diff --git a/.github/workflows/test_packages.yml b/.github/workflows/test_packages.yml index e2eb247bd2..ffebaab256 100644 --- a/.github/workflows/test_packages.yml +++ b/.github/workflows/test_packages.yml @@ -30,41 +30,50 @@ jobs: - perception/face_recognition - perception/heart_anomaly_detection - perception/multimodal_human_centric - - perception/object_tracking_2d - perception/pose_estimation + - perception/fall_detection - perception/speech_recognition - - perception/skeleton_based_action_recognition + - perception/skeleton_based_action_recognition/costgcn + - perception/skeleton_based_action_recognition/pstgcn + - perception/skeleton_based_action_recognition/stbln + - perception/skeleton_based_action_recognition/stgcn + - perception/skeleton_based_action_recognition/tagcn - perception/semantic_segmentation + - perception/object_tracking_2d + # - perception/object_tracking_3d # passes, but disabled due to free() crash - perception/object_detection_2d/centernet - perception/object_detection_2d/detr - perception/object_detection_2d/gem - perception/object_detection_2d/ssd + - perception/object_detection_2d/nanodet - perception/object_detection_2d/yolov3 + - perception/object_detection_2d/yolov5 - perception/object_detection_2d/retinaface - perception/object_detection_2d/nms + # - perception/object_detection_3d # passes, but disabled due to free() crash - perception/facial_expression_recognition - - perception/object_detection_3d - # - control/mobile_manipulation - # - simulation/human_model_generation - # - control/single_demo_grasp - - perception/object_tracking_3d + - simulation/human_model_generation + #- control/mobile_manipulation + #- control/single_demo_grasp + #- planning/end_to_end_planning runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python 3.8 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 - - name: Test Wheel + - name: Test Wheel Separately run: | export DISABLE_BCOLZ_AVX2=true - sudo apt -y install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev python3-dev + sudo apt -y install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev libeigen3-dev python3 -m venv venv source venv/bin/activate - wget https://raw.githubusercontent.com/opendr-eu/opendr/master/dependencies/pip_requirements.txt - cat pip_requirements.txt | xargs -n 1 -L 1 pip install - pip install opendr-toolkit - python -m unittest discover -s tests/sources/tools/${{ matrix.package }} + python3 -m pip install --upgrade pip + python3 -m pip install wheel + python3 -m pip install opendr-toolkit + # run the test + python3 -m unittest discover -s tests/sources/tools/${{ matrix.package }} test-docker: if: ${{ contains(github.event.pull_request.labels.*.name, 'test packages') || github.event_name == 'schedule' }} strategy: @@ -78,32 +87,40 @@ jobs: - perception/face_recognition - perception/heart_anomaly_detection - perception/multimodal_human_centric - - perception/object_tracking_2d - perception/pose_estimation + - perception/fall_detection - perception/speech_recognition - - perception/skeleton_based_action_recognition + - perception/skeleton_based_action_recognition/costgcn + - perception/skeleton_based_action_recognition/pstgcn + - perception/skeleton_based_action_recognition/stbln + - perception/skeleton_based_action_recognition/stgcn + - perception/skeleton_based_action_recognition/tagcn - perception/semantic_segmentation + - perception/object_tracking_2d + # - perception/object_tracking_3d # passes, but disabled due to free() crash - perception/object_detection_2d/centernet - perception/object_detection_2d/detr - perception/object_detection_2d/gem - perception/object_detection_2d/ssd + - perception/object_detection_2d/nanodet - perception/object_detection_2d/yolov3 + - perception/object_detection_2d/yolov5 - perception/object_detection_2d/retinaface - perception/object_detection_2d/nms + # - perception/object_detection_3d # passes, but disabled due to free() crash - perception/facial_expression_recognition - - perception/object_detection_3d - - control/mobile_manipulation - simulation/human_model_generation + - control/mobile_manipulation - control/single_demo_grasp - - perception/object_tracking_3d + - planning/end_to_end_planning runs-on: ${{ matrix.os }} steps: - name: Set up Python 3.8 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 - name: Test Docker run: | - docker run --name toolkit -i opendr/opendr-toolkit:cpu_v1.1.1 bash + docker run --name toolkit -i opendr/opendr-toolkit:cpu_v2.0.0 bash docker start toolkit - docker exec -i toolkit bash -c "source bin/activate.sh && source tests/sources/tools/control/mobile_manipulation/run_ros.sh && python -m unittest discover -s tests/sources/tools/${{ matrix.package }}" + docker exec -i toolkit bash -c "source bin/activate.sh && source tests/sources/tools/control/mobile_manipulation/run_ros.sh && python3 -m unittest discover -s tests/sources/tools/${{ matrix.package }}" diff --git a/.github/workflows/tests_suite.yml b/.github/workflows/tests_suite.yml index bbf9f68b68..afdc1a0e9f 100644 --- a/.github/workflows/tests_suite.yml +++ b/.github/workflows/tests_suite.yml @@ -29,13 +29,13 @@ jobs: DEPENDENCIES_INSTALLATION: "brew install clang-format@11 cppcheck; ln /usr/local/bin/clang-format-11 /usr/local/bin/clang-format" runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 if: matrix.os == 'ubuntu-20.04' || github.event.pull_request.draft == false with: submodules: true - name: Set up Python 3.8 if: matrix.os == 'ubuntu-20.04' || github.event.pull_request.draft == false - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 - name: Test Sources @@ -60,30 +60,32 @@ jobs: - perception/face_recognition - perception/heart_anomaly_detection - perception/multimodal_human_centric - - perception/object_tracking_2d - - perception/object_detection_3d - perception/pose_estimation - perception/fall_detection - perception/speech_recognition - - perception/skeleton_based_action_recognition + - perception/skeleton_based_action_recognition/costgcn + - perception/skeleton_based_action_recognition/pstgcn + - perception/skeleton_based_action_recognition/stbln + - perception/skeleton_based_action_recognition/stgcn + - perception/skeleton_based_action_recognition/tagcn - perception/semantic_segmentation - - control/mobile_manipulation + - perception/object_tracking_2d + # - perception/object_tracking_3d # passes, but disabled due to free() crash - perception/object_detection_2d/centernet - perception/object_detection_2d/detr - perception/object_detection_2d/gem - perception/object_detection_2d/ssd + - perception/object_detection_2d/nanodet - perception/object_detection_2d/yolov3 + - perception/object_detection_2d/yolov5 - perception/object_detection_2d/retinaface - perception/object_detection_2d/nms - - simulation/human_model_generation + # - perception/object_detection_3d # passes, but disabled due to free() crash - perception/facial_expression_recognition + - simulation/human_model_generation + - control/mobile_manipulation - control/single_demo_grasp - planning/end_to_end_planning - # - perception/object_tracking_3d - include: - - os: ubuntu-20.04 - DEPENDENCIES_INSTALLATION: "sudo sh -c 'echo \"deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main\" > /etc/apt/sources.list.d/ros-latest.list' \ - && curl -s https://raw.githubusercontent.com/ros/rosdistro/master/ros.asc | sudo apt-key add -" runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 @@ -95,26 +97,22 @@ jobs: python-version: 3.8 - name: Test Tools run: | - ${{ matrix.DEPENDENCIES_INSTALLATION }} - export OPENDR_HOME=$PWD - export OPENDR_DEVICE=cpu - export PYTHONPATH=$OPENDR_HOME/src:$PYTHONPATH - export DISABLE_BCOLZ_AVX2=true export ROS_DISTRO=noetic - make install_compilation_dependencies - make install_runtime_dependencies - pip install -r tests/sources/requirements.txt + export OPENDR_HOME=$PWD + ./bin/install.sh + source bin/activate.sh + python3 -m pip install -r tests/sources/requirements.txt if [ ${{ matrix.package }} = "ctests" ]; then - make ctests + make ctests else - source tests/sources/tools/control/mobile_manipulation/run_ros.sh - python -m unittest discover -s tests/sources/tools/${{ matrix.package }} + source tests/sources/tools/control/mobile_manipulation/run_ros.sh + python3 -m unittest discover -s tests/sources/tools/${{ matrix.package }} fi build-wheel: if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true - name: Set up Python 3.8 @@ -123,8 +121,8 @@ jobs: python-version: 3.8 - name: Install prerequisites run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine + python3 -m pip install --upgrade pip + python3 -m pip install setuptools wheel twine - name: Build Wheel run: ./bin/build_wheel.sh @@ -138,7 +136,7 @@ jobs: if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true - name: Get branch name @@ -146,7 +144,7 @@ jobs: uses: tj-actions/branch-names@v5.1 - name: Build image run: | - docker build --tag opendr/opendr-toolkit:cpu_test --build-arg branch=${{ steps.branch-name.outputs.current_branch }} --file Dockerfile . + docker build --no-cache --tag opendr/opendr-toolkit:cpu_test --build-arg branch=${{ steps.branch-name.outputs.current_branch }} --build-arg ros_distro=noetic --file Dockerfile . docker save opendr/opendr-toolkit:cpu_test > cpu_test.zip - name: Upload image artifact uses: actions/upload-artifact@v2 @@ -169,32 +167,39 @@ jobs: - perception/face_recognition - perception/heart_anomaly_detection - perception/multimodal_human_centric - - perception/object_tracking_2d - perception/pose_estimation - perception/fall_detection - perception/speech_recognition - - perception/skeleton_based_action_recognition + - perception/skeleton_based_action_recognition/costgcn + - perception/skeleton_based_action_recognition/pstgcn + - perception/skeleton_based_action_recognition/stbln + - perception/skeleton_based_action_recognition/stgcn + - perception/skeleton_based_action_recognition/tagcn - perception/semantic_segmentation + - perception/object_tracking_2d + # - perception/object_tracking_3d # passes, but disabled due to free() crash - perception/object_detection_2d/centernet - perception/object_detection_2d/detr - perception/object_detection_2d/gem - perception/object_detection_2d/ssd + - perception/object_detection_2d/nanodet - perception/object_detection_2d/yolov3 + - perception/object_detection_2d/yolov5 - perception/object_detection_2d/retinaface - perception/object_detection_2d/nms + # - perception/object_detection_3d # passes, but disabled due to free() crash - perception/facial_expression_recognition - # - perception/object_detection_3d - # - control/mobile_manipulation - # - simulation/human_model_generation - # - control/single_demo_grasp - # - perception/object_tracking_3d + - simulation/human_model_generation + #- control/mobile_manipulation + #- control/single_demo_grasp + #- planning/end_to_end_planning runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true - name: Set up Python 3.8 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 - name: Download artifact @@ -206,26 +211,31 @@ jobs: uses: tj-actions/branch-names@v5.1 - name: Test Wheel run: | + export OPENDR_HOME=$PWD export DISABLE_BCOLZ_AVX2=true + export OPENDR_DEVICE=cpu + export ROS_DISTRO=noetic + sudo apt -y install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev python3-dev + python3 -m venv venv source venv/bin/activate - pip install wheel + python3 -m pip install --upgrade pip + python3 -m pip install wheel + + # install all tools one at a time while read f; do package=$(sed "s/_/-/g" <<< $f) - arr=(${package//// }) - # Get the tool name (if there is a base path before) - if [ ! -z ${arr[1]} ]; then - package=${arr[1]} - fi - echo "Installing $package package" - if [ "$package" == "opendr" ]; then - pip install ./artifact/wheel-artifact/opendr-toolkit-*.tar.gz - else - pip install ./artifact/wheel-artifact/opendr-toolkit-$package-*.tar.gz + package=(${package//// }) + if [ ! -z ${package[1]} ]; then package=${package[1]}; fi + + if [ "$package" != "opendr" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-$package-*.tar.gz fi done < packages.txt - python -m unittest discover -s tests/sources/tools/${{ matrix.package }} + + # run the test + python3 -m unittest discover -s tests/sources/tools/${{ matrix.package }} test-wheel-separate: needs: build-wheel if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} @@ -241,32 +251,39 @@ jobs: - perception/face_recognition - perception/heart_anomaly_detection - perception/multimodal_human_centric - - perception/object_tracking_2d - perception/pose_estimation - perception/fall_detection - perception/speech_recognition - - perception/skeleton_based_action_recognition + - perception/skeleton_based_action_recognition/costgcn + - perception/skeleton_based_action_recognition/pstgcn + - perception/skeleton_based_action_recognition/stbln + - perception/skeleton_based_action_recognition/stgcn + - perception/skeleton_based_action_recognition/tagcn - perception/semantic_segmentation + - perception/object_tracking_2d + # - perception/object_tracking_3d # passes, but disabled due to free() crash - perception/object_detection_2d/centernet - perception/object_detection_2d/detr - perception/object_detection_2d/gem - perception/object_detection_2d/ssd + - perception/object_detection_2d/nanodet - perception/object_detection_2d/yolov3 + - perception/object_detection_2d/yolov5 - perception/object_detection_2d/retinaface - perception/object_detection_2d/nms + # - perception/object_detection_3d # passes, but disabled due to free() crash - perception/facial_expression_recognition - # - perception/object_detection_3d - # - control/mobile_manipulation - # - simulation/human_model_generation - # - control/single_demo_grasp - # - perception/object_tracking_3d + - simulation/human_model_generation + #- control/mobile_manipulation + #- control/single_demo_grasp + #- planning/end_to_end_planning runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true - name: Set up Python 3.8 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 - name: Download artifact @@ -276,34 +293,51 @@ jobs: - name: Get branch name id: branch-name uses: tj-actions/branch-names@v5.1 - - name: Test Wheel + - name: Test Wheel Separate run: | + export OPENDR_HOME=$PWD export DISABLE_BCOLZ_AVX2=true + export OPENDR_DEVICE=cpu + export ROS_DISTRO=noetic + sudo apt -y install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev libeigen3-dev + python3 -m venv venv source venv/bin/activate - pip install wheel - # Install engine and requirements for other packages - pip install ./artifact/wheel-artifact/opendr-toolkit-engine-*.tar.gz - # The following two are dependecies for some other packages and pip cannot automatically install them if they are not on a repo - pip install ./artifact/wheel-artifact/opendr-toolkit-compressive-learning-*.tar.gz - pip install ./artifact/wheel-artifact/opendr-toolkit-object-detection-2d-*.tar.gz - pip install ./artifact/wheel-artifact/opendr-toolkit-pose-estimation-*.tar.gz + python3 -m pip install --upgrade pip + python3 -m pip install wheel - # Install specific package for testing + # get the name of the wheel to install based on the test being run package=$(sed "s/_/-/g" <<< ${{ matrix.package }}) - arr=(${package//// }) - if [ ! -z ${arr[1]} ]; then - package=${arr[1]} - fi - echo "Installing $package package" - # Utils contains hyperparameter tuning - if [ "$package" == "utils" ]; then - pip install ./artifact/wheel-artifact/opendr-toolkit-hyperparameter-tuner-*.tar.gz + package=(${package//// }) + if [ ! -z ${package[1]} ]; then package=${package[1]}; fi + + # all packages require the engine + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-engine-*.tar.gz + + # install specific package + if [ ${{ matrix.package }} == "utils" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-hyperparameter-tuner-*.tar.gz + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-ambiguity-measure-*.tar.gz else - pip install ./artifact/wheel-artifact/opendr-toolkit-$package-*.tar.gz + # install required dependencies for derived tools, we do so manually to ensure the local one is used + if [ ${{ matrix.package }} == "perception/fall_detection" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-pose-estimation-*.tar.gz + elif [ ${{ matrix.package }} == "perception/heart_anomaly_detection" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-compressive-learning-*.tar.gz + elif [ ${{ matrix.package }} == "perception/multimodal_human_centric" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-compressive-learning-*.tar.gz + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-object-detection-2d-*.tar.gz + elif [ ${{ matrix.package }} == "perception/object_tracking_3d" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-object-detection-3d-*.tar.gz + fi + + # install the package itself + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-$package-*.tar.gz fi - python -m unittest discover -s tests/sources/tools/${{ matrix.package }} + + # run the test + python3 -m unittest discover -s tests/sources/tools/${{ matrix.package }} test-docker: needs: build-docker if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} @@ -319,26 +353,32 @@ jobs: - perception/face_recognition - perception/heart_anomaly_detection - perception/multimodal_human_centric - - perception/object_tracking_2d - perception/pose_estimation - perception/fall_detection - perception/speech_recognition - - perception/skeleton_based_action_recognition + - perception/skeleton_based_action_recognition/costgcn + - perception/skeleton_based_action_recognition/pstgcn + - perception/skeleton_based_action_recognition/stbln + - perception/skeleton_based_action_recognition/stgcn + - perception/skeleton_based_action_recognition/tagcn - perception/semantic_segmentation + - perception/object_tracking_2d + # - perception/object_tracking_3d # passes, but disabled due to free() crash - perception/object_detection_2d/centernet - perception/object_detection_2d/detr - perception/object_detection_2d/gem - perception/object_detection_2d/ssd + - perception/object_detection_2d/nanodet - perception/object_detection_2d/yolov3 + - perception/object_detection_2d/yolov5 - perception/object_detection_2d/retinaface - perception/object_detection_2d/nms + # - perception/object_detection_3d # passes, but disabled due to free() crash - perception/facial_expression_recognition - - perception/object_detection_3d - - control/mobile_manipulation - simulation/human_model_generation + - control/mobile_manipulation - control/single_demo_grasp - planning/end_to_end_planning - # - perception/object_tracking_3d runs-on: ubuntu-20.04 steps: - name: Download artifact @@ -350,7 +390,7 @@ jobs: docker load < ./artifact/docker-artifact/cpu_test.zip docker run --name toolkit -i opendr/opendr-toolkit:cpu_test bash docker start toolkit - docker exec -i toolkit bash -c "source bin/activate.sh && source tests/sources/tools/control/mobile_manipulation/run_ros.sh && python -m unittest discover -s tests/sources/tools/${{ matrix.package }}" + docker exec -i toolkit bash -c "source bin/activate.sh && source tests/sources/tools/control/mobile_manipulation/run_ros.sh && python3 -m unittest discover -s tests/sources/tools/${{ matrix.package }}" delete-docker-artifacts: needs: [build-docker, test-docker] if: ${{ always() }} diff --git a/.github/workflows/tests_suite_develop.yml b/.github/workflows/tests_suite_develop.yml index 4d345563af..e27c069f2f 100644 --- a/.github/workflows/tests_suite_develop.yml +++ b/.github/workflows/tests_suite_develop.yml @@ -29,14 +29,14 @@ jobs: DEPENDENCIES_INSTALLATION: "brew install clang-format@11 cppcheck; ln /usr/local/bin/clang-format-11 /usr/local/bin/clang-format" runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 if: matrix.os == 'ubuntu-20.04' || github.event.pull_request.draft == false with: submodules: true ref: develop - name: Set up Python 3.8 if: matrix.os == 'ubuntu-20.04' || github.event.pull_request.draft == false - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 - name: Test Sources @@ -61,30 +61,32 @@ jobs: - perception/face_recognition - perception/heart_anomaly_detection - perception/multimodal_human_centric - - perception/object_tracking_2d - - perception/object_detection_3d - perception/pose_estimation - perception/fall_detection - perception/speech_recognition - - perception/skeleton_based_action_recognition + - perception/skeleton_based_action_recognition/costgcn + - perception/skeleton_based_action_recognition/pstgcn + - perception/skeleton_based_action_recognition/stbln + - perception/skeleton_based_action_recognition/stgcn + - perception/skeleton_based_action_recognition/tagcn - perception/semantic_segmentation - - control/mobile_manipulation + - perception/object_tracking_2d + # - perception/object_tracking_3d # passes, but disabled due to free() crash - perception/object_detection_2d/centernet - perception/object_detection_2d/detr - perception/object_detection_2d/gem - perception/object_detection_2d/ssd + - perception/object_detection_2d/nanodet - perception/object_detection_2d/yolov3 + - perception/object_detection_2d/yolov5 - perception/object_detection_2d/retinaface - perception/object_detection_2d/nms - - simulation/human_model_generation + # - perception/object_detection_3d # passes, but disabled due to free() crash - perception/facial_expression_recognition + - simulation/human_model_generation + - control/mobile_manipulation - control/single_demo_grasp - planning/end_to_end_planning - # - perception/object_tracking_3d - include: - - os: ubuntu-20.04 - DEPENDENCIES_INSTALLATION: "sudo sh -c 'echo \"deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main\" > /etc/apt/sources.list.d/ros-latest.list' \ - && curl -s https://raw.githubusercontent.com/ros/rosdistro/master/ros.asc | sudo apt-key add -" runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 @@ -97,26 +99,22 @@ jobs: python-version: 3.8 - name: Test Tools run: | - ${{ matrix.DEPENDENCIES_INSTALLATION }} - export OPENDR_HOME=$PWD - export OPENDR_DEVICE=cpu - export PYTHONPATH=$OPENDR_HOME/src:$PYTHONPATH - export DISABLE_BCOLZ_AVX2=true export ROS_DISTRO=noetic - make install_compilation_dependencies - make install_runtime_dependencies - pip install -r tests/sources/requirements.txt + export OPENDR_HOME=$PWD + ./bin/install.sh + source bin/activate.sh + python3 -m pip install -r tests/sources/requirements.txt if [ ${{ matrix.package }} = "ctests" ]; then - make ctests + make ctests else - source tests/sources/tools/control/mobile_manipulation/run_ros.sh - python -m unittest discover -s tests/sources/tools/${{ matrix.package }} + source tests/sources/tools/control/mobile_manipulation/run_ros.sh + python3 -m unittest discover -s tests/sources/tools/${{ matrix.package }} fi build-wheel: if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true ref: develop @@ -126,8 +124,8 @@ jobs: python-version: 3.8 - name: Install prerequisites run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine + python3 -m pip install --upgrade pip + python3 -m pip install setuptools wheel twine - name: Build Wheel run: ./bin/build_wheel.sh @@ -141,7 +139,7 @@ jobs: if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true ref: develop @@ -150,7 +148,7 @@ jobs: uses: tj-actions/branch-names@v5.1 - name: Build image run: | - docker build --tag opendr/opendr-toolkit:cpu_test --build-arg branch=${{ steps.branch-name.outputs.current_branch }} --file Dockerfile . + docker build --no-cache --tag opendr/opendr-toolkit:cpu_test --build-arg branch=${{ steps.branch-name.outputs.current_branch }} --build-arg ros_distro=noetic --file Dockerfile . docker save opendr/opendr-toolkit:cpu_test > cpu_test.zip - name: Upload image artifact uses: actions/upload-artifact@v2 @@ -173,33 +171,40 @@ jobs: - perception/face_recognition - perception/heart_anomaly_detection - perception/multimodal_human_centric - - perception/object_tracking_2d - perception/pose_estimation - perception/fall_detection - perception/speech_recognition - - perception/skeleton_based_action_recognition + - perception/skeleton_based_action_recognition/costgcn + - perception/skeleton_based_action_recognition/pstgcn + - perception/skeleton_based_action_recognition/stbln + - perception/skeleton_based_action_recognition/stgcn + - perception/skeleton_based_action_recognition/tagcn - perception/semantic_segmentation + - perception/object_tracking_2d + # - perception/object_tracking_3d # passes, but disabled due to free() crash - perception/object_detection_2d/centernet - perception/object_detection_2d/detr - perception/object_detection_2d/gem - perception/object_detection_2d/ssd + - perception/object_detection_2d/nanodet - perception/object_detection_2d/yolov3 + - perception/object_detection_2d/yolov5 - perception/object_detection_2d/retinaface - perception/object_detection_2d/nms + # - perception/object_detection_3d # passes, but disabled due to free() crash - perception/facial_expression_recognition - # - perception/object_detection_3d - # - control/mobile_manipulation - # - simulation/human_model_generation - # - control/single_demo_grasp - # - perception/object_tracking_3d + - simulation/human_model_generation + #- control/mobile_manipulation + #- control/single_demo_grasp + #- planning/end_to_end_planning runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true ref: develop - name: Set up Python 3.8 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 - name: Download artifact @@ -211,26 +216,34 @@ jobs: uses: tj-actions/branch-names@v5.1 - name: Test Wheel run: | + export OPENDR_HOME=$PWD export DISABLE_BCOLZ_AVX2=true - sudo apt -y install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev libeigen3-dev + export OPENDR_DEVICE=cpu + export PYTHONPATH=$OPENDR_HOME/src:$PYTHONPATH + export ROS_DISTRO=noetic + + sudo apt -y install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev python3-dev + python3 -m venv venv source venv/bin/activate - pip install wheel + python3 -m pip install --upgrade pip + python3 -m pip install wheel + + # install all tools one at a time while read f; do package=$(sed "s/_/-/g" <<< $f) - arr=(${package//// }) - # Get the tool name (if there is a base path before) - if [ ! -z ${arr[1]} ]; then - package=${arr[1]} - fi - echo "Installing $package package" - if [ "$package" == "opendr" ]; then - pip install ./artifact/wheel-artifact/opendr-toolkit-*.tar.gz - else - pip install ./artifact/wheel-artifact/opendr-toolkit-$package-*.tar.gz + package=(${package//// }) + if [ ! -z ${package[1]} ]; then package=${package[1]}; fi + + if [ "$package" != "opendr" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-$package-*.tar.gz fi done < packages.txt - python -m unittest discover -s tests/sources/tools/${{ matrix.package }} + + cd src/opendr/perception/object_detection_2d/retinaface; make; cd $OPENDR_HOME + + # run the test + python3 -m unittest discover -s tests/sources/tools/${{ matrix.package }} test-wheel-separate: needs: build-wheel if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} @@ -246,33 +259,40 @@ jobs: - perception/face_recognition - perception/heart_anomaly_detection - perception/multimodal_human_centric - - perception/object_tracking_2d - perception/pose_estimation - perception/fall_detection - perception/speech_recognition - - perception/skeleton_based_action_recognition + - perception/skeleton_based_action_recognition/costgcn + - perception/skeleton_based_action_recognition/pstgcn + - perception/skeleton_based_action_recognition/stbln + - perception/skeleton_based_action_recognition/stgcn + - perception/skeleton_based_action_recognition/tagcn - perception/semantic_segmentation + - perception/object_tracking_2d + # - perception/object_tracking_3d # passes, but disabled due to free() crash - perception/object_detection_2d/centernet - perception/object_detection_2d/detr - perception/object_detection_2d/gem - perception/object_detection_2d/ssd + - perception/object_detection_2d/nanodet - perception/object_detection_2d/yolov3 + - perception/object_detection_2d/yolov5 - perception/object_detection_2d/retinaface - perception/object_detection_2d/nms + # - perception/object_detection_3d # passes, but disabled due to free() crash - perception/facial_expression_recognition - # - perception/object_detection_3d - # - control/mobile_manipulation - # - simulation/human_model_generation - # - control/single_demo_grasp - # - perception/object_tracking_3d + - simulation/human_model_generation + #- control/mobile_manipulation + #- control/single_demo_grasp + #- planning/end_to_end_planning runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: submodules: true ref: develop - name: Set up Python 3.8 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.8 - name: Download artifact @@ -282,33 +302,54 @@ jobs: - name: Get branch name id: branch-name uses: tj-actions/branch-names@v5.1 - - name: Test Wheel + - name: Test Wheel Separate run: | + export OPENDR_HOME=$PWD export DISABLE_BCOLZ_AVX2=true + export OPENDR_DEVICE=cpu + export PYTHONPATH=$OPENDR_HOME/src:$PYTHONPATH + export ROS_DISTRO=noetic + sudo apt -y install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev libeigen3-dev + python3 -m venv venv source venv/bin/activate - pip install wheel - # Install engine and requirements for other packages - pip install ./artifact/wheel-artifact/opendr-toolkit-engine-*.tar.gz - # The following two are dependecies for some other packages and pip cannot automatically install them if they are not on a repo - pip install ./artifact/wheel-artifact/opendr-toolkit-compressive-learning-*.tar.gz - pip install ./artifact/wheel-artifact/opendr-toolkit-object-detection-2d-*.tar.gz - pip install ./artifact/wheel-artifact/opendr-toolkit-pose-estimation-*.tar.gz - # Install specific package for testing + python3 -m pip install --upgrade pip + python3 -m pip install wheel + + # get the name of the wheel to install based on the test being run package=$(sed "s/_/-/g" <<< ${{ matrix.package }}) - arr=(${package//// }) - if [ ! -z ${arr[1]} ]; then - package=${arr[1]} - fi - echo "Installing $package package" - # Utils contains hyperparameter tuning - if [ "$package" == "utils" ]; then - pip install ./artifact/wheel-artifact/opendr-toolkit-hyperparameter-tuner-*.tar.gz + package=(${package//// }) + if [ ! -z ${package[1]} ]; then package=${package[1]}; fi + + # all packages require the engine + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-engine-*.tar.gz + + # install specific package + if [ ${{ matrix.package }} == "utils" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-hyperparameter-tuner-*.tar.gz + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-ambiguity-measure-*.tar.gz else - pip install ./artifact/wheel-artifact/opendr-toolkit-$package-*.tar.gz + # install required dependencies for derived tools, we do so manually to ensure the local one is used + if [ ${{ matrix.package }} == "perception/fall_detection" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-pose-estimation-*.tar.gz + elif [ ${{ matrix.package }} == "perception/heart_anomaly_detection" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-compressive-learning-*.tar.gz + elif [ ${{ matrix.package }} == "perception/multimodal_human_centric" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-compressive-learning-*.tar.gz + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-object-detection-2d-*.tar.gz + elif [ ${{ matrix.package }} == "perception/object_tracking_3d" ]; then + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-object-detection-3d-*.tar.gz + fi + + # install the package itself + python3 -m pip install ./artifact/wheel-artifact/opendr-toolkit-$package-*.tar.gz fi - python -m unittest discover -s tests/sources/tools/${{ matrix.package }} + + cd src/opendr/perception/object_detection_2d/retinaface; make; cd $OPENDR_HOME + + # run the test + python3 -m unittest discover -s tests/sources/tools/${{ matrix.package }} test-docker: needs: build-docker if: ${{ contains(github.event.pull_request.labels.*.name, 'test release') || github.event_name == 'schedule' }} @@ -324,26 +365,32 @@ jobs: - perception/face_recognition - perception/heart_anomaly_detection - perception/multimodal_human_centric - - perception/object_tracking_2d - perception/pose_estimation - perception/fall_detection - perception/speech_recognition - - perception/skeleton_based_action_recognition + - perception/skeleton_based_action_recognition/costgcn + - perception/skeleton_based_action_recognition/pstgcn + - perception/skeleton_based_action_recognition/stbln + - perception/skeleton_based_action_recognition/stgcn + - perception/skeleton_based_action_recognition/tagcn - perception/semantic_segmentation + - perception/object_tracking_2d + # - perception/object_tracking_3d # passes, but disabled due to free() crash - perception/object_detection_2d/centernet - perception/object_detection_2d/detr - perception/object_detection_2d/gem - perception/object_detection_2d/ssd + - perception/object_detection_2d/nanodet - perception/object_detection_2d/yolov3 + - perception/object_detection_2d/yolov5 - perception/object_detection_2d/retinaface - perception/object_detection_2d/nms + # - perception/object_detection_3d # passes, but disabled due to free() crash - perception/facial_expression_recognition - - perception/object_detection_3d - - control/mobile_manipulation - simulation/human_model_generation + - control/mobile_manipulation - control/single_demo_grasp - planning/end_to_end_planning - # - perception/object_tracking_3d runs-on: ubuntu-20.04 steps: - name: Download artifact @@ -355,7 +402,7 @@ jobs: docker load < ./artifact/docker-artifact/cpu_test.zip docker run --name toolkit -i opendr/opendr-toolkit:cpu_test bash docker start toolkit - docker exec -i toolkit bash -c "source bin/activate.sh && source tests/sources/tools/control/mobile_manipulation/run_ros.sh && python -m unittest discover -s tests/sources/tools/${{ matrix.package }}" + docker exec -i toolkit bash -c "source bin/activate.sh && source tests/sources/tools/control/mobile_manipulation/run_ros.sh && python3 -m unittest discover -s tests/sources/tools/${{ matrix.package }}" delete-docker-artifacts: needs: [build-docker, test-docker] if: ${{ always() }} diff --git a/.gitignore b/.gitignore index af7ac1235e..7c6a85f652 100644 --- a/.gitignore +++ b/.gitignore @@ -70,4 +70,13 @@ temp/ # ROS interface projects/opendr_ws/.catkin_workspace projects/opendr_ws/devel/ -projects/control/eagerx/eagerx_ws/ +projects/python/control/eagerx/eagerx_ws/ + +projects/opendr_ws_2/build +projects/opendr_ws_2/install +projects/opendr_ws_2/log +projects/opendr_ws_2/MOT +projects/opendr_ws_2/KITTI + +# packages +src/opendr_toolkit*.egg-info/ diff --git a/CHANGELOG.md b/CHANGELOG.md index dbc3a1ab95..500b3b8434 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,24 @@ # OpenDR Toolkit Change Log +## Version 2.0.0 +Released on December, 31st, 2022. + + - New Features: + - Added YOLOv5 as an inference-only tool ([#360](https://github.com/opendr-eu/opendr/pull/360)). + - Added Continual Transformer Encoders ([#317](https://github.com/opendr-eu/opendr/pull/317)). + - Added Continual Spatio-Temporal Graph Convolutional Networks tool ([#370](https://github.com/opendr-eu/opendr/pull/370)). + - Added AmbiguityMeasure utility tool ([#361](https://github.com/opendr-eu/opendr/pull/361)). + - Added SiamRPN 2D tracking tool ([#367](https://github.com/opendr-eu/opendr/pull/367)). + - Added Facial Emotion Estimation tool ([#264](https://github.com/opendr-eu/opendr/pull/264)). + - Added High resolution pose estimation tool ([#356](https://github.com/opendr-eu/opendr/pull/356)). + - Added ROS2 nodes for all included tools ([#256](https://github.com/opendr-eu/opendr/pull/256)). + - Added missing ROS nodes and homogenized the interface across the tools ([#305](https://github.com/opendr-eu/opendr/issues/305)). + - Bug Fixes: + - Fixed `BoundingBoxList`, `TrackingAnnotationList`, `BoundingBoxList3D` and `TrackingAnnotationList3D` confidence warnings ([#365](https://github.com/opendr-eu/opendr/pull/365)). + - Fixed undefined `image_id` and `segmentation` for COCO `BoundingBoxList` ([#365](https://github.com/opendr-eu/opendr/pull/365)). + - Fixed Continual X3D ONNX support ([#372](https://github.com/opendr-eu/opendr/pull/372)). + - Fixed several issues with ROS nodes and improved performance ([#305](https://github.com/opendr-eu/opendr/issues/305)). + ## Version 1.1.1 Released on June, 30th, 2022. diff --git a/Dockerfile b/Dockerfile index 82c44286f4..321e686b52 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,11 +1,12 @@ FROM ubuntu:20.04 -ARG branch +ARG branch=master +ARG ros_distro=noetic # Install dependencies RUN apt-get update && \ - apt-get --yes install git sudo -RUN DEBIAN_FRONTEND="noninteractive" apt-get -y install tzdata + apt-get --yes install git sudo && \ + DEBIAN_FRONTEND="noninteractive" apt-get -y install tzdata # Add Tini ENV TINI_VERSION v0.19.0 @@ -16,12 +17,13 @@ ENTRYPOINT ["/tini", "--"] # Clone the repo and install the toolkit RUN git clone --depth 1 --recurse-submodules -j8 https://github.com/opendr-eu/opendr -b $branch WORKDIR "/opendr" -RUN ./bin/install.sh +ENV ROS_DISTRO=$ros_distro +RUN chmod +x ./bin/install.sh && ./bin/install.sh && rm -rf /root/.cache/* && apt-get clean # Create script for starting Jupyter Notebook -RUN /bin/bash -c "source ./bin/activate.sh; pip3 install jupyter" -RUN echo "#!/bin/bash\n source ./bin/activate.sh\n ./venv/bin/jupyter notebook --port=8888 --no-browser --ip 0.0.0.0 --allow-root" > start.sh -RUN chmod +x start.sh +RUN /bin/bash -c "source ./bin/activate.sh; pip3 install jupyter" && \ + echo "#!/bin/bash\n source ./bin/activate.sh\n ./venv/bin/jupyter notebook --port=8888 --no-browser --ip 0.0.0.0 --allow-root" > start.sh && \ + chmod +x start.sh # Start Jupyter Notebook inside OpenDR CMD ["./start.sh"] diff --git a/Dockerfile-cuda b/Dockerfile-cuda index 81d99f4dd8..c60b5dca4a 100644 --- a/Dockerfile-cuda +++ b/Dockerfile-cuda @@ -1,19 +1,16 @@ FROM nvidia/cuda:11.2.0-cudnn8-devel-ubuntu20.04 -ARG branch +ARG branch=master +ARG ros_distro=noetic # Fix NVIDIA CUDA Linux repository key rotation ENV APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=1 RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu$(cat /etc/os-release | grep VERSION_ID | awk '{print substr($0,13,5)}' | awk -F'.' '{print $1$2}')/x86_64/3bf863cc.pub -ARG branch - -ARG branch - # Install dependencies RUN apt-get update && \ - apt-get --yes install git sudo apt-utils -RUN DEBIAN_FRONTEND="noninteractive" apt-get -y install tzdata + apt-get --yes install git sudo apt-utils && \ + DEBIAN_FRONTEND="noninteractive" apt-get -y install tzdata # Add Tini ENV TINI_VERSION v0.19.0 @@ -24,15 +21,16 @@ ENTRYPOINT ["/tini", "--"] RUN sudo apt-get --yes install build-essential # Clone the repo and install the toolkit -ENV OPENDR_DEVICE gpu +ENV OPENDR_DEVICE=gpu +ENV ROS_DISTRO=$ros_distro RUN git clone --depth 1 --recurse-submodules -j8 https://github.com/opendr-eu/opendr -b $branch WORKDIR "/opendr" -RUN ./bin/install.sh +RUN chmod +x ./bin/install.sh && ./bin/install.sh && rm -rf /root/.cache/* && apt-get clean # Create script for starting Jupyter Notebook -RUN /bin/bash -c "source ./bin/activate.sh; pip3 install jupyter" -RUN echo "#!/bin/bash\n source ./bin/activate.sh\n ./venv/bin/jupyter notebook --port=8888 --no-browser --ip 0.0.0.0 --allow-root" > start.sh -RUN chmod +x start.sh +RUN /bin/bash -c "source ./bin/activate.sh; pip3 install jupyter" && \ + echo "#!/bin/bash\n source ./bin/activate.sh\n ./venv/bin/jupyter notebook --port=8888 --no-browser --ip 0.0.0.0 --allow-root" > start.sh && \ + chmod +x start.sh # Start Jupyter Notebook inside OpenDR CMD ["./start.sh"] diff --git a/Dockerfile-embedded b/Dockerfile-embedded new file mode 100644 index 0000000000..f5b6253917 --- /dev/null +++ b/Dockerfile-embedded @@ -0,0 +1,201 @@ +# +# This dockerfile roughly follows the 'Installing from source' ROS instructions from: +# http://wiki.ros.org/noetic/Installation/Source +# +ARG BASE_IMAGE=nvcr.io/nvidia/l4t-base:r32.6.1 +FROM ${BASE_IMAGE} +ARG device=nx + +ARG ROS_PKG=ros_base +ENV ROS_DISTRO=noetic +ENV ROS_ROOT=/opt/ros/${ROS_DISTRO} +ENV ROS_PYTHON_VERSION=3 + +ENV DEBIAN_FRONTEND=noninteractive + +ENV CUDA_HOME="/usr/local/cuda" +ENV PATH="/usr/local/cuda/bin:${PATH}" +ENV LD_LIBRARY_PATH="/usr/local/cuda/lib64:${LD_LIBRARY_PATH}" + +WORKDIR /workspace + +# +# OpenCV - https://github.com/mdegans/nano_build_opencv/blob/master/build_opencv.sh +# +ARG OPENCV_VERSION="4.4.0" + +# install build dependencies +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + build-essential \ + gfortran \ + cmake \ + git \ + file \ + tar \ + libatlas-base-dev \ + libavcodec-dev \ + libavformat-dev \ + libavresample-dev \ + libcanberra-gtk3-module \ + libdc1394-22-dev \ + libeigen3-dev \ + libglew-dev \ + libgstreamer-plugins-base1.0-dev \ + libgstreamer-plugins-good1.0-dev \ + libgstreamer1.0-dev \ + libgtk-3-dev \ + libjpeg-dev \ + libjpeg8-dev \ + libjpeg-turbo8-dev \ + liblapack-dev \ + liblapacke-dev \ + libopenblas-dev \ + libpng-dev \ + libpostproc-dev \ + libswscale-dev \ + libtbb-dev \ + libtbb2 \ + libtesseract-dev \ + libtiff-dev \ + libv4l-dev \ + libxine2-dev \ + libxvidcore-dev \ + libx264-dev \ + libgtkglext1 \ + libgtkglext1-dev \ + pkg-config \ + qv4l2 \ + v4l-utils \ + zlib1g-dev + +RUN apt-get install ca-certificates -y + +# OpenCV looks for the cuDNN version in cudnn_version.h, but it's been renamed to cudnn_version_v8.h +RUN ln -s /usr/include/$(uname -i)-linux-gnu/cudnn_version_v8.h /usr/include/$(uname -i)-linux-gnu/cudnn_version.h + +# Architecture-specific build options +ARG CUDA_ARCH_BIN="" +ARG ENABLE_NEON="OFF" + +# Clone and configure OpenCV repo +RUN git clone --depth 1 --branch ${OPENCV_VERSION} https://github.com/opencv/opencv.git && \ + git clone --depth 1 --branch ${OPENCV_VERSION} https://github.com/opencv/opencv_contrib.git && \ + cd opencv && \ + mkdir build && \ + cd build && \ + echo "configuring OpenCV ${OPENCV_VERSION}, CUDA_ARCH_BIN=${CUDA_ARCH_BIN}, ENABLE_NEON=${ENABLE_NEON}" && \ + cmake \ + -D CPACK_BINARY_DEB=ON \ + -D BUILD_EXAMPLES=OFF \ + -D BUILD_opencv_python2=OFF \ + -D BUILD_opencv_python3=ON \ + -D BUILD_opencv_java=OFF \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=/usr/local \ + -D CUDA_ARCH_BIN=${CUDA_ARCH_BIN} \ + -D CUDA_ARCH_PTX= \ + -D CUDA_FAST_MATH=ON \ + -D CUDNN_INCLUDE_DIR=/usr/include/$(uname -i)-linux-gnu \ + -D EIGEN_INCLUDE_PATH=/usr/include/eigen3 \ + -D WITH_EIGEN=ON \ + -D ENABLE_NEON=${ENABLE_NEON} \ + -D OPENCV_DNN_CUDA=ON \ + -D OPENCV_ENABLE_NONFREE=ON \ + -D OPENCV_EXTRA_MODULES_PATH=/workspace/opencv_contrib/modules \ + -D OPENCV_GENERATE_PKGCONFIG=ON \ + -D WITH_CUBLAS=ON \ + -D WITH_CUDA=ON \ + -D WITH_CUDNN=ON \ + -D WITH_GSTREAMER=ON \ + -D WITH_LIBV4L=ON \ + -D WITH_OPENGL=ON \ + -D WITH_OPENCL=OFF \ + -D WITH_IPP=OFF \ + -D WITH_TBB=ON \ + -D BUILD_TIFF=ON \ + -D BUILD_PERF_TESTS=OFF \ + -D BUILD_TESTS=OFF \ + ../ + +RUN cd opencv/build && make -j$(nproc) +RUN cd opencv/build && make install +RUN cd opencv/build && make package + +RUN cd opencv/build && tar -czvf OpenCV-${OPENCV_VERSION}-$(uname -i).tar.gz *.deb + +RUN apt-get update -y +RUN apt-get install software-properties-common -y && \ + apt-get update + +# Add the ROS deb repo to the apt sources list + +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + git \ + cmake \ + build-essential \ + curl \ + wget \ + gnupg2 \ + lsb-release \ + ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +RUN sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list' +RUN curl -s https://raw.githubusercontent.com/ros/rosdistro/master/ros.asc | apt-key add - + +# +# Install bootstrap dependencies +# +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + libpython3-dev \ + python3-rosdep \ + python3-rosinstall-generator \ + python3-vcstool \ + libyaml-cpp-dev \ + build-essential && \ + rosdep init && \ + rosdep update && \ + rm -rf /var/lib/apt/lists/* + +# +# Download/build the ROS source +# +RUN mkdir ros_catkin_ws && \ + cd ros_catkin_ws && \ + # try below for ros-noetic-vision-msgs ros-noetic-geometry-msgs ros-noetic-sensor-msgs ros-noetic-audio-common-msgs \ + rosinstall_generator ${ROS_PKG} audio_common_msgs --rosdistro ${ROS_DISTRO} --deps --tar > ${ROS_DISTRO}-${ROS_PKG}.rosinstall && \ + rosinstall_generator ${ROS_PKG} sensor_msgs --rosdistro ${ROS_DISTRO} --deps --tar > ${ROS_DISTRO}-${ROS_PKG}.rosinstall && \ + rosinstall_generator ${ROS_PKG} geometry_msgs --rosdistro ${ROS_DISTRO} --deps --tar > ${ROS_DISTRO}-${ROS_PKG}.rosinstall && \ + rosinstall_generator ${ROS_PKG} vision_msgs --rosdistro ${ROS_DISTRO} --deps --tar > ${ROS_DISTRO}-${ROS_PKG}.rosinstall && \ + mkdir src && \ + cd src && \ + git clone https://github.com/ros/resource_retriever && \ + cd ../ && \ + vcs import --input ${ROS_DISTRO}-${ROS_PKG}.rosinstall ./src && \ + apt-get update && \ + apt-get install -y gir1.2-gstreamer-1.0 && \ + rosdep install --from-paths ./src --ignore-packages-from-source --rosdistro ${ROS_DISTRO} --skip-keys python3-pykdl -y && \ + python3 ./src/catkin/bin/catkin_make_isolated --install --install-space ${ROS_ROOT} -DCMAKE_BUILD_TYPE=Release && \ + rm -rf /var/lib/apt/lists/* + + +RUN echo 'source /opt/ros/${ROS_DISTRO}/setup.bash' >> /root/.bashrc +CMD ["bash"] +WORKDIR / + +RUN git clone --depth 1 --recurse-submodules -j8 --branch master https://github.com/opendr-eu/opendr +RUN apt-get update +RUN cd ./opendr && ./bin/install_nvidia.sh $device +RUN cd ./opendr/projects/opendr_ws/src && \ + git clone --branch noetic https://github.com/ros-perception/vision_opencv && \ + git clone --branch develop https://github.com/bosch-ros-pkg/usb_cam.git && \ + cd ./usb_cam && git reset --hard 3ce8ee1 && cd ../ && \ + git clone https://github.com/ros-perception/image_common.git && \ + git clone https://github.com/ros-drivers/audio_common && \ + sed -i 's/(Boost REQUIRED python37)/(Boost REQUIRED python3)/' ./vision_opencv/cv_bridge/CMakeLists.txt && \ + cd ../ + +RUN /bin/bash -c '. /opt/ros/noetic/setup.bash; cd /opendr/projects/opendr_ws; catkin_make' diff --git a/README.md b/README.md index 9a9c7c53b3..00baafe795 100644 --- a/README.md +++ b/README.md @@ -7,10 +7,13 @@ ______________________________________________________________________

Website • - AboutInstallation • - Using OpenDR toolkit • - Examples • + Python Examples • + ROS1 • + ROS2 • + C API • + Customization • + Known IssuesRoadmapChangelogLicense @@ -33,19 +36,40 @@ OpenDR focuses on the **AI and Cognition core technology** in order to provide t As a result, the developed OpenDR toolkit will also enable cooperative human-robot interaction as well as the development of cognitive mechatronics where sensing and actuation are closely coupled with cognitive systems thus contributing to another two core technologies beyond AI and Cognition. OpenDR aims to develop, train, deploy and evaluate deep learning models that improve the technical capabilities of the core technologies beyond the current state of the art. -## Installing OpenDR Toolkit +## Where to start? + +You can start by [installing](docs/reference/installation.md) the OpenDR toolkit. OpenDR can be installed in the following ways: 1. By *cloning* this repository (CPU/GPU support) 2. Using *pip* (CPU/GPU support only) 3. Using *docker* (CPU/GPU support) -You can find detailed installation instruction in the [documentation](docs/reference/installation.md). -## Using OpenDR toolkit +## What OpenDR provides? + OpenDR provides an intuitive and easy to use **[Python interface](src/opendr)**, a **[C API](src/c_api) for performance critical application**, a wealth of **[usage examples and supporting tools](projects)**, as well as **ready-to-use [ROS nodes](projects/opendr_ws)**. OpenDR is built to support [Webots Open Source Robot Simulator](https://cyberbotics.com/), while it also extensively follows industry standards, such as [ONNX model format](https://onnx.ai/) and [OpenAI Gym Interface](https://gym.openai.com/). -You can find detailed documentation in OpenDR [wiki](https://github.com/tasostefas/opendr_internal/wiki), as well as in the [tools index](docs/reference/index.md). + +## How can I start using OpenDR? + +You can find detailed documentation in OpenDR [wiki](https://github.com/opendr-eu/opendr/wiki). +The main point of reference after installing the toolkit is the [tools index](docs/reference/index.md). +Starting from there, you can find detailed documentation for all the tools included in OpenDR. + +- If you are interested in ready-to-use ROS nodes, then you can directly jump to our [ROS1](projects/opendr_ws) and [ROS2](projects/opendr_ws_2) workspaces. +- If you are interested for ready-to-use examples, then you can checkout the [projects](projects/python) folder, which contains examples and tutorials for [perception](projects/python/perception), [control](projects/python/control), [simulation](projects/python/simulation) and [hyperparameter tuning](projects/python/utils) tools. +- If you want to explore our C API, then you explore the provided [C demos](projects/c_api). + +## How can I interface OpenDR? + +OpenDR is built upon Python. +Therefore, the main OpenDR interface is written in Python and it is available through the [opendr](src/opendr) package. +Furthermore, OpenDR provides [ROS1](projects/opendr_ws) and [ROS2](projects/opendr_ws_2) interfaces, as well as a [C interface](projects/c_api). +Note that you can use as many tools as you wish at the same time, since there is no hardware limitation on the number of tools that can run at the same time. +However, hardware limitations (e.g., GPU memory) might restrict the number of tools that can run at any given moment. + + ## Roadmap OpenDR has the following roadmap: @@ -54,15 +78,15 @@ OpenDR has the following roadmap: - **v3.0 (2023)**: Active perception-enabled deep learning tools for improved robotic perception ## How to contribute -Please follow the instructions provided in the [wiki](https://github.com/tasostefas/opendr_internal/wiki). +Please follow the instructions provided in the [wiki](https://github.com/opendr-eu/opendr/wiki). ## How to cite us If you use OpenDR for your research, please cite the following paper that introduces OpenDR architecture and design:

-@article{opendr2022,
+@inproceedings{opendr2022,
   title={OpenDR: An Open Toolkit for Enabling High Performance, Low Footprint Deep Learning for Robotics},
   author={Passalis, Nikolaos and Pedrazzi, Stefania and Babuska, Robert and Burgard, Wolfram and Dias, Daniel and Ferro, Francesco and Gabbouj, Moncef and Green, Ole and Iosifidis, Alexandros and Kayacan, Erdal and Kober, Jens and Michel, Olivier and Nikolaidis, Nikos and Nousi, Paraskevi and Pieters, Roel and Tzelepi, Maria and Valada, Abhinav and Tefas, Anastasios},
-  journal={arXiv preprint arXiv:2203.00403},
+    booktitle = {Proceedings of the 2022 IEEE/RSJ International Conference on Intelligent Robots and Systems (to appear)},
   year={2022}
 }
 
diff --git a/bin/activate_nvidia.sh b/bin/activate_nvidia.sh new file mode 100755 index 0000000000..15df6b6870 --- /dev/null +++ b/bin/activate_nvidia.sh @@ -0,0 +1,12 @@ +#!/bin/sh +export OPENDR_HOME=$PWD +export PYTHONPATH=$OPENDR_HOME/src:$PYTHONPATH +alias python=python3 +export LD_LIBRARY_PATH=$OPENDR_HOME/lib:$LD_LIBRARY_PATH + +export PATH=/usr/local/cuda/bin:$PATH +export MXNET_HOME=$OPENDR_HOME/mxnet/ +export PYTHONPATH=$MXNET_HOME/python:$PYTHONPATH +export MXNET_CUDNN_AUTOTUNE_DEFAULT=0 +export LC_ALL="C.UTF-8" +export MPLBACKEND=TkAgg diff --git a/bin/build_wheel.sh b/bin/build_wheel.sh index acfa33680c..42a184d4c2 100755 --- a/bin/build_wheel.sh +++ b/bin/build_wheel.sh @@ -7,7 +7,7 @@ git submodule update --init --recursive rm dist/* rm src/*egg-info -rf -pip install cython numpy +python3 -m pip install cython numpy # Build OpenDR packages while read p; do @@ -17,5 +17,7 @@ while read p; do python3 setup.py sdist done < packages.txt +# Cleanup +rm src/*egg-info -rf rm setup.py rm MANIFEST.in diff --git a/bin/install.sh b/bin/install.sh index d6a75fe65a..ddced59961 100755 --- a/bin/install.sh +++ b/bin/install.sh @@ -9,6 +9,15 @@ if [[ -z "${OPENDR_DEVICE}" ]]; then export OPENDR_DEVICE=cpu fi +if [[ -z "${ROS_DISTRO}" ]]; then + echo "[INFO] No ROS_DISTRO is specified. The modules relying on ROS/ROS2 might not work." +else + if ! ([[ ${ROS_DISTRO} == "noetic" || ${ROS_DISTRO} == "melodic" || ${ROS_DISTRO} == "foxy" || ${ROS_DISTRO} == "humble" ]]); then + echo "[ERROR] ${ROS_DISTRO} is not a supported ROS_DISTRO. Please use 'noetic' or 'melodic' for ROS and 'foxy' or 'humble' for ROS2." + exit 1 + fi +fi + # Install base ubuntu deps sudo apt-get install --yes libfreetype6-dev lsb-release git python3-pip curl wget python3.8-venv @@ -16,42 +25,47 @@ sudo apt-get install --yes libfreetype6-dev lsb-release git python3-pip curl wge git submodule init git submodule update -case $(lsb_release -r |cut -f2) in - "18.04") - export ROS_DISTRO=melodic;; - "20.04") - export ROS_DISTRO=noetic;; - *) - echo "Not tested for this ubuntu version" && exit 1;; -esac - # Create a virtual environment and update python3 -m venv venv source venv/bin/activate python3 -m pip install -U pip -pip3 install setuptools configparser +python3 -m pip install setuptools configparser # Add repositories for ROS sudo sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list' \ - && curl -s https://raw.githubusercontent.com/ros/rosdistro/master/ros.asc | sudo apt-key add - + && curl -s https://raw.githubusercontent.com/ros/rosdistro/master/ros.asc | sudo apt-key add - # Build OpenDR make install_compilation_dependencies make install_runtime_dependencies -# Install additional ROS packages -sudo apt-get install ros-noetic-vision-msgs ros-noetic-audio-common-msgs +# ROS package dependencies +if [[ ${ROS_DISTRO} == "noetic" || ${ROS_DISTRO} == "melodic" ]]; then + echo "Installing ROS dependencies" + sudo apt-get -y install ros-$ROS_DISTRO-vision-msgs ros-$ROS_DISTRO-geometry-msgs ros-$ROS_DISTRO-sensor-msgs ros-$ROS_DISTRO-audio-common-msgs ros-$ROS_DISTRO-usb-cam ros-$ROS_DISTRO-webots-ros +fi + +# ROS2 package dependencies +if [[ ${ROS_DISTRO} == "foxy" || ${ROS_DISTRO} == "humble" ]]; then + echo "Installing ROS2 dependencies" + sudo apt-get -y install python3-lark ros-$ROS_DISTRO-usb-cam ros-$ROS_DISTRO-webots-ros2 python3-colcon-common-extensions ros-$ROS_DISTRO-vision-msgs + LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/ros/$ROS_DISTRO/lib/controller + cd $OPENDR_HOME/projects/opendr_ws_2/ + git clone --depth 1 --branch ros2 https://github.com/ros-drivers/audio_common src/audio_common + rosdep install -i --from-path src/audio_common --rosdistro $ROS_DISTRO -y + cd $OPENDR_HOME +fi # If working on GPU install GPU dependencies as needed if [[ "${OPENDR_DEVICE}" == "gpu" ]]; then - pip3 uninstall -y mxnet - pip3 uninstall -y torch + python3 -m pip uninstall -y mxnet + python3 -m pip uninstall -y torch echo "[INFO] Replacing mxnet-cu112==1.8.0post0 to enable CUDA acceleration." - pip3 install mxnet-cu112==1.8.0post0 + python3 -m pip install mxnet-cu112==1.8.0post0 echo "[INFO] Replacing torch==1.9.0+cu111 to enable CUDA acceleration." - pip3 install torch==1.9.0+cu111 torchvision==0.10.0+cu111 torchaudio==0.9.0 -f https://download.pytorch.org/whl/torch_stable.html + python3 -m pip install torch==1.9.0+cu111 torchvision==0.10.0+cu111 torchaudio==0.9.0 -f https://download.pytorch.org/whl/torch_stable.html echo "[INFO] Reinstalling detectronv2." - pip3 install 'git+https://github.com/facebookresearch/detectron2.git@5aeb252b194b93dc2879b4ac34bc51a31b5aee13' + python3 -m pip install 'git+https://github.com/facebookresearch/detectron2.git@5aeb252b194b93dc2879b4ac34bc51a31b5aee13' fi make libopendr diff --git a/bin/install_nvidia.sh b/bin/install_nvidia.sh new file mode 100755 index 0000000000..f0f2901b05 --- /dev/null +++ b/bin/install_nvidia.sh @@ -0,0 +1,310 @@ +#!/bin/bash + +if [[ $1 = "tx2" ]]; +then + echo "Installing OpenDR on Nvidia TX2" +elif [[ $1 = "agx" ]] || [[ $1 = "nx" ]] +then + echo "Installing OpenDR on Nvidia AGX/NX" +else + echo "Wrong argument, supported inputs are 'tx2', 'agx' and 'nx'" + exit 1 +fi + +# export OpenDR related paths +export OPENDR_HOME=$PWD +export PYTHONPATH=$OPENDR_HOME/src:$PYTHONPATH +export PYTHON=python3 +export LD_LIBRARY_PATH=$OPENDR_HOME/src:$LD_LIBRARY_PATH + +# Install mxnet +cd $OPENDR_HOME + +sudo apt-get install -y gfortran build-essential git python3-pip python-numpy libopencv-dev graphviz libopenblas-dev libopenblas-base libatlas-base-dev python-numpy + +pip3 install --upgrade pip +pip3 install setuptools==59.5.0 +pip3 install numpy==1.19.4 + +git clone --recursive -b v1.8.x https://github.com/apache/incubator-mxnet.git mxnet + +export PATH=/usr/local/cuda/bin:$PATH +export MXNET_HOME=$OPENDR_HOME/mxnet/ +export PYTHONPATH=$MXNET_HOME/python:$PYTHONPATH + +sudo rm /usr/local/cuda +sudo ln -s /usr/local/cuda-10.2 /usr/local/cuda + +cd $MXNET_HOME +cp $MXNET_HOME/make/config_jetson.mk config.mk +sed -i 's/USE_CUDA = 0/USE_CUDA = 1/' config.mk +sed -i 's/USE_CUDA_PATH = NONE/USE_CUDA_PATH = \/usr\/local\/cuda/' config.mk +# CUDA_ARCH setting +sed -i 's/CUDA_ARCH = -gencode arch=compute_53,code=sm_53 -gencode arch=compute_62,code=sm_62 -gencode arch=compute_72,code=sm_72/ /' config.mk +sed -i 's/USE_CUDNN = 0/USE_CUDNN = 1/' config.mk + +if [[ $1 = "tx2" ]]; +then + sed -i '/USE_CUDNN/a CUDA_ARCH = -gencode arch=compute_62,code=sm_62' config.mk +elif [[ $1 = "agx" ]] || [[ $1 = "nx" ]] +then + echo "AGX or nx" + sed -i '/USE_CUDNN/a CUDA_ARCH = -gencode arch=compute_72,code=sm_72' config.mk +else + echo "Wrong argument, supported inputs are 'tx2', 'agx' and 'nx'" +fi + +make -j $(nproc) NVCC=/usr/local/cuda/bin/nvcc + +cd $MXNET_HOME/python +sudo pip3 install -e . + +cd $OPENDR_HOME +chmod a+rwx ./mxnet + +sudo apt-get update +sudo apt-get install --yes libfreetype6-dev lsb-release curl wget + +git submodule init +git submodule update + +pip3 install configparser + +# Install Torch +sudo apt-get install --yes libopenblas-dev cmake ninja-build +TORCH=torch-1.9.0-cp36-cp36m-linux_aarch64.whl +wget https://nvidia.box.com/shared/static/h1z9sw4bb1ybi0rm3tu8qdj8hs05ljbm.whl -O torch-1.9.0-cp36-cp36m-linux_aarch64.whl + +pip3 install Cython +pip3 install $TORCH +rm ./torch-1.9.0-cp36-cp36m-linux_aarch64.whl + +# Install Torchvision +TORCH_VISION=0.10.0 +sudo apt-get install --yes libjpeg-dev zlib1g-dev libpython3-dev libavcodec-dev libavformat-dev libswscale-dev +git clone --branch v0.10.0 https://github.com/pytorch/vision torchvision +cd torchvision +export BUILD_VERSION=0.10.0 +sudo python3 setup.py install +cd ../ +rm -r torchvision/ + +# Install dlib +wget http://dlib.net/files/dlib-19.21.tar.bz2 +tar jxvf dlib-19.21.tar.bz2 +cd dlib-19.21/ +mkdir build +cd build/ +cmake .. +cmake --build . +cd ../ +sudo python3 setup.py install +cd $OPENDR_HOME +rm dlib-19.21.tar.bz2 + +apt-get install -y libprotobuf-dev protobuf-compiler +apt-get install -y python3-tk +# For AV +apt-get update && apt-get install -y software-properties-common &&\ + add-apt-repository -y ppa:jonathonf/ffmpeg-4 + +apt-get update && apt-get install -y \ + ffmpeg \ + libavformat-dev \ + libavcodec-dev \ + libavdevice-dev \ + libavutil-dev \ + libswscale-dev \ + libswresample-dev \ + libavfilter-dev \ + libeigen3-dev + +pip3 install av==8.0.1 + +# Install rest of the dependencies of OpenDR + +pip3 install absl-py==1.0.0 +pip3 install aiohttp==3.8.1 +pip3 install aiosignal==1.2.0 +pip3 install alembic==1.7.5 +pip3 install appdirs==1.4.4 +pip3 install async-timeout==4.0.1 +pip3 install attrs==21.2.0 +pip3 install audioread==2.1.9 +pip3 install autocfg==0.0.8 +pip3 install Automat==20.2.0 +pip3 install autopage==0.4.0 +pip3 install bcolz==1.2.1 +pip3 cache purge +pip3 install scikit-build==0.16.3 +pip3 install cachetools==4.2.4 +pip3 install catkin-pkg==0.4.24 +pip3 install catkin-tools==0.8.2 +pip3 install certifi==2021.10.8 +pip3 install cityscapesscripts==2.2.0 +pip3 install charset-normalizer==2.0.9 +pip3 install cliff==3.10.0 +pip3 install cloudpickle==1.5.0 +pip3 install cmaes==0.8.2 +pip3 install cmd2==2.3.3 +pip3 install colorlog==6.6.0 +pip3 install configparser==5.2.0 +pip3 install constantly==15.1.0 +pip3 install cycler==0.11.0 +pip3 install Cython==0.29.22 +pip3 install cython-bbox==0.1.3 +pip3 install decorator==5.1.0 +pip3 install defusedxml==0.7.1 +pip3 install distro==1.6.0 +pip3 install docutils==0.18.1 +pip3 install easydict==1.9 +pip3 install empy==3.3.4 +pip3 install filterpy==1.4.5 +pip3 install flake8==4.0.1 +pip3 install flake8-import-order==0.18.1 +pip3 install flask +pip3 cache purge +pip3 install frozenlist==1.2.0 +pip3 install fsspec==2021.11.1 +pip3 install future==0.18.2 +pip3 install gdown +pip3 install gluoncv==0.11.0b20210908 +pip3 install google-auth==1.35.0 +pip3 install google-auth-oauthlib==0.4.6 +pip3 install graphviz==0.8.4 +pip3 install greenlet==1.1.2 +pip3 install grpcio==1.42.0 +pip3 install gym==0.21.0 +pip3 install hyperlink==21.0.0 +pip3 install idna==3.3 +pip3 install idna-ssl==1.1.0 +pip3 install imageio==2.6.0 +pip3 install imantics==0.1.12 +pip3 install imgaug==0.4.0 +pip3 install importlib-metadata==4.8.2 +pip3 install importlib-resources==5.4.0 +pip3 install imutils==0.5.4 +pip3 install incremental==21.3.0 +pip3 install iniconfig==1.1.1 +pip3 install ipython +pip3 install joblib==1.0.1 +pip3 install kiwisolver==1.3.1 +pip3 install lap==0.4.0 +pip3 cache purge +sudo apt-get install --yes llvm-10* +sudo ln -s /usr/bin/llvm-config-10 /usr/bin/llvm-config +pip3 install llvmlite==0.36.0 +sudo mv /usr/include/tbb/tbb.h /usr/include/tbb/tbb.h.bak +pip3 install numba==0.53.1 +LLVM_CONFIG=/usr/bin/llvm-config-10 pip3 install librosa==0.8.0 +pip3 install lxml==4.6.3 +pip3 install Mako==1.1.6 +pip3 install Markdown==3.3.6 +pip3 install MarkupSafe==2.0.1 +pip3 install matplotlib==2.2.2 +pip3 install mccabe==0.6.1 +pip3 install mmcv==0.5.9 +pip3 install motmetrics==1.2.0 +pip3 install multidict==5.2.0 +pip3 install munkres==1.1.4 +pip3 install netifaces==0.11.0 +pip3 install networkx==2.5.1 +pip3 install numpy==1.19.4 +pip3 install oauthlib==3.1.1 +pip3 install onnx==1.10.2 +pip3 install onnxruntime==1.3.0 +pip3 install opencv-python==4.5.4.60 +pip3 install opencv-contrib-python==4.5.4.60 +pip3 cache purge +pip3 install optuna==2.10.0 +pip3 install osrf-pycommon==1.0.0 +pip3 install packaging==21.3 +pip3 install pandas==1.1.5 +pip3 install pbr==5.8.0 +pip3 install Pillow==8.3.2 +pip3 install plotly==5.4.0 +pip3 install pluggy==1.0.0 +pip3 install pooch==1.5.2 +pip3 install portalocker==2.3.2 +pip3 install prettytable==2.4.0 +pip3 install progress==1.5 +pip3 install protobuf==3.19.6 +pip3 install py==1.11.0 +pip3 install py-cpuinfo==8.0.0 +pip3 install pyasn1==0.4.8 +pip3 install pyasn1-modules==0.2.8 +pip3 install pybind11==2.6.2 +pip3 install pycodestyle==2.8.0 +pip3 install pycparser==2.21 +pip3 install pyflakes==2.4.0 +pip3 install pyglet==1.5.16 +pip3 install pyparsing==3.0.6 +pip3 install pyperclip==1.8.2 +pip3 install pytest==6.2.5 +pip3 install pytest-benchmark==3.4.1 +pip3 install python-dateutil==2.8.2 +pip3 cache purge +pip3 install pytz==2021.3 +pip3 install PyWavelets==1.1.1 +pip3 install --ignore-installed PyYAML==5.3 +pip3 install requests==2.26.0 +pip3 install requests-oauthlib==1.3.0 +pip3 install resampy==0.2.2 +pip3 install rosdep==0.21.0 +pip3 install rosdistro==0.8.3 +pip3 install roslibpy==1.2.1 +pip3 install rospkg==1.3.0 +pip3 install rsa==4.8 +pip3 install scikit-image==0.16.2 +pip3 install scikit-learn==0.22 +pip3 install seaborn==0.11.2 +pip3 install setuptools-rust==1.1.2 +pip3 install scipy==1.5.4 +pip3 install Shapely==1.5.9 +pip3 install six==1.16.0 +pip3 install SoundFile==0.10.3.post1 +pip3 install SQLAlchemy==1.4.28 +pip3 install stable-baselines3==1.1.0 +pip3 install stevedore==3.5.0 +pip3 install tabulate==0.8.9 +pip3 install tenacity==8.0.1 +pip3 install tensorboard==2.4.1 +pip3 install tensorboard-plugin-wit==1.8.0 +pip3 install tensorboardX==2.0 +pip3 cache purge +pip3 install toml==0.10.2 +pip3 install tqdm==4.54.0 +pip3 install trimesh==3.5.23 +pip3 install Twisted==21.7.0 +pip3 install txaio==21.2.1 +pip3 install typing_extensions==4.0.1 +pip3 install urllib3==1.26.7 +pip3 install vcstool==0.3.0 +pip3 install wdwidth==0.2.5 +pip3 install Werkzeug==2.0.2 +pip3 install xmljson==0.2.1 +pip3 install xmltodict==0.12.0 +pip3 install yacs==0.1.8 +pip3 install yarl==1.7.2 +pip3 install zipp==3.6.0 +pip3 install zope.interface==5.4.0 +pip3 install wheel +pip3 install pytorch-lightning==1.2.3 +pip3 install omegaconf==2.3.0 +pip3 install ninja +pip3 install terminaltables +pip3 install psutil +pip3 install continual-inference>=1.0.2 +pip3 install git+https://github.com/waspinator/pycococreator.git@0.2.0 +pip3 install git+https://github.com/cidl-auth/cocoapi@03ee5a19844e253b8365dbbf35c1e5d8ca2e7281#subdirectory=PythonAPI +pip3 install git+https://github.com/cocodataset/panopticapi.git@7bb4655548f98f3fedc07bf37e9040a992b054b0 +pip3 install git+https://github.com/mapillary/inplace_abn.git +pip3 install git+https://github.com/facebookresearch/detectron2.git@4841e70ee48da72c32304f9ebf98138c2a70048d +pip3 install git+https://github.com/cidl-auth/DCNv2 +pip3 install ${OPENDR_HOME}/src/opendr/perception/panoptic_segmentation/efficient_ps/algorithm/EfficientPS +pip3 install ${OPENDR_HOME}/src/opendr/perception/panoptic_segmentation/efficient_ps/algorithm/EfficientPS/efficientNet +pip3 cache purge + +cd $OPENDR_HOME/src/opendr/perception/object_detection_2d/retinaface +make +cd $OPENDR_HOME diff --git a/dependencies/parse_dependencies.py b/dependencies/parse_dependencies.py index 31fdc20829..608abdcd51 100644 --- a/dependencies/parse_dependencies.py +++ b/dependencies/parse_dependencies.py @@ -65,7 +65,7 @@ def read_ini_key(key, summary_file): # Loop through tools and extract dependencies if not global_dependencies: opendr_home = os.environ.get('OPENDR_HOME') - for dir_to_walk in ['src', 'projects/control/eagerx']: + for dir_to_walk in ['src', 'projects/python/control/eagerx']: for subdir, dirs, files in os.walk(os.path.join(opendr_home, dir_to_walk)): for filename in files: if filename == 'dependencies.ini': diff --git a/dependencies/pip_requirements.txt b/dependencies/pip_requirements.txt deleted file mode 100644 index 76a12feebd..0000000000 --- a/dependencies/pip_requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -numpy==1.17.5 -Cython -torch==1.7.1 -wheel -git+https://github.com/cidl-auth/cocoapi@03ee5a19844e253b8365dbbf35c1e5d8ca2e7281#subdirectory=PythonAPI -git+https://github.com/cocodataset/panopticapi.git@7bb4655548f98f3fedc07bf37e9040a992b054b0 -git+https://github.com/MatthewHowe/DCNv2@194f5733c667cf13e5bd478a8c5bf27573ffa98c \ No newline at end of file diff --git a/docs/reference/activity-recognition.md b/docs/reference/activity-recognition.md index 733ba2207e..22214a87dc 100644 --- a/docs/reference/activity-recognition.md +++ b/docs/reference/activity-recognition.md @@ -2,6 +2,7 @@ The *activity_recognition* module contains the *X3DLearner* and *CoX3DLearner* classes, which inherit from the abstract class *Learner*. +You can find the classes and the corresponding IDs regarding activity recognition [here](https://github.com/opendr-eu/opendr/blob/master/src/opendr/perception/activity_recognition/datasets/kinetics400_classes.csv). ### Class X3DLearner Bases: `engine.learners.Learner` @@ -146,7 +147,6 @@ Parameters: Path to metadata file in json format or to weights path. - #### `X3DLearner.optimize` ```python X3DLearner.optimize(self, do_constant_folding) @@ -215,8 +215,6 @@ Parameters: ``` - - #### References [1] X3D: Expanding Architectures for Efficient Video Recognition, [arXiv](https://arxiv.org/abs/2004.04730). @@ -398,7 +396,6 @@ Inherited from [X3DLearner](/src/opendr/perception/activity_recognition/x3d/x3d_ ``` - #### Performance Evaluation TABLE-1: Input shapes, prediction accuracy on Kinetics 400, floating point operations (FLOPs), parameter count and maximum allocated memory of activity recognition learners at inference. @@ -426,7 +423,7 @@ TABLE-2: Speed (evaluations/second) of activity recognition learner inference on TABLE-3: Throughput (evaluations/second) of activity recognition learner inference on various computational devices. -The largest fitting power of two was used as batch size for each device. +The largest fitting power of two was used as batch size for each device. | Model | CPU | TX2 | Xavier | RTX 2080 Ti | | ------- | ----- | ---- | ------ | ----------- | | X3D-L | 0.22 | 0.21 | 1.73 | 3.55 | @@ -438,7 +435,7 @@ The largest fitting power of two was used as batch size for each device. | CoX3D-S | 11.60 | 8.22 | 64.91 | 196.54 | -TABLE-4: Energy (Joules) of activity recognition learner inference on embedded devices. +TABLE-4: Energy (Joules) of activity recognition learner inference on embedded devices. | Model | TX2 | Xavier | | ------- | ------ | ------ | | X3D-L | 187.89 | 23.54 | @@ -468,5 +465,6 @@ Model inference works as expected. #### References -[1] X3D: Expanding Architectures for Efficient Video Recognition, +[2] X3D: Expanding Architectures for Efficient Video Recognition, [arXiv](https://arxiv.org/abs/2004.04730). + diff --git a/docs/reference/ambiguity_measure.md b/docs/reference/ambiguity_measure.md new file mode 100644 index 0000000000..3b816628e6 --- /dev/null +++ b/docs/reference/ambiguity_measure.md @@ -0,0 +1,76 @@ +## ambiguity_measure module + +The *ambiguity_measure* module contains the *AmbiguityMeasure* class. + +### Class AmbiguityMeasure +Bases: `object` + +The *AmbiguityMeasure* class is a tool that allows to obtain an ambiguity measure of vision-based models that output pixel-wise value estimates. +This tool can be used in combination with vision-based manipulation models such as Transporter Nets [[1]](#transporter-paper). + +The [AmbiguityMeasure](../../src/opendr/utils/ambiguity_measure/ambiguity_measure.py) class has the following public methods: + +#### `AmbiguityMeasure` constructor +```python +AmbiguityMeasure(self, threshold, temperature) +``` + +Constructor parameters: + +- **threshold**: *float, default=0.5*\ + Ambiguity threshold, should be in [0, 1). +- **temperature**: *float, default=1.0*\ + Temperature of the sigmoid function. + Should be > 0. + Higher temperatures will result in higher ambiguity measures. + +#### `AmbiguityMeasure.get_ambiguity_measure` +```python +AmbiguityMeasure.get_ambiguity_measure(self, heatmap) +``` + +This method allows to obtain an ambiguity measure of the output of a model. + +Parameters: + +- **heatmap**: *np.ndarray*\ + Pixel-wise value estimates. + These can be obtained using from for example a Transporter Nets model [[1]](#transporter-paper). + +#### Demos and tutorial + +A demo showcasing the usage and functionality of the *AmbiguityMeasure* is available [here](https://colab.research.google.com/github/opendr-eu/opendr/blob/ambiguity_measure/projects/python/utils/ambiguity_measure/ambiguity_measure_tutorial.ipynb). + + +#### Examples + +* **Ambiguity measure example** + + This example shows how to obtain the ambiguity measure from pixel-wise value estimates. + + ```python + import numpy as np + from opendr.utils.ambiguity_measure.ambiguity_measure import AmbiguityMeasure + + # Simulate image and value pixel-wise value estimates (normally you would get this from a model such as Transporter) + img = 255 * np.random.random((128, 128, 3)) + img = np.asarray(img, dtype="uint8") + heatmap = 10 * np.random.random((128, 128)) + + # Initialize ambiguity measure + am = AmbiguityMeasure(threshold=0.1, temperature=3) + + # Get ambiguity measure of the heatmap + ambiguous, locs, maxima, probs = am.get_ambiguity_measure(heatmap) + + # Plot ambiguity measure + am.plot_ambiguity_measure(heatmap, locs, probs, img) + ``` + +#### References +[1] +Zeng, A., Florence, P., Tompson, J., Welker, S., Chien, J., Attarian, M., ... & Lee, J. (2021, October). +Transporter networks: Rearranging the visual world for robotic manipulation. +In Conference on Robot Learning (pp. 726-747). +PMLR. + diff --git a/docs/reference/continual-transformer-encoder.md b/docs/reference/continual-transformer-encoder.md new file mode 100644 index 0000000000..38ec084bae --- /dev/null +++ b/docs/reference/continual-transformer-encoder.md @@ -0,0 +1,211 @@ +## Continual Transformer Encoder module + + +### Class CoTransEncLearner +Bases: `engine.learners.Learner` + +The *CoTransEncLearner* class provides a Continual Transformer Encoder learner, which can be used for time-series processing of user-provided features. +This module was originally proposed by Hedegaard et al. in "Continual Transformers: Redundancy-Free Attention for Online Inference", 2022, https://arxiv.org/abs/2201.06268" + +The [CoTransEncLearner](src/opendr/perception/activity_recognition/continual_transformer_decoder/continual_transformer_decoder_learner.py) class has the following public methods: + +#### `CoTransEncLearner` constructor + +```python +CoX3DLearner(self, lr, iters, batch_size, optimizer, lr_schedule, network_head, num_layers, input_dims, hidden_dims, sequence_len, num_heads, dropout, num_classes, positional_encoding_learned, checkpoint_after_iter, checkpoint_load_iter, temp_path, device, loss, weight_decay, momentum, drop_last, pin_memory, num_workers, seed) +``` + +Constructor parameters: + + - **lr**: *float, default=1e-2*\ + Learning rate during optimization. + - **iters**: *int, default=10*\ + Number of epochs to train for. + - **batch_size**: *int, default=64*\ + Dataloader batch size. Defaults to 64. + - **optimizer**: *str, default="sgd"*\ + Name of optimizer to use ("sgd" or "adam"). + - **lr_schedule**: *str, default=""*\ + Schedule for training the model. + - **network_head**: *str, default="classification"*\ + Head of network (only "classification" is currently available). + - **num_layers**: *int, default=1*\ + Number of Transformer Encoder layers (1 or 2). Defaults to 1. + - **input_dims**: *float, default=1024*\ + Input dimensions per token. + - **hidden_dims**: *float, default=1024*\ + Hidden projection dimension. + - **sequence_len**: *int, default=64*\ + Length of token sequence to consider. + - **num_heads**: *int, default=8*\ + Number of attention heads. + - **dropout**: *float, default=0.1*\ + Dropout probability. + - **num_classes**: *int, default=22*\ + Number of classes to predict among. + - **positional_encoding_learned**: *bool, default=False*\ + Positional encoding type. + - **checkpoint_after_iter**: *int, default=0*\ + Unused parameter. + - **checkpoint_load_iter**: *int, default=0*\ + Unused parameter. + - **temp_path**: *str, default=""*\ + Path in which to store temporary files. + - **device**: *str, default="cuda"*\ + Name of computational device ("cpu" or "cuda"). + - **loss**: *str, default="cross_entropy"*\ + Loss function used during optimization. + - **weight_decay**: *[type], default=1e-4*\ + Weight decay used for optimization. + - **momentum**: *float, default=0.9*\ + Momentum used for optimization. + - **drop_last**: *bool, default=True*\ + Drop last data point if a batch cannot be filled. + - **pin_memory**: *bool, default=False*\ + Pin memory in dataloader. + - **num_workers**: *int, default=0*\ + Number of workers in dataloader. + - **seed**: *int, default=123*\ + Random seed. + + +#### `CoTransEncLearner.fit` +```python +CoTransEncLearner.fit(self, dataset, val_dataset, epochs, steps) +``` + +This method is used for training the algorithm on a train dataset and validating on a val dataset. + +Parameters: + - **dataset**: *Dataset*: + Training dataset. + - **val_dataset**: *Dataset, default=None* + Validation dataset. If none is given, validation steps are skipped. + - **epochs**: *int, default=None* + Number of epochs. If none is supplied, self.iters will be used. + - **steps**: *int, default=None* + Number of training steps to conduct. If none, this is determined by epochs. + + +#### `CoTransEncLearner.eval` +```python +CoTransEncLearner.eval(self, dataset, steps) +``` +This method is used to evaluate a trained model on an evaluation dataset. +Returns a dictionary containing stats regarding evaluation. + +Parameters: + - **dataset**: *Dataset* + Dataset on which to evaluate model. + - **steps**: *int, default=None* + Number of validation batches to evaluate. If None, all batches are evaluated. + + +#### `CoTransEncLearner.infer` +```python +CoTransEncLearner.infer(x) +``` + +This method is used to perform classification of a video. +Returns a `engine.target.Category` objects, where each holds a category. + +Parameters: +- **x**: *Union[Timeseries, Vector, torch.Tensor]* + Either a single time instance (Vector) or a Timeseries. x can also be passed as a torch.Tensor. + + +#### `CoTransEncLearner.save` +```python +CoTransEncLearner.save(self, path) +``` + +Save model weights and metadata to path. +Provided with the path "/my/path/name" (absolute or relative), it creates the "name" directory, if it does not already exist. +Inside this folder, the model is saved as "model_name.pth" and the metadata file as "name.json". +If the files already exist, their names are versioned with a suffix. + +If `self.optimize` was run previously, it saves the optimized ONNX model in a similar fashion with an ".onnx" extension. + +Parameters: +- **path**: *str* + Directory in which to save model weights and meta data. + + +#### `CoTransEncLearner.load` +```python +CoTransEncLearner.load(self, path) +``` + +This method is used to load a previously saved model from its saved folder. + +Parameters: +- **path**: *str* + Path to metadata file in json format or to weights path. + + +#### `CoTransEncLearner.optimize` +```python +CoTransEncLearner.optimize(self, do_constant_folding) +``` + +Optimize model execution. This is accomplished by saving to the ONNX format and loading the optimized model. + +Parameters: +- **do_constant_folding**: *bool, default=False* + ONNX format optimization. + If True, the constant-folding optimization is applied to the model during export. + Constant-folding optimization will replace some of the ops that have all constant inputs, with pre-computed constant nodes. + + +#### Examples + +* **Fit model**. + + ```python + from opendr.perception.activity_recognition import CoTransEncLearner + from opendr.perception.activity_recognition.datasets import DummyTimeseriesDataset + + learner = CoTransEncLearner( + batch_size=2, + device="cpu", + input_dims=8, + hidden_dims=32, + sequence_len=64, + num_heads=8, + num_classes=4, + ) + train_ds = DummyTimeseriesDataset( + sequence_len=64, num_sines=8, num_datapoints=128 + ) + val_ds = DummyTimeseriesDataset( + sequence_len=64, num_sines=8, num_datapoints=128, base_offset=128 + ) + learner.fit(dataset=train_ds, val_dataset=val_ds, steps=2) + learner.save('./saved_models/trained_model') + ``` + +* **Evaluate model**. + + ```python + from opendr.perception.activity_recognition import CoTransEncLearner + from opendr.perception.activity_recognition.datasets import DummyTimeseriesDataset + + learner = CoTransEncLearner( + batch_size=2, + device="cpu", + input_dims=8, + hidden_dims=32, + sequence_len=64, + num_heads=8, + num_classes=4, + ) + test_ds = DummyTimeseriesDataset( + sequence_len=64, num_sines=8, num_datapoints=128, base_offset=256 + ) + results = learner.eval(test_ds) # Dict with accuracy and loss + ``` + + +#### References +[3] Continual Transformers: Redundancy-Free Attention for Online Inference, +[arXiv](https://arxiv.org/abs/2201.06268). diff --git a/docs/reference/customize.md b/docs/reference/customize.md new file mode 100644 index 0000000000..3af80f0656 --- /dev/null +++ b/docs/reference/customize.md @@ -0,0 +1,131 @@ +# Customizing the toolkit + +OpenDR is fully open-source and can be readily customized to meet the needs of several different application areas, since the source code for all the developed tools is provided. +Several ready-to-use examples, which are expected to cover a wide range of different needs, are provided. +For example, users can readily use the existing [ROS nodes](../../projects/opendr_ws), e.g., by including the required triggers or by combining several nodes into one to build custom nodes that will fit their needs. +Furthermore, note that several tools can be combined within a ROS node, as showcased in [face recognition ROS node](../../projects/opendr_ws/src/perception/scripts/face_recognition.py). +You can use these nodes as a template for customizing the toolkit to your own needs. +The rest of this document includes instructions for: +1. [Building docker images using the provided docker files](#building-custom-docker-images) +2. [Customizing existing docker images](#customizing-existing-docker-images) +3. [Changing the behavior of ROS nodes](#changing-the-behavior-of-ros-nodes) +4. [Building docker images that do not contain the whole toolkit](#building-docker-images-that-do-not-contain-the-whole-toolkit) + + +## Building custom docker images +The default docker images can be too large for some applications. +OpenDR provides the dockerfiles for customizing the images to your own needs, e.g., using OpenDR in custom third-party images. +Therefore, you can build the docker images locally using the [Dockerfile](/Dockerfile) ([Dockerfile-cuda](/Dockerfile-cuda) for cuda) provided in the root folder of the toolkit. + +### Building the CPU image +For the CPU image, execute the following commands: +```bash +git clone --depth 1 --recurse-submodules -j8 https://github.com/opendr-eu/opendr +cd opendr +sudo docker build -t opendr/opendr-toolkit:cpu . +``` + +### Building the CUDA image +For the cuda-enabled image, first edit `/etc/docker/daemon.json` in order to set the default docker runtime: +``` +{ + "runtimes": { + "nvidia": { + "path": "nvidia-container-runtime", + "runtimeArgs": [] + } + }, + "default-runtime": "nvidia" +} +``` + +Restart docker afterwards: +``` +sudo systemctl restart docker.service +``` +Then you can build the supplied dockerfile: +```bash +git clone --depth 1 --recurse-submodules -j8 https://github.com/opendr-eu/opendr +cd opendr +sudo docker build -t opendr/opendr-toolkit:cuda -f Dockerfile-cuda . +``` + +### Building the Embedded Devices image +The provided Dockerfile-embedded is tested on fresh flashed Nvidia-nx, Nvidia-Tx2 and Nvidia-Agx using jetpack 4.6. + +To build the embedded devices images yourself, first edit `/etc/docker/daemon.json` in order to set the default docker runtime: +``` +{ + "runtimes": { + "nvidia": { + "path": "nvidia-container-runtime", + "runtimeArgs": [] + } + }, + "default-runtime": "nvidia" +} +``` + +Restart docker afterwards: +``` +sudo systemctl restart docker.service +``` + +Then run: +``` +sudo docker build --build-arg device=nx -t opendr/opendr-toolkit:nx -f Dockerfile-embedded . +``` +You can build the image on nx/tx2/agx by changing the build-arg accordingly. + +### Running the custom images +In order to run them, the commands are respectively: +```bash +sudo docker run -p 8888:8888 opendr/opendr-toolkit:cpu +``` +or: +``` +sudo docker run --gpus all -p 8888:8888 opendr/opendr-toolkit:cuda +``` +or: +``` +sudo docker run -p 8888:8888 opendr/opendr-toolkit:nx +``` +## Customizing existing docker images +Building docker images from scratch can take a lot of time, especially for embedded systems without cross-compilation support. +If you need to modify a docker image without rebuilding it (e.g., for changing some source files inside it or adding support for custom pipelines), then you can simply start with the image that you are interesting in, make the changes and use the [docker commit](https://docs.docker.com/engine/reference/commandline/commit/) command. In this way, the changes that have been made will be saved in a new image. + + +## Changing the behavior of ROS nodes +ROS nodes are provided as examples that demonstrate how various tools can be used. +As a result, customization might be needed in order to make them appropriate for your specific needs. +Currently, all nodes support changing the input/output topics (please refer to the [README](../../projects/opendr_ws/src/opendr_perception/README.md) for more information for each node). +However, if you need to change anything else (e.g., load a custom model), then you should appropriately modify the source code of the nodes. +This is very easy, since the Python API of OpenDR is used in all of the provided nodes. +You can refer to [Python API documentation](https://github.com/opendr-eu/opendr/blob/master/docs/reference/index.md) for more details for the tool that you are interested in. + +### Loading a custom model +Loading a custom model in a ROS node is very easy. +First, locate the node that you want to modify (e.g., [pose estimation](../../projects/opendr_ws/src/perception/scripts/pose_estimation.py)). +Then, search for the line where the learner loads the model (i.e., calls the `load()` function). +For the aforementioned node, this happens at [line 76](../../projects/opendr_ws/src/perception/scripts/pose_estimation.py#L76). +Then, replace the path to the `load()` function with the path to your custom model. +You can also optionally remove the call to `download()` function (e.g., [line 75](../../projects/opendr_ws/src/perception/scripts/pose_estimation.py#L75)) to make the node start up faster. + + +## Building docker images that do not contain the whole toolkit +To build custom docker images that do not contain the whole toolkit you should follow these steps: +1. Identify the tools that are using and note them. +2. Start from a clean clone of the repository and remove all modules under [src/opendr] that you are not using. +To this end, use the `rm` command from the root folder of the toolkit and write down the commands that you are issuing. +Please note that you should NOT remove the `engine` package. +4. Add the `rm` commands that you have issued in the dockerfile (e.g., in the main [dockerfile](https://github.com/opendr-eu/opendr/blob/master/Dockerfile)) after the `WORKDIR command` and before the `RUN ./bin/install.sh` command. +5. Build the dockerfile as usual. + +By removing the tools that you are not using, you are also removing the corresponding `requirements.txt` file. +In this way, the `install.sh` script will not pull and install the corresponding dependencies, allowing for having smaller and more lightweight docker images. + +Things to keep in mind: +1. ROS noetic is manually installed by the installation script. +If you want to install another version, you should modify both `install.sh` and `Makefile`. +2. `mxnet`, `torch` and `detectron` are manually installed by the `install.sh` script if you have set `OPENDR_DEVICE=gpu`. +If you do not need these dependencies, then you should manually remove them. diff --git a/docs/reference/detr.md b/docs/reference/detr.md index d54f267ac0..b2007cb601 100644 --- a/docs/reference/detr.md +++ b/docs/reference/detr.md @@ -230,10 +230,10 @@ Documentation on how to use this node can be found [here](../../projects/opendr_ #### Tutorials and Demos A tutorial on performing inference is available -[here](../../projects/perception/object_detection_2d/detr/inference_tutorial.ipynb). -Furthermore, demos on performing [training](../../projects/perception/object_detection_2d/detr/train_demo.py), -[evaluation](../../projects/perception/object_detection_2d/detr/eval_demo.py) and -[inference](../../projects/perception/object_detection_2d/detr/inference_demo.py) are also available. +[here](../../projects/python/perception/object_detection_2d/detr/inference_tutorial.ipynb). +Furthermore, demos on performing [training](../../projects/python/perception/object_detection_2d/detr/train_demo.py), +[evaluation](../../projects/python/perception/object_detection_2d/detr/eval_demo.py) and +[inference](../../projects/python/perception/object_detection_2d/detr/inference_demo.py) are also available. diff --git a/docs/reference/eagerx.md b/docs/reference/eagerx.md index 53f3eae930..537e128c3d 100644 --- a/docs/reference/eagerx.md +++ b/docs/reference/eagerx.md @@ -24,21 +24,21 @@ Documentation is available online: [https://eagerx.readthedocs.io](https://eager **Prerequisites**: EAGERx requires ROS Noetic and Python 3.8 to be installed. -1. **[demo_full_state](../../projects/control/eagerx/demos/demo_full_state.py)**: +1. **[demo_full_state](../../projects/python/control/eagerx/demos/demo_full_state.py)**: Here, we wrap the OpenAI gym within EAGERx. The agent learns to map low-dimensional angular observations to torques. -2. **[demo_pid](../../projects/control/eagerx/demos/demo_pid.py)**: +2. **[demo_pid](../../projects/python/control/eagerx/demos/demo_pid.py)**: Here, we add a PID controller, tuned to stabilize the pendulum in the upright position, as a pre-processing node. The agent now maps low-dimensional angular observations to reference torques. In turn, the reference torques are converted to torques by the PID controller, and applied to the system. -3. **[demo_classifier](../../projects/control/eagerx/demos/demo_classifier.py)**: +3. **[demo_classifier](../../projects/python/control/eagerx/demos/demo_classifier.py)**: Instead of using low-dimensional angular observations, the environment now produces pixel images of the pendulum. In order to speed-up learning, we use a pre-trained classifier to convert these pixel images to estimated angular observations. Then, the agent uses these estimated angular observations similarly as in 'demo_2_pid' to successfully swing-up the pendulum. Example usage: ```bash -cd $OPENDR_HOME/projects/control/eagerx/demos +cd $OPENDR_HOME/projects/python/control/eagerx/demos python3 [demo_name] ``` diff --git a/docs/reference/end-to-end-planning.md b/docs/reference/end-to-end-planning.md index 79ff34f7a0..748ecdde91 100644 --- a/docs/reference/end-to-end-planning.md +++ b/docs/reference/end-to-end-planning.md @@ -7,22 +7,22 @@ class *LearnerRL*. Bases: `engine.learners.LearnerRL` The *EndToEndPlanningRLLearner* is an agent that can be used to train quadrotor robots equipped with a depth sensor to -follow a provided trajectory while avoiding obstacles. +follow a provided trajectory while avoiding obstacles. Originally published in [[1]](#safe-e2e-planning), -The [EndToEndPlanningRLLearner](/src/opendr/planning/end_to_end_planning/e2e_planning_learner.py) class has the +The [EndToEndPlanningRLLearner](../../src/opendr/planning/end_to_end_planning/e2e_planning_learner.py) class has the following public methods: #### `EndToEndPlanningRLLearner` constructor Constructor parameters: -- **env**: *gym.Env*\ - Reinforcment learning environment to train or evaluate the agent on. +- **env**: *gym.Env, default=None*\ + Reinforcement learning environment to train or evaluate the agent on. - **lr**: *float, default=3e-4*\ Specifies the initial learning rate to be used during training. - **n_steps**: *int, default=1024*\ Specifies the number of steps to run for environment per update. -- **iters**: *int, default=5e4*\ +- **iters**: *int, default=1e5*\ Specifies the number of steps the training should run for. - **batch_size**: *int, default=64*\ Specifies the batch size during training. @@ -35,7 +35,7 @@ Constructor parameters: #### `EndToEndPlanningRLLearner.fit` ```python -EndToEndPlanningRLLearner.fit(self, env, logging_path, silent, verbose) +EndToEndPlanningRLLearner.fit(self, env, logging_path, verbose) ``` Train the agent on the environment. @@ -46,8 +46,6 @@ Parameters: If specified use this env to train. - **logging_path**: *str, default=''*\ Path for logging and checkpointing. -- **silent**: *bool, default=False*\ - Disable verbosity. - **verbose**: *bool, default=True*\ Enable verbosity. @@ -103,17 +101,20 @@ Parameters: ### Simulation environment setup -The environment includes an Ardupilot controlled quadrotor in Webots simulation. +The environment is provided with a [world](../../src/opendr/planning/end_to_end_planning/envs/webots/worlds/train-no-dynamic-random-obstacles.wbt) +that needs to be opened with Webots version 2022b in order to demonstrate the end-to-end planner. + +The environment includes an optional Ardupilot controlled quadrotor for simulating dynamics. For the installation of Ardupilot instructions are available [here](https://github.com/ArduPilot/ardupilot). -The required files to complete Ardupilot setup can be downloaded by running [`download_ardupilot_files.py`](src/opendr/planning/end_to_end_planning/download_ardupilot_files.py) script. +The required files to complete Ardupilot setup can be downloaded by running [download_ardupilot_files.py](../../src/opendr/planning/end_to_end_planning/download_ardupilot_files.py) script. The downloaded files (zipped as `ardupilot.zip`) should be replaced under the installation of Ardupilot. In order to run Ardupilot in Webots 2021a, controller codes should be replaced. (For older versions of Webots, these files can be skipped.) The world file for the environment is provided under `/ardupilot/libraries/SITL/examples/webots/worlds/` for training and testing. Install `mavros` package for ROS communication with Ardupilot. Instructions are available [here](https://github.com/mavlink/mavros/blob/master/mavros/README.md#installation). -Source installation is recomended. +Source installation is recommended. ### Running the environment @@ -128,16 +129,16 @@ The simulation time should stop at first time step and wait for Ardupilot softwa - `take_off` which takes off the quadrotor. - `range_image` which converts the depth image into array format to be input for the learner. -After these steps the [AgiEnv](src/opendr/planning/end_to_end_planning/envs/agi_env.py) gym environment can send action comments to the simulated drone and receive depth image and pose information from simulation. +After these steps the [UAVDepthPlanningEnv](../../src/opendr/planning/end_to_end_planning/envs/UAV_depth_planning_env.py) gym environment can send action comments to the simulated drone and receive depth image and pose information from simulation. ### Examples Training in Webots environment: ```python -from opendr.planning.end_to_end_planning import EndToEndPlanningRLLearner, AgiEnv +from opendr.planning.end_to_end_planning import EndToEndPlanningRLLearner, UAVDepthPlanningEnv -env = AgiEnv() +env = UAVDepthPlanningEnv() learner = EndToEndPlanningRLLearner(env, n_steps=1024) learner.fit(logging_path='./end_to_end_planning_tmp') ``` @@ -146,9 +147,9 @@ learner.fit(logging_path='./end_to_end_planning_tmp') Running a pretrained model: ```python -from opendr.planning.end_to_end_planning import EndToEndPlanningRLLearner, AgiEnv +from opendr.planning.end_to_end_planning import EndToEndPlanningRLLearner, UAVDepthPlanningEnv -env = AgiEnv() +env = UAVDepthPlanningEnv() learner = EndToEndPlanningRLLearner(env) learner.load('{$OPENDR_HOME}/src/opendr/planning/end_to_end_planning/pretrained_model/saved_model.zip') obs = env.reset() @@ -182,4 +183,8 @@ TABLE 2: Platform compatibility evaluation. | x86 - Ubuntu 20.04 (CPU docker) | Pass | | x86 - Ubuntu 20.04 (GPU docker) | Pass | | NVIDIA Jetson TX2 | Pass | -| NVIDIA Jetson Xavier AGX | Pass | \ No newline at end of file +| NVIDIA Jetson Xavier AGX | Pass | + +#### References +[1] Ugurlu, H.I.; Pham, X.H.; Kayacan, E. Sim-to-Real Deep Reinforcement Learning for Safe End-to-End Planning of Aerial Robots. Robotics 2022, 11, 109. +[DOI](https://doi.org/10.3390/robotics11050109). [GitHub](https://github.com/open-airlab/gym-depth-planning.git) \ No newline at end of file diff --git a/docs/reference/face-detection-2d-retinaface.md b/docs/reference/face-detection-2d-retinaface.md index 976c60e26d..da160df163 100644 --- a/docs/reference/face-detection-2d-retinaface.md +++ b/docs/reference/face-detection-2d-retinaface.md @@ -167,17 +167,17 @@ Parameters: If True, maximum verbosity if enabled. - **url**: *str, default=OpenDR FTP URL*\ URL of the FTP server. - + #### Examples * **Training example**. - To train properly, the backbone weights are downloaded automatically in the `temp_path`. + To train properly, the backbone weights are downloaded automatically in the `temp_path`. The WIDER Face detection dataset is supported for training, implemented as a `DetectionDataset` subclass. This example assumes the data has been downloaded and placed in the directory referenced by `data_root`. ```python from opendr.perception.object_detection_2d import RetinaFaceLearner, WiderFaceDataset from opendr.engine.datasets import ExternalDataset - + dataset = WiderFaceDataset(root=data_root, splits=['train']) face_learner = RetinaFaceLearner(backbone='resnet', prefix='retinaface_resnet50', @@ -189,7 +189,7 @@ Parameters: face_learner.fit(dataset, val_dataset=dataset, verbose=True) face_learner.save('./trained_models/retinaface_resnet50') ``` - + Custom datasets are supported by inheriting the `DetectionDataset` class. * **Inference and result drawing example on a test .jpg image using OpenCV.** @@ -208,7 +208,7 @@ Parameters: img = draw_bounding_boxes(img.opencv(), bounding_boxes, learner.classes, show=True) ``` - + #### Performance Evaluation In terms of speed, the performance of RetinaFace is summarized in the table below (in FPS). @@ -223,12 +223,12 @@ The measurement was made on a Jetson TX2 module. | Variant | Memory (MB) | Energy (Joules) - Total per inference | |-------------------|---------|-------| -| RetinaFace | 4443 | 21.83 | +| RetinaFace | 4443 | 21.83 | | RetinaFace-MobileNet | 4262 | 8.73 | Finally, we measure the recall on the WIDER face validation subset at 87.83%. Note that RetinaFace can make use of image pyramids and horizontal flipping to achieve even better recall at the cost of additional computations. -For the MobileNet version, recall drops to 77.81%. +For the MobileNet version, recall drops to 77.81%. The platform compatibility evaluation is also reported below: @@ -242,8 +242,8 @@ The platform compatibility evaluation is also reported below: | NVIDIA Jetson TX2 | :heavy_check_mark: | | NVIDIA Jetson Xavier AGX | :heavy_check_mark: | | NVIDIA Jetson Xavier NX | :heavy_check_mark: | - + #### References [1] RetinaFace: Single-stage Dense Face Localisation in the Wild, [arXiv](https://arxiv.org/abs/1905.00641). - + diff --git a/docs/reference/fall-detection.md b/docs/reference/fall-detection.md index 3d535a633c..567ff89993 100644 --- a/docs/reference/fall-detection.md +++ b/docs/reference/fall-detection.md @@ -5,9 +5,18 @@ The *fall_detection* module contains the *FallDetectorLearner* class, which inhe ### Class FallDetectorLearner Bases: `engine.learners.Learner` -The *FallDetectorLearner* class contains the implementation of a naive fall detector algorithm. +The *FallDetectorLearner* class contains the implementation of a rule-based fall detector algorithm. It can be used to perform fall detection on images (inference) using a pose estimator. +This rule-based method can provide **cheap and fast** fall detection capabilities when pose estimation +is already being used. Its inference time cost is ~0.1% of pose estimation, adding negligible overhead. + +However, it **has known limitations** due to its nature. Working with 2D poses means that depending on the +orientation of the person, it cannot detect most fallen poses that face the camera. +Another example of known false-positive detection occurs when a person is sitting with their knees +detectable, but ankles obscured or undetectable, this however is critical for detecting a fallen person +whose ankles are not visible. + The [FallDetectorLearner](/src/opendr/perception/fall_detection/fall_detector_learner.py) class has the following public methods: diff --git a/docs/reference/fmp_gmapping.md b/docs/reference/fmp_gmapping.md index 6df53abfa1..913bd88609 100644 --- a/docs/reference/fmp_gmapping.md +++ b/docs/reference/fmp_gmapping.md @@ -3,9 +3,9 @@ Traditional *SLAM* algorithm for estimating a robot's position and a 2D, grid-based map of the environment from planar LiDAR scans. Based on OpenSLAM GMapping, with additional functionality for computing the closed-form Full Map Posterior Distribution. -For more details on the launchers and tools, see the [FMP_Eval Readme](../../projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/README.md). +For more details on the launchers and tools, see the [FMP_Eval Readme](../../projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/README.md). -For more details on the actual SLAM algorithm and its ROS node wrapper, see the [SLAM_GMapping Readme](../../projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/README.md). +For more details on the actual SLAM algorithm and its ROS node wrapper, see the [SLAM_GMapping Readme](../../projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/README.md). ## Demo Usage A demo ROSBag for a square corridor can be found in the Map Simulator submodule in `src/map_simulator/rosbags/`, as well as preconfigured ***roslaunch*** @@ -25,4 +25,4 @@ This will start the following processes and nodes: Other ROSBags can be easily generated with the map simulator script from either new custom scenarios, or from the test configuration files in `src/map_simulator/scenarios/robots/` directory. -For more information on how to define custom test scenarios and converting them to ROSBags, see the [Map_Simulator Readme](../../projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/README.md). \ No newline at end of file +For more information on how to define custom test scenarios and converting them to ROSBags, see the [Map_Simulator Readme](../../projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/README.md). \ No newline at end of file diff --git a/docs/reference/gem.md b/docs/reference/gem.md index 27e19ae9b7..88826b60f1 100644 --- a/docs/reference/gem.md +++ b/docs/reference/gem.md @@ -216,8 +216,8 @@ Parameters: #### Demo and Tutorial -An inference [demo](../../projects/perception/object_detection_2d/gem/inference_demo.py) and -[tutorial](../../projects/perception/object_detection_2d/gem/inference_tutorial.ipynb) are available. +An inference [demo](../../projects/python/perception/object_detection_2d/gem/inference_demo.py) and +[tutorial](../../projects/python/perception/object_detection_2d/gem/inference_tutorial.ipynb) are available. #### Examples diff --git a/docs/reference/high-resolution-pose-estimation.md b/docs/reference/high-resolution-pose-estimation.md new file mode 100644 index 0000000000..b0128397ee --- /dev/null +++ b/docs/reference/high-resolution-pose-estimation.md @@ -0,0 +1,358 @@ +## high_resolution_pose_estimation module + +The *high_resolution_pose_estimation* module contains the *HighResolutionPoseEstimationLearner* class, which inherits from the abstract class *Learner*. + +### Class HighResolutionPoseEstimationLearner +Bases: `engine.learners.Learner` + +The *HighResolutionLightweightOpenPose* class is an implementation for pose estimation in high resolution images. +This method creates a heatmap of a resized version of the input image. +Using this heatmap, the input image is cropped keeping the area of interest and then it is used for pose estimation. +Since the high resolution pose estimation method is based on the Lightweight OpenPose algorithm, the models that can be used have to be trained with the Lightweight OpenPose tool. + +In this method there are two important variables which are responsible for the increase in speed and accuracy in high resolution images. +These variables are *first_pass_height* and *second_pass_height* which define how the image is resized in this procedure. + +The [HighResolutionPoseEstimationLearner](/src/opendr/perception/pose_estimation/hr_pose_estimation/high_resolution_learner.py) class has the following public methods: + +#### `HighResolutionPoseEstimationLearner` constructor +```python +HighResolutionPoseEstimationLearner(self, device, backbone, temp_path, mobilenet_use_stride, mobilenetv2_width, shufflenet_groups, num_refinement_stages, batches_per_iter, base_height, first_pass_height, second_pass_height, percentage_arround_crop, heatmap_threshold, experiment_name, num_workers, weights_only, output_name, multiscale, scales, visualize, img_mean, img_scale, pad_value, half_precision) +``` + +Constructor parameters: + +- **device**: *{'cpu', 'cuda'}, default='cuda'*\ + Specifies the device to be used. +- **backbone**: *{'mobilenet, 'mobilenetv2', 'shufflenet'}, default='mobilenet'*\ + Specifies the backbone architecture. +- **temp_path**: *str, default='temp'*\ + Specifies a path where the algorithm looks for pretrained backbone weights, the checkpoints are saved along with the logging files. + Moreover the JSON file that contains the evaluation detections is saved here. +- **mobilenet_use_stride**: *bool, default=True*\ + Whether to add a stride value in the mobilenet model, which reduces accuracy but increases inference speed. +- **mobilenetv2_width**: *[0.0 - 1.0], default=1.0*\ + If the mobilenetv2 backbone is used, this parameter specifies its size. +- **shufflenet_groups**: *int, default=3*\ + If the shufflenet backbone is used, it specifies the number of groups to be used in grouped 1x1 convolutions in each ShuffleUnit. +- **num_refinement_stages**: *int, default=2*\ + Specifies the number of pose estimation refinement stages are added on the model's head, including the initial stage. +- **batches_per_iter**: *int, default=1*\ + Specifies per how many batches a backward optimizer step is performed. +- **base_height**: *int, default=256*\ + Specifies the height, based on which the images will be resized before performing the forward pass when using the Lightweight OpenPose. +- **first_pass_height**: *int, default=360*\ + Specifies the height that the input image will be resized during the heatmap generation procedure. +- **second_pass_height**: *int, default=540*\ + Specifies the height of the image on the second inference for pose estimation procedure. +- **percentage_arround_crop**: *float, default=0.3*\ + Specifies the percentage of an extra pad arround the cropped image +- **heatmap_threshold**: *float, default=0.1*\ + Specifies the threshold value that the heatmap elements should have during the first pass in order to trigger the second pass +- **experiment_name**: *str, default='default'*\ + String name to attach to checkpoints. +- **num_workers**: *int, default=8*\ + Specifies the number of workers to be used by the data loader. +- **weights_only**: *bool, default=True*\ + If True, only the model weights will be loaded; it won't load optimizer, scheduler, num_iter, current_epoch information. +- **output_name**: *str, default='detections.json'*\ + The name of the json file where the evaluation detections are stored, inside the temp_path. +- **multiscale**: *bool, default=False*\ + Specifies whether evaluation will run in the predefined multiple scales setup or not. + It overwrites self.scales to [0.5, 1.0, 1.5, 2.0]. +- **scales**: *list, default=None*\ + A list of integer scales that define the multiscale evaluation setup. + Used to manually set the scales instead of going for the predefined multiscale setup. +- **visualize**: *bool, default=False*\ + Specifies whether the images along with the poses will be shown, one by one, during evaluation. +- **img_mean**: *list, default=(128, 128, 128)]*\ + Specifies the mean based on which the images are normalized. +- **img_scale**: *float, default=1/256*\ + Specifies the scale based on which the images are normalized. +- **pad_value**: *list, default=(0, 0, 0)*\ + Specifies the pad value based on which the images' width is padded. +- **half_precision**: *bool, default=False*\ + Enables inference using half (fp16) precision instead of single (fp32) precision. Valid only for GPU-based inference. + + +#### `HighResolutionPoseEstimationLearner.eval` +```python +HighResolutionPoseEstimationLearner.eval(self, dataset, silent, verbose, use_subset, subset_size, images_folder_name, annotations_filename) +``` + +This method is used to evaluate a trained model on an evaluation dataset. +Returns a dictionary containing statistics regarding evaluation. + +Parameters: + +- **dataset**: *object*\ + Object that holds the evaluation dataset. + Can be of type `ExternalDataset` or a custom dataset inheriting from `DatasetIterator`. +- **silent**: *bool, default=False*\ + If set to True, disables all printing of evaluation progress reports and other information to STDOUT. +- **verbose**: *bool, default=True*\ + If set to True, enables the maximum verbosity. +- **use_subset**: *bool, default=True*\ + If set to True, a subset of the validation dataset is created and used in evaluation. +- **subset_size**: *int, default=250*\ + Controls the size of the validation subset. +- **images_folder_name**: *str, default='val2017'*\ + Folder name that contains the dataset images. + This folder should be contained in the dataset path provided. + Note that this is a folder name, not a path. +- **annotations_filename**: *str, default='person_keypoints_val2017.json'*\ + Filename of the annotations JSON file. + This file should be contained in the dataset path provided. + +#### `HighResolutionPoseEstimation.infer` +```python +HighResolutionPoseEstimation.infer(self, img, upsample_ratio, stride, track, smooth, multiscale, visualize) +``` + +This method is used to perform pose estimation on an image. +Returns a list of `engine.target.Pose` objects, where each holds a pose, or returns an empty list if no detection were made. + +Parameters: + +- **img**: *object***\ + Object of type engine.data.Image. +- **upsample_ratio**: *int, default=4*\ + Defines the amount of upsampling to be performed on the heatmaps and PAFs when resizing. +- **stride**: *int, default=8*\ + Defines the stride value for creating a padded image. +- **track**: *bool, default=True*\ + If True, infer propagates poses ids from previous frame results to track poses. +- **smooth**: *bool, default=True*\ + If True, smoothing is performed on pose keypoints between frames. +- **multiscale**: *bool, default=False*\ + Specifies whether evaluation will run in the predefined multiple scales setup or not. + + + +#### `HighResolutionPoseEstimationLearner.__first_pass` +```python +HighResolutionPoseEstimationLearner.__first_pass(self, img) +``` + +This method is used for extracting a heatmap from the input image about human locations in the picture. + +Parameters: + +- **img**: *object***\ + Object of type engine.data.Image. + + +#### `HighResolutionPoseEstimationLearner.__second_pass` +```python +HighResolutionPoseEstimationLearner.__second_pass(self, img, net_input_height_size, max_width, stride, upsample_ratio, pad_value, img_mean, img_scale) +``` + +On this method the second inference step is carried out, which estimates the human poses on the image that is provided. +Following the steps of the proposed method this image should be the cropped part of the initial high resolution image that came out from taking into account the area of interest of the heatmap generated. + +Parameters: + +- **img**: *object***\ + Object of type engine.data.Image. +- **net_input_height_size**: *int*\ + It is the height that is used for resizing the image on the pose estimation procedure. +- **max_width**: *int*\ + It is the max width that the cropped image should have in order to keep the height-width ratio below a certain value. +- **stride**: *int*\ + Is the stride value of mobilenet which reduces accuracy but increases inference speed. +- **upsample_ratio**: *int, default=4*\ + Defines the amount of upsampling to be performed on the heatmaps and PAFs when resizing. +- **pad_value**: *list, default=(0, 0, 0)*\ + Specifies the pad value based on which the images' width is padded. +- **img_mean**: *list, default=(128, 128, 128)]*\ + Specifies the mean based on which the images are normalized. +- **img_scale**: *float, default=1/256*\ + Specifies the scale based on which the images are normalized. + + +#### `HighResolutionPoseEstimation.download` +```python +HighResolutionPoseEstimation.download(self, path, mode, verbose, url) +``` + +Download utility for various Lightweight Open Pose components. +Downloads files depending on mode and saves them in the path provided. +It supports downloading: +1. the default mobilenet pretrained model +2. mobilenet, mobilenetv2 and shufflenet weights needed for training +3. a test dataset with a single COCO image and its annotation + +Parameters: + +- **path**: *str, default=None*\ + Local path to save the files, defaults to self.temp_path if None. +- **mode**: *str, default="pretrained"*\ + What file to download, can be one of "pretrained", "weights", "test_data" +- **verbose**: *bool, default=False*\ + Whether to print messages in the console. +- **url**: *str, default=OpenDR FTP URL*\ + URL of the FTP server. + +#### `HighResolutionPoseEstimation.load` +```python +HighResolutionPoseEstimation.load(self, path, verbose) +``` +This method is used to load a pretrained model that has trained with Lightweight OpenPose. The model is loaded from inside the directory of the path provided, using the metadata .json file included. + +Parameters: +- **path**: *str*\ + Path of the model to be loaded. +- **verbose**: *bool, default=False*\ + If set to True, prints a message on success. + + +#### Examples + +* **Inference and result drawing example on a test .jpg image using OpenCV.** + ```python + import cv2 + from opendr.perception.pose_estimation import HighResolutionPoseEstimationLearner + from opendr.perception.pose_estimation import draw + from opendr.engine.data import Image + + pose_estimator = HighResolutionPoseEstimationLearner(device='cuda', num_refinement_stages=2, + mobilenet_use_stride=False, half_precision=False, + first_pass_height=360, + second_pass_height=540) + pose_estimator.download() # Download the default pretrained mobilenet model in the temp_path + + pose_estimator.load("./parent_dir/openpose_default") + pose_estimator.download(mode="test_data") # Download a test data taken from COCO2017 + + img = Image.open('./parent_dir/dataset/image/000000000785_1080.jpg') + orig_img = img.opencv() # Keep original image + current_poses = pose_estimator.infer(img) + img_opencv = img.opencv() + for pose in current_poses: + draw(img_opencv, pose) + img_opencv = cv2.addWeighted(orig_img, 0.6, img_opencv, 0.4, 0) + cv2.imshow('Result', img_opencv) + cv2.waitKey(0) + ``` + + +#### Performance Evaluation + + +In order to check the performance of the *HighResolutionPoseEstimationLearner* it has been tested on various platforms and with different optimizations that Lightweight OpenPose offers. +The experiments are conducted on a 1080p image. + + +#### Lightweight OpenPose With resizing on 256 pixels +| **Method** | **CPU i7-9700K (FPS)** | **RTX 2070 (FPS)** | **Jetson TX2 (FPS)** | **Xavier NX (FPS)** | +|:------------------------------------------------:|-----------------------|-------------------|----------------------|---------------------| +| OpenDR - Baseline | 0.9 | 46.3 | 4.6 | 6.4 | +| OpenDR - Full | 2.9 | 83.1 | 11.2 | 13.5 | + + +#### Lightweight OpenPoseWithout resizing +| Method | CPU i7-9700K (FPS) | RTX 2070 (FPS) | Jetson TX2 (FPS) | Xavier NX (FPS) | +|-------------------|--------------------|-----------------|------------------|-----------------| +| OpenDR - Baseline | 0.05 | 2.6 | 0.3 | 0.5 | +| OpenDR - Full | 0.2 | 10.8 | 1.4 | 3.1 | + + +#### High-Resolution Pose Estimation +| Method | CPU i7-9700K (FPS) | RTX 2070 (FPS) | Jetson TX2 (FPS) | Xavier NX (FPS) | +|------------------------|--------------------|----------------|------------------|-----------------| +| HRPoseEstim - Baseline | 2.3 | 13.6 | 1.4 | 1.8 | +| HRPoseEstim - Half | 2.7 | 16.1 | 1.3 | 3.0 | +| HRPoseEstim - Stride | 8.1 | 27.0 | 4 | 4.9 | +| HRPoseEstim - Stages | 3.7 | 16.5 | 1.9 | 2.7 | +| HRPoseEstim - H+S | 8.2 | 25.9 | 3.6 | 5.5 | +| HRPoseEstim - Full | 10.9 | 31.7 | 4.8 | 6.9 | + +As it is shown in the previous tables, OpenDR Lightweight OpenPose achieves higher FPS when it is resizing the input image into 256 pixels. +It is easier to process that image, but as it is shown in the next tables the method falls apart when it comes to accuracy and there are no detections. + +We have evaluated the effect of using different inference settings, namely: +- *HRPoseEstim - Baseline*, which refers to directly using the High Resolution Pose Estimation method,which is based in Lightweight OpenPose, +- *HRPoseEstim - Half*, which refers to enabling inference in half (FP) precision, +- *HRPoseEstim - Stride*, which refers to increasing stride by two in the input layer of the model, +- *HRPoseEstim - Stages*, which refers to removing the refinement stages, +- *HRPoseEstim - H+S*, which uses both half precision and increased stride, and +- *HRPoseEstim - Full*, which refers to combining all three available optimization. +was used as input to the models. + +The average precision and average recall on the COCO evaluation split is also reported in the tables below: + + +#### Lightweight OpenPose with resizing +| Method | Average Precision (IoU=0.50) | Average Recall (IoU=0.50) | +|-------------------|------------------------------|---------------------------| +| OpenDR - Baseline | 0.101 | 0.267 | + | OpenDR - Full | 0.031 | 0.044 | + + + + +#### Lightweight OpenPose without resizing +| Method | Average Precision (IoU=0.50) | Average Recall (IoU=0.50) | +|-------------------|------------------------------|---------------------------| +| OpenDR - Baseline | 0.695 | 0.749 | +| OpenDR - Full | 0.389 | 0.441 | + + + +#### High Resolution Pose Estimation +| Method | Average Precision (IoU=0.50) | Average Recall (IoU=0.50) | +|------------------------|------------------------------|---------------------------| +| HRPoseEstim - Baseline | 0.615 | 0.637 | +| HRPoseEstim - Half | 0.604 | 0.621 | +| HRPoseEstim - Stride | 0.262 | 0.274 | +| HRPoseEstim - Stages | 0.539 | 0.562 | +| HRPoseEstim - H+S | 0.254 | 0.267 | +| HRPoseEstim - Full | 0.259 | 0.272 | + +The average precision and the average recall have been calculated on a 1080p version of COCO2017 validation dataset and the results are reported in the table below: + +| Method | Average Precision (IoU=0.50) | Average Recall (IoU=0.50) | +|-------------------|------------------------------|---------------------------| +| HRPoseEstim - Baseline | 0.518 | 0.536 | +| HRPoseEstim - Half | 0.509 | 0.520 | +| HRPoseEstim - Stride | 0.143 | 0.149 | +| HRPoseEstim - Stages | 0.474 | 0.496 | +| HRPoseEstim - H+S | 0.134 | 0.139 | +| HRPoseEstim - Full | 0.141 | 0.150 | + +For measuring the precision and recall we used the standard approach proposed for COCO, using an Intersection of Union (IoU) metric at 0.5. + + +#### Notes + +For the metrics of the algorithm the COCO dataset evaluation scores are used as explained [here](https://cocodataset.org/#keypoints-eval). + +Keypoints and how poses are constructed is according to the original method described [here](https://github.com/Daniil-Osokin/lightweight-human-pose-estimation.pytorch/blob/master/TRAIN-ON-CUSTOM-DATASET.md). + +Pose keypoints ids are matched as: + +| Keypoint ID | Keypoint name | Keypoint abbrev. | +|------------- |---------------- |------------------ | +| 0 | nose | nose | +| 1 | neck | neck | +| 2 | right shoulder | r_sho | +| 3 | right elbow | r_elb | +| 4 | right wrist | r_wri | +| 5 | left shoulder | l_sho | +| 6 | left elbow | l_elb | +| 7 | left wrist | l_wri | +| 8 | right hip | r_hip | +| 9 | right knee | r_knee | +| 10 | right ankle | r_ank | +| 11 | left hip | l_hip | +| 12 | left knee | l_knee | +| 13 | left ankle | l_ank | +| 14 | right eye | r_eye | +| 15 | left eye | l_eye | +| 16 | right ear | r_ear | +| 17 | left ear | l_ear | + + +#### References +[1] OpenPose: Realtime Multi-Person 2D Pose Estimation using Part Affinity Fields, [arXiv](https://arxiv.org/abs/1812.08008). +[2] Real-time 2D Multi-Person Pose Estimation on CPU: Lightweight OpenPose, [arXiv](https://arxiv.org/abs/1811.12004). diff --git a/docs/reference/human-model-generation.md b/docs/reference/human-model-generation.md index 8bd3997cb8..71bac046de 100644 --- a/docs/reference/human-model-generation.md +++ b/docs/reference/human-model-generation.md @@ -77,7 +77,7 @@ Documentation on how to use this node can be found [here](../../projects/opendr_ #### Tutorials and Demos A demo in the form of a Jupyter Notebook is available -[here](../../projects/simulation/human_model_generation/demos/model_generation.ipynb). +[here](../../projects/python/simulation/human_model_generation/demos/model_generation.ipynb). #### Example @@ -95,8 +95,8 @@ A demo in the form of a Jupyter Notebook is available OPENDR_HOME = os.environ["OPENDR_HOME"] # We load a full-body image of a human as well as an image depicting its corresponding silhouette. - rgb_img = Image.open(os.path.join(OPENDR_HOME, 'projects/simulation/human_model_generation/demos', 'imgs_input/rgb/result_0004.jpg')) - msk_img = Image.open(os.path.join(OPENDR_HOME, 'projects/simulation/human_model_generation/demos', 'imgs_input/msk/result_0004.jpg')) + rgb_img = Image.open(os.path.join(OPENDR_HOME, 'projects/python/simulation/human_model_generation/demos', 'imgs_input/rgb/result_0004.jpg')) + msk_img = Image.open(os.path.join(OPENDR_HOME, 'projects/python/simulation/human_model_generation/demos', 'imgs_input/msk/result_0004.jpg')) # We initialize learner. Using the infer method, we generate human 3D model. model_generator = PIFuGeneratorLearner(device='cuda', checkpoint_dir='./temp') diff --git a/docs/reference/image_based_facial_emotion_estimation.md b/docs/reference/image_based_facial_emotion_estimation.md new file mode 100644 index 0000000000..11f4b8acbf --- /dev/null +++ b/docs/reference/image_based_facial_emotion_estimation.md @@ -0,0 +1,331 @@ +## image_based_facial_emotion_estimation module + +The *image_based_facial_emotion_estimation* module contains the *FacialEmotionLearner* class, which inherits from the abstract class *Learner*. + +### Class FacialEmotionLearner +Bases: `engine.learners.Learner` + +The *FacialEmotionLearner* class is an implementation of the state-of-the-art method ESR [[1]](#1) for efficient facial feature learning with wide ensemble-based convolutional neural networks. +An ESR consists of two building blocks. +(1) The base of the network is an array of convolutional layers for low- and middle-level feature learning. +(2) These informative features are then shared with independent convolutional branches that constitute the ensemble. +From this point, each branch can learn distinctive features while competing for a common resource - the shared layers. +The [FacialEmotionLearner](/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/facial_emotion_learner.py) class has the following public methods: + + +#### `FacialEmotionLearner` constructor +```python +FacialEmotionLearner(self, lr, batch_size, temp_path, device, device_ind, validation_interval, + max_training_epoch, momentum, ensemble_size, base_path_experiment, name_experiment, dimensional_finetune, categorical_train, + base_path_to_dataset, max_tuning_epoch, diversify) +``` + +Constructor parameters: + +- **lr**: *float, default=0.1*\ + Specifies the initial learning rate to be used during training. +- **batch_size**: *int, default=32*\ + Specifies number of samples to be bundled up in a batch during training. + This heavily affects memory usage, adjust according to your system. +- **temp_path**: *str, default='temp'*\ + Specifies a path where the algorithm saves the checkpoints and onnx optimized model (if needed). +- **device**: *{'cpu', 'cuda'}, default='cuda'*\ + Specifies the device to be used. +- **device_ind**: *list, default=[0]*\ + List of GPU indices to be used if the device is 'cuda'. +- **validation_interval**: *int, default=1*\ + Specifies the validation interval. +- **max_training_epoch**: *int, default=2*\ + Specifies the maximum number of epochs the training should run for. +- **momentum**: *float, default=0.9*\ + Specifies the momentum value used for optimizer. +- **ensemble_size**: *int, default=9*\ + Specifies the number of ensemble branches in the model. +- **base_path_experiment**: *str, default='./experiments/'*\ + Specifies the path in which the experimental results will be saved. +- **name_experiment**: *str, default='esr_9'*\ + String name for saving checkpoints. +- **dimensional_finetune**: *bool, default=True*\ + Specifies if the model should be fine-tuned on dimensional data or not. +- **categorical_train**: *bool, default=False*\ + Specifies if the model should be trained on categorical data or not. +- **base_path_to_dataset**: *str, default=''./data/AffectNet''*\ + Specifies the dataset path. +- **max_tuning_epoch**: *int, default=1*\ + Specifies the maximum number of epochs the model should be finetuned on dimensional data. +- **diversity**: *bool, default=False*\ + Specifies if the learner diversifies the features of different branches or not. + +#### `FacialEmotionLearner.fit` +```python +FacialEmotionLearner.fit(self) +``` + +This method is used for training the algorithm on a train dataset and validating on a val dataset. + + +#### `FacialEmotionLearner.eval` +```python +FacialEmotionLearner.eval(self, eval_type, current_branch_on_training) +``` + +This method is used to evaluate a trained model on an evaluation dataset. +Returns a dictionary containing stats regarding evaluation. + +Parameters: + +- **eval_type**: *str, default='categorical'*\ + Specifies the type of data that model is evaluated on. + It can be either categorical or dimensional data. +- **current_branch_on_training**: *int, default=0*\ + Specifies the index of trained branch which should be evaluated on validation data. + + +#### `FacialEmotionLearner.init_model` +```python +FacialEmotionLearner.init_model(self, num_branches) +``` + +This method is used to initialize the model. + +Parameters: + +- **num_branches**: *int*\ + Specifies the number of ensemble branches in the model. ESR_9 model is built by 9 branches by default. + +#### `FacialEmotionLearner.infer` +```python +FacialEmotionLearner.infer(self, input_batch) +``` + +This method is used to perform inference on an image or a batch of images. +It returns dimensional emotion results and also the categorical emotion results as an object of `engine.target.Category` if a proper input object `engine.data.Image` is given. + +Parameters: + +- **input_batch**: *object*** + Object of type `engine.data.Image`. It also can be a list of Image objects, or a Torch tensor which will be converted to Image object. + +#### `FacialEmotionLearner.save` +```python +FacialEmotionLearner.save(self, state_dicts, base_path_to_save_model) +``` +This method is used to save a trained model. +Provided with the path (absolute or relative), it creates the "path" directory, if it does not already exist. +Inside this folder, the model is saved as "model_name.pt" and the metadata file as "model_name.json". If the directory already exists, the "model_name.pt" and "model_name.json" files are overwritten. + +If [`self.optimize`](#FacialEmotionLearner.optimize) was run previously, it saves the optimized ONNX model in a similar fashion with an ".onnx" extension, by copying it from the self.temp_path it was saved previously during conversion. + +Parameters: + +- **state_dicts**: *object*\ + Object of type Python dictionary containing the trained model weights. +- **base_path_to_save_model**: *str*\ + Specifies the path in which the model will be saved. + +#### `FacialEmotionLearner.load` +```python +FacialEmotionLearner.load(self, ensemble_size, path_to_saved_network, file_name_base_network, + file_name_conv_branch, fix_backbone) +``` + +Loads the model from inside the directory of the path provided, using the metadata .json file included. + +Parameters: + +- **ensemble_size**: *int, default=9*\ + Specifies the number of ensemble branches in the model for which the pretrained weights should be loaded. +- **path_to_saved_network**: *str, default="./trained_models/esr_9"*\ + Path of the model to be loaded. +- **file_name_base_network**: *str, default="Net-Base-Shared_Representations.pt"*\ + The file name of the base network to be loaded. +- **file_name_conv_branch**: *str, default="Net-Branch_{}.pt"*\ + The file name of the ensemble branch network to be loaded. +- **fix_backbone**: *bool*\ + If true, all the model weights except the classifier are fixed so that the last layers' weights are fine-tuned on dimensional data. + Otherwise, all the model weights will be trained from scratch. + + +#### `FacialEmotionLearner.optimize` +```python +FacialEmotionLearner.optimize(self, do_constant_folding) +``` + +This method is used to optimize a trained model to ONNX format which can be then used for inference. + +Parameters: + +- **do_constant_folding**: *bool, default=False*\ + ONNX format optimization. + If True, the constant-folding optimization is applied to the model during export. + + +#### `FacialEmotionLearner.download` +```python +@staticmethod +FacialEmotionLearner.download(self, path, mode, url) +``` + +Downloads data and saves them in the path provided. + +Parameters: + +- **path**: *str, default=None*\ + Local path to save the files, defaults to `self.temp_dir` if None. +- **mode**: *str, default="data"*\ + What file to download, can be "data". +- **url**: *str, default=opendr FTP URL*\ + URL of the FTP server. + + +#### Data preparation + Download the [AffectNet](http://mohammadmahoor.com/affectnet/) [[2]](https://www.computer.org/csdl/magazine/mu/2012/03/mmu2012030034/13rRUxjQyrW) dataset, and organize it in the following structure: + ``` + AffectNet/ + Training_Labeled/ + 0/ + 1/ + ... + n/ + Training_Unlabeled/ + 0/ + 1/ + ... + n/ + Validation/ + 0/ + 1/ + ... + n/ + ``` + In order to do that, you need to run the following function: + ```python + from opendr.perception.facial_expression_recognition.image_based_facial_emotion_estimation.algorithm.utils import datasets + datasets.pre_process_affect_net(base_path_to_images, base_path_to_annotations, base_destination_path, set_index) + ``` + This pre-processes the AffectNet dataset by cropping and resizing the images into 96 x 96 pixels, and organizing them in folders with 500 images each. + Each image is renamed to follow the pattern "[id][emotion_idx][valence times 1000]_[arousal times 1000].jpg". + +#### Pre-trained models + +The pretrained models on AffectNet Categorical dataset are provided by [[1]](#1) which can be found [here](https://github.com/siqueira-hc/Efficient-Facial-Feature-Learning-with-Wide-Ensemble-based-Convolutional-Neural-Networks/tree/master/model/ml/trained_models/esr_9). +**Please note that the pretrained weights cannot be used for commercial purposes.** + +#### Examples + +* **Train the ensemble model on AffectNet Categorical dataset and then fine-tune it on the AffectNet dimensional dataset** + The training and evaluation dataset should be present in the path provided. + The `batch_size` argument should be adjusted according to available memory. + + ```python + from opendr.perception.facial_expression_recognition import FacialEmotionLearner + + learner = FacialEmotionLearner(device="cpu", temp_path='./tmp', + batch_size=2, max_training_epoch=1, ensemble_size=1, + name_experiment='esr_9', base_path_experiment='./experiments/', + lr=1e-1, categorical_train=True, dimensional_finetune=True, + base_path_to_dataset='./data', max_tuning_epoch=1) + learner.fit() + learner.save(state_dicts=learner.model.to_state_dict(), + base_path_to_save_model=learner.base_path_experiment, + current_branch_save=8) + ``` + +* **Inference on a batch of images** + ```python + from opendr.perception.facial_expression_recognition import FacialEmotionLearner + from torch.utils.data import DataLoader + + learner = FacialEmotionLearner(device="cpu", temp_path='./tmp', + batch_size=2, max_training_epoch=1, ensemble_size=1, + name_experiment='esr_9', base_path_experiment='./experiments/', + lr=1e-1, categorical_train=True, dimensional_finetune=True, + base_path_to_dataset='./data', max_tuning_epoch=1) + + # Download the validation data + dataset_path = learner.download(mode='data') + val_data = datasets.AffectNetCategorical(idx_set=2, + max_loaded_images_per_label=2, + transforms=None, + is_norm_by_mean_std=False, + base_path_to_affectnet=learner.dataset_path) + + val_loader = DataLoader(val_data, batch_size=32, shuffle=False, num_workers=8) + batch = next(iter(val_loader))[0] + learner.load(learner.ensemble_size, path_to_saved_network=learner.base_path_experiment, fix_backbone=True) + ensemble_emotion_results, ensemble_dimension_results = learner.infer(batch[0]) + ``` + +* **Optimization example for a previously trained model** + Inference can be run with the trained model after running self.optimize. + ```python + from opendr.perception.facial_expression_recognition import FacialEmotionLearner + + learner = FacialEmotionLearner(device="cpu", temp_path='./tmp', + batch_size=2, max_training_epoch=1, ensemble_size=1, + name_experiment='esr_9', base_path_experiment='./experiments/', + lr=1e-1, categorical_train=True, dimensional_finetune=True, + base_path_to_dataset='./data', max_tuning_epoch=1) + + + learner.load(learner.ensemble_size, path_to_saved_network=learner.base_path_experiment, fix_backbone=True) + learner.optimize(do_constant_folding=True) + learner.save(path='./parent_dir/optimized_model', model_name='optimized_pstbln') + ``` + + +#### Performance Evaluation + +The tests were conducted on the following computational devices: +- Intel(R) Xeon(R) Gold 6230R CPU on server +- Nvidia Jetson TX2 +- Nvidia Jetson Xavier AGX +- Nvidia RTX 2080 Ti GPU on server with Intel Xeon Gold processors + + +Inference time is measured as the time taken to transfer the input to the model (e.g., from CPU to GPU), run inference using the algorithm, and return results to CPU. +The ESR and its extension diversified_ESR denoted as ESR*, which learns diversified feature representations to improve the model generalisation, are implemented in *FacialEmotionLearner*. +The ESR-n and ESR*-n denote the ESR and diversified-ESR models with #n ensemble branches, respectively + +The model can receive either single images as input or a video, which can be captured by webcam, and perform the prediction frame-by-frame. + +We report speed (single sample per inference) as the mean of 100 runs, and the energy (Joules) on embedded devices. +The noted memory is the maximum allocated memory on GPU during inference. + +| Method | Acc. (%) | Params (M) | Mem. (MB) | +|--------------|----------|------------|-----------| +| ESR-9 | 87.17 | 20.35 | 402.99 | +| ESR-15 | 88.59 | 33.67 | 455.61 | +| ESR*-9 | 89.15 | 20.83 | 406.83 | +| ESR*-15 | 89.34 | 34.47 | 460.73 | + +The inference speed (evaluations/second) of both learners on various computational devices are as follows: + +| Method | CPU | Jetson TX2 | Jetson Xavier | RTX 2080 Ti | +|--------------|-------|------------|---------------|-------------| +| ESR-9 | 22.23 | 27.08 | 28.79 | 117.91 | +| ESR-15 | 13.86 | 17.76 | 18.17 | 91.78 | +| ESR*-9 | 5.24 | 6.60 | 12.45 | 33.40 | +| ESR*-15 | 3.38 | 4.18 | 8.47 | 20.57 | + +Energy (Joules) of both learners’ inference on embedded devices is shown in the following: + +| Method | Jetson TX2 | Jetson Xavier | +|---------|------------|---------------| +| ESR-9 | 0.96 | 0.67 | +| ESR-15 | 1.16 | 0.93 | +| ESR*-9 | 3.38 | 1.41 | +| ESR*-15 | 6.26 | 2.51 | + + + + +## References + +[1] +[Siqueira, Henrique, Sven Magg, and Stefan Wermter. "Efficient facial feature learning with wide ensemble-based convolutional neural networks." Proceedings of the AAAI conference on artificial intelligence. Vol. 34. No. 04. 2020.]( +https://ojs.aaai.org/index.php/AAAI/article/view/6037) + +[2] +[Mollahosseini, Ali, Behzad Hasani, and Mohammad H. Mahoor. "Affectnet: A database for facial expression, valence, and arousal computing in the wild." IEEE Transactions on Affective Computing 10.1 (2017): 18-31.]( +https://ieeexplore.ieee.org/abstract/document/8013713) diff --git a/docs/reference/images/hand_gesture_examples.png b/docs/reference/images/hand_gesture_examples.png new file mode 100644 index 0000000000..b9d0a88d55 Binary files /dev/null and b/docs/reference/images/hand_gesture_examples.png differ diff --git a/docs/reference/index.md b/docs/reference/index.md index b7061793da..cf0ea9c1ff 100644 --- a/docs/reference/index.md +++ b/docs/reference/index.md @@ -1,6 +1,6 @@ # OpenDR Toolkit Reference Manual -*Release 1.1* +*Release 2.0.0*
@@ -16,6 +16,8 @@ Neither the copyright holder nor any applicable licensor will be liable for any ## Table of Contents +- [Installation](/docs/reference/installation.md) +- [Customization](/docs/reference/customize.md) - Inference and Training API - `engine` Module - [engine.data Module](engine-data.md) @@ -26,29 +28,36 @@ Neither the copyright holder nor any applicable licensor will be liable for any - [face_recognition_learner Module](face-recognition.md) - facial expression recognition: - [landmark_based_facial_expression_recognition](landmark-based-facial-expression-recognition.md) + - [image_based_facial_emotion_estimation](image_based_facial_emotion_estimation.md) - pose estimation: - [lightweight_open_pose Module](lightweight-open-pose.md) + - [high_resolution_pose_estimation Module](high-resolution-pose-estimation.md) - activity recognition: - - [activity_recognition Module](activity-recognition.md) - - action recognition: - - [skeleton_based_action_recognition](skeleton-based-action-recognition.md) + - [skeleton-based action recognition](skeleton-based-action-recognition.md) + - [continual skeleton-based action recognition Module](skeleton-based-action-recognition.md#class-costgcnlearner) + - [x3d Module](activity-recognition.md#class-x3dlearner) + - [continual x3d Module](activity-recognition.md#class-cox3dlearner) + - [continual transformer encoder Module](continual-transformer-encoder.md) - speech recognition: - [matchboxnet Module](matchboxnet.md) - [edgespeechnets Module](edgespeechnets.md) - [quadraticselfonn Module](quadratic-selfonn.md) - object detection 2d: + - [nanodet Module](nanodet.md) - [detr Module](detr.md) - [gem Module](gem.md) - [retinaface Module](face-detection-2d-retinaface.md) - [centernet Module](object-detection-2d-centernet.md) - [ssd Module](object-detection-2d-ssd.md) - [yolov3 Module](object-detection-2d-yolov3.md) + - [yolov5 Module](object-detection-2d-yolov5.md) - [seq2seq-nms Module](object-detection-2d-nms-seq2seq_nms.md) - object detection 3d: - [voxel Module](voxel-object-detection-3d.md) - object tracking 2d: - [fair_mot Module](object-tracking-2d-fair-mot.md) - [deep_sort Module](object-tracking-2d-deep-sort.md) + - [siamrpn Module](object-tracking-2d-siamrpn.md) - object tracking 3d: - [ab3dmot Module](object-tracking-3d-ab3dmot.md) - multimodal human centric: @@ -77,9 +86,10 @@ Neither the copyright holder nor any applicable licensor will be liable for any - [human_model_generation Module](human-model-generation.md) - `utils` Module - [Hyperparameter Tuning Module](hyperparameter_tuner.md) + - [Ambiguity Measure Module](ambiguity_measure.md) - `Stand-alone Utility Frameworks` - [Engine Agnostic Gym Environment with Reactive extension (EAGERx)](eagerx.md) -- [ROSBridge Package](rosbridge.md) +- [ROS Bridge Package](opendr-ros-bridge.md) - [C Inference API](c-api.md) - [data.h](c-data-h.md) - [target.h](c-target-h.md) @@ -89,48 +99,54 @@ Neither the copyright holder nor any applicable licensor will be liable for any - `C API` Module - [face recognition Demo](/projects/c_api) - `control` Module - - [mobile_manipulation Demo](/projects/control/mobile_manipulation) - - [single_demo_grasp Demo](/projects/control/single_demo_grasp) + - [mobile_manipulation Demo](/projects/python/control/mobile_manipulation) + - [single_demo_grasp Demo](/projects/python/control/single_demo_grasp) - `opendr workspace` Module - [opendr_ws](/projects/opendr_ws) - `perception` Module - activity recognition: - - [activity_recognition Demo](/projects/perception/activity_recognition/demos/online_recognition) + - [activity_recognition Demo](/projects/python/perception/activity_recognition/demos/online_recognition) - face recognition: - - [face_recognition_Demo](/projects/perception/face_recognition) + - [face_recognition_Demo](/projects/python/perception/face_recognition) - facial expression recognition: - - [landmark_based_facial_expression_recognition Demo](/projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition) + - [landmark_based_facial_expression_recognition Demo](/projects/python/perception/facial_expression_recognition/landmark_based_facial_expression_recognition) + - [image_based_facial_emotion_estimation Demo](/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation) - heart anomaly detection: - - [heart anomaly detection Demo](/projects/perception/heart_anomaly_detection) + - [heart anomaly detection Demo](/projects/python/perception/heart_anomaly_detection) - pose estimation: - - [lightweight_open_pose Demo](/projects/perception/lightweight_open_pose) + - [lightweight_open_pose Demo](/projects/python/perception/pose_estimation/lightweight_open_pose) + - [high_resolution_pose_estimation Demo](/projects/python/perception/pose_estimation/high_resolution_pose_estimation) - multimodal human centric: - - [rgbd_hand_gesture_learner Demo](/projects/perception/multimodal_human_centric/rgbd_hand_gesture_recognition) - - [audiovisual_emotion_recognition Demo](/projects/perception/multimodal_human_centric/audiovisual_emotion_recognition) + - [rgbd_hand_gesture_learner Demo](/projects/python/perception/multimodal_human_centric/rgbd_hand_gesture_recognition) + - [audiovisual_emotion_recognition Demo](/projects/python/perception/multimodal_human_centric/audiovisual_emotion_recognition) - object detection 2d: - - [detr Demo](/projects/perception/object_detection_2d/detr) - - [gem Demo](/projects/perception/object_detection_2d/gem) - - [retinaface Demo](/projects/perception/object_detection_2d/retinaface) - - [centernet Demo](/projects/perception/object_detection_2d/centernet) - - [ssd Demo](/projects/perception/object_detection_2d/ssd) - - [yolov3 Demo](/projects/perception/object_detection_2d/yolov3) - - [seq2seq-nms Demo](/projects/perception/object_detection_2d/nms/seq2seq-nms) + - [nanodet Demo](/projects/python/perception/object_detection_2d/nanodet) + - [detr Demo](/projects/python/perception/object_detection_2d/detr) + - [gem Demo](/projects/python/perception/object_detection_2d/gem) + - [retinaface Demo](/projects/python/perception/object_detection_2d/retinaface) + - [centernet Demo](/projects/python/perception/object_detection_2d/centernet) + - [ssd Demo](/projects/python/perception/object_detection_2d/ssd) + - [yolov3 Demo](/projects/python/perception/object_detection_2d/yolov3) + [yolov5 Demo](/projects/python/perception/object_detection_2d/yolov5) + - [seq2seq-nms Demo](/projects/python/perception/object_detection_2d/nms/seq2seq-nms) - object detection 3d: - - [voxel Demo](/projects/perception/object_detection_3d/demos/voxel_object_detection_3d) + - [voxel Demo](/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d) - object tracking 2d: - - [fair_mot Demo](/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort) + - [fair_mot Demo](/projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort) + - [siamrpn Demo](/projects/python/perception/object_tracking_2d/demos/siamrpn) - panoptic segmentation: - - [efficient_ps Demo](/projects/perception/panoptic_segmentation/efficient_ps) + - [efficient_ps Demo](/projects/python/perception/panoptic_segmentation/efficient_ps) - semantic segmentation: - - [bisnet Demo](/projects/perception/semantic_segmentation/bisenet) + - [bisnet Demo](/projects/python/perception/semantic_segmentation/bisenet) - action recognition: - - [skeleton_based_action_recognition Demo](/projects/perception/skeleton_based_action_recognition) + - [skeleton_based_action_recognition Demo](/projects/python/perception/skeleton_based_action_recognition) - fall detection: - - [fall_detection Demo](/projects/perception/fall_detection.md) - - [full_map_posterior_slam Module](/projects/perception/slam/full_map_posterior_gmapping) + - [fall_detection Demo](/projects/python/perception/fall_detection.md) + - [full_map_posterior_slam Module](/projects/python/perception/slam/full_map_posterior_gmapping) - `simulation` Module - - [SMPL+D Human Models Dataset](/projects/simulation/SMPL%2BD_human_models) - - [Human-Data-Generation-Framework](/projects/simulation/human_dataset_generation) - - [Human Model Generation Demos](/projects/simulation/human_dataset_generation) + - [SMPL+D Human Models Dataset](/projects/python/simulation/SMPL%2BD_human_models) + - [Human-Data-Generation-Framework](/projects/python/simulation/human_dataset_generation) + - [Human Model Generation Demos](/projects/python/simulation/human_dataset_generation) - `utils` Module - - [Hyperparameter Tuning Module](/projects/utils/hyperparameter_tuner) + - [Hyperparameter Tuning Module](/projects/python/utils/hyperparameter_tuner) +- [Known Issues](/docs/reference/issues.md) diff --git a/docs/reference/installation.md b/docs/reference/installation.md index 1eb9042ee3..12383747a6 100644 --- a/docs/reference/installation.md +++ b/docs/reference/installation.md @@ -1,68 +1,29 @@ # Installing OpenDR toolkit OpenDR can be installed in the following ways: -1. By cloning this repository (CPU/GPU support) -2. Using *pip* (CPU/GPU support) -3. Using *docker* (CPU/GPU support) +1. Using *pip* (CPU/GPU support) +2. Using *docker* (CPU/GPU support) +3. By cloning this repository (CPU/GPU support, for advanced users only) The following table summarizes the installation options based on your system architecture and OS: -| Installation Method | CPU/GPU | OS | -|---------------------|----------|-----------------------| -| Clone & Install | Both | Ubuntu 20.04 (x86-64) | -| pip | Both | Ubuntu 20.04 (x86-64) | -| docker | Both | Linux / Windows | +| Installation Method | OS | +|-----------------------|-----------------------| +| Clone & Install | Ubuntu 20.04 (x86-64) | +| pip | Ubuntu 20.04 (x86-64) | +| docker | Linux / Windows | +Note that pip installation includes only the Python API of the toolkit. +If you need to use all the functionalities of the toolkit (e.g., ROS nodes, etc.), then you need either to use the pre-compiled docker images or to follow the installation instructions for cloning and building the toolkit. -# Installing by cloning OpenDR repository (Ubuntu 20.04, x86, architecture) - -This is the recommended way of installing the whole toolkit, since it allows for fully exploiting all the provided functionalities. -To install the toolkit, please first make sure that you have `git` available on your system. -```bash -sudo apt install git -``` -Then, clone the toolkit: -```bash -git clone --depth 1 --recurse-submodules -j8 https://github.com/opendr-eu/opendr -``` -You are then ready to install the toolkit: -```bash -cd opendr -./bin/install.sh -``` -The installation script automatically installs all the required dependencies. -Note that this might take a while (~10-20min depending on your machine and network connection), while the script also makes system-wide changes. -Using dockerfiles is strongly advised (please see below), unless you know what you are doing. -Please also make sure that you have enough RAM available for the installation (about 4GB of free RAM is needed for the full installation/compilation). - - -If you want to install GPU-related dependencies, then you can appropriately set the `OPENDR_DEVICE` variable. -The toolkit defaults to using CPU. -Therefore, if you want to use GPU, please set this variable accordingly *before* running the installation script: -```bash -export OPENDR_DEVICE=gpu -``` -The installation script creates a *virtualenv*, where the toolkit is installed. -To activate OpenDR environment you can just source the `activate.sh`: -```bash -source ./bin/activate.sh -``` -Then, you are ready to use the toolkit! - -**NOTE:** `OPENDR_DEVICE` does not alter the inference/training device at *runtime*. -It only affects the dependency installation. -You can use OpenDR API to change the inference device. - -You can also verify the installation by using the supplied Python and C unit tests: +The toolkit is developed and tested on *Ubuntu 20.04 (x86-64)*. +Please make sure that you have the most recent version of all tools by running ```bash -make unittest -make ctests +sudo apt upgrade ``` - -If you plan to use GPU-enabled functionalities, then you are advised to install [CUDA 11.2](https://developer.nvidia.com/cuda-11.2.0-download-archive), along with [CuDNN](https://developer.nvidia.com/cudnn). - -**HINT:** All tests probe for the `TEST_DEVICE` enviromental variable when running. -If this enviromental variable is set during testing, it allows for easily running all tests on a different device (e.g., setting `TEST_DEVICE=cuda:0` runs all tests on the first GPU of the system). +before installing the toolkit and then follow the installation instructions in the relevant section. +All the required dependencies will be automatically installed (or explicit instructions are provided). +Other platforms apart from Ubuntu 20.04, e.g., Windows, other Linux distributions, etc., are currently supported through docker images. # Installing using *pip* @@ -71,7 +32,7 @@ If this enviromental variable is set during testing, it allows for easily runnin You can directly install the Python API of the OpenDR toolkit using pip. First, install the required dependencies: ```bash -sudo apt install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev libeigen3-dev +sudo apt install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev libeigen3-dev python3 -m venv venv source venv/bin/activate pip install wheel @@ -88,12 +49,12 @@ If you have a CPU that does not support AVX2, the please also `export DISABLE_BC This is not needed for newer CPUs. ## Enabling GPU-acceleration -The same OpenDR package is used for both CPU and GPU systems. +The same OpenDR package is used for both CPU and GPU systems. However, you need to have the appropriate GPU-enabled dependencies installed to use a GPU with OpenDR. If you plan to use GPU, then you should first install [mxnet-cuda](https://mxnet.apache.org/versions/1.4.1/install/index.html?platform=Linux&language=Python&processor=CPU) and [detectron2](https://detectron2.readthedocs.io/en/latest/tutorials/install.html). For example, if you stick with the default PyTorch version (1.8) and use CUDA11.2, then you can simply follow: ```bash -sudo apt install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev libeigen3-dev +sudo apt install python3.8-venv libfreetype6-dev git build-essential cmake python3-dev wget libopenblas-dev libsndfile1 libboost-dev libeigen3-dev python3 -m venv venv source venv/bin/activate pip install wheel @@ -116,30 +77,29 @@ For example, if you just want to perform pose estimation you can just run: pip install opendr-toolkit-engine pip install opendr-toolkit-pose-estimation ``` -Note that `opendr-toolkit-engine` must be always installed in your system, while multiple tools can be installed in this way. +Note that `opendr-toolkit-engine` must be always installed in your system, while multiple tools can be installed in this way. OpenDR distributes the following packages that can be installed: -- *opendr-toolkit-activity_recognition* -- *opendr-toolkit-speech_recognition* -- *opendr-toolkit-semantic_segmentation* -- *opendr-toolkit-skeleton_based_action_recognition* -- *opendr-toolkit-face_recognition* -- *opendr-toolkit-facial_expression_recognition* -- *opendr-toolkit-panoptic_segmentation* -- *opendr-toolkit-pose_estimation* -- *opendr-toolkit-compressive_learning* -- *opendr-toolkit-hyperparameter_tuner* -- *opendr-toolkit-heart_anomaly_detection* -- *opendr-toolkit-human_model_generation* -- *opendr-toolkit-multimodal_human_centric* -- *opendr-toolkit-object_detection_2d* -- *opendr-toolkit-object_tracking_2d* -- *opendr-toolkit-object_detection_3d* -- *opendr-toolkit-object_tracking_3d* -- *opendr-toolkit-mobile_manipulation* (requires a functional ROS installation) -- *opendr-toolkit-single_demo_grasp* (requires a functional ROS installation) - - -Note that `opendr-toolkit` is actually just a metapackage that includes all the afformentioned packages. +- *opendr-toolkit-activity-recognition* +- *opendr-toolkit-speech-recognition* +- *opendr-toolkit-semantic-segmentation* +- *opendr-toolkit-skeleton-based-action-recognition* +- *opendr-toolkit-face-recognition* +- *opendr-toolkit-facial-expression-recognition* +- *opendr-toolkit-panoptic-segmentation* +- *opendr-toolkit-pose-estimation* +- *opendr-toolkit-compressive-learning* +- *opendr-toolkit-hyperparameter-tuner* +- *opendr-toolkit-heart-anomaly-detection* +- *opendr-toolkit-human-model-generation* +- *opendr-toolkit-multimodal-human-centric* +- *opendr-toolkit-object-detection-2d* +- *opendr-toolkit-object-tracking-2d* +- *opendr-toolkit-object-detection-3d* +- *opendr-toolkit-object-tracking-3d* +- *opendr-toolkit-ambiguity-measure* +- *opendr-toolkit-fall-detection* + +Note that `opendr-toolkit` is actually just a metapackage that includes all the aformentioned packages. # Installing using *docker* @@ -162,34 +122,86 @@ source bin/activate.sh If you want to display GTK-based applications from the Docker container (e.g., visualize results using OpenCV `imshow()`), then you should mount the X server socket inside the container, e.g., ```bash xhost +local:root -sudo docker run -it -v /tmp/.X11-unix:/tmp/.X11-unix -e DISPLAY=unix$DISPLAY opendr/opendr-toolkit:cpu_v1.1.1 /bin/bash +sudo docker run -it -v /tmp/.X11-unix:/tmp/.X11-unix -e DISPLAY=unix$DISPLAY opendr/opendr-toolkit:cpu_v2.0.0 /bin/bash ``` ## GPU docker If you want to use a CUDA-enabled container please install [nvidia-docker](https://github.com/NVIDIA/nvidia-docker). Then, you can directly run the latest image with the command: ```bash -sudo docker run --gpus all -p 8888:8888 opendr/opendr-toolkit:cuda_v1.1.1 +sudo docker run --gpus all -p 8888:8888 opendr/opendr-toolkit:cuda_v2.0.0 ``` or, for an interactive session: ```bash -sudo docker run --gpus all -it opendr/opendr-toolkit:cuda_v1.1.1 /bin/bash +sudo docker run --gpus all -it opendr/opendr-toolkit:cuda_v2.0.0 /bin/bash ``` In this case, do not forget to enable the virtual environment with: ```bash source bin/activate.sh ``` -## Build the docker images yourself _(optional)_ -Alternatively you can also build the docker images locally using the [Dockerfile](/Dockerfile) ([Dockerfile-cuda](/Dockerfile-cuda) for cuda) provided in the root folder of the toolkit. -For the CPU image, execute the following commands: +# Installing by cloning OpenDR repository (Ubuntu 20.04, x86, architecture) + +This is the recommended way of installing the whole toolkit, since it allows for fully exploiting all the provided functionalities. +To install the toolkit, please first make sure that you have `git` available on your system. +```bash +sudo apt install git +``` +Then, clone the toolkit: ```bash git clone --depth 1 --recurse-submodules -j8 https://github.com/opendr-eu/opendr +``` + +If you want to install GPU-related dependencies, then you can appropriately set the `OPENDR_DEVICE` variable. +The toolkit defaults to using CPU. +Therefore, if you want to use GPU, please set this variable accordingly *before* running the installation script: +```bash +export OPENDR_DEVICE=gpu +``` + +If you want to use ROS or ROS2, then you need to set the `ROS_DISTRO` variable *before* running the installation script so that additional required dependencies are correctly installed. +This variable should be set to either `noetic` or `melodic` for ROS, and `foxy` or `humble` for ROS2. + +You are then ready to install the toolkit: +```bash cd opendr -sudo docker build -t opendr/opendr-toolkit:cpu . +./bin/install.sh ``` +The installation script automatically installs all the required dependencies. +Note that this might take a while (~10-20min depending on your machine and network connection), while the script also makes system-wide changes. +Using dockerfiles is strongly advised (please see below), unless you know what you are doing. +Please also make sure that you have enough RAM available for the installation (about 4GB of free RAM is needed for the full installation/compilation). + -For the cuda-enabled image, first edit `/etc/docker/daemon.json` in order to set the default docker runtime: +The installation script creates a *virtualenv*, where the toolkit is installed. +To activate OpenDR environment you can just source the `activate.sh`: +```bash +source ./bin/activate.sh +``` +Then, you are ready to use the toolkit! + +**NOTE:** `OPENDR_DEVICE` does not alter the inference/training device at *runtime*. +It only affects the dependency installation. +You can use OpenDR API to change the inference device. + +You can also verify the installation by using the supplied Python and C unit tests: +```bash +make unittest +make ctests +``` + +If you plan to use GPU-enabled functionalities, then you are advised to install [CUDA 11.2](https://developer.nvidia.com/cuda-11.2.0-download-archive), along with [CuDNN](https://developer.nvidia.com/cudnn). + +**HINT:** All tests probe for the `TEST_DEVICE` enviromental variable when running. +If this enviromental variable is set during testing, it allows for easily running all tests on a different device (e.g., setting `TEST_DEVICE=cuda:0` runs all tests on the first GPU of the system). + + +## Nvidia embedded devices docker +You can also run the corresponding docker image on an Nvidia embedded device (supported: TX-2, Xavier-NX and AGX): + +Note that the embedded device should be flashed with Jetpack 4.6. + +To enable GPU usage on the embedded device within docker, first edit `/etc/docker/daemon.json` in order to set the default docker runtime: ``` { "runtimes": { @@ -206,18 +218,35 @@ Restart docker afterwards: ``` sudo systemctl restart docker.service ``` -Then you can build the supplied dockerfile: + + +You can directly run the corresponding docker image by running one of the below: ```bash -git clone --depth 1 --recurse-submodules -j8 https://github.com/opendr-eu/opendr -cd opendr -sudo docker build -t opendr/opendr-toolkit:cuda -f Dockerfile-cuda . +sudo docker run -it opendr/opendr-toolkit:tx2_v2 /bin/bash +sudo docker run -it opendr/opendr-toolkit:nx_v2 /bin/bash +sudo docker run -it opendr/opendr-toolkit:agx_v2 /bin/bash ``` +This will give you access to a bash terminal within the docker. -In order to run them, the commands are respectively: +After that you should enable the environment variables inside the docker with: ```bash -sudo docker run --gpus all -p 8888:8888 opendr/opendr-toolkit:cpu +cd opendr +source bin/activate_nvidia.sh +source /opt/ros/noetic/setup.bash +source projects/opendr_ws/devel/setup.bash ``` -and + +The embedded devices docker comes preinstalled with the OpenDR toolkit. +It supports all tools under perception package, as well as all corresponding ROS nodes. + +You can enable a USB camera, given it is mounted as `/dev/video0`, by running the container with the following arguments: +``` +xhost +local:root +sudo docker run -it --privileged -v /dev/video0:/dev/video0 opendr/opendr-toolkit:nx_v2 /bin/bash ``` -sudo docker run --gpus all -p 8888:8888 opendr/opendr-toolkit:cuda + +To use the docker on an embedded device with a monitor and a usb camera attached, as well as network access through the hosts network settings you can run: ``` +xhost +local:root +sudo docker run -it --privileged --network host -v /tmp/.X11-unix:/tmp/.X11-unix -e DISPLAY=unix$DSIPLAY -v /dev/video0:/dev/video0 opendr/opendr-toolkit:nx_v2 /bin/bash +``` \ No newline at end of file diff --git a/docs/reference/issues.md b/docs/reference/issues.md new file mode 100644 index 0000000000..fa4fd1fc89 --- /dev/null +++ b/docs/reference/issues.md @@ -0,0 +1,39 @@ +# Known Issues + +This page includes known issues, compatibility issues as well as possible workarounds. + + +## Issue: Some ROS nodes have a noticable lag + +You should make sure that queue size is set to 1 and the buffer size is large enough to hold the input message. +Even though we have set the appropriate default values for topics in order to avoid this issue, this also depends on your system configuration (e.g., size messages published in input topics). +Be sure to check the discussion and explanation of this behavior in [#275](https://github.com/opendr-eu/opendr/issues/275). +Essentially, due to the way ROS handles message a latency of at least 2 frames is expected. + + +## Issue: Docker image do not fit my embedded device + +This can affect several embedded devices, such as NX and TX2, which have limited storage on board. +The easiest solution to this issue is to use external storage (e.g., an SD card or an external SSD). +You can also check the [customization](develop/docs/reference/customize.md) instructions on how you can manually build a docker image that can fit your device. + +## Issue: I am trying to install the toolkit on Ubuntu 18.04/20.10/XX.XX, WLS, or any other linux distribution and it doesn't work. + +OpenDR toolkit targets native installation on Ubuntu 20.04. +For any other system you are advised to use the docker images that are expected to work out-of-the-box on any configuration and operating system. + + +## Issue: I cannot install the tookit using `pip` \ I cannot install the toolkit on colab + +OpenDR toolkit is officially targeting Ubuntu 20.04. +For other systems, slight modifications might be needed in order to ensure that all dependencies are in place. +Most parts of the toolkit will be probably installed without any issue on colab or any other Ubuntu-like system. +However, the behavior of `pip`'s dependency solver might cause issues (e.g., endless loops when trying to solve dependencies). +In this case, it is suggested to remove any package that could cause any conflict, e.g.: +``` +pip uninstall -y torch torchaudio fastai torchvision torchtext torchsummary kapre google-cloud-bigquery-storage yellowbrick tensorflow-metadata tensorflow-datasets numba imbalanced-learn googleapis-common-protos google-api-core imageio tensorboard +``` +and then install the toolkit using the `--use-deprecated=legacy-resolver` flag, e.g.: +``` +DISABLE_BCOLZ_AVX2=true pip install opendr-toolkit --use-deprecated=legacy-resolver +``` diff --git a/docs/reference/mobile-manipulation.md b/docs/reference/mobile-manipulation.md index b40fe513e1..294cccdcd1 100644 --- a/docs/reference/mobile-manipulation.md +++ b/docs/reference/mobile-manipulation.md @@ -130,7 +130,7 @@ The dependencies for this module automatically set up and compile a catkin works To start required ROS nodes, please run the following before using the `MobileRLLearner` class: ```sh -source ${OPENDR_HOME}/projects/control/mobile_manipulation/mobile_manipulation_ws/devel/setup.bash +source ${OPENDR_HOME}/projects/python/control/mobile_manipulation/mobile_manipulation_ws/devel/setup.bash roslaunch mobile_manipulation_rl [pr2,tiago]_analytical.launch ```` @@ -265,7 +265,7 @@ As this achieves very high control frequencies, we do not expect any benefits th TABLE-1: Control frequency in Hertz. | Model | AMD Ryzen 9 5900X (Hz) | -| -------- | ---------------------- | +| -------- | ---------------------- | | MobileRL | 2200 | @@ -294,7 +294,7 @@ TABLE-3: Platform compatibility evaluation. #### Notes ##### HSR -The HSR environment relies on packages that are part of the proprietory HSR simulator. +The HSR environment relies on packages that are part of the proprietary HSR simulator. If you have an HSR account with Toyota, please follow these steps to use the environment. Otherwise ignore this section to use the other environments we provide. @@ -307,7 +307,7 @@ Otherwise ignore this section to use the other environments we provide. and add them to `pybind_add_module()` and `target_link_libraries()` two lines below that. - Comment in the hsr parts in `src/pybindings` and the import of HSREnv in `mobileRL/envs/robotenv.py` to create the python bindings -- Some HSR launchfiles are not opensource either and might need some small adjustments +- Some HSR launchfiles are not open source either and might need some small adjustments #### References [1] Learning Kinematic Feasibility for Mobile Manipulation through Deep Reinforcement Learning, diff --git a/docs/reference/nanodet.md b/docs/reference/nanodet.md new file mode 100644 index 0000000000..765f210673 --- /dev/null +++ b/docs/reference/nanodet.md @@ -0,0 +1,289 @@ +## nanodet module + +The *nanodet* module contains the *NanodetLearner* class, which inherits from the abstract class *Learner*. + +### Class NanodetLearner +Bases: `engine.learners.Learner` + +The *NanodetLearner* class is a wrapper of the Nanodet object detection algorithms based on the original +[Nanodet implementation](https://github.com/RangiLyu/nanodet). +It can be used to perform object detection on images (inference) and train All predefined Nanodet object detection models and new modular models from the user. + +The [NanodetLearner](../../src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py) class has the +following public methods: + +#### `NanodetLearner` constructor +```python +NanodetLearner(self, model_to_use, iters, lr, batch_size, checkpoint_after_iter, checkpoint_load_iter, temp_path, device, + weight_decay, warmup_steps, warmup_ratio, lr_schedule_T_max, lr_schedule_eta_min, grad_clip) +``` + +Constructor parameters: + +- **model_to_use**: *{"EfficientNet_Lite0_320", "EfficientNet_Lite1_416", "EfficientNet_Lite2_512", "RepVGG_A0_416", + "t", "g", "m", "m_416", "m_0.5x", "m_1.5x", "m_1.5x_416", "plus_m_320", "plus_m_1.5x_320", "plus_m_416", + "plus_m_1.5x_416", "custom"}, default=plus_m_1.5x_416*\ + Specifies the model to use and the config file that contains all hyperparameters for training, evaluation and inference as the original + [Nanodet implementation](https://github.com/RangiLyu/nanodet). If you want to overwrite some of the parameters you can + put them as parameters in the learner. +- **iters**: *int, default=None*\ + Specifies the number of epochs the training should run for. +- **lr**: *float, default=None*\ + Specifies the initial learning rate to be used during training. +- **batch_size**: *int, default=None*\ + Specifies number of images to be bundled up in a batch during training. + This heavily affects memory usage, adjust according to your system. +- **checkpoint_after_iter**: *int, default=None*\ + Specifies per how many training iterations a checkpoint should be saved. + If it is set to 0 no checkpoints will be saved. +- **checkpoint_load_iter**: *int, default=None*\ + Specifies which checkpoint should be loaded. + If it is set to 0, no checkpoints will be loaded. +- **temp_path**: *str, default=''*\ + Specifies a path where the algorithm looks for saving the checkpoints along with the logging files. If *''* the `cfg.save_dir` will be used instead. +- **device**: *{'cpu', 'cuda'}, default='cuda'*\ + Specifies the device to be used. +- **weight_decay**: *float, default=None*\ +- **warmup_steps**: *int, default=None*\ +- **warmup_ratio**: *float, default=None*\ +- **lr_schedule_T_max**: *int, default=None*\ +- **lr_schedule_eta_min**: *float, default=None*\ +- **grad_clip**: *int, default=None*\ + +#### `NanodetLearner.fit` +```python +NanodetLearner.fit(self, dataset, val_dataset, logging_path, verbose, seed) +``` + +This method is used for training the algorithm on a train dataset and validating on a val dataset. + +Parameters: + +- **dataset**: *ExternalDataset*\ + Object that holds the training dataset. + Can be of type `ExternalDataset`. +- **val_dataset** : *ExternalDataset, default=None*\ + Object that holds the validation dataset. + Can be of type `ExternalDataset`. +- **logging_path** : *str, default=''*\ + Subdirectory in temp_path to save log files and TensorBoard. +- **verbose** : *bool, default=True*\ + Enables the maximum verbosity and the logger. +- **seed** : *int, default=123*\ + Seed for repeatability. + +#### `NanodetLearner.eval` +```python +NanodetLearner.eval(self, dataset, verbose) +``` + +This method is used to evaluate a trained model on an evaluation dataset. +Saves a txt logger file containing stats regarding evaluation. + +Parameters: + +- **dataset** : *ExternalDataset*\ + Object that holds the evaluation dataset. +- **verbose**: *bool, default=True*\ + Enables the maximum verbosity and logger. + +#### `NanodetLearner.infer` +```python +NanodetLearner.infer(self, input, thershold, verbose) +``` + +This method is used to perform object detection on an image. +Returns an `engine.target.BoundingBoxList` object, which contains bounding boxes that are described by the left-top corner and +its width and height, or returns an empty list if no detections were made of the image in input. + +Parameters: +- **input** : *Image*\ + Image type object to perform inference on it. + - **threshold**: *float, default=0.35*\ + Specifies the threshold for object detection inference. + An object is detected if the confidence of the output is higher than the specified threshold. +- **verbose**: *bool, default=True*\ + Enables the maximum verbosity and logger. + +#### `NanodetLearner.save` +```python +NanodetLearner.save(self, path, verbose) +``` + +This method is used to save a trained model with its metadata. +Provided with the path, it creates the "path" directory, if it does not already exist. +Inside this folder, the model is saved as *"nanodet_{model_name}.pth"* and a metadata file *"nanodet_{model_name}.json"*. +If the directory already exists, the *"nanodet_{model_name}.pth"* and *"nanodet_{model_name}.json"* files are overwritten. + +Parameters: + +- **path**: *str, default=None*\ + Path to save the model, if None it will be the `"temp_folder"` or the `"cfg.save_dir"` from learner. +- **verbose**: *bool, default=True*\ + Enables the maximum verbosity and logger. + +#### `NanodetLearner.load` +```python +NanodetLearner.load(self, path, verbose) +``` + +This method is used to load a previously saved model from its saved folder. +Loads the model from inside the directory of the path provided, using the metadata .json file included. + +Parameters: + +- **path**: *str, default=None*\ + Path of the model to be loaded. +- **verbose**: *bool, default=True*\ + Enables the maximum verbosity and logger. + +#### `NanodetLearner.download` +```python +NanodetLearner.download(self, path, mode, model, verbose, url) +``` + +Downloads data needed for the various functions of the learner, e.g., pretrained models as well as test data. + +Parameters: + +- **path**: *str, default=None*\ + Specifies the folder where data will be downloaded. If *None*, the *self.temp_path* directory is used instead. +- **mode**: *{'pretrained', 'images', 'test_data'}, default='pretrained'*\ + If *'pretrained'*, downloads a pretrained detector model from the *model_to_use* architecture which was chosen at learner initialization. + If *'images'*, downloads an image to perform inference on. If *'test_data'* downloads a dummy dataset for testing purposes. +- **verbose**: *bool, default=False*\ + Enables the maximum verbosity and logger. +- **url**: *str, default=OpenDR FTP URL*\ + URL of the FTP server. + + +#### Tutorials and Demos + +A tutorial on performing inference is available. +Furthermore, demos on performing [training](../../projects/perception/object_detection_2d/nanodet/train_demo.py), +[evaluation](../../projects/perception/object_detection_2d/nanodet/eval_demo.py) and +[inference](../../projects/perception/object_detection_2d/nanodet/inference_demo.py) are also available. + + + +#### Examples + +* **Training example using an `ExternalDataset`.** + + To train properly, the architecture weights must be downloaded in a predefined directory before fit is called, in this case the directory name is "predefined_examples". + Default architecture is *'plus-m-1.5x_416'*. + The training and evaluation dataset root should be present in the path provided, along with the annotation files. + The default COCO 2017 training data can be found [here](https://cocodataset.org/#download) (train, val, annotations). + All training parameters (optimizer, lr schedule, losses, model parameters etc.) can be changed in the model config file + in [config directori](../../src/opendr/perception/object_detection_2d/nanodet/algorithm/config). + You can find more informations in [config file detail](../../src/opendr/perception/object_detection_2d/nanodet/algorithm/config/config_file_detail.md). + For easier use, with NanodetLearner parameters user can overwrite the following parameters: + (iters, lr, batch_size, checkpoint_after_iter, checkpoint_load_iter, temp_path, device, weight_decay, warmup_steps, + warmup_ratio, lr_schedule_T_max, lr_schedule_eta_min, grad_clip) + + **Note** + + The Nanodet tool can be used with any PASCAL VOC or COCO like dataset. The only thing is needed is to provide the correct root and dataset type. + + If *'voc'* is choosed for *dataset* the directory must look like this: + + - root folder + - train + - Annotations + - image1.xml + - image2.xml + - ... + - JPEGImages + - image1.jpg + - image2.jpg + - ... + - val + - Annotations + - image1.xml + - image2.xml + - ... + - JPEGImages + - image1.jpg + - image2.jpg + - ... + + On the other hand if *'coco'* is choosed for *dataset* the directory must look like this: + + - root folder + - train2017 + - image1.jpg + - image2.jpg + - ... + - val2017 + - image1.jpg + - image2.jpg + - ... + - annotations + - instances_train2017.json + - instances_val2017.json + + You can change the default annotation and image directories in [dataset](../../src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/__init__.py) + + ```python + import argparse + + from opendr.engine.datasets import ExternalDataset + from opendr.perception.object_detection_2d import NanodetLearner + + + if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--dataset", help="Dataset to train on", type=str, default="coco", choices=["voc", "coco"]) + parser.add_argument("--data-root", help="Dataset root folder", type=str) + parser.add_argument("--model", help="Model that config file will be used", type=str) + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--batch-size", help="Batch size to use for training", type=int, default=6) + parser.add_argument("--lr", help="Learning rate to use for training", type=float, default=5e-4) + parser.add_argument("--checkpoint-freq", help="Frequency in-between checkpoint saving and evaluations", type=int, default=50) + parser.add_argument("--n-epochs", help="Number of total epochs", type=int, default=300) + parser.add_argument("--resume-from", help="Epoch to load checkpoint file and resume training from", type=int, default=0) + + args = parser.parse_args() + + if args.dataset == 'voc': + dataset = ExternalDataset(args.data_root, 'voc') + val_dataset = ExternalDataset(args.data_root, 'voc') + elif args.dataset == 'coco': + dataset = ExternalDataset(args.data_root, 'coco') + val_dataset = ExternalDataset(args.data_root, 'coco') + + nanodet = NanodetLearner(model_to_use=args.model, iters=args.n_epochs, lr=args.lr, batch_size=args.batch_size, + checkpoint_after_iter=args.checkpoint_freq, checkpoint_load_iter=args.resume_from, + device=args.device) + + nanodet.download("./predefined_examples", mode="pretrained") + nanodet.load("./predefined_examples/nanodet-{}/nanodet-{}.ckpt".format(args.model, args.model), verbose=True) + nanodet.fit(dataset, val_dataset) + nanodet.save() + ``` + +* **Inference and result drawing example on a test image.** + + This example shows how to perform inference on an image and draw the resulting bounding boxes using a nanodet model that is pretrained on the COCO dataset. + Moreover, inference can be used in all images in a folder, frames of a video or a webcam feedback with the provided *mode*. + In this example first is downloaded a pre-trained model as in training example and then an image to be inference. + With the same *path* parameter you can choose a folder or a video file to be used as inference. Last but not least, if 'webcam' is + used in *mode* the *camid* parameter of inference must be used to determine the webcam device in your machine. + + ```python + import argparse + from opendr.perception.object_detection_2d import NanodetLearner + + if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Model that config file will be used", type=str) + args = parser.parse_args() + + nanodet = NanodetLearner(model_to_use=args.model, device=args.device) + + nanodet.download("./predefined_examples", mode="pretrained") + nanodet.load("./predefined_examples/nanodet-{}/nanodet-{}.ckpt".format(args.model, args.model), verbose=True) + nanodet.download("./predefined_examples", mode="images") + boxes = nanodet.infer(path="./predefined_examples/000000000036.jpg") + ``` \ No newline at end of file diff --git a/docs/reference/object-detection-2d-nms-seq2seq_nms.md b/docs/reference/object-detection-2d-nms-seq2seq_nms.md index 513233c833..c1269c108f 100644 --- a/docs/reference/object-detection-2d-nms-seq2seq_nms.md +++ b/docs/reference/object-detection-2d-nms-seq2seq_nms.md @@ -262,7 +262,7 @@ Parameters: ssd = SingleShotDetectorLearner(device='cuda') ssd.download(".", mode="pretrained") ssd.load("./ssd_default_person", verbose=True) - img = Image.open(OPENDR_HOME + '/projects/perception/object_detection_2d/nms/img_temp/frame_0000.jpg') + img = Image.open(OPENDR_HOME + '/projects/python/perception/object_detection_2d/nms/img_temp/frame_0000.jpg') if not isinstance(img, Image): img = Image(img) boxes = ssd.infer(img, threshold=0.25, custom_nms=seq2SeqNMSLearner) diff --git a/docs/reference/object-detection-2d-yolov5.md b/docs/reference/object-detection-2d-yolov5.md new file mode 100644 index 0000000000..58420328eb --- /dev/null +++ b/docs/reference/object-detection-2d-yolov5.md @@ -0,0 +1,81 @@ +## YOLOv5DetectorLearner module + +The *yolov5* module contains the *YOLOv5DetectorLearner* class, which inherits from the abstract class *Learner*. + +### Class YOLOv5DetectorLearner +Bases: `engine.learners.Learner` + +The *YOLOv5DetectorLearner* class is a wrapper of the YOLO detector[[1]](#yolo-1) +[Ultralytics implementation](https://github.com/ultralytics/yolov5) based on its availability in the [Pytorch Hub](https://pytorch.org/hub/ultralytics_yolov5/). +It can be used to perform object detection on images (inference only). + +The [YOLOv5DetectorLearner](/src/opendr/perception/object_detection_2d/yolov5/yolov5_learner.py) class has the following +public methods: + +#### `YOLOv5DetectorLearner` constructor +```python +YOLOv5DetectorLearner(self, model_name, path, device) +``` + +Constructor parameters: + +- **model_name**: *str*\ + Specifies the name of the model to be used. Available models: + - 'yolov5n' (46.0% mAP, 1.9M parameters) + - 'yolov5s' (56.0% mAP, 7.2M parameters) + - 'yolov5m' (63.9% mAP, 21.2M parameters) + - 'yolov5l' (67.2% mAP, 46.5M parameters) + - 'yolov5x' (68.9% mAP, 86.7M parameters) + - 'yolov5n6' (50.7% mAP, 3.2M parameters) + - 'yolov5s6' (63.0% mAP, 16.8M parameters) + - 'yolov5m6' (69.0% mAP, 35.7 parameters) + - 'yolov5l6' (71.6% mAP, 76.8M parameters) + - 'custom' (for custom models, the ```path``` parameter must be set to point to the location of the weights file.) +Note that mAP (0.5) is reported on the [COCO val2017 dataset](https://github.com/ultralytics/yolov5/releases). +- **path**: *str, default=None*\ + For custom-trained models, specifies the path to the weights to be loaded. +- **device**: *{'cuda', 'cpu'}, default='cuda'* + Specifies the device used for inference. +- **temp_path**: *str, default='.'*\ + Specifies the path to where the weights will be downloaded when using pretrained models. +- **force_reload**: *bool, default=False*\ + Sets the `force_reload` parameter of the pytorch hub `load` method. + This fixes issues with caching when set to `True`. + + +#### `YOLOv5DetectorLearner.infer` +The `infer` method: +```python +YOLOv5DetectorLearner.infer(self, img) +``` + +Performs inference on a single image. + +Parameters: + +- **img**: *object*\ + Object of type engine.data.Image or OpenCV. +- **size**: *int, default=640*\ + Size of image for inference. + The image is resized to this in both sides before being fed to the model. + +#### Examples + +* Inference and result drawing example on a test .jpg image using OpenCV: + ```python + import torch + from opendr.engine.data import Image + from opendr.perception.object_detection_2d import YOLOv5DetectorLearner + from opendr.perception.object_detection_2d import draw_bounding_boxes + + yolo = YOLOv5DetectorLearner(model_name='yolov5s', device='cpu') + + torch.hub.download_url_to_file('https://ultralytics.com/images/zidane.jpg', 'zidane.jpg') # download image + im1 = Image.open('zidane.jpg') # OpenDR image + + results = yolo.infer(im1) + draw_bounding_boxes(im1.opencv(), results, yolo.classes, show=True, line_thickness=3) + ``` + +#### References +[1] YOLOv5: The friendliest AI architecture you'll ever use. diff --git a/docs/reference/object-tracking-2d-siamrpn.md b/docs/reference/object-tracking-2d-siamrpn.md new file mode 100644 index 0000000000..6953be0a40 --- /dev/null +++ b/docs/reference/object-tracking-2d-siamrpn.md @@ -0,0 +1,221 @@ +## SiamRPNLearner module + +The *SiamRPN* module contains the *SiamRPNLearner* class, which inherits from the abstract class *Learner*. + +### Class SiamRPNLearner +Bases: `engine.learners.Learner` + +The *SiamRPNLearner* class is a wrapper of the SiamRPN detector[[1]](#siamrpn-1) +[GluonCV implementation](https://github.com/dmlc/gluon-cv/tree/master/gluoncv/model_zoo/siamrpn). +It can be used to perform object tracking on videos (inference) as well as train new object tracking models. + +The [SiamRPNLearner](/src/opendr/perception/object_tracking_2d/siamrpn/siamrpn_learner.py) class has the following public methods: + +#### `SiamRPNLearner` constructor +```python +SiamRPNLearner(self, device, n_epochs, num_workers, warmup_epochs, lr, weight_decay, momentum, cls_weight, loc_weight, batch_size, temp_path) +``` + +Parameters: + +- **device**: *{'cuda', 'cpu'}, default='cuda'*\ + Specifies the device to be used. +- **n_epochs**: *int, default=50*\ + Specifies the number of epochs to be used during training. +- **num_workers**: *int, default=1*\ + Specifies the number of workers to be used when loading datasets or performing evaluation. +- **warmup_epochs**: *int, default=2*\ + Specifies the number of epochs during which the learning rate is annealed to **lr**. +- **lr**: *float, default=0.001*\ + Specifies the initial learning rate to be used during training. +- **weight_decay**: *float, default=0*\ + Specifies the weight decay to be used during training. +- **momentum**: *float, default=0.9*\ + Specifies the momentum to be used for optimizer during training. +- **cls_weight**: *float, default=1.*\ + Specifies the classification loss multiplier to be used for optimizer during training. +- **loc_weight**: *float, default=1.2*\ + Specifies the localization loss multiplier to be used for optimizer during training. +- **batch_size**: *int, default=32*\ + Specifies the batch size to be used during training. +- **temp_path**: *str, default=''*\ + Specifies a path to be used for data downloading. + + +#### `SiamRPNLearner.fit` +```python +SiamRPNLearner.fit(self, dataset, log_interval, n_gpus, verbose) +``` + +This method is used to train the algorithm on a `DetectionDataset` or `ExternalDataset` dataset and also performs evaluation on a validation set using the trained model. +Returns a dictionary containing stats regarding the training process. + +Parameters: + +- **dataset**: *object*\ + Object that holds the training dataset. +- **log_interval**: *int, default=20*\ + Training loss is printed in stdout after this amount of iterations. +- **n_gpus**: *int, default=1*\ + If CUDA is enabled, training can be performed on multiple GPUs as set by this parameter. +- **verbose**: *bool, default=True*\ + If True, enables maximum verbosity. + +#### `SiamRPNLearner.eval` +```python +SiamRPNLearner.eval(self, dataset) +``` + +Performs evaluation on a dataset. The OTB dataset is currently supported. + +Parameters: + +- **dataset**: *object*\ + Object that holds dataset to perform evaluation on. + Expected type is `ExternalDataset` with `otb2015` dataset type. + +#### `SiamRPNLearner.infer` +```python +SiamRPNLearner.infer(self, img, init_box) +``` + +Performs inference on a single image. +If the `init_box` is provided, the tracker is initialized. +If not, the current position of the target is updated by running inference on the image. + +Parameters: + +- **img**: *object*\ + Object of type engine.data.Image. +- **init_box**: *object, default=None*\ + Object of type engine.target.TrackingAnnotation. + If provided, it is used to initialize the tracker. + +#### `SiamRPNLearner.save` +```python +SiamRPNLearner.save(self, path, verbose) +``` + +Saves a model in OpenDR format at the specified path. +The model name is extracted from the base folder in the specified path. + +Parameters: + +- **path**: *str*\ + Specifies the folder where the model will be saved. + The model name is extracted from the base folder of this path. +- **verbose**: *bool default=False*\ + If True, enables maximum verbosity. + +#### `SiamRPNLearner.load` +```python +SiamRPNLearner.load(self, path, verbose) +``` + +Loads a model which was previously saved in OpenDR format at the specified path. + +Parameters: + +- **path**: *str*\ + Specifies the folder where the model will be loaded from. +- **verbose**: *bool default=False*\ + If True, enables maximum verbosity. + +#### `SiamRPNLearner.download` +```python +SiamRPNLearner.download(self, path, mode, verbose, url, overwrite) +``` + +Downloads data needed for the various functions of the learner, e.g., pre-trained models as well as test data. + +Parameters: + +- **path**: *str, default=None*\ + Specifies the folder where data will be downloaded. + If *None*, the *self.temp_path* directory is used instead. +- **mode**: *{'pretrained', 'video', 'test_data', 'otb2015'}, default='pretrained'*\ + If *'pretrained'*, downloads a pre-trained detector model. + If *'video'*, downloads a single video to perform inference on. + If *'test_data'* downloads a dummy version of the OTB dataset for testing purposes. + If *'otb2015'*, attempts to download the OTB dataset (100 videos). + This process lasts a long time. +- **verbose**: *bool default=False*\ + If True, enables maximum verbosity. +- **url**: *str, default=OpenDR FTP URL*\ + URL of the FTP server. +- **overwrite**: *bool, default=False*\ + If True, files will be re-downloaded if they already exists. + This can solve some issues with large downloads. + +#### Examples + +* **Training example using `ExternalDataset` objects**. + Training is supported solely via the `ExternalDataset` class. + See [class README](/src/opendr/perception/object_tracking_2d/siamrpn/README.md) for a list of supported datasets and presumed data directory structure. + Example training on COCO Detection dataset: + ```python + from opendr.engine.datasets import ExternalDataset + from opendr.perception.object_tracking_2d import SiamRPNLearner + + dataset = ExternalDataset("/path/to/data/root", "coco") + learner = SiamRPNLearner(device="cuda", n_epochs=50, batch_size=32, + lr=1e-3) + learner.fit(dataset) + learner.save("siamrpn_custom") + ``` + +* **Inference and result drawing example on a test mp4 video using OpenCV.** + ```python + import cv2 + from opendr.engine.target import TrackingAnnotation + from opendr.perception.object_tracking_2d import SiamRPNLearner + + learner = SiamRPNLearner(device="cuda") + learner.download(".", mode="pretrained") + learner.load("siamrpn_opendr") + + learner.download(".", mode="video") + cap = cv2.VideoCapture("tc_Skiing_ce.mp4") + + init_bbox = TrackingAnnotation(left=598, top=312, width=75, height=200, name=0, id=0) + + frame_no = 0 + while cap.isOpened(): + ok, frame = cap.read() + if not ok: + break + + if frame_no == 0: + # first frame, pass init_bbox to infer function to initialize the tracker + pred_bbox = learner.infer(frame, init_bbox) + else: + # after the first frame only pass the image to infer + pred_bbox = learner.infer(frame) + + frame_no += 1 + + cv2.rectangle(frame, (pred_bbox.left, pred_bbox.top), + (pred_bbox.left + pred_bbox.width, pred_bbox.top + pred_bbox.height), + (0, 255, 255), 3) + cv2.imshow('Tracking Result', frame) + cv2.waitKey(1) + + cv2.destroyAllWindows() + ``` + + +#### Performance evaluation + +We have measured the performance on the OTB2015 dataset in terms of success and FPS on an RTX 2070. +``` +------------------------------------------------ +| Tracker name | Success | FPS | +------------------------------------------------ +| siamrpn_alexnet_v2_otb15 | 0.668 | 132.1 | +------------------------------------------------ +``` + +#### References +[1] +High Performance Visual Tracking with Siamese Region Proposal Network, +[PDF](https://openaccess.thecvf.com/content_cvpr_2018/papers/Li_High_Performance_Visual_CVPR_2018_paper.pdf). diff --git a/docs/reference/opendr-ros-bridge.md b/docs/reference/opendr-ros-bridge.md new file mode 100755 index 0000000000..a98666a3ab --- /dev/null +++ b/docs/reference/opendr-ros-bridge.md @@ -0,0 +1,431 @@ +## opendr_bridge package + + +This *opendr_bridge* package provides an interface to convert OpenDR data types and targets into ROS-compatible ones similar to CvBridge. +The *ROSBridge* class provides two methods for each data type X: +1. *from_ros_X()* : converts the ROS equivalent of X into OpenDR data type +2. *to_ros_X()* : converts the OpenDR data type into the ROS equivalent of X + +### Class ROSBridge + +The *ROSBridge* class provides an interface to convert OpenDR data types and targets into ROS-compatible ones. + +The ROSBridge class has the following public methods: + +#### `ROSBridge` constructor +The constructor only initializes the state of the class and does not require any input arguments. +```python +ROSBridge(self) +``` + +#### `ROSBridge.from_ros_image` + +```python +ROSBridge.from_ros_image(self, + message, + encoding) +``` + +This method converts a ROS Image into an OpenDR image. + +Parameters: + +- **message**: *sensor_msgs.msg.Img*\ + ROS image to be converted into an OpenDR image. +- **encoding**: *str, default='bgr8'*\ + Encoding to be used for the conversion (inherited from CvBridge). + +#### `ROSBridge.to_ros_image` + +```python +ROSBridge.to_ros_image(self, + image, + encoding) +``` + +This method converts an OpenDR image into a ROS image. + +Parameters: + +- **message**: *engine.data.Image*\ + OpenDR image to be converted into a ROS message. +- **encoding**: *str, default='bgr8'*\ + Encoding to be used for the conversion (inherited from CvBridge). + +#### `ROSBridge.from_ros_pose` + +```python +ROSBridge.from_ros_pose(self, + ros_pose) +``` + +Converts an OpenDRPose2D message into an OpenDR Pose. + +Parameters: + +- **ros_pose**: *opendr_bridge.msg.OpenDRPose2D*\ + ROS pose to be converted into an OpenDR Pose. + +#### `ROSBridge.to_ros_pose` + +```python +ROSBridge.to_ros_pose(self, + pose) +``` +Converts an OpenDR Pose into a OpenDRPose2D msg that can carry the same information, i.e. a list of keypoints, +the pose detection confidence and the pose id. +Each keypoint is represented as an OpenDRPose2DKeypoint with x, y pixel position on input image with (0, 0) +being the top-left corner. + +Parameters: + +- **pose**: *engine.target.Pose*\ + OpenDR Pose to be converted to ROS OpenDRPose2D. + + +#### `ROSBridge.to_ros_category` + +```python +ROSBridge.to_ros_category(self, + category) +``` +Converts an OpenDR Category used for category recognition into a ROS ObjectHypothesis. + +Parameters: + +- **message**: *engine.target.Category*\ + OpenDR Category used for category recognition to be converted to ROS ObjectHypothesis. + +#### `ROSBridge.to_ros_category_description` + +```python +ROSBridge.to_ros_category_description(self, + category) +``` +Converts an OpenDR Category into a ROS String. + +Parameters: + +- **message**: *engine.target.Category*\ + OpenDR Category to be converted to ROS String. + + +#### `ROSBridge.from_ros_category` + +```python +ROSBridge.from_ros_category(self, + ros_hypothesis) +``` + +Converts a ROS ObjectHypothesis message into an OpenDR Category. + +Parameters: + +- **message**: *vision_msgs.msg.ObjectHypothesis*\ + ROS ObjectHypothesis to be converted into an OpenDR Category. + + +#### `ROSBridge.from_ros_face` + +```python +ROSBridge.from_ros_face(self, + ros_hypothesis) +``` + +Converts a ROS ObjectHypothesis message into an OpenDR Category. + +Parameters: + +- **message**: *vision_msgs.msg.ObjectHypothesis*\ + ROS ObjectHypothesis to be converted into an OpenDR Category. + +#### `ROSBridge.to_ros_face` + +```python +ROSBridge.to_ros_face(self, + category) +``` +Converts an OpenDR Category used for face recognition into a ROS ObjectHypothesis. + +Parameters: + +- **message**: *engine.target.Category*\ + OpenDR Category used for face recognition to be converted to ROS ObjectHypothesis. + +#### `ROSBridge.to_ros_face_id` + +```python +ROSBridge.to_ros_face_id(self, + category) +``` +Converts an OpenDR Category into a ROS String. + +Parameters: + +- **message**: *engine.target.Category*\ + OpenDR Category to be converted to ROS String. + +#### `ROSBridge.to_ros_boxes` + +```python +ROSBridge.to_ros_boxes(self, + box_list) +``` +Converts an OpenDR BoundingBoxList into a Detection2DArray msg that can carry the same information. Each bounding box is +represented by its center coordinates as well as its width/height dimensions. + +#### `ROSBridge.from_ros_boxes` + +```python +ROSBridge.from_ros_boxes(self, + ros_detections) +``` +Converts a ROS Detection2DArray message with bounding boxes into an OpenDR BoundingBoxList + +#### `ROSBridge.from_ros_3Dpose` + +```python +ROSBridge.from_ros_3Dpose(self, + ros_pose) +``` + +Converts a ROS pose into an OpenDR pose (used for a 3D pose). + +Parameters: + +- **ros_pose**: *geometry_msgs.msg.Pose*\ + ROS pose to be converted into an OpenDR pose. + +#### `ROSBridge.to_ros_3Dpose` + +```python +ROSBridge.to_ros_3Dpose(self, + opendr_pose) +``` +Converts an OpenDR pose into a ROS ```geometry_msgs.msg.Pose``` message. + +Parameters: + +- **opendr_pose**: *engine.target.Pose*\ + OpenDR pose to be converted to ```geometry_msgs.msg.Pose``` message. + +#### `ROSBridge.to_ros_mesh` + +```python +ROSBridge.to_ros_mesh(self, + vertices, faces) +``` +Converts a triangle mesh consisting of vertices, faces into a ROS ```shape_msgs.msg.Mesh``` message. + +Parameters: + +- **vertices**: *numpy.ndarray*\ + Vertices (Nx3) of a triangle mesh. +- **faces**: *numpy.ndarray*\ + Faces (Nx3) of a triangle mesh. + + #### `ROSBridge.to_ros_colors` + +```python +ROSBridge.to_ros_colors(self, + colors) +``` +Converts a list of colors into a list of ROS ```std_msgs.msg.colorRGBA``` messages. + +Parameters: + +- **colors**: *list of list of size 3*\ + List of colors to be converted to a list of ROS colors. + + #### `ROSBridge.from_ros_mesh` + +```python +ROSBridge.from_ros_mesh(self, + ros_mesh) +``` +Converts a ROS mesh into arrays of vertices and faces of a triangle mesh. + +Parameters: + +- **ros_mesh**: *shape_msgs.msg.Mesh*\ + + #### `ROSBridge.from_ros_colors` + +```python +ROSBridge.from_ros_colors(self, + ros_colors) +``` +Converts a list of ROS colors into an array (Nx3). + +Parameters: + +- **ros_colors**: list of *std_msgs.msg.colorRGBA* + + +#### `ROSBridge.from_ros_image_to_depth` + +```python +ROSBridge.from_ros_image_to_depth(self, + message, + encoding) +``` + +This method converts a ROS image message into an OpenDR grayscale depth image. + +Parameters: + +- **message**: *sensor_msgs.msg.Img*\ + ROS image to be converted into an OpenDR image. +- **encoding**: *str, default='mono16'*\ + Encoding to be used for the conversion. + +#### `ROSBridge.from_category_to_rosclass` + +```python +ROSBridge.from_category_to_rosclass(self, + prediction, + source_data) +``` +This method converts an OpenDR Category object into Classification2D message with class label, confidence, timestamp and optionally corresponding input. + +Parameters: + +- **prediction**: *engine.target.Category*\ + OpenDR Category object +- **source_data**: *default=None*\ + Corresponding input, default=None + +#### `ROSBridge.from_rosarray_to_timeseries` + +```python +ROSBridge.from_rosarray_to_timeseries(self, + ros_array, + dim1, + dim2) +``` +This method converts a ROS array into OpenDR Timeseries object. + +Parameters: + +- **ros_array**: *std_msgs.msg.Float32MultiArray*\ + ROS array of data +- **dim1**: *int*\ + First dimension +- **dim2**: *int*\ + Second dimension + +#### `ROSBridge.from_ros_point_cloud` + +```python +ROSBridge.from_ros_point_cloud(self, point_cloud) +``` + +Converts a ROS PointCloud message into an OpenDR PointCloud. + +Parameters: + +- **point_cloud**: *sensor_msgs.msg.PointCloud*\ + ROS PointCloud to be converted. + +#### `ROSBridge.to_ros_point_cloud` + +```python +ROSBridge.to_ros_point_cloud(self, point_cloud) +``` +Converts an OpenDR PointCloud message into a ROS PointCloud. + +Parameters: + +- **point_cloud**: *engine.data.PointCloud*\ + OpenDR PointCloud to be converted. + +#### `ROSBridge.from_ros_boxes_3d` + +```python +ROSBridge.from_ros_boxes_3d(self, ros_boxes_3d, classes) +``` + +Converts a ROS Detection3DArray message into an OpenDR BoundingBox3D object. + +Parameters: + +- **ros_boxes_3d**: *vision_msgs.msg.Detection3DArray*\ + The ROS boxes to be converted. + +- **classes**: *[str]*\ + The array of classes to transform an index into a string name. + +#### `ROSBridge.to_ros_boxes_3d` + +```python +ROSBridge.to_ros_boxes_3d(self, boxes_3d, classes) +``` +Converts an OpenDR BoundingBox3DList object into a ROS Detection3DArray message. + +Parameters: + +- **boxes_3d**: *engine.target.BoundingBox3DList*\ + The ROS boxes to be converted. + +- **classes**: *[str]* + The array of classes to transform from string name into an index. + +#### `ROSBridge.from_ros_tracking_annotation` + +```python +ROSBridge.from_ros_tracking_annotation(self, ros_detections, ros_tracking_ids, frame) +``` + +Converts a pair of ROS messages with bounding boxes and tracking ids into an OpenDR TrackingAnnotationList. + +Parameters: + +- **ros_detections**: *sensor_msgs.msg.Detection2DArray*\ + The boxes to be converted. +- **ros_tracking_ids**: *std_msgs.msg.Int32MultiArray*\ + The tracking ids corresponding to the boxes. +- **frame**: *int, default=-1*\ + The frame index to assign to the tracking boxes. + +#### `ROSBridge.to_ros_single_tracking_annotation` + +```python +ROSBridge.to_ros_single_tracking_annotation(self, tracking_annotation) +``` + +Converts a `TrackingAnnotation` object to a `Detection2D` ROS message. +This method is intended for single object tracking methods. + +Parameters: + +- **tracking_annotation**: *opendr.engine.target.TrackingAnnotation*\ + The box to be converted. + +#### `ROSBridge.from_ros_single_tracking_annotation` + +```python +ROSBridge.from_ros_single_tracking_annotation(self, ros_detection_box) +``` + +Converts a `Detection2D` ROS message object to a `TrackingAnnotation` object. +This method is intended for single object tracking methods. + +Parameters: + +- **ros_detection_box**: *vision_msgs.Detection2D*\ + The box to be converted. + +## ROS message equivalence with OpenDR +1. `sensor_msgs.msg.Img` is used as an equivalent to `engine.data.Image` +2. `opendr_bridge.msg.Pose` is used as an equivalent to `engine.target.Pose` +3. `vision_msgs.msg.Detection2DArray` is used as an equivalent to `engine.target.BoundingBoxList` +4. `vision_msgs.msg.Detection2D` is used as an equivalent to `engine.target.BoundingBox` and + to `engine.target.TrackingAnnotation` in single object tracking +5. `geometry_msgs.msg.Pose` is used as an equivalent to `engine.target.Pose` for 3D poses conversion only. +6. `vision_msgs.msg.Detection3DArray` is used as an equivalent to `engine.target.BoundingBox3DList`. +7. `sensor_msgs.msg.PointCloud` is used as an equivalent to `engine.data.PointCloud`. + +## ROS services +The following ROS services are implemented (`srv` folder): +1. `opendr_bridge.OpenDRSingleObjectTracking`: can be used to initialize the tracking process of single + object trackers, by providing a `Detection2D` bounding box \ No newline at end of file diff --git a/docs/reference/rgbd-hand-gesture-learner.md b/docs/reference/rgbd-hand-gesture-learner.md index d967b47391..93bdc40c0b 100644 --- a/docs/reference/rgbd-hand-gesture-learner.md +++ b/docs/reference/rgbd-hand-gesture-learner.md @@ -2,6 +2,20 @@ The *rgbd_hand_gesture_learner* module contains the *RgbdHandGestureLearner* class, which inherits from the abstract class *Learner*. +On the table below you can find the gesture classes and their corresponding IDs: + +| **ID** | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | +|:------:|:------:|:-----:|:----:|:----:|:--------------:|:--------------:|:----:|:---:|:-----:|:-----:|:---:|:----:|:-----:|:-------:|:---:|:-----:| +| Class | COLLAB | Eight | Five | Four | Horiz HBL, HFR | Horiz HFL, HBR | Nine | One | Punch | Seven | Six | Span | Three | TimeOut | Two | XSign | + +The naming convention of the gestures classes is as follow: +- V is used for vertical gestures, while H is used for horizontal gestures. +- F identifies the version of the gesture where the front of the hand is facing the camera, while B identifies the version where the back of the hand is facing the camera. +- R is used for right-hand gestures, while L is used for left-hand gestures. + +Below is an illustration image of hand gestures, the image is copied from [[1]](#dataset). +![Hand gesture examples](images/hand_gesture_examples.png) + ### Class RgbdHandGestureLearner Bases: `opendr.engine.learners.Learner` diff --git a/docs/reference/rosbridge.md b/docs/reference/ros2bridge.md similarity index 94% rename from docs/reference/rosbridge.md rename to docs/reference/ros2bridge.md index 6e19acbc51..d0c155e4d7 100755 --- a/docs/reference/rosbridge.md +++ b/docs/reference/ros2bridge.md @@ -59,25 +59,28 @@ ROSBridge.from_ros_pose(self, ros_pose) ``` -Converts a ROS pose into an OpenDR pose. +Converts an OpenDRPose2D message into an OpenDR Pose. Parameters: -- **message**: *ros_bridge.msg.Pose*\ - ROS pose to be converted into an OpenDR pose. +- **ros_pose**: *ros_bridge.msg.OpenDRPose2D*\ + ROS pose to be converted into an OpenDR Pose. #### `ROSBridge.to_ros_pose` ```python ROSBridge.to_ros_pose(self, - ros_pose) + pose) ``` -Converts an OpenDR pose into a ROS pose. +Converts an OpenDR Pose into a OpenDRPose2D msg that can carry the same information, i.e. a list of keypoints, +the pose detection confidence and the pose id. +Each keypoint is represented as an OpenDRPose2DKeypoint with x, y pixel position on input image with (0, 0) +being the top-left corner. Parameters: -- **message**: *engine.target.Pose*\ - OpenDR pose to be converted to ROS pose. +- **pose**: *engine.target.Pose*\ + OpenDR Pose to be converted to ROS OpenDRPose2D. #### `ROSBridge.to_ros_category` diff --git a/docs/reference/semantic-segmentation.md b/docs/reference/semantic-segmentation.md index 783b801810..9a0b0f2969 100644 --- a/docs/reference/semantic-segmentation.md +++ b/docs/reference/semantic-segmentation.md @@ -2,6 +2,11 @@ The *semantic segmentation* module contains the *BisenetLearner* class, which inherit from the abstract class *Learner*. +On the table below you can find the detectable classes and their corresponding IDs: + +| Class | Bicyclist | Building | Car | Column Pole | Fence | Pedestrian | Road | Sidewalk | Sign Symbol | Sky | Tree | Unknown | +|--------|-----------|----------|-----|-------------|-------|------------|------|----------|-------------|-----|------|---------| +| **ID** | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | ### Class BisenetLearner Bases: `engine.learners.Learner` diff --git a/docs/reference/single-demonstration-grasping.md b/docs/reference/single-demonstration-grasping.md index 7332a0adb0..a4d8f67dad 100644 --- a/docs/reference/single-demonstration-grasping.md +++ b/docs/reference/single-demonstration-grasping.md @@ -113,7 +113,7 @@ $ make install_runtime_dependencies after installing dependencies, the user must source the workspace in the shell in order to detect the packages: ``` -$ source projects/control/single_demo_grasp/simulation_ws/devel/setup.bash +$ source projects/python/control/single_demo_grasp/simulation_ws/devel/setup.bash ``` ## Demos @@ -125,7 +125,7 @@ Three different nodes must be launched consecutively in order to properly run th ``` 1. $ cd path/to/opendr/home # change accordingly 2. $ source bin/setup.bash -3. $ source projects/control/single_demo_grasp/simulation_ws/devel/setup.bash +3. $ source projects/python/control/single_demo_grasp/simulation_ws/devel/setup.bash 4. $ export WEBOTS_HOME=/usr/local/webots 5. $ roslaunch single_demo_grasping_demo panda_sim.launch ``` @@ -134,7 +134,7 @@ Three different nodes must be launched consecutively in order to properly run th ``` 1. $ cd path/to/opendr/home # change accordingly 2. $ source bin/setup.bash -3. $ source projects/control/single_demo_grasp/simulation_ws/devel/setup.bash +3. $ source projects/python/control/single_demo_grasp/simulation_ws/devel/setup.bash 4. $ roslaunch single_demo_grasping_demo camera_stream_inference.launch ``` @@ -142,7 +142,7 @@ Three different nodes must be launched consecutively in order to properly run th ``` 1. $ cd path/to/opendr/home # change accordingly 2. $ source bin/setup.bash -3. $ source projects/control/single_demo_grasp/simulation_ws/devel/setup.bash +3. $ source projects/python/control/single_demo_grasp/simulation_ws/devel/setup.bash 4. $ roslaunch single_demo_grasping_demo panda_sim_control.launch ``` @@ -150,14 +150,14 @@ Three different nodes must be launched consecutively in order to properly run th You can find an example on how to use the learner class to run inference and see the result in the following directory: ``` -$ cd projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/ +$ cd projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/ ``` simply run: ``` 1. $ cd path/to/opendr/home # change accordingly 2. $ source bin/setup.bash -3. $ source projects/control/single_demo_grasp/simulation_ws/devel/setup.bash -4. $ cd projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/ +3. $ source projects/python/control/single_demo_grasp/simulation_ws/devel/setup.bash +4. $ cd projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/ 5. $ ./single_demo_inference.py ``` diff --git a/docs/reference/skeleton-based-action-recognition.md b/docs/reference/skeleton-based-action-recognition.md index 241ff902d1..21eb21e5e5 100644 --- a/docs/reference/skeleton-based-action-recognition.md +++ b/docs/reference/skeleton-based-action-recognition.md @@ -2,29 +2,30 @@ The *skeleton_based_action_recognition* module contains the *SpatioTemporalGCNLearner* and *ProgressiveSpatioTemporalGCNLearner* classes, which inherits from the abstract class *Learner*. -#### Data preparation - Download the NTU-RGB+D skeleton data from [here](https://github.com/shahroudy/NTURGB-D) and the kinetics-skeleton dataset from [here](https://drive.google.com/drive/folders/1SPQ6FmFsjGg3f59uCWfdUWI-5HJM_YhZ). - Then run the following function to preprocess the NTU-RGB+D and Kinetics skeleton data for ST-GCN methods: - - ```python - from opendr.perception.skeleton_based_action_recognition.algorithm.datasets import ntu_gendata - from opendr.perception.skeleton_based_action_recognition.algorithm.datasets import kinetics_gendata - python3 ntu_gendata.py --data_path ./data/nturgbd_raw_skeletons --ignored_sample_path ./algorithm/datasets/ntu_samples_with_missing_skeletons.txt --out_folder ./data/preprocessed_nturgbd - python3 kinetics_gendata.py --data_path ./data/kinetics_raw_skeletons --out_folder ./data/preprocessed_kinetics_skeletons - ``` - You need to specify the path of the downloaded data as `--data_path` and the path of the processed data as `--out_folder`. - ntu_samples_with_missing_skeletons.txt provides the NTU-RGB+D sample indices which don't contain any skeleton. - You need to specify the path of this file with --ignored_sample_path. +#### Data preparation +Download the NTU-RGB+D skeleton data from [here](https://github.com/shahroudy/NTURGB-D) and the kinetics-skeleton dataset from [here](https://drive.google.com/drive/folders/1SPQ6FmFsjGg3f59uCWfdUWI-5HJM_YhZ). +Then run the following function to preprocess the NTU-RGB+D and Kinetics skeleton data for ST-GCN methods: + +```bash +cd src/opendr/perception/skeleton_based_action_recognition/algorithm/datasets + +python3 ntu_gendata.py --data_path ./data/nturgbd_raw_skeletons --ignored_sample_path ./algorithm/datasets/ntu_samples_with_missing_skeletons.txt --out_folder ./data/preprocessed_nturgbd + +python3 kinetics_gendata.py --data_path ./data/kinetics_raw_skeletons --out_folder ./data/preprocessed_kinetics_skeletons +``` +You need to specify the path of the downloaded data as `--data_path` and the path of the processed data as `--out_folder`. +ntu_samples_with_missing_skeletons.txt provides the NTU-RGB+D sample indices which don't contain any skeleton. +You need to specify the path of this file with --ignored_sample_path. ### Class SpatioTemporalGCNLearner Bases: `engine.learners.Learner` -The *SpatioTemporalGCNLearner* class is a wrapper of the ST-GCN [[1]](#1) and the proposed methods TA-GCN [[2]](#2) and ST-BLN [[3]](#3) for Skeleton-based Human +The *SpatioTemporalGCNLearner* class is a wrapper of the ST-GCN [[1]](#1) and the proposed methods TA-GCN [[2]](#2) and ST-BLN [[3]](#3) for Skeleton-based Human Action Recognition. This implementation of ST-GCN can be found in [OpenMMLAB toolbox]( https://github.com/open-mmlab/mmskeleton/tree/b4c076baa9e02e69b5876c49fa7c509866d902c7). -It can be used to perform the baseline method ST-GCN and the proposed methods TA-GCN [[2]](#2) and ST-BLN [[3]](#3) for skeleton-based action recognition. -The TA-GCN and ST-BLN methods are proposed on top of ST-GCN and make it more efficient in terms of number of model parameters and floating point operations. +It can be used to perform the baseline method ST-GCN and the proposed methods TA-GCN [[2]](#2) and ST-BLN [[3]](#3) for skeleton-based action recognition. +The TA-GCN and ST-BLN methods are proposed on top of ST-GCN and make it more efficient in terms of number of model parameters and floating point operations. The [SpatioTemporalGCNLearner](/src/opendr/perception/skeleton_based_action_recognition/spatio_temporal_gcn_learner.py) class has the following public methods: @@ -35,62 +36,64 @@ SpatioTemporalGCNLearner(self, lr, batch_size, optimizer_name, lr_schedule, checkpoint_after_iter, checkpoint_load_iter, temp_path, device, num_workers, epochs, experiment_name, device_ind, val_batch_size, drop_after_epoch, - start_epoch, dataset_name, num_class, num_point, - num_person, in_channels, method_name, + start_epoch, dataset_name, num_class, num_point, + num_person, in_channels, method_name, stbln_symmetric, num_frames, num_subframes) ``` Constructor parameters: -- **lr**: *float, default=0.1* +- **lr**: *float, default=0.1*\ Specifies the initial learning rate to be used during training. -- **batch_size**: *int, default=128* +- **batch_size**: *int, default=128*\ Specifies number of skeleton sequences to be bundled up in a batch during training. This heavily affects memory usage, adjust according to your system. -- **optimizer_name**: *str {'sgd', 'adam'}, default='sgd'* +- **optimizer_name**: *str {'sgd', 'adam'}, default='sgd'*\ Specifies the optimizer type that should be used. -- **lr_schedule**: *str, default=' '* +- **lr_schedule**: *str, default=' '*\ Specifies the learning rate scheduler. -- **checkpoint_after_iter**: *int, default=0* +- **checkpoint_after_iter**: *int, default=0*\ Specifies per how many training iterations a checkpoint should be saved. If it is set to 0 no checkpoints will be saved. -- **checkpoint_load_iter**: *int, default=0* +- **checkpoint_load_iter**: *int, default=0*\ Specifies which checkpoint should be loaded. If it is set to 0, no checkpoints will be loaded. -- **temp_path**: *str, default=''* +- **temp_path**: *str, default=''* Specifies a path where the algorithm saves the checkpoints and onnx optimized model (if needed). -- **device**: *{'cpu', 'cuda'}, default='cuda'* +- **device**: *{'cpu', 'cuda'}, default='cuda'*\ Specifies the device to be used. -- **num_workers**: *int, default=32* +- **num_workers**: *int, default=32*\ Specifies the number of workers to be used by the data loader. -- **epochs**: *int, default=50* +- **epochs**: *int, default=50*\ Specifies the number of epochs the training should run for. -- **experiment_name**: *str, default='stgcn_nturgbd'* +- **experiment_name**: *str, default='stgcn_nturgbd'*\ String name to attach to checkpoints. -- **device_ind**: *list, default=[0]* - List of GPU indices to be used if the device is 'cuda'. -- **val_batch_size**: *int, default=256* +- **device_ind**: *list, default=[0]*\ + List of GPU indices to be used if the device is 'cuda'. +- **val_batch_size**: *int, default=256*\ Specifies number of skeleton sequences to be bundled up in a batch during evaluation. This heavily affects memory usage, adjust according to your system. -- **drop_after_epoch**: *list, default=[30,40]* - List of epoch numbers in which the optimizer drops the learning rate. -- **start_epoch**: *int, default=0* - Specifies the starting epoch number for training. -- **dataset_name**: *str {'kinetics', 'nturgbd_cv', 'nturgbd_cs'}, default='nturgbd_cv'* - Specifies the name of dataset that is used for training and evaluation. -- **num_class**: *int, default=60* - Specifies the number of classes for the action dataset. -- **num_point**: *int, default=25* - Specifies the number of body joints in each skeleton. -- **num_person**: *int, default=2* +- **drop_after_epoch**: *list, default=[30,40]*\ + List of epoch numbers in which the optimizer drops the learning rate. +- **start_epoch**: *int, default=0*\ + Specifies the starting epoch number for training. +- **dataset_name**: *str {'kinetics', 'nturgbd_cv', 'nturgbd_cs'}, default='nturgbd_cv'* + Specifies the name of dataset that is used for training and evaluation. +- **num_class**: *int, default=60*\ + Specifies the number of classes for the action dataset. +- **num_point**: *int, default=25*\ + Specifies the number of body joints in each skeleton. +- **num_person**: *int, default=2*\ Specifies the number of body skeletons in each frame. -- **in_channels**: *int, default=3* - Specifies the number of input channels for each body joint. -- **graph_type**: *str {'kinetics', 'ntu'}, default='ntu'* - Specifies the type of graph structure associated with the dataset. -- **method_name**: *str {'stgcn', 'stbln', 'tagcn'}, default='stgcn'* - Specifies the name of method to be trained and evaluated. For each method, a different model is trained. -- **stbln_symmetric**: *bool, default=False* - Specifies if the random graph in stbln method is symmetric or not. This parameter is used if method_name is 'stbln'. -- **num_frames**: *int, default=300* - Specifies the number of frames in each skeleton sequence. This parameter is used if the method_name is 'tagcn'. -- **num_subframes**: *int, default=100* - Specifies the number of sub-frames that are going to be selected by the tagcn model. This parameter is used if the method_name is 'tagcn'. +- **in_channels**: *int, default=3*\ + Specifies the number of input channels for each body joint. +- **graph_type**: *str {'kinetics', 'ntu'}, default='ntu'*\ + Specifies the type of graph structure associated with the dataset. +- **method_name**: *str {'stgcn', 'stbln', 'tagcn'}, default='stgcn'*\ + Specifies the name of method to be trained and evaluated. + For each method, a different model is trained. +- **stbln_symmetric**: *bool, default=False*\ + Specifies if the random graph in stbln method is symmetric or not. + This parameter is used if method_name is 'stbln'. +- **num_frames**: *int, default=300*\ + Specifies the number of frames in each skeleton sequence. This parameter is used if the method_name is 'tagcn'. +- **num_subframes**: *int, default=100*\ + Specifies the number of sub-frames that are going to be selected by the tagcn model. This parameter is used if the method_name is 'tagcn'. #### `SpatioTemporalGCNLearner.fit` @@ -101,41 +104,43 @@ SpatioTemporalGCNLearner.fit(self, dataset, val_dataset, logging_path, silent, v val_labels_filename, skeleton_data_type) ``` This method is used for training the algorithm on a train dataset and validating on a val dataset. + Parameters: -- **dataset**: *object* + +- **dataset**: *object*\ Object that holds the training dataset. Can be of type `ExternalDataset` or a custom dataset inheriting from `DatasetIterator`. -- **val_dataset**: *object* - Object that holds the validation dataset. -- **logging_path**: *str, default=''* +- **val_dataset**: *object*\ + Object that holds the validation dataset. +- **logging_path**: *str, default=''*\ Path to save TensorBoard log files and the training log files. - If set to None or '', TensorBoard logging is disabled and no log file is created. -- **silent**: *bool, default=False* + If set to None or '', TensorBoard logging is disabled and no log file is created. +- **silent**: *bool, default=False* If set to True, disables all printing of training progress reports and other information to STDOUT. -- **verbose**: *bool, default=True*** +- **verbose**: *bool, default=True*\ If set to True, enables the maximum verbosity. -- **momentum**: *float, default=0.9* - Specifies the momentum value for optimizer. -- **nesterov**: *bool, default=True*** - If set to true, the optimizer uses Nesterov's momentum. -- **weight_decay**: *float, default=0.0001*** - Specifies the weight_decay value of the optimizer. -- **train_data_filename**: *str, default='train_joints.npy'* - Filename that contains the training data. +- **momentum**: *float, default=0.9*\ + Specifies the momentum value for optimizer. +- **nesterov**: *bool, default=True*\ + If set to true, the optimizer uses Nesterov's momentum. +- **weight_decay**: *float, default=0.0001*\ + Specifies the weight_decay value of the optimizer. +- **train_data_filename**: *str, default='train_joints.npy'*\ + Filename that contains the training data. This file should be contained in the dataset path provided. Note that this is a file name, not a path. -- **train_labels_filename**: *str, default='train_labels.pkl'* - Filename of the labels .pkl file. +- **train_labels_filename**: *str, default='train_labels.pkl'*\ + Filename of the labels .pkl file. This file should be contained in the dataset path provided. -- **val_data_filename**: *str, default='val_joints.npy'* +- **val_data_filename**: *str, default='val_joints.npy'*\ Filename that contains the validation data. This file should be contained in the dataset path provided. Note that this is a filename, not a path. -- **val_labels_filename**: *str, default='val_labels.pkl'* +- **val_labels_filename**: *str, default='val_labels.pkl'*\ Filename of the validation labels .pkl file. This file should be contained in the dataset path provided. -- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'* - The data stream that should be used for training and evaluation. +- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'*\ + The data stream that should be used for training and evaluation. #### `SpatioTemporalGCNLearner.eval` ```python @@ -145,55 +150,58 @@ SpatioTemporalGCNLearner.eval(self, val_dataset, val_loader, epoch, silent, verb ``` This method is used to evaluate a trained model on an evaluation dataset. -Returns a dictionary containing stats regarding evaluation. +Returns a dictionary containing stats regarding evaluation. + Parameters: -- **val_dataset**: *object* + +- **val_dataset**: *object*\ Object that holds the evaluation dataset. Can be of type `ExternalDataset` or a custom dataset inheriting from `DatasetIterator`. -- **val_loader**: *object, default=None* +- **val_loader**: *object, default=None*\ Object that holds a Python iterable over the evaluation dataset. Object of `torch.utils.data.DataLoader` class. -- **epoch**: *int, default=0* - The training epoch in which the model is evaluated. -- **silent**: *bool, default=False* +- **epoch**: *int, default=0*\ + The training epoch in which the model is evaluated. +- **silent**: *bool, default=False*\ If set to True, disables all printing of evaluation progress reports and other information to STDOUT. -- **verbose**: *bool, default=True* +- **verbose**: *bool, default=True*\ If set to True, enables the maximum verbosity. -- **val_data_filename**: *str, default='val_joints.npy'* +- **val_data_filename**: *str, default='val_joints.npy'*\ Filename that contains the validation data. This file should be contained in the dataset path provided. Note that this is a filename, not a path. -- **val_labels_filename**: *str, default='val_labels.pkl'* +- **val_labels_filename**: *str, default='val_labels.pkl'*\ Filename of the validation labels .pkl file. This file should be contained in the dataset path provided. -- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'* - The data stream that should be used for training and evaluation. -- **save_score**: *bool, default=False* - If set to True, it saves the classification score of all samples in differenc classes - in a log file. Default to False. -- **wrong_file**: *str, default=None* - If set to True, it saves the results of wrongly classified samples. Default to False. -- **result_file**: *str, default=None* - If set to True, it saves the classification results of all samples. Default to False. -- **show_topk**: *list, default=[1, 5]* - Is set to a list of integer numbers defining the k in top-k accuracy. Default is set to [1,5]. - +- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'*\ + The data stream that should be used for training and evaluation. +- **save_score**: *bool, default=False*\ + If set to True, it saves the classification score of all samples in different classes + in a log file. +- **wrong_file**: *str, default=None*\ + If set to True, it saves the results of wrongly classified samples. +- **result_file**: *str, default=None*\ + If set to True, it saves the classification results of all samples. +- **show_topk**: *list, default=[1, 5]*\ + Is set to a list of integer numbers defining the k in top-k accuracy. + #### `SpatioTemporalGCNLearner.init_model` ```python SpatioTemporalGCNLearner.init_model(self) ``` -This method is used to initialize the imported model and its loss function. - +This method is used to initialize the imported model and its loss function. + #### `SpatioTemporalGCNLearner.infer` ```python SpatioTemporalGCNLearner.infer(self, SkeletonSeq_batch) ``` -This method is used to perform action recognition on a sequence of skeletons. -It returns the action category as an object of `engine.target.Category` if a proper input object `engine.data.SkeletonSequence` is given. +This method is used to perform action recognition on a sequence of skeletons. +It returns the action category as an object of `engine.target.Category` if a proper input object `engine.data.SkeletonSequence` is given. Parameters: -- **SkeletonSeq_batch**: *object*** + +- **SkeletonSeq_batch**: *object*\ Object of type engine.data.SkeletonSequence. #### `SpatioTemporalGCNLearner.save` @@ -201,20 +209,18 @@ Parameters: SpatioTemporalGCNLearner.save(self, path, model_name, verbose) ``` This method is used to save a trained model. -Provided with the path "/my/path" (absolute or relative), it creates the "path" directory, if it does not already -exist. Inside this folder, the model is saved as "model_name.pt" and the metadata file as "model_name.json". If the directory -already exists, the "model_name.pt" and "model_name.json" files are overwritten. +Provided with the path "/my/path" (absolute or relative), it creates the "path" directory, if it does not already exist. +Inside this folder, the model is saved as "model_name.pt" and the metadata file as "model_name.json". If the directory already exists, the "model_name.pt" and "model_name.json" files are overwritten. -If [`self.optimize`](/src/opendr/perception/skeleton_based_action_recognition/spatio_temporal_gcn_learner.py#L539) was run previously, it saves the optimized ONNX model in -a similar fashion with an ".onnx" extension, by copying it from the self.temp_path it was saved previously -during conversion. +If [`self.optimize`](/src/opendr/perception/skeleton_based_action_recognition/spatio_temporal_gcn_learner.py#L539) was run previously, it saves the optimized ONNX model in a similar fashion with an ".onnx" extension, by copying it from the self.temp_path it was saved previously during conversion. Parameters: -- **path**: *str* + +- **path**: *str*\ Path to save the model. -- **model_name**: *str* - The file name to be saved. -- **verbose**: *bool, default=False* +- **model_name**: *str*\ + The file name to be saved. +- **verbose**: *bool, default=False*\ If set to True, prints a message on success. #### `SpatioTemporalGCNLearner.load` @@ -226,11 +232,12 @@ This method is used to load a previously saved model from its saved folder. Loads the model from inside the directory of the path provided, using the metadata .json file included. Parameters: -- **path**: *str* + +- **path**: *str*\ Path of the model to be loaded. -- **model_name**: *str* - The file name to be loaded. -- **verbose**: *bool, default=False* +- **model_name**: *str*\ + The file name to be loaded. +- **verbose**: *bool, default=False*\ If set to True, prints a message on success. @@ -242,7 +249,8 @@ SpatioTemporalGCNLearner.optimize(self, do_constant_folding) This method is used to optimize a trained model to ONNX format which can be then used for inference. Parameters: -- **do_constant_folding**: *bool, default=False* + +- **do_constant_folding**: *bool, default=False*\ ONNX format optimization. If True, the constant-folding optimization is applied to the model during export. Constant-folding optimization will replace some of the operations that have all constant inputs, with pre-computed constant nodes. @@ -255,27 +263,29 @@ SpatioTemporalGCNLearner.multi_stream_eval(self, dataset, scores, data_filename, labels_filename, skeleton_data_type, verbose, silent) ``` -This method is used to ensemble the classification results of the model on two or more data streams like joints, bones and motions. -It returns the top-k classification performance of ensembled model. +This method is used to ensemble the classification results of the model on two or more data streams like joints, bones and motions. +It returns the top-k classification performance of ensembled model. Parameters: -- **dataset**: *object* + +- **dataset**: *object*\ Object that holds the dataset. Can be of type `ExternalDataset` or a custom dataset inheriting from `DatasetIterator`. -- **score**: *list* - A list of score arrays. Each array in the list contains the evaluation results for a data stream. -- **data_filename**: *str, default='val_joints.npy'* +- **score**: *list*\ + A list of score arrays. + Each array in the list contains the evaluation results for a data stream. +- **data_filename**: *str, default='val_joints.npy'*\ Filename that contains the validation data. This file should be contained in the dataset path provided. Note that this is a filename, not a path. -- **labels_filename**: *str, default='val_labels.pkl'* +- **labels_filename**: *str, default='val_labels.pkl'*\ Filename of the validation labels .pkl file. This file should be contained in the dataset path provided. -- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'* - The data stream that should be used for training and evaluation. -- **silent**: *bool, default=False* +- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'*\ + The data stream that should be used for training and evaluation. +- **silent**: *bool, default=False*\ If set to True, disables all printing of evaluation progress reports and other information to STDOUT. -- **verbose**: *bool, default=True* +- **verbose**: *bool, default=True*\ If set to True, enables the maximum verbosity. @@ -285,37 +295,36 @@ Parameters: SpatioTemporalGCNLearner.download(self, path, mode, verbose, url, file_name) ``` -Download utility for various skeleton-based action recognition components. Downloads files depending on mode and -saves them in the path provided. It supports downloading: -1. the pretrained weights for stgcn, tagcn and stbln models. -2. a dataset containing one or more skeleton sequences and its labels. +Download utility for various skeleton-based action recognition components. Downloads files depending on mode and saves them in the path provided. It supports downloading: +1. the pretrained weights for stgcn, tagcn and stbln models. +2. a dataset containing one or more skeleton sequences and its labels. Parameters: -- **path**: *str, default=None* + +- **path**: *str, default=None*\ Local path to save the files, defaults to self.parent_dir if None. -- **mode**: *str, default="pretrained"* +- **mode**: *str, default="pretrained"*\ What file to download, can be one of "pretrained", "train_data", "val_data", "test_data" -- **verbose**: *bool, default=False* +- **verbose**: *bool, default=False*\ Whether to print messages in the console. -- **url**: *str, default=OpenDR FTP URL* +- **url**: *str, default=OpenDR FTP URL*\ URL of the FTP server. -- **file_name**: *str* - The name of the file containing the pretrained model. - +- **file_name**: *str*\ + The name of the file containing the pretrained model. #### Examples -* **Training example using an `ExternalDataset`**. +* **Training example using an `ExternalDataset`**. The training and evaluation dataset should be present in the path provided, along with the labels file. The `batch_size` argument should be adjusted according to available memory. ```python from opendr.perception.skeleton_based_action_recognition.spatio_temporal_gcn_learner import SpatioTemporalGCNLearner from opendr.engine.datasets import ExternalDataset - + training_dataset = ExternalDataset(path='./data/preprocessed_nturgbd/xview', dataset_type='NTURGBD') validation_dataset = ExternalDataset(path='./data/preprocessed_nturgbd/xview', dataset_type='NTURGBD') - + stgcn_learner = SpatioTemporalGCNLearner(temp_path='./parent_dir', batch_size=64, epochs=50, checkpoint_after_iter=10, val_batch_size=128, @@ -330,9 +339,9 @@ Parameters: skeleton_data_type='joint') stgcn_learner.save(path='./saved_models/stgcn_nturgbd_cv_checkpoints', model_name='test_stgcn') ``` - In a similar manner train the TA-GCN model by specifying the number of important frames that the model selects as num_subframes. - The number of frames in both NTU-RGB+D and Kinetics-skeleton is 300. - + In a similar manner train the TA-GCN model by specifying the number of important frames that the model selects as num_subframes. + The number of frames in both NTU-RGB+D and Kinetics-skeleton is 300. + ```python tagcn_learner = SpatioTemporalGCNLearner(temp_path='./parent_dir', batch_size=64, epochs=50, @@ -348,9 +357,9 @@ Parameters: skeleton_data_type='joint') tagcn_learner.save(path='./saved_models/tagcn_nturgbd_cv_checkpoints', model_name='test_tagcn') ``` - - For training the ST-BLN model, set the method_name to 'stbln' and specify if the model uses a symmetric attention matrix or not by setting stbln_symmetric to True or False. - + + For training the ST-BLN model, set the method_name to 'stbln' and specify if the model uses a symmetric attention matrix or not by setting stbln_symmetric to True or False. + ```python stbln_learner = SpatioTemporalGCNLearner(temp_path='./parent_dir', @@ -367,7 +376,7 @@ Parameters: skeleton_data_type='joint') stbln_learner.save(path='./saved_models/stbln_nturgbd_cv_checkpoints', model_name='test_stbln') ``` - + * **Inference on a test skeleton sequence** ```python @@ -381,15 +390,15 @@ Parameters: method_name='stgcn') # Download the default pretrained stgcn model in the parent_dir stgcn_learner.download( - mode="pretrained", path='./parent_dir/pretrained_models', file_name='pretrained_stgcn') - + mode="pretrained", path='./parent_dir/pretrained_models', file_name='pretrained_stgcn') + stgcn_learner.load('./parent_dir/pretrained_models', model_name='pretrained_stgcn') - test_data_path = stgcn_learner.download(mode="test_data") # Download a test data + test_data_path = stgcn_learner.download(mode="test_data") # Download a test data test_data = numpy.load(test_data_path) action_category = stgcn_learner.infer(test_data) - + ``` - + * **Optimization example for a previously trained model.** Inference can be run with the trained model after running self.optimize. ```python @@ -403,23 +412,21 @@ Parameters: experiment_name='stgcn_nturgbd', method_name='stgcn') stgcn_learner.download( - mode="pretrained", path='./parent_dir/pretrained_models', file_name='pretrained_stgcn') - + mode="pretrained", path='./parent_dir/pretrained_models', file_name='pretrained_stgcn') + stgcn_learner.load(path='./parent_dir/pretrained_models', file_name='pretrained_stgcn') stgcn_learner.optimize(do_constant_folding=True) stgcn_learner.save(path='./parent_dir/optimized_model', model_name='optimized_stgcn') ``` - The inference and optimization can be performed for TA-GCN and ST-BLN methods in a similar manner only by specifying the method_name to 'tagcn' or 'stbln', respectively in the learner class constructor. + The inference and optimization can be performed for TA-GCN and ST-BLN methods in a similar manner only by specifying the method_name to 'tagcn' or 'stbln', respectively in the learner class constructor. ### Class ProgressiveSpatioTemporalGCNLearner Bases: `engine.learners.Learner` -The *ProgressiveSpatioTemporalGCNLearner* class is an implementation of the proposed method PST-GCN [[4]](#4) for Skeleton-based Human -Action Recognition. -It finds an optimized and data dependant spatio-temporal graph convolutional network topology for skeleton-based action recognition. -The [ProgressiveSpatioTemporalGCNLearner](/src/opendr/perception/skeleton_based_action_recognition/progressive_spatio_temporal_gcn_learner.py) class has the -following public methods: +The *ProgressiveSpatioTemporalGCNLearner* class is an implementation of the proposed method PST-GCN [[4]](#4) for Skeleton-based Human Action Recognition. +It finds an optimized and data dependant spatio-temporal graph convolutional network topology for skeleton-based action recognition. +The [ProgressiveSpatioTemporalGCNLearner](/src/opendr/perception/skeleton_based_action_recognition/progressive_spatio_temporal_gcn_learner.py) class has the following public methods: #### `ProgressiveSpatioTemporalGCNLearner` constructor @@ -428,67 +435,73 @@ ProgressiveSpatioTemporalGCNLearner(self, lr, batch_size, optimizer_name, lr_sch checkpoint_after_iter, checkpoint_load_iter, temp_path, device, num_workers, epochs, experiment_name, device_ind, val_batch_size, drop_after_epoch, - start_epoch, dataset_name, + start_epoch, dataset_name, blocksize, numblocks, numlayers, topology, layer_threshold, block_threshold) ``` Constructor parameters: -- **lr**: *float, default=0.1* + +- **lr**: *float, default=0.1*\ Specifies the initial learning rate to be used during training. -- **batch_size**: *int, default=128* - Specifies number of skeleton sequences to be bundled up in a batch during training. This heavily affects memory usage, adjust according to your system. -- **optimizer_name**: *str {'sgd', 'adam'}, default='sgd'* +- **batch_size**: *int, default=128*\ + Specifies number of skeleton sequences to be bundled up in a batch during training. + This heavily affects memory usage, adjust according to your system. +- **optimizer_name**: *str {'sgd', 'adam'}, default='sgd'*\ Specifies the optimizer type that should be used. -- **lr_schedule**: *str, default=' '* +- **lr_schedule**: *str, default=' '*\ Specifies the learning rate scheduler. -- **checkpoint_after_iter**: *int, default=0* - Specifies per how many training iterations a checkpoint should be saved. If it is set to 0 no checkpoints will be saved. -- **checkpoint_load_iter**: *int, default=0* - Specifies which checkpoint should be loaded. If it is set to 0, no checkpoints will be loaded. -- **temp_path**: *str, default=''* +- **checkpoint_after_iter**: *int, default=0*\ + Specifies per how many training iterations a checkpoint should be saved. + If it is set to 0 no checkpoints will be saved. +- **checkpoint_load_iter**: *int, default=0*\ + Specifies which checkpoint should be loaded. + If it is set to 0, no checkpoints will be loaded. +- **temp_path**: *str, default=''*\ Specifies a path where the algorithm saves the checkpoints and onnx optimized model (if needed). -- **device**: *{'cpu', 'cuda'}, default='cuda'* +- **device**: *{'cpu', 'cuda'}, default='cuda'*\ Specifies the device to be used. -- **num_workers**: *int, default=32* +- **num_workers**: *int, default=32*\ Specifies the number of workers to be used by the data loader. -- **epochs**: *int, default=50* +- **epochs**: *int, default=50*\ Specifies the number of epochs the training should run for. -- **experiment_name**: *str, default='stgcn_nturgbd'* +- **experiment_name**: *str, default='stgcn_nturgbd'* String name to attach to checkpoints. -- **device_ind**: *list, default=[0]* - List of GPU indices to be used if the device is 'cuda'. -- **val_batch_size**: *int, default=256* - Specifies number of skeleton sequences to be bundled up in a batch during evaluation. This heavily affects memory usage, adjust according to your system. -- **drop_after_epoch**: *list, default=[30,40]* - List of epoch numbers in which the optimizer drops the learning rate. -- **start_epoch**: *int, default=0* - Specifies the starting epoch number for training. -- **dataset_name**: *str {'kinetics', 'nturgbd_cv', 'nturgbd_cs'}, default='nturgbd_cv'* - Specifies the name of dataset that is used for training and evaluation. -- **num_class**: *int, default=60* - Specifies the number of classes for the action dataset. -- **num_point**: *int, default=25* - Specifies the number of body joints in each skeleton. -- **num_person**: *int, default=2* +- **device_ind**: *list, default=[0]*\ + List of GPU indices to be used if the device is 'cuda'. +- **val_batch_size**: *int, default=256*\ + Specifies number of skeleton sequences to be bundled up in a batch during evaluation. + This heavily affects memory usage, adjust according to your system. +- **drop_after_epoch**: *list, default=[30,40]*\ + List of epoch numbers in which the optimizer drops the learning rate. +- **start_epoch**: *int, default=0*\ + Specifies the starting epoch number for training. +- **dataset_name**: *str {'kinetics', 'nturgbd_cv', 'nturgbd_cs'}, default='nturgbd_cv'*\ + Specifies the name of dataset that is used for training and evaluation. +- **num_class**: *int, default=60*\ + Specifies the number of classes for the action dataset. +- **num_point**: *int, default=25*\ + Specifies the number of body joints in each skeleton. +- **num_person**: *int, default=2*\ Specifies the number of body skeletons in each frame. -- **in_channels**: *int, default=3* - Specifies the number of input channels for each body joint. -- **graph_type**: *str {'kinetics', 'ntu'}, default='ntu'* - Specifies the type of graph structure associated with the dataset. -- **block_size**: *int, default=20* - Specifies the number of output channels (or neurons) that are added to each layer of the network at each progression iteration. -- **numblocks**: *int, default=10* - Specifies the maximum number of blocks that are added to each layer of the network at each progression iteration. -- **numlayers**: *int, default=10* +- **in_channels**: *int, default=3*\ + Specifies the number of input channels for each body joint. +- **graph_type**: *str {'kinetics', 'ntu'}, default='ntu'*\ + Specifies the type of graph structure associated with the dataset. +- **block_size**: *int, default=20*\ + Specifies the number of output channels (or neurons) that are added to each layer of the network at each progression iteration. +- **numblocks**: *int, default=10*\ + Specifies the maximum number of blocks that are added to each layer of the network at each progression iteration. +- **numlayers**: *int, default=10*\ Specifies the maximum number of layers that are built for the network. -- **topology**: *list, default=[]* - Specifies the initial topology of the network. The default is set to [], since the method gets an empty network as input and builds it progressively. -- **layer_threshold**: *float, default=1e-4* - Specifies the threshold which is used by the method to identify when it should stop adding new layers. -- **block_threshold**: *float, default=1e-4* - Specifies the threshold which is used by the model to identify when it should stop adding new blocks in each layer. - +- **topology**: *list, default=[]*\ + Specifies the initial topology of the network. + The default is set to [], since the method gets an empty network as input and builds it progressively. +- **layer_threshold**: *float, default=1e-4*\ + Specifies the threshold which is used by the method to identify when it should stop adding new layers. +- **block_threshold**: *float, default=1e-4*\ + Specifies the threshold which is used by the model to identify when it should stop adding new blocks in each layer. + #### `ProgressiveSpatioTemporalGCNLearner.fit` ```python @@ -499,41 +512,43 @@ ProgressiveSpatioTemporalGCNLearner.fit(self, dataset, val_dataset, logging_path ``` This method is used for training the algorithm on a train dataset and validating on a val dataset. + Parameters: -- **dataset**: *object* + +- **dataset**: *object*\ Object that holds the training dataset. Can be of type `ExternalDataset` or a custom dataset inheriting from `DatasetIterator`. -- **val_dataset**: *object* - Object that holds the validation dataset. -- **logging_path**: *str, default=''* +- **val_dataset**: *object*\ + Object that holds the validation dataset. +- **logging_path**: *str, default=''*\ Path to save TensorBoard log files and the training log files. - If set to None or '', TensorBoard logging is disabled and no log file is created. -- **silent**: *bool, default=False* + If set to None or '', TensorBoard logging is disabled and no log file is created. +- **silent**: *bool, default=False*\ If set to True, disables all printing of training progress reports and other information to STDOUT. -- **verbose**: *bool, default=True*** +- **verbose**: *bool, default=True*\ If set to True, enables the maximum verbosity. -- **momentum**: *float, default=0.9* - Specifies the momentum value for optimizer. -- **nesterov**: *bool, default=True*** - If set to true, the optimizer uses Nesterov's momentum. -- **weight_decay**: *float, default=0.0001*** - Specifies the weight_decay value of the optimizer. -- **train_data_filename**: *str, default='train_joints.npy'* - Filename that contains the training data. +- **momentum**: *float, default=0.9*\ + Specifies the momentum value for optimizer. +- **nesterov**: *bool, default=True*\ + If set to true, the optimizer uses Nesterov's momentum. +- **weight_decay**: *float, default=0.0001*\ + Specifies the weight_decay value of the optimizer. +- **train_data_filename**: *str, default='train_joints.npy'*\ + Filename that contains the training data. This file should be contained in the dataset path provided. Note that this is a file name, not a path. -- **train_labels_filename**: *str, default='train_labels.pkl'* - Filename of the labels .pkl file. +- **train_labels_filename**: *str, default='train_labels.pkl'*\ + Filename of the labels .pkl file. This file should be contained in the dataset path provided. -- **val_data_filename**: *str, default='val_joints.npy'* +- **val_data_filename**: *str, default='val_joints.npy'*\ Filename that contains the validation data. This file should be contained in the dataset path provided. Note that this is a filename, not a path. -- **val_labels_filename**: *str, default='val_labels.pkl'* +- **val_labels_filename**: *str, default='val_labels.pkl'*\ Filename of the validation labels .pkl file. This file should be contained in the dataset path provided. -- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'* - The data stream that should be used for training and evaluation. +- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'*\ + The data stream that should be used for training and evaluation. #### `ProgressiveSpatioTemporalGCNLearner.eval` @@ -544,112 +559,113 @@ ProgressiveSpatioTemporalGCNLearner.eval(self, val_dataset, val_loader, epoch, s ``` This method is used to evaluate a trained model on an evaluation dataset. -Returns a dictionary containing stats regarding evaluation. +Returns a dictionary containing stats regarding evaluation. + Parameters: -- **val_dataset**: *object* +- **val_dataset**: *object*\ Object that holds the evaluation dataset. Can be of type `ExternalDataset` or a custom dataset inheriting from `DatasetIterator`. -- **val_loader**: *object, default=None* +- **val_loader**: *object, default=None*\ Object that holds a Python iterable over the evaluation dataset. Object of `torch.utils.data.DataLoader` class. -- **epoch**: *int, default=0* - The training epoch in which the model is evaluated. -- **silent**: *bool, default=False* +- **epoch**: *int, default=0*\ + The training epoch in which the model is evaluated. +- **silent**: *bool, default=False*\ If set to True, disables all printing of evaluation progress reports and other information to STDOUT. -- **verbose**: *bool, default=True* +- **verbose**: *bool, default=True*\ If set to True, enables the maximum verbosity. -- **val_data_filename**: *str, default='val_joints.npy'* +- **val_data_filename**: *str, default='val_joints.npy'*\ Filename that contains the validation data. This file should be contained in the dataset path provided. Note that this is a filename, not a path. -- **val_labels_filename**: *str, default='val_labels.pkl'* +- **val_labels_filename**: *str, default='val_labels.pkl'*\ Filename of the validation labels .pkl file. This file should be contained in the dataset path provided. -- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'* - The data stream that should be used for training and evaluation. -- **save_score**: *bool, default=False* - If set to True, it saves the classification score of all samples in differenc classes - in a log file. Default to False. -- **wrong_file**: *str, default=None* - If set to True, it saves the results of wrongly classified samples. Default to False. -- **result_file**: *str, default=None* - If set to True, it saves the classification results of all samples. Default to False. -- **show_topk**: *list, default=[1, 5]* - Is set to a list of integer numbers defining the k in top-k accuracy. Default is set to [1,5]. +- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'*\ + The data stream that should be used for training and evaluation. +- **save_score**: *bool, default=False*\ + If set to True, it saves the classification score of all samples in different classes in a log file. +- **wrong_file**: *str, default=None*\ + If set to True, it saves the results of wrongly classified samples. +- **result_file**: *str, default=None*\ + If set to True, it saves the classification results of all samples. +- **show_topk**: *list, default=[1, 5]*\ + Is set to a list of integer numbers defining the k in top-k accuracy. #### `ProgressiveSpatioTemporalGCNLearner.init_model` ```python ProgressiveSpatioTemporalGCNLearner.init_model(self) ``` -This method is used to initialize the imported model and its loss function. - - +This method is used to initialize the imported model and its loss function. + + #### `ProgressiveSpatioTemporalGCNLearner.network_builder` ```python ProgressiveSpatioTemporalGCNLearner.network_builder(self, dataset, val_dataset, train_data_filename, train_labels_filename, val_data_filename, val_labels_filename, skeleton_data_type, verbose) ``` -This method implement the ST-GCN Augmentation Module (ST-GCN-AM) which builds the network topology progressively. +This method implement the ST-GCN Augmentation Module (ST-GCN-AM) which builds the network topology progressively. + Parameters: -- **dataset**: *object* +- **dataset**: *object*\ Object that holds the training dataset. -- **val_dataset**: *object* +- **val_dataset**: *object*\ Object that holds the evaluation dataset. Can be of type `ExternalDataset` or a custom dataset inheriting from `DatasetIterator`. -- **train_data_filename**: *str, default='train_joints.npy'* - Filename that contains the training data. +- **train_data_filename**: *str, default='train_joints.npy'*\ + Filename that contains the training data. This file should be contained in the dataset path provided. Note that this is a file name, not a path. -- **train_labels_filename**: *str, default='train_labels.pkl'* - Filename of the labels .pkl file. +- **train_labels_filename**: *str, default='train_labels.pkl'*\ + Filename of the labels .pkl file. This file should be contained in the dataset path provided. -- **val_data_filename**: *str, default='val_joints.npy'* +- **val_data_filename**: *str, default='val_joints.npy'*\ Filename that contains the validation data. This file should be contained in the dataset path provided. Note that this is a filename, not a path. -- **val_labels_filename**: *str, default='val_labels.pkl'* +- **val_labels_filename**: *str, default='val_labels.pkl'*\ Filename of the validation labels .pkl file. This file should be contained in the dataset path provided. -- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'* - The data stream that should be used for training and evaluation. -- **verbose**: *bool, default=True*** +- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'*\ + The data stream that should be used for training and evaluation. +- **verbose**: *bool, default=True*\ Whether to print messages in the console. - - + + #### `ProgressiveSpatioTemporalGCNLearner.infer` ```python ProgressiveSpatioTemporalGCNLearner.infer(self, SkeletonSeq_batch) ``` -This method is used to perform action recognition on a sequence of skeletons. -It returns the action category as an object of `engine.target.Category` if a proper input object `engine.data.SkeletonSequence` is given. +This method is used to perform action recognition on a sequence of skeletons. +It returns the action category as an object of `engine.target.Category` if a proper input object `engine.data.SkeletonSequence` is given. Parameters: -- **SkeletonSeq_batch**: *object*** + +- **SkeletonSeq_batch**: *object*\ Object of type engine.data.SkeletonSequence. #### `ProgressiveSpatioTemporalGCNLearner.save` ```python ProgressiveSpatioTemporalGCNLearner.save(self, path, model_name, verbose) ``` + This method is used to save a trained model. -Provided with the path "/my/path" (absolute or relative), it creates the "path" directory, if it does not already -exist. Inside this folder, the model is saved as "model_name.pt" and the metadata file as "model_name.json". If the directory -already exists, the "model_name.pt" and "model_name.json" files are overwritten. +Provided with the path "/my/path" (absolute or relative), it creates the "path" directory, if it does not already exist. +Inside this folder, the model is saved as "model_name.pt" and the metadata file as "model_name.json". If the directory already exists, the "model_name.pt" and "model_name.json" files are overwritten. -If [`self.optimize`](/src/opendr/perception/skeleton_based_action_recognition/progressive_spatio_temporal_gcn_learner.py#L576) was run previously, it saves the optimized ONNX model in -a similar fashion with an ".onnx" extension, by copying it from the self.temp_path it was saved previously -during conversion. +If [`self.optimize`](/src/opendr/perception/skeleton_based_action_recognition/progressive_spatio_temporal_gcn_learner.py#L576) was run previously, it saves the optimized ONNX model in a similar fashion with an ".onnx" extension, by copying it from the self.temp_path it was saved previously during conversion. Parameters: -- **path**: *str* + +- **path**: *str*\ Path to save the model. -- **model_name**: *str* - The file name to be saved. -- **verbose**: *bool, default=False* +- **model_name**: *str*\ + The file name to be saved. +- **verbose**: *bool, default=False*\ If set to True, prints a message on success. #### `ProgressiveSpatioTemporalGCNLearner.load` @@ -661,11 +677,12 @@ This method is used to load a previously saved model from its saved folder. Loads the model from inside the directory of the path provided, using the metadata .json file included. Parameters: -- **path**: *str* + +- **path**: *str*\ Path of the model to be loaded. -- **model_name**: *str* - The file name to be loaded. -- **verbose**: *bool, default=False* +- **model_name**: *str*\ + The file name to be loaded. +- **verbose**: *bool, default=False*\ If set to True, prints a message on success. @@ -677,7 +694,8 @@ ProgressiveSpatioTemporalGCNLearner.optimize(self, do_constant_folding) This method is used to optimize a trained model to ONNX format which can be then used for inference. Parameters: -- **do_constant_folding**: *bool, default=False* + +- **do_constant_folding**: *bool, default=False*\ ONNX format optimization. If True, the constant-folding optimization is applied to the model during export. Constant-folding optimization will replace some of the operations that have all constant inputs, with pre-computed constant nodes. @@ -689,27 +707,28 @@ ProgressiveSpatioTemporalGCNLearner.multi_stream_eval(self, dataset, scores, dat labels_filename, skeleton_data_type, verbose, silent) ``` -This method is used to ensemble the classification results of the model on two or more data streams like joints, bones and motions. -It returns the top-k classification performance of ensembled model. +This method is used to ensemble the classification results of the model on two or more data streams like joints, bones and motions. +It returns the top-k classification performance of ensembled model. Parameters: -- **dataset**: *object* + +- **dataset**: *object*\ Object that holds the dataset. Can be of type `ExternalDataset` or a custom dataset inheriting from `DatasetIterator`. -- **score**: *list* +- **score**: *list*\ A list of score arrays. Each array in the list contains the evaluation results for a data stream. -- **data_filename**: *str, default='val_joints.npy'* +- **data_filename**: *str, default='val_joints.npy'*\ Filename that contains the validation data. This file should be contained in the dataset path provided. Note that this is a filename, not a path. -- **labels_filename**: *str, default='val_labels.pkl'* +- **labels_filename**: *str, default='val_labels.pkl'*\ Filename of the validation labels .pkl file. This file should be contained in the dataset path provided. -- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'* - The data stream that should be used for training and evaluation. -- **silent**: *bool, default=False* +- **skeleton_data_type**: *str {'joint', 'bone', 'motion'}, default='joint'*\ + The data stream that should be used for training and evaluation. +- **silent**: *bool, default=False*\ If set to True, disables all printing of evaluation progress reports and other information to STDOUT. -- **verbose**: *bool, default=True* +- **verbose**: *bool, default=True*\ If set to True, enables the maximum verbosity. @@ -719,27 +738,238 @@ Parameters: ProgressiveSpatioTemporalGCNLearner.download(self, path, mode, verbose, url, file_name) ``` -Download utility for various skeleton-based action recognition components. Downloads files depending on mode and -saves them in the path provided. It supports downloading: -1. the pretrained weights for stgcn, tagcn and stbln models. -2. a dataset containing one or more skeleton sequences and its labels. +Download utility for various skeleton-based action recognition components. +Downloads files depending on mode and saves them in the path provided. +It supports downloading: +1. the pretrained weights for stgcn, tagcn and stbln models. +2. a dataset containing one or more skeleton sequences and its labels. Parameters: -- **path**: *str, default=None* + +- **path**: *str, default=None*\ Local path to save the files, defaults to self.parent_dir if None. -- **mode**: *str, default="pretrained"* +- **mode**: *str, default="pretrained"*\ What file to download, can be one of "pretrained", "train_data", "val_data", "test_data" -- **verbose**: *bool, default=False* +- **verbose**: *bool, default=False*\ Whether to print messages in the console. -- **url**: *str, default=OpenDR FTP URL* +- **url**: *str, default=OpenDR FTP URL*\ URL of the FTP server. -- **file_name**: *str* - The name of the file containing the pretrained model. +- **file_name**: *str*\ + The name of the file containing the pretrained model. + + +### Class CoSTGCNLearner +Bases: `engine.learners.Learner` + +The *CoSTGCNLearner* class is an implementation of the proposed method CoSTGCN [[8]](#8) for Continual-Skeleton-based Human Action Recognition. +It performs skeleton-based action recognition continuously in a frame-wise manner. +The [CoSTGCNLearner](/src/opendr/perception/skeleton_based_action_recognition/continual_stgcn_learner.py) class has the following public methods: + + +#### `CoSTGCNLearner` constructor +```python +CoSTGCNLearner(self, lr, iters, batch_size, optimizer, lr_schedule, backbone, network_head, + checkpoint_after_iter, checkpoint_load_iter, temp_path, + device, loss, weight_decay, momentum, drop_last, pin_memory, num_workers, seed, + num_classes, num_point, num_person, in_channels, graph_type, sequence_len + ) +``` + +Constructor parameters: + +- **lr**: *float, default=0.001*\ + Specifies the learning rate to be used during training. +- **iters**: *int, default=10*\ + Number of epochs to train for. +- **batch_size**: *int, default=64*\ + Specifies number of skeleton sequences to be bundled up in a batch during training. + This heavily affects memory usage, adjust according to your system. +- **optimizer**: *str {'sgd', 'adam'}, default='adam'*\ + Name of optimizer to use ("sgd" or "adam"). +- **lr_schedule**: *str, default=''* + Specifies the learning rate scheduler. +- **network_head**: *str, default='classification'*\ + Head of network (only "classification" is currently available). +- **checkpoint_after_iter**: *int, default=0*\ + Unused parameter. +- **checkpoint_load_iter**: *int, default=0*\ + Unused parameter. +- **temp_path**: *str, default=''*\ + Path in which to store temporary files. +- **device**: *{'cpu', 'cuda'}, default='cuda'*\ + Specifies the device to be used. +- **loss**: *str, default="cross_entropy"*\ + Name of loss in torch.nn.functional to use. Defaults to "cross_entropy". +- **weight_decay**: *float, default=1e-5*\ + Weight decay used for optimization. Defaults to 1e-5. +- **momentum**: *float, default=0.9*\ + Momentum used for optimization. Defaults to 0.9. +- **drop_last**: *bool, default=True*\ + Drop last data point if a batch cannot be filled. Defaults to True. +- **pin_memory**: *bool, default=False*\ + Pin memory in dataloader. Defaults to False. +- **num_workers**: *int, default=0*\ + Specifies the number of workers to be used by the data loader. +- **seed**: *int, default=123*\ + Random seed. Defaults to 123. +- **num_classes**: *int, default=60*\ + Specifies the number of classes for the action dataset. +- **num_point**: *int, default=25*\ + Specifies the number of body joints in each skeleton. +- **num_person**: *int, default=2*\ + Specifies the number of body skeletons in each frame. +- **in_channels**: *int, default=3*\ + Specifies the number of input channels for each body joint. +- **graph_type**: *str {'ntu', 'openpose'}, default='ntu'*\ + Specifies the type of graph structure associated with the dataset. +- **sequence_len** *int, default=300*\ + Size of the final global average pooling. Defaults to 300. + +#### `CoSTGCNLearner.fit` +```python +CoSTGCNLearner.fit(self, dataset, val_dataset, epochs, steps) +``` + +This method is used for training the algorithm on a train dataset and validating on a val dataset. + +Parameters: + +- **dataset**: *object*\ + Object that holds the training dataset. + Can be of type `ExternalDataset` or a custom dataset inheriting from `DatasetIterator`. +- **val_dataset**: *object*\ + Object that holds the validation dataset. +- **epochs**: *int, default=None*\ + Number of epochs. + If none is supplied, self.iters will be used. +- **steps**: *int, default=None*\ + Number of training steps to conduct. + If none, this is determined by epochs. + + +#### `CoSTGCNLearner.eval` +```python +CoSTGCNLearner.eval(self, dataset, steps) +``` + +This method is used to evaluate a trained model on an evaluation dataset. +Returns a dictionary containing stats regarding evaluation. + +Parameters: + +- **dataset**: *object*\ + Dataset on which to evaluate model +- **steps**: *int, default=None*\ + Number of validation batches to evaluate. + If None, all batches are evaluated. + + +#### `CoSTGCNLearner.init_model` +```python +CoSTGCNLearner.init_model(self) +``` +This method is used to initialize model with random parameters + +#### `ProgressiveSpatioTemporalGCNLearner.infer` +```python +ProgressiveSpatioTemporalGCNLearner.infer(self, batch) +``` + +This method is used to perform action recognition on a sequence of skeletons. +It returns the action category as an object of `engine.target.Category` if a proper input object `engine.data.SkeletonSequence` is given. + +Parameters: + +- **batch**: *object*\ + Object of type engine.data.SkeletonSequence. + +#### `CoSTGCNLearner.save` +```python +CoSTGCNLearner.save(self, path) +``` + +This method is used to save model weights and metadata to path. + +Parameters: + +- **path**: *str*\ + Directory in which to save model weights and meta data. + + +#### `CoSTGCNLearner.load` +```python +CoSTGCNLearner.load(self, path) +``` + +This method is used to load a previously saved model from its saved folder. +Loads the model from inside the directory of the path provided, using the metadata .json file included. + +Parameters: + +- **path**: *str*\ + Path to metadata file in json format or path to model weights. + + +#### `CoSTGCNLearner.optimize` +```python +CoSTGCNLearner.optimize(self, do_constant_folding) +``` + +This method is used to optimize a trained model to ONNX format which can be then used for inference. + +Parameters: + +- **do_constant_folding**: *bool, default=False*\ + ONNX format optimization. + If True, the constant-folding optimization is applied to the model during export. + + +#### `CoSTGCNLearner.download` +```python +@staticmethod +CoSTGCNLearner.download(self, dataset_name, experiment_name, path, method_name, mode, verbose, url, file_name) +``` + +Downloads files depending on mode and saves them in the path provided. +It supports downloading: +1. the pretrained weights for stgcn model. +2. a small sample dataset and its labels. + +Parameters: + +- **dataset_name**: *str, default='nturgbd_cv'*\ + The name of dataset that should be downloaded. +- **experiment_name**: *str, default='stgcn_nturgbd'*\ + The name of experiment for which the pretrained model is saved. +- **path**: *str, default=None*\ + Local path to save the files, defaults to self.parent_dir if None. +- **mode**: *str, default="pretrained"*\ + What file to download, can be one of "pretrained", "train_data", "val_data", "test_data" +- **verbose**: *bool, default=False*\ + Whether to print messages in the console. +- **url**: *str, default=OpenDR FTP URL*\ + URL of the FTP server. +- **file_name**: *str, default="costgcn_ntu60_xview_joint.ckpt"*\ + The name of the file containing the pretrained model. + +#### `CoSTGCNLearner.infer` +```python +CoSTGCNLearner.infer(self, batch) +``` + +This method is used to perform inference on a batch of data. +It returns a list of output categories + +Parameters: + +- **batch**: *object*\ + Batch of skeletons for a single time-step. + The batch should have shape (C, V, S), (C, T, V, S), or (B, C, T, V, S). Here, B is the batch size, C is the number of input channels, V is the number of vertices, and S is the number of skeletons #### Examples -* **Finding an optimized spatio-temporal GCN architecture based on training dataset defined as an `ExternalDataset`**. +* **Finding an optimized spatio-temporal GCN architecture based on training dataset defined as an `ExternalDataset`**. The training and evaluation dataset should be present in the path provided, along with the labels file. The `batch_size` argument should be adjusted according to available memory. @@ -748,21 +978,21 @@ Parameters: from opendr.engine.datasets import ExternalDataset training_dataset = ExternalDataset(path='./data/preprocessed_nturgbd/xview', dataset_type='NTURGBD') validation_dataset = ExternalDataset(path='./data/preprocessed_nturgbd/xview', dataset_type='NTURGBD') - + pstgcn_learner = ProgressiveSpatioTemporalGCNLearner(temp_path='./parent_dir', batch_size=64, epochs=65, checkpoint_after_iter=10, val_batch_size=128, dataset_name='nturgbd_cv', experiment_name='pstgcn_nturgbd', blocksize=20, numblocks=1, numlayers=1, topology=[], layer_threshold=1e-4, block_threshold=1e-4) - + pstgcn_learner.network_builder(dataset=training_dataset, val_dataset=validation_dataset, train_data_filename='train_joints.npy', train_labels_filename='train_labels.pkl', val_data_filename="val_joints.npy", val_labels_filename="val_labels.pkl", skeleton_data_type='joint') - + pstgcn_learner.save(path='./saved_models/pstgcn_nturgbd_cv_checkpoints', model_name='test_pstgcn') ``` @@ -776,19 +1006,19 @@ Parameters: dataset_name='nturgbd_cv', experiment_name='pstgcn_nturgbd', blocksize=20, numblocks=1, numlayers=1, topology=[], layer_threshold=1e-4, block_threshold=1e-4) - + # Download the default pretrained pstgcn model in the parent_dir pstgcn_learner.download( - mode="pretrained", path='./parent_dir/pretrained_models', file_name='pretrained_pstgcn') - + mode="pretrained", path='./parent_dir/pretrained_models', file_name='pretrained_pstgcn') + pstgcn_learner.load('./parent_dir/pretrained_models', model_name='pretrained_stgcn') - test_data_path = pstgcn_learner.download(mode="test_data") # Download a test data + test_data_path = pstgcn_learner.download(mode="test_data") # Download a test data test_data = numpy.load(test_data_path) action_category = pstgcn_learner.infer(test_data) - + ``` - -* **Optimization example for a previously trained model.** + +* **Optimization example for a previously trained model** Inference can be run with the trained model after running self.optimize. ```python from opendr.perception.skeleton_based_action_recognition.progressive_spatio_temporal_gcn_learner import ProgressiveSpatioTemporalGCNLearner @@ -800,8 +1030,8 @@ Parameters: blocksize=20, numblocks=1, numlayers=1, topology=[], layer_threshold=1e-4, block_threshold=1e-4) pstgcn_learner.download( - mode="pretrained", path='./parent_dir/pretrained_models', file_name='pretrained_pstgcn') - + mode="pretrained", path='./parent_dir/pretrained_models', file_name='pretrained_pstgcn') + pstgcn_learner.load(path='./parent_dir/pretrained_models', file_name='pretrained_pstgcn') pstgcn_learner.optimize(do_constant_folding=True) pstgcn_learner.save(path='./parent_dir/optimized_model', model_name='optimized_pstgcn') @@ -817,44 +1047,55 @@ The tests were conducted on the following computational devices: - Nvidia Jetson Xavier AGX - Nvidia RTX 2080 Ti GPU on server with Intel Xeon Gold processors - Inference time is measured as the time taken to transfer the input to the model (e.g., from CPU to GPU), run inference using the algorithm, and return results to CPU. -The ST-GCN, TAGCN and ST-BLN models are implemented in *SpatioTemporalGCNLearner* and the PST-GCN model is implemented in *ProgressiveSpatioTemporalGCNLearner*. +The ST-GCN, TAGCN and ST-BLN models are implemented in *SpatioTemporalGCNLearner* and the PST-GCN model is implemented in *ProgressiveSpatioTemporalGCNLearner*. Note that the models receive each input sample as a sequence of 300 skeletons, and the pose estimation process is not involved in this benchmarking. The skeletal data is from NTU-RGBD dataset. We report speed (single sample per inference) as the mean of 100 runs. The noted memory is the maximum allocated memory on GPU during inference. The performance evaluation results of the *SpatioTemporalGCNLearner* and *ProgressiveSpatioTemporalGCNLearner* in terms of prediction accuracy on NTU-RGBD-60, parameter count and maximum allocated memory are reported in the following Tables. -The performance of TA-GCN is reported when it selects 100 frames out of 300 (T=100). PST-GCN finds different architectures for two different dataset settings (CV and CS) which leads to different classification accuracy, number of parameters and memory allocation. - -| Method | Acc. (%) | Params (M) | Mem. (MB) | -|-------------------|----------|------------|-----------| -| ST-GCN | 88.3 | 3.12 | 47.37 | -| TA-GCN (T=100) | 94.2 | 2.24 | 42.65 | -| ST-BLN | 93.8 | 5.3 | 55.77 | -| PST-GCN (CV) | 94.33 | 0.63 | 31.65 | -| PST-GCN (CS) | 87.9 | 0.92 | 32.2 | +The performance of TA-GCN is reported when it selects 100 frames out of 300 (T=100). PST-GCN finds different architectures for two different dataset settings (CV and CS) which leads to different classification accuracy, number of parameters and memory allocation. + +| Method | Acc. (%) | Params (M) | Mem. (MB) | +|----------------|----------|------------|-----------| +| ST-GCN | 88.3 | 3.12 | 47.37 | +| TA-GCN (T=100) | 94.2 | 2.24 | 42.65 | +| ST-BLN | 93.8 | 5.3 | 55.77 | +| PST-GCN (CV) | 94.33 | 0.63 | 31.65 | +| PST-GCN (CS) | 87.9 | 0.92 | 32.2 | +| CoST-GCN (CV) | 93.8 | 3.1 | 36.1 | +| CoST-GCN (CS) | 86.3 | 3.1 | 36.1 | +| CoA-GCN (CV) | 92.6 | 3.5 | 37.4 | +| CoA-GCN (CS) | 84.1 | 3.5 | 37.4 | +| CoS-TR (CV) | 92.4 | 3.1 | 36.1 | +| CoS-TR (CS) | 86.3 | 3.1 | 36.1 | The inference speed (evaluations/second) of both learners on various computational devices are as follows: -| Method | CPU | Jetson TX2 | Jetson Xavier | RTX 2080 Ti | +| Method | CPU | Jetson TX2 | Jetson Xavier | RTX 2080 Ti | |----------------|-------|------------|---------------|-------------| -| ST-GCN | 13.26 | 4.89 | 15.27 | 63.32 | -| TA-GCN (T=100) | 20.47 | 10.6 | 25.43 | 93.33 | +| ST-GCN | 13.26 | 4.89 | 15.27 | 63.32 | +| TA-GCN (T=100) | 20.47 | 10.6 | 25.43 | 93.33 | | ST-BLN | 7.69 | 3.57 | 12.56 | 55.98 | -| PST-GCN (CV) | 15.38 | 6.57 | 20.25 | 83.10 | -| PST-GCN (CS) | 13.07 | 5.53 | 19.41 | 77.57 | - -Energy (Joules) of both learners’ inference on embedded devices is shown in the following: - -| Method | Jetson TX2 | Jetson Xavier | -|-------------------|-------------|----------------| -| ST-GCN | 6.07 | 1.38 | -| TA-GCN (T=100) | 2.23 | 0.59 | -| ST-BLN | 9.26 | 2.01 | -| PST-GCN (CV) | 4.13 | 1.00 | -| PST-GCN (CS) | 5.54 | 1.12 | +| PST-GCN (CV) | 15.38 | 6.57 | 20.25 | 83.10 | +| PST-GCN (CS) | 13.07 | 5.53 | 19.41 | 77.57 | +| CoST-GCN | 34.26 | 11.22 | 20.91 | - | +| CoA-GCN | 23.09 | 7.24 | 15.28 | - | +| CoS-TR | 30.12 | 10.49 | 20.87 | - | + +Energy (Joules) of both learners’ inference on embedded devices is shown in the following: + +| Method | Jetson TX2 | Jetson Xavier | +|----------------|------------|---------------| +| ST-GCN | 6.07 | 1.38 | +| TA-GCN (T=100) | 2.23 | 0.59 | +| ST-BLN | 9.26 | 2.01 | +| PST-GCN (CV) | 4.13 | 1.00 | +| PST-GCN (CS) | 5.54 | 1.12 | +| CoST-GCN | 1.95 | 0.57 | +| CoA-GCN | 3.33 | 0.91 | +| CoS-TR | 2.28 | 0.55 | The platform compatibility evaluation is also reported below: @@ -871,31 +1112,35 @@ The platform compatibility evaluation is also reported below: ## References -[1] -[Yan, S., Xiong, Y., & Lin, D. (2018, April). Spatial temporal graph convolutional networks for skeleton-based action +[1] +[Yan, S., Xiong, Y., & Lin, D. (2018, April). Spatial temporal graph convolutional networks for skeleton-based action recognition. In Proceedings of the AAAI conference on artificial intelligence (Vol. 32, No. 1).]( https://arxiv.org/abs/1609.02907) -[2] +[2] [Heidari, Negar, and Alexandros Iosifidis. "Temporal attention-augmented graph convolutional network for efficient skeleton-based human action recognition." 2020 25th International Conference on Pattern Recognition (ICPR). IEEE, 2021.](https://ieeexplore.ieee.org/abstract/document/9412091) -[3] -[Heidari, N., & Iosifidis, A. (2020). On the spatial attention in Spatio-Temporal Graph Convolutional Networks for +[3] +[Heidari, N., & Iosifidis, A. (2020). On the spatial attention in Spatio-Temporal Graph Convolutional Networks for skeleton-based human action recognition. arXiv preprint arXiv: 2011.03833.](https://arxiv.org/abs/2011.03833) -[4] +[4] [Heidari, Negar, and Alexandras Iosifidis. "Progressive Spatio-Temporal Graph Convolutional Network for Skeleton-Based Human Action Recognition." ICASSP 2021-2021 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP). IEEE, 2021.](https://ieeexplore.ieee.org/abstract/document/9413860) -[5] +[5] [Shahroudy, A., Liu, J., Ng, T. T., & Wang, G. (2016). Ntu rgb+ d: A large scale dataset for 3d human activity analysis. In Proceedings of the IEEE conference on computer vision and pattern recognition (pp. 1010-1019).]( https://openaccess.thecvf.com/content_cvpr_2016/html/Shahroudy_NTU_RGBD_A_CVPR_2016_paper.html) [6] -[Kay, W., Carreira, J., Simonyan, K., Zhang, B., Hillier, C., Vijayanarasimhan, S., ... & Zisserman, A. (2017). -The kinetics human action video dataset. arXiv preprint arXiv:1705.06950.](https://arxiv.org/pdf/1705.06950.pdf) +[Kay, W., Carreira, J., Simonyan, K., Zhang, B., Hillier, C., Vijayanarasimhan, S., ... & Zisserman, A. (2017). +The kinetics human action video dataset. arXiv preprint arXiv:1705.06950.](https://arxiv.org/pdf/1705.06950.pdf) [7] -[Cao, Z., Simon, T., Wei, S. E., & Sheikh, Y. (2017). Realtime multi-person 2d pose estimation using part affinity +[Cao, Z., Simon, T., Wei, S. E., & Sheikh, Y. (2017). Realtime multi-person 2d pose estimation using part affinity fields. In Proceedings of the IEEE conference on computer vision and pattern recognition (pp. 7291-7299).]( https://openaccess.thecvf.com/content_cvpr_2017/html/Cao_Realtime_Multi-Person_2D_CVPR_2017_paper.html) + +[8] +[Hedegaard, Lukas, Negar Heidari, and Alexandros Iosifidis. "Online Skeleton-based Action Recognition with Continual Spatio-Temporal Graph Convolutional Networks." arXiv preprint arXiv:2203.11009 (2022).]( +https://arxiv.org/abs/2203.11009) \ No newline at end of file diff --git a/docs/reference/smpld_models.md b/docs/reference/smpld_models.md index b5c7418472..e0bba4dff7 100644 --- a/docs/reference/smpld_models.md +++ b/docs/reference/smpld_models.md @@ -6,10 +6,10 @@ This folder contains code for:

- - - - + + + +

### Download the raw SMPL+D models only (≈12.5Gb) diff --git a/packages.txt b/packages.txt index 5c24f8a26e..a00820971f 100644 --- a/packages.txt +++ b/packages.txt @@ -6,7 +6,6 @@ perception/pose_estimation perception/fall_detection perception/compressive_learning perception/heart_anomaly_detection -simulation/human_model_generation perception/multimodal_human_centric perception/facial_expression_recognition perception/activity_recognition @@ -16,6 +15,7 @@ perception/object_tracking_2d perception/object_detection_3d perception/object_tracking_3d perception/panoptic_segmentation +simulation/human_model_generation utils/hyperparameter_tuner -control/single_demo_grasp -opendr +utils/ambiguity_measure +opendr \ No newline at end of file diff --git a/projects/README.md b/projects/README.md index 6cf05ca17a..d755cc6794 100644 --- a/projects/README.md +++ b/projects/README.md @@ -1,3 +1,8 @@ # Projects - This folder contains sample applications demonstrating the OpenDR toolkit functionalities. + +This includes: +- [Python usage examples and tutorials](python) +- [C_API usage examples](c_api) +- [ROS 1 nodes](opendr_ws) +- [ROS 2 nodes](opendr_ws_2) diff --git a/projects/opendr_ws/README.md b/projects/opendr_ws/README.md old mode 100755 new mode 100644 index 2985a9f062..31a6aba763 --- a/projects/opendr_ws/README.md +++ b/projects/opendr_ws/README.md @@ -1,59 +1,94 @@ # opendr_ws ## Description -This ROS workspace contains ROS nodes and tools developed by OpenDR project. Currently, ROS nodes are compatible with ROS Noetic. -This workspace contains the `ros_bridge` package, which provides message definitions for ROS-compatible OpenDR data types, +This ROS workspace contains ROS nodes and tools developed by OpenDR project. +Currently, ROS nodes are compatible with **ROS Melodic for Ubuntu 18.04** and **ROS Noetic for Ubuntu 20.04**. +The instructions that follow target ROS Noetic, but can easily be modified for ROS Melodic by swapping out the version name. +This workspace contains the `opendr_bridge` package, which provides message definitions for ROS-compatible OpenDR data types, as well the `ROSBridge` class which provides an interface to convert OpenDR data types and targets into ROS-compatible -ones similar to CvBridge. You can find more information in the corresponding [documentation](../../docs/reference/rosbridge.md). - - -## Setup -For running a minimal working example you can follow the instructions below: - -0. Source the necessary distribution tools: - - ```source /opt/ros/noetic/setup.bash``` - -1. Make sure you are inside opendr_ws -2. If you are planning to use a usb camera for the demos, install the corresponding package and its dependencies: - -```shell -cd src -git clone https://github.com/ros-drivers/usb_cam -cd .. -rosdep install --from-paths src/ --ignore-src -``` -3. Install the following dependencies, required in order to use the OpenDR ROS tools: -```shell -sudo apt-get install ros-noetic-vision-msgs ros-noetic-geometry-msgs ros-noetic-sensor-msgs ros-noetic-audio-common-msgs -``` -4. Build the packages inside workspace -```shell -catkin_make -``` -5. Source the workspace and you are ready to go! -```shell -source devel/setup.bash -``` +ones similar to CvBridge. You can find more information in the corresponding [documentation](../../docs/reference/opendr-ros-bridge.md). + + +## First time setup +For the initial setup you can follow the instructions below: + +0. Make sure ROS noetic is installed: http://wiki.ros.org/noetic/Installation/Ubuntu (desktop full install) + +1. Open a new terminal window and source the necessary distribution tools: + ```shell + source /opt/ros/noetic/setup.bash + ``` + _For convenience, you can add this line to your `.bashrc` so you don't have to source the tools each time you open a terminal window._ + +2. Navigate to your OpenDR home directory (`~/opendr`) and activate the OpenDR environment using: + ```shell + source bin/activate.sh + ``` + You need to do this step every time before running an OpenDR node. + +3. Navigate into the OpenDR ROS workspace:: + ```shell + cd projects/opendr_ws + ``` + +4. Build the packages inside the workspace: + ```shell + catkin_make + ``` + +5. Source the workspace: + ```shell + source devel/setup.bash + ``` + You are now ready to run an OpenDR ROS node, in this terminal but first the ROS master node needs to be running + +6. Before continuing, you need to start the ROS master node by running: + ```shell + roscore & + ``` + You can now run an OpenDR ROS node. More information below. + +#### After first time setup +For running OpenDR nodes after you have completed the initial setup, you can skip step 0 from the list above. +You can also skip building the workspace (step 4) granted it's been already built and no changes were made to the code inside the workspace, e.g. you modified the source code of a node. + +#### More information +After completing the setup you can read more information on the [opendr perception package README](src/opendr_perception/README.md), where you can find a concise list of prerequisites and helpful notes to view the output of the nodes or optimize their performance. + +#### Node documentation +You can also take a look at the list of tools [below](#structure) and click on the links to navigate directly to documentation for specific nodes with instructions on how to run and modify them. + +**For first time users we suggest reading the introductory sections (prerequisites and notes) first.** + ## Structure -Currently, apart from tools, opendr_ws contains the following ROS nodes: - -### [Perception](src/perception/README.md) -1. Pose Estimation -2. Fall Detection -3. 2D Object Detection -4. Face Detection -5. Panoptic Segmentation -6. Face Recognition -7. Semantic Segmentation -8. RGBD Hand Gesture Recognition -9. Heart Anomaly Detection -10. Video Human Activity Recognition -11. Landmark-based Facial Expression Recognition -12. Skeleton-based Human Action Recognition -13. Speech Command Recognition -14. Voxel Object Detection 3D -15. AB3DMOT Object Tracking 3D -16. FairMOT Object Tracking 2D -17. Deep Sort Object Tracking 2D +Currently, apart from tools, opendr_ws contains the following ROS nodes (categorized according to the input they receive): + +### [Perception](src/opendr_perception/README.md) +## RGB input +1. [Pose Estimation](src/opendr_perception/README.md#pose-estimation-ros-node) +2. [Fall Detection](src/opendr_perception/README.md#fall-detection-ros-node) +3. [Face Detection](src/opendr_perception/README.md#face-detection-ros-node) +4. [Face Recognition](src/opendr_perception/README.md#face-recognition-ros-node) +5. [2D Object Detection](src/opendr_perception/README.md#2d-object-detection-ros-nodes) +6. [2D Single Object Tracking](src/opendr_perception/README.md#2d-single-object-tracking-ros-node) +7. [2D Object Tracking](src/opendr_perception/README.md#2d-object-tracking-ros-nodes) +8. [Panoptic Segmentation](src/opendr_perception/README.md#panoptic-segmentation-ros-node) +9. [Semantic Segmentation](src/opendr_perception/README.md#semantic-segmentation-ros-node) +10. [Image-based Facial Emotion Estimation](src/opendr_perception/README.md#image-based-facial-emotion-estimation-ros-node) +11. [Landmark-based Facial Expression Recognition](src/opendr_perception/README.md#landmark-based-facial-expression-recognition-ros-node) +12. [Skeleton-based Human Action Recognition](src/opendr_perception/README.md#skeleton-based-human-action-recognition-ros-node) +13. [Video Human Activity Recognition](src/opendr_perception/README.md#video-human-activity-recognition-ros-node) +## RGB + Infrared input +1. [End-to-End Multi-Modal Object Detection (GEM)](src/opendr_perception/README.md#2d-object-detection-gem-ros-node) +## RGBD input +1. [RGBD Hand Gesture Recognition](src/opendr_perception/README.md#rgbd-hand-gesture-recognition-ros-node) +## RGB + Audio input +1. [Audiovisual Emotion Recognition](src/opendr_perception/README.md#audiovisual-emotion-recognition-ros-node) +## Audio input +1. [Speech Command Recognition](src/opendr_perception/README.md#speech-command-recognition-ros-node) +## Point cloud input +1. [3D Object Detection Voxel](src/opendr_perception/README.md#3d-object-detection-voxel-ros-node) +2. [3D Object Tracking AB3DMOT](src/opendr_perception/README.md#3d-object-tracking-ab3dmot-ros-node) +## Biosignal input +1. [Heart Anomaly Detection](src/opendr_perception/README.md#heart-anomaly-detection-ros-node) diff --git a/projects/opendr_ws/images/opendr_node_diagram.png b/projects/opendr_ws/images/opendr_node_diagram.png new file mode 100644 index 0000000000..6948a1f1b9 Binary files /dev/null and b/projects/opendr_ws/images/opendr_node_diagram.png differ diff --git a/projects/opendr_ws/src/ros_bridge/CMakeLists.txt b/projects/opendr_ws/src/opendr_bridge/CMakeLists.txt similarity index 75% rename from projects/opendr_ws/src/ros_bridge/CMakeLists.txt rename to projects/opendr_ws/src/opendr_bridge/CMakeLists.txt index b7ed470ae0..6cad646562 100644 --- a/projects/opendr_ws/src/ros_bridge/CMakeLists.txt +++ b/projects/opendr_ws/src/opendr_bridge/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.0.2) -project(ros_bridge) +project(opendr_bridge) find_package(catkin REQUIRED COMPONENTS roscpp @@ -14,6 +14,18 @@ catkin_python_setup() ################################################ ## Declare ROS messages, services and actions ## ################################################ +add_message_files( + DIRECTORY msg + FILES + OpenDRPose2DKeypoint.msg + OpenDRPose2D.msg +) + + add_service_files( + DIRECTORY srv + FILES + OpenDRSingleObjectTracking.srv + ) generate_messages( DEPENDENCIES diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/example/Images/.keep b/projects/opendr_ws/src/opendr_bridge/include/opendr_bridge/.keep similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/example/Images/.keep rename to projects/opendr_ws/src/opendr_bridge/include/opendr_bridge/.keep diff --git a/projects/opendr_ws/src/opendr_bridge/msg/OpenDRPose2D.msg b/projects/opendr_ws/src/opendr_bridge/msg/OpenDRPose2D.msg new file mode 100644 index 0000000000..09b1443027 --- /dev/null +++ b/projects/opendr_ws/src/opendr_bridge/msg/OpenDRPose2D.msg @@ -0,0 +1,26 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This message represents a full OpenDR human pose 2D as a list of keypoints + +Header header + +# The id of the pose +int32 pose_id + +# The pose detection confidence of the model +float32 conf + +# A list of a human 2D pose keypoints +OpenDRPose2DKeypoint[] keypoint_list diff --git a/projects/opendr_ws/src/opendr_bridge/msg/OpenDRPose2DKeypoint.msg b/projects/opendr_ws/src/opendr_bridge/msg/OpenDRPose2DKeypoint.msg new file mode 100644 index 0000000000..72d14a19f2 --- /dev/null +++ b/projects/opendr_ws/src/opendr_bridge/msg/OpenDRPose2DKeypoint.msg @@ -0,0 +1,22 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This message contains all relevant information for an OpenDR human pose 2D keypoint + +# The kpt_name according to https://github.com/opendr-eu/opendr/blob/master/docs/reference/lightweight-open-pose.md#notes +string kpt_name + +# x and y pixel position on the input image, (0, 0) is top-left corner of image +int32 x +int32 y diff --git a/projects/opendr_ws/src/ros_bridge/package.xml b/projects/opendr_ws/src/opendr_bridge/package.xml similarity index 88% rename from projects/opendr_ws/src/ros_bridge/package.xml rename to projects/opendr_ws/src/opendr_bridge/package.xml index e9cb01afb1..9d68c624b2 100644 --- a/projects/opendr_ws/src/ros_bridge/package.xml +++ b/projects/opendr_ws/src/opendr_bridge/package.xml @@ -1,8 +1,8 @@ - ros_bridge - 1.1.1 - OpenDR ros_bridge package. This package provides a way to translate ROS messages into OpenDR data types + opendr_bridge + 2.0.0 + OpenDR ROS bridge package. This package provides a way to translate ROS messages into OpenDR data types and vice versa. OpenDR Project Coordinator diff --git a/projects/opendr_ws/src/ros_bridge/setup.py b/projects/opendr_ws/src/opendr_bridge/setup.py similarity index 100% rename from projects/opendr_ws/src/ros_bridge/setup.py rename to projects/opendr_ws/src/opendr_bridge/setup.py diff --git a/projects/opendr_ws/src/ros_bridge/src/opendr_bridge/__init__.py b/projects/opendr_ws/src/opendr_bridge/src/opendr_bridge/__init__.py similarity index 100% rename from projects/opendr_ws/src/ros_bridge/src/opendr_bridge/__init__.py rename to projects/opendr_ws/src/opendr_bridge/src/opendr_bridge/__init__.py diff --git a/projects/opendr_ws/src/ros_bridge/src/opendr_bridge/bridge.py b/projects/opendr_ws/src/opendr_bridge/src/opendr_bridge/bridge.py similarity index 86% rename from projects/opendr_ws/src/ros_bridge/src/opendr_bridge/bridge.py rename to projects/opendr_ws/src/opendr_bridge/src/opendr_bridge/bridge.py index fe7e4171f2..215803f064 100755 --- a/projects/opendr_ws/src/ros_bridge/src/opendr_bridge/bridge.py +++ b/projects/opendr_ws/src/opendr_bridge/src/opendr_bridge/bridge.py @@ -28,6 +28,7 @@ from sensor_msgs.msg import Image as ImageMsg, PointCloud as PointCloudMsg, ChannelFloat32 as ChannelFloat32Msg import rospy from geometry_msgs.msg import Point32 as Point32Msg, Quaternion as QuaternionMsg +from opendr_bridge.msg import OpenDRPose2D, OpenDRPose2DKeypoint class ROSBridge: @@ -69,51 +70,50 @@ def to_ros_image(self, image: Image, encoding: str='passthrough') -> ImageMsg: message = self._cv_bridge.cv2_to_imgmsg(image.opencv(), encoding=encoding) return message - def to_ros_pose(self, pose): + def to_ros_pose(self, pose: Pose): """ - Converts an OpenDR pose into a Detection2DArray msg that can carry the same information - Each keypoint is represented as a bbox centered at the keypoint with zero width/height. The subject id is also - embedded on each keypoint (stored in ObjectHypothesisWithPose). - :param pose: OpenDR pose to be converted + Converts an OpenDR Pose into a OpenDRPose2D msg that can carry the same information, i.e. a list of keypoints, + the pose detection confidence and the pose id. + Each keypoint is represented as an OpenDRPose2DKeypoint with x, y pixel position on input image with (0, 0) + being the top-left corner. + :param pose: OpenDR Pose to be converted to OpenDRPose2D :type pose: engine.target.Pose :return: ROS message with the pose - :rtype: vision_msgs.msg.Detection2DArray + :rtype: opendr_bridge.msg.OpenDRPose2D """ data = pose.data - keypoints = Detection2DArray() - for i in range(data.shape[0]): - keypoint = Detection2D() - keypoint.bbox = BoundingBox2D() - keypoint.results.append(ObjectHypothesisWithPose()) - keypoint.bbox.center = Pose2D() - keypoint.bbox.center.x = data[i][0] - keypoint.bbox.center.y = data[i][1] - keypoint.bbox.size_x = 0 - keypoint.bbox.size_y = 0 - keypoint.results[0].id = pose.id - if pose.confidence: - keypoint.results[0].score = pose.confidence - keypoints.detections.append(keypoint) - return keypoints + # Setup ros pose + ros_pose = OpenDRPose2D() + ros_pose.pose_id = int(pose.id) + if pose.confidence: + ros_pose.conf = pose.confidence - def from_ros_pose(self, ros_pose): - """ - Converts a ROS message with pose payload into an OpenDR pose - :param ros_pose: the pose to be converted (represented as vision_msgs.msg.Detection2DArray) - :type ros_pose: vision_msgs.msg.Detection2DArray - :return: an OpenDR pose + # Add keypoints to pose + for i in range(data.shape[0]): + ros_keypoint = OpenDRPose2DKeypoint() + ros_keypoint.kpt_name = pose.kpt_names[i] + ros_keypoint.x = data[i][0] + ros_keypoint.y = data[i][1] + # Add keypoint to pose + ros_pose.keypoint_list.append(ros_keypoint) + return ros_pose + + def from_ros_pose(self, ros_pose: OpenDRPose2D): + """ + Converts an OpenDRPose2D message into an OpenDR Pose. + :param ros_pose: the ROS pose to be converted + :type ros_pose: opendr_bridge.msg.OpenDRPose2D + :return: an OpenDR Pose :rtype: engine.target.Pose """ - keypoints = ros_pose.detections - data = [] - pose_id, confidence = None, None + ros_keypoints = ros_pose.keypoint_list + keypoints = [] + pose_id, confidence = ros_pose.pose_id, ros_pose.conf - for keypoint in keypoints: - data.append(keypoint.bbox.center.x) - data.append(keypoint.bbox.center.y) - confidence = keypoint.results[0].score - pose_id = keypoint.results[0].id - data = np.asarray(data).reshape((-1, 2)) + for ros_keypoint in ros_keypoints: + keypoints.append(int(ros_keypoint.x)) + keypoints.append(int(ros_keypoint.y)) + data = np.asarray(keypoints).reshape((-1, 2)) pose = Pose(data, confidence) pose.id = pose_id @@ -213,7 +213,7 @@ def to_ros_boxes(self, box_list): ros_box.bbox.center.y = box.top + box.height / 2. ros_box.bbox.size_x = box.width ros_box.bbox.size_y = box.height - ros_box.results[0].id = box.name + ros_box.results[0].id = int(box.name) if box.confidence: ros_box.results[0].score = box.confidence ros_boxes.detections.append(ros_box) @@ -235,8 +235,8 @@ def from_ros_boxes(self, ros_detections): height = box.bbox.size_y left = box.bbox.center.x - width / 2. top = box.bbox.center.y - height / 2. - id = box.results[0].id - bbox = BoundingBox(top=top, left=left, width=width, height=height, name=id) + _id = int(box.results[0].id) + bbox = BoundingBox(top=top, left=left, width=width, height=height, name=_id) bboxes.data.append(bbox) return bboxes @@ -275,6 +275,50 @@ def from_ros_tracking_annotation(self, ros_detections, ros_tracking_ids, frame=- return TrackingAnnotationList(boxes) + def from_ros_single_tracking_annotation(self, ros_detection_box): + """ + Converts a pair of ROS messages with bounding boxes and tracking ids into an OpenDR TrackingAnnotationList + :param ros_detection_box: The boxes to be converted. + :type ros_detection_box: vision_msgs.msg.Detection2D + :return: An OpenDR TrackingAnnotationList + :rtype: engine.target.TrackingAnnotationList + """ + width = ros_detection_box.bbox.size_x + height = ros_detection_box.bbox.size_y + left = ros_detection_box.bbox.center.x - width / 2. + top = ros_detection_box.bbox.center.y - height / 2. + id = 0 + bbox = TrackingAnnotation( + name=id, + left=left, + top=top, + width=width, + height=height, + id=0, + frame=-1 + ) + return bbox + + def to_ros_single_tracking_annotation(self, tracking_annotation): + """ + Converts a pair of ROS messages with bounding boxes and tracking ids into an OpenDR TrackingAnnotationList + :param tracking_annotation: The box to be converted. + :type tracking_annotation: engine.target.TrackingAnnotation + :return: A ROS vision_msgs.msg.Detection2D + :rtype: vision_msgs.msg.Detection2D + """ + ros_box = Detection2D() + ros_box.bbox = BoundingBox2D() + ros_box.results.append(ObjectHypothesisWithPose()) + ros_box.bbox.center = Pose2D() + ros_box.bbox.center.x = tracking_annotation.left + tracking_annotation.width / 2.0 + ros_box.bbox.center.y = tracking_annotation.top + tracking_annotation.height / 2.0 + ros_box.bbox.size_x = tracking_annotation.width + ros_box.bbox.size_y = tracking_annotation.height + ros_box.results[0].id = int(tracking_annotation.name) + ros_box.results[0].score = -1 + return ros_box + def to_ros_bounding_box_list(self, bounding_box_list): """ Converts an OpenDR bounding_box_list into a Detection2DArray msg that can carry the same information @@ -294,7 +338,7 @@ def to_ros_bounding_box_list(self, bounding_box_list): detection.bbox.center.y = bounding_box.top + bounding_box.height / 2.0 detection.bbox.size_x = bounding_box.width detection.bbox.size_y = bounding_box.height - detection.results[0].id = bounding_box.name + detection.results[0].id = int(bounding_box.name) detection.results[0].score = bounding_box.confidence detections.detections.append(detection) return detections diff --git a/projects/opendr_ws/src/opendr_bridge/srv/OpenDRSingleObjectTracking.srv b/projects/opendr_ws/src/opendr_bridge/srv/OpenDRSingleObjectTracking.srv new file mode 100644 index 0000000000..7ca4024125 --- /dev/null +++ b/projects/opendr_ws/src/opendr_bridge/srv/OpenDRSingleObjectTracking.srv @@ -0,0 +1,3 @@ +vision_msgs/Detection2D init_box +--- +bool success diff --git a/projects/opendr_ws/src/data_generation/CMakeLists.txt b/projects/opendr_ws/src/opendr_data_generation/CMakeLists.txt similarity index 85% rename from projects/opendr_ws/src/data_generation/CMakeLists.txt rename to projects/opendr_ws/src/opendr_data_generation/CMakeLists.txt index 2a43cfdb27..ed273ea805 100644 --- a/projects/opendr_ws/src/data_generation/CMakeLists.txt +++ b/projects/opendr_ws/src/opendr_data_generation/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.0.2) -project(data_generation) +project(opendr_data_generation) find_package(catkin REQUIRED COMPONENTS roscpp @@ -27,6 +27,6 @@ include_directories( ############# catkin_install_python(PROGRAMS - scripts/synthetic_facial_generation.py + scripts/synthetic_facial_generation_node.py DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} ) diff --git a/projects/opendr_ws/src/data_generation/README.md b/projects/opendr_ws/src/opendr_data_generation/README.md similarity index 97% rename from projects/opendr_ws/src/data_generation/README.md rename to projects/opendr_ws/src/opendr_data_generation/README.md index 523347f6a0..67390f9918 100644 --- a/projects/opendr_ws/src/data_generation/README.md +++ b/projects/opendr_ws/src/opendr_data_generation/README.md @@ -1,28 +1,28 @@ -# Perception Package - -This package contains ROS nodes related to data generation package of OpenDR. - -## Pose Estimation ROS Node -Assuming that you have already [built your workspace](../../README.md) and started roscore (i.e., just run `roscore`), then you can - - -1. Add OpenDR to `PYTHONPATH` (please make sure you do not overwrite `PYTHONPATH` ), e.g., -```shell -export PYTHONPATH="/home/user/opendr/src:$PYTHONPATH" -``` - -2. Start the node responsible for publishing images. If you have a usb camera, then you can use the corresponding node (assuming you have installed the corresponding package): - -```shell -rosrun usb_cam usb_cam_node -``` - -3. You are then ready to start the synthetic data generation node - -```shell -rosrun data_generation synthetic_facial_generation.py -``` - -3. You can examine the published multiview facial images stream using `rosrun rqt_image_view rqt_image_view` (select the topic `/opendr/synthetic_facial_images`) or `rostopic echo /opendr/synthetic_facial_images` - - +# Perception Package + +This package contains ROS nodes related to data generation package of OpenDR. + +## Pose Estimation ROS Node +Assuming that you have already [built your workspace](../../README.md) and started roscore (i.e., just run `roscore`), then you can + + +1. Add OpenDR to `PYTHONPATH` (please make sure you do not overwrite `PYTHONPATH` ), e.g., +```shell +export PYTHONPATH="/home/user/opendr/src:$PYTHONPATH" +``` + +2. Start the node responsible for publishing images. If you have a usb camera, then you can use the corresponding node (assuming you have installed the corresponding package): + +```shell +rosrun usb_cam usb_cam_node +``` + +3. You are then ready to start the synthetic data generation node + +```shell +rosrun data_generation synthetic_facial_generation.py +``` + +3. You can examine the published multiview facial images stream using `rosrun rqt_image_view rqt_image_view` (select the topic `/opendr/synthetic_facial_images`) or `rostopic echo /opendr/synthetic_facial_images` + + diff --git a/projects/opendr_ws/src/data_generation/package.xml b/projects/opendr_ws/src/opendr_data_generation/package.xml similarity index 93% rename from projects/opendr_ws/src/data_generation/package.xml rename to projects/opendr_ws/src/opendr_data_generation/package.xml index 57d1e6e1f7..f4733b2ada 100644 --- a/projects/opendr_ws/src/data_generation/package.xml +++ b/projects/opendr_ws/src/opendr_data_generation/package.xml @@ -1,7 +1,7 @@ - data_generation - 1.1.1 + opendr_data_generation + 2.0.0 OpenDR's ROS nodes for data generation package OpenDR Project Coordinator Apache License v2.0 diff --git a/projects/opendr_ws/src/data_generation/scripts/synthetic_facial_generation.py b/projects/opendr_ws/src/opendr_data_generation/scripts/synthetic_facial_generation_node.py similarity index 100% rename from projects/opendr_ws/src/data_generation/scripts/synthetic_facial_generation.py rename to projects/opendr_ws/src/opendr_data_generation/scripts/synthetic_facial_generation_node.py diff --git a/projects/opendr_ws/src/perception/CMakeLists.txt b/projects/opendr_ws/src/opendr_perception/CMakeLists.txt similarity index 51% rename from projects/opendr_ws/src/perception/CMakeLists.txt rename to projects/opendr_ws/src/opendr_perception/CMakeLists.txt index a47f5f9c4b..c2a4a6278b 100644 --- a/projects/opendr_ws/src/perception/CMakeLists.txt +++ b/projects/opendr_ws/src/opendr_perception/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.0.2) -project(perception) +project(opendr_perception) find_package(catkin REQUIRED COMPONENTS roscpp @@ -28,10 +28,15 @@ include_directories( ############# catkin_install_python(PROGRAMS - scripts/pose_estimation.py - scripts/fall_detection.py - scripts/object_detection_2d_detr.py - scripts/object_detection_2d_gem.py - scripts/semantic_segmentation_bisenet.py + scripts/pose_estimation_node.py + scripts/hr_pose_estimation_node.py + scripts/fall_detection_node.py + scripts/object_detection_2d_nanodet_node.py + scripts/object_detection_2d_yolov5_node.py + scripts/object_detection_2d_detr_node.py + scripts/object_detection_2d_gem_node.py + scripts/semantic_segmentation_bisenet_node.py + scripts/object_tracking_2d_siamrpn_node.py + scripts/facial_emotion_estimation_node.py DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} ) diff --git a/projects/opendr_ws/src/opendr_perception/README.md b/projects/opendr_ws/src/opendr_perception/README.md new file mode 100644 index 0000000000..c868a9648d --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/README.md @@ -0,0 +1,849 @@ +# OpenDR Perception Package + +This package contains ROS nodes related to the perception package of OpenDR. + +--- + +## Prerequisites + +Before you can run any of the package's ROS nodes, some prerequisites need to be fulfilled: +1. First of all, you need to [set up the required packages, build and source your workspace.](../../README.md#first-time-setup) +2. Start roscore by running `roscore &`, if you haven't already done so. +3. _(Optional for nodes with [RGB input](#rgb-input-nodes))_ + + For basic usage and testing, all the toolkit's ROS nodes that use RGB images are set up to expect input from a basic webcam using the default package `usb_cam`, which is installed with the toolkit. + You can run the webcam node in the terminal with the workspace sourced using: + ```shell + rosrun usb_cam usb_cam_node & + ``` + By default, the USB cam node publishes images on `/usb_cam/image_raw` and the RGB input nodes subscribe to this topic if not provided with an input topic argument. + As explained for each node below, you can modify the topics via arguments, so if you use any other node responsible for publishing images, **make sure to change the input topic accordingly.** + +--- + +## Notes + +- ### Display output images with rqt_image_view + For any node that outputs images, `rqt_image_view` can be used to display them by running the following command: + ```shell + rosrun rqt_image_view rqt_image_view & + ``` + A window will appear, where the topic that you want to view can be selected from the drop-down menu on the top-left area of the window. + Refer to each node's documentation below to find out the default output image topic, where applicable, and select it on the drop-down menu of rqt_image_view. + +- ### Echo node output + All OpenDR nodes publish some kind of detection message, which can be echoed by running the following command: + ```shell + rostopic echo /opendr/topic_name + ``` + You can find out the default topic name for each node, in its documentation below. + +- ### Increase performance by disabling output + Optionally, nodes can be modified via command line arguments, which are presented for each node separately below. + Generally, arguments give the option to change the input and output topics, the device the node runs on (CPU or GPU), etc. + When a node publishes on several topics, where applicable, a user can opt to disable one or more of the outputs by providing `None` in the corresponding output topic. + This disables publishing on that topic, forgoing some operations in the node, which might increase its performance. + + _An example would be to disable the output annotated image topic in a node when visualization is not needed and only use the detection message in another node, thus eliminating the OpenCV operations._ + +- ### An example diagram of OpenDR nodes running + ![Pose Estimation ROS node running diagram](../../images/opendr_node_diagram.png) + - On the left, the `usb_cam` node can be seen, which is using a system camera to publish images on the `/usb_cam/image_raw` topic. + - In the middle, OpenDR's pose estimation node is running taking as input the published image. By default, the node has its input topic set to `/usb_cam/image_raw`. + - To the right the two output topics of the pose estimation node can be seen. + The bottom topic `/opendr/image_pose_annotated` is the annotated image which can be easily viewed with `rqt_image_view` as explained earlier. + The other topic `/opendr/poses` is the detection message which contains the detected poses' detailed information. + This message can be easily viewed by running `rostopic echo /opendr/poses` in a terminal with the OpenDR ROS workspace sourced. + + + +---- +## RGB input nodes + +### Pose Estimation ROS Node + +You can find the pose estimation ROS node python script [here](./scripts/pose_estimation_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [pose estimation tool](../../../../src/opendr/perception/pose_estimation/lightweight_open_pose/lightweight_open_pose_learner.py) whose documentation can be found [here](../../../../docs/reference/lightweight-open-pose.md). +The node publishes the detected poses in [OpenDR's 2D pose message format](../opendr_bridge/msg/OpenDRPose2D.msg), which saves a list of [OpenDR's keypoint message format](../opendr_bridge/msg/OpenDRPose2DKeypoint.msg). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the pose detection node: + ```shell + rosrun opendr_perception pose_estimation_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_pose_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/poses`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--accelerate`: acceleration flag that causes pose estimation to run faster but with less accuracy + +3. Default output topics: + - Output images: `/opendr/image_pose_annotated` + - Detection messages: `/opendr/poses` + + For viewing the output, refer to the [notes above.](#notes) + +### Fall Detection ROS Node + +You can find the fall detection ROS node python script [here](./scripts/fall_detection_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [fall detection tool](../../../../src/opendr/perception/fall_detection/fall_detector_learner.py) whose documentation can be found [here](../../../../docs/reference/fall-detection.md). +Fall detection uses the toolkit's pose estimation tool internally. + + + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the fall detection node: + + ```shell + rosrun opendr_perception fall_detection_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_fallen_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/fallen`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--accelerate`: acceleration flag that causes pose estimation that runs internally to run faster but with less accuracy + +3. Default output topics: + - Output images: `/opendr/image_fallen_annotated` + - Detection messages: `/opendr/fallen` + + For viewing the output, refer to the [notes above.](#notes) + +### Face Detection ROS Node + +The face detection ROS node supports both the ResNet and MobileNet versions, the latter of which performs masked face detection as well. + +You can find the face detection ROS node python script [here](./scripts/face_detection_retinaface_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [face detection tool](../../../../src/opendr/perception/object_detection_2d/retinaface/retinaface_learner.py) whose documentation can be found [here](../../../../docs/reference/face-detection-2d-retinaface.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the face detection node + + ```shell + rosrun opendr_perception face_detection_retinaface_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_faces_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/faces`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--backbone BACKBONE`: retinaface backbone, options are either `mnet` or `resnet`, where `mnet` detects masked faces as well (default=`resnet`) + +3. Default output topics: + - Output images: `/opendr/image_faces_annotated` + - Detection messages: `/opendr/faces` + + For viewing the output, refer to the [notes above.](#notes) + +### Face Recognition ROS Node + +You can find the face recognition ROS node python script [here](./scripts/face_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [face recognition tool](../../../../src/opendr/perception/face_recognition/face_recognition_learner.py) whose documentation can be found [here](../../../../docs/reference/face-recognition.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the face recognition node: + + ```shell + rosrun opendr_perception face_recognition_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_face_reco_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/face_recognition`) + - `-id or --detections_id_topic DETECTIONS_ID_TOPIC`: topic name for detection ID messages, `None` to stop the node from publishing on this topic (default=`/opendr/face_recognition_id`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--backbone BACKBONE`: backbone network (default=`mobilefacenet`) + - `--dataset_path DATASET_PATH`: path of the directory where the images of the faces to be recognized are stored (default=`./database`) + +3. Default output topics: + - Output images: `/opendr/image_face_reco_annotated` + - Detection messages: `/opendr/face_recognition` and `/opendr/face_recognition_id` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +Reference images should be placed in a defined structure like: +- imgs + - ID1 + - image1 + - image2 + - ID2 + - ID3 + - ... + +The default dataset path is `./database`. Please use the `--database_path ./your/path/` argument to define a custom one. +Τhe name of the sub-folder, e.g. ID1, will be published under `/opendr/face_recognition_id`. + +The database entry and the returned confidence is published under the topic name `/opendr/face_recognition`, and the human-readable ID +under `/opendr/face_recognition_id`. + +### 2D Object Detection ROS Nodes + +For 2D object detection, there are several ROS nodes implemented using various algorithms. The generic object detectors are SSD, YOLOv3, YOLOv5, CenterNet, Nanodet and DETR. + +You can find the 2D object detection ROS node python scripts here: +[SSD node](./scripts/object_detection_2d_ssd_node.py), [YOLOv3 node](./scripts/object_detection_2d_yolov3_node.py), [YOLOv5 node](./scripts/object_detection_2d_yolov5_node.py), [CenterNet node](./scripts/object_detection_2d_centernet_node.py), [Nanodet node](./scripts/object_detection_2d_nanodet_node.py) and [DETR node](./scripts/object_detection_2d_detr_node.py), +where you can inspect the code and modify it as you wish to fit your needs. +The nodes makes use of the toolkit's various 2D object detection tools: +[SSD tool](../../../../src/opendr/perception/object_detection_2d/ssd/ssd_learner.py), [YOLOv3 tool](../../../../src/opendr/perception/object_detection_2d/yolov3/yolov3_learner.py), [YOLOv5 tool](../../../../src/opendr/perception/object_detection_2d/yolov5/yolov5_learner.py), +[CenterNet tool](../../../../src/opendr/perception/object_detection_2d/centernet/centernet_learner.py), [Nanodet tool](../../../../src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py), [DETR tool](../../../../src/opendr/perception/object_detection_2d/detr/detr_learner.py), +whose documentation can be found here: +[SSD docs](../../../../docs/reference/object-detection-2d-ssd.md), [YOLOv3 docs](../../../../docs/reference/object-detection-2d-yolov3.md), [YOLOv5 docs](../../../../docs/reference/object-detection-2d-yolov5.md), +[CenterNet docs](../../../../docs/reference/object-detection-2d-centernet.md), [Nanodet docs](../../../../docs/reference/nanodet.md), [DETR docs](../../../../docs/reference/detr.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start a 2D object detector node: + 1. SSD node + ```shell + rosrun opendr_perception object_detection_2d_ssd_node.py + ``` + The following optional arguments are available for the SSD node: + - `--backbone BACKBONE`: Backbone network (default=`vgg16_atrous`) + - `--nms_type NMS_TYPE`: Non-Maximum Suppression type options are `default`, `seq2seq-nms`, `soft-nms`, `fast-nms`, `cluster-nms` (default=`default`) + + 2. YOLOv3 node + ```shell + rosrun opendr_perception object_detection_2d_yolov3_node.py + ``` + The following optional argument is available for the YOLOv3 node: + - `--backbone BACKBONE`: Backbone network (default=`darknet53`) + + 3. YOLOv5 node + ```shell + rosrun opendr_perception object_detection_2d_yolov5_node.py + ``` + The following optional argument is available for the YOLOv5 node: + - `--model_name MODEL_NAME`: Network architecture, options are `yolov5s`, `yolov5n`, `yolov5m`, `yolov5l`, `yolov5x`, `yolov5n6`, `yolov5s6`, `yolov5m6`, `yolov5l6`, `custom` (default=`yolov5s`) + + 4. CenterNet node + ```shell + rosrun opendr_perception object_detection_2d_centernet_node.py + ``` + The following optional argument is available for the CenterNet node: + - `--backbone BACKBONE`: Backbone network (default=`resnet50_v1b`) + + 5. Nanodet node + ```shell + rosrun opendr_perception object_detection_2d_nanodet_node.py + ``` + The following optional argument is available for the Nanodet node: + - `--model Model`: Model that config file will be used (default=`plus_m_1.5x_416`) + + 6. DETR node + ```shell + rosrun opendr_perception object_detection_2d_detr_node.py + ``` + + The following optional arguments are available for all nodes above: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_objects_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/objects`) + - `--device DEVICE`: Device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Output images: `/opendr/image_objects_annotated` + - Detection messages: `/opendr/objects` + + For viewing the output, refer to the [notes above.](#notes) + +### 2D Single Object Tracking ROS Node + +You can find the single object tracking 2D ROS node python script [here](./scripts/object_tracking_2d_siamrpn_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [single object tracking 2D SiamRPN tool](../../../../src/opendr/perception/object_tracking_2d/siamrpn/siamrpn_learner.py) whose documentation can be found [here](../../../../docs/reference/object-tracking-2d-siamrpn.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the single object tracking 2D node: + + ```shell + rosrun opendr_perception object_tracking_2d_siamrpn_node.py + ``` + + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC` : listen to RGB images on this topic (default=`/usb_cam/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_tracking_annotated`) + - `-t or --tracker_topic TRACKER_TOPIC`: topic name for tracker messages, `None` to stop the node from publishing on this topic (default=`/opendr/tracked_object`) + - `--device DEVICE`: Device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Output images: `/opendr/image_tracking_annotated` + - Detection messages: `/opendr/tracked_object` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +To initialize this node it is required to provide a bounding box of an object to track. +This is achieved by initializing one of the toolkit's 2D object detectors (YOLOv3) and running object detection once on the input. +Afterwards, **the detected bounding box that is closest to the center of the image** is used to initialize the tracker. +Feel free to modify the node to initialize it in a different way that matches your use case. + +### 2D Object Tracking ROS Nodes + +For 2D object tracking, there two ROS nodes provided, one using Deep Sort and one using FairMOT which use either pretrained models, or custom trained models. +The predicted tracking annotations are split into two topics with detections and tracking IDs. Additionally, an annotated image is generated. + +You can find the 2D object detection ROS node python scripts here: [Deep Sort node](./scripts/object_tracking_2d_deep_sort_node.py) and [FairMOT node](./scripts/object_tracking_2d_fair_mot_node.py) +where you can inspect the code and modify it as you wish to fit your needs. +The nodes makes use of the toolkit's [object tracking 2D - Deep Sort tool](../../../../src/opendr/perception/object_tracking_2d/deep_sort/object_tracking_2d_deep_sort_learner.py) +and [object tracking 2D - FairMOT tool](../../../../src/opendr/perception/object_tracking_2d/fair_mot/object_tracking_2d_fair_mot_learner.py) +whose documentation can be found here: [Deep Sort docs](../../../../docs/reference/object-tracking-2d-deep-sort.md), [FairMOT docs](../../../../docs/reference/object-tracking-2d-fair-mot.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start a 2D object tracking node: + 1. Deep Sort node + ```shell + rosrun opendr_perception object_tracking_2d_deep_sort_node.py + ``` + The following optional argument is available for the Deep Sort node: + - `-n --model_name MODEL_NAME`: name of the trained model (default=`deep_sort`) + 2. FairMOT node + ```shell + rosrun opendr_perception object_tracking_2d_fair_mot_node.py + ``` + The following optional argument is available for the FairMOT node: + - `-n --model_name MODEL_NAME`: name of the trained model (default=`fairmot_dla34`) + + The following optional arguments are available for both nodes: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_objects_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/objects`) + - `-t or --tracking_id_topic TRACKING_ID_TOPIC`: topic name for tracking ID messages, `None` to stop the node from publishing on this topic (default=`/opendr/objects_tracking_id`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `-td --temp_dir TEMP_DIR`: path to a temporary directory with models (default=`temp`) + +3. Default output topics: + - Output images: `/opendr/image_objects_annotated` + - Detection messages: `/opendr/objects` + - Tracking ID messages: `/opendr/objects_tracking_id` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +An [image dataset node](#image-dataset-ros-node) is also provided to be used along these nodes. +Make sure to change the default input topic of the tracking node if you are not using the USB cam node. + +### Panoptic Segmentation ROS Node + +You can find the panoptic segmentation ROS node python script [here](./scripts/panoptic_segmentation_efficient_ps_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [panoptic segmentation tool](../../../../src/opendr/perception/panoptic_segmentation/efficient_ps/efficient_ps_learner.py) whose documentation can be found [here](../../../../docs/reference/efficient-ps.md) +and additional information about Efficient PS [here](../../../../src/opendr/perception/panoptic_segmentation/README.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the panoptic segmentation node: + + ```shell + rosrun opendr_perception panoptic_segmentation_efficient_ps_node.py + ``` + + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC` : listen to RGB images on this topic (default=`/usb_cam/image_raw`) + - `-oh --output_heatmap_topic OUTPUT_HEATMAP_TOPIC`: publish the semantic and instance maps on this topic as `OUTPUT_HEATMAP_TOPIC/semantic` and `OUTPUT_HEATMAP_TOPIC/instance`, `None` to stop the node from publishing on this topic (default=`/opendr/panoptic`) + - `-ov --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: publish the panoptic segmentation map as an RGB image on this topic or a more detailed overview if using the `--detailed_visualization` flag, `None` to stop the node from publishing on this topic (default=`opendr/panoptic/rgb_visualization`) + - `--detailed_visualization`: generate a combined overview of the input RGB image and the semantic, instance, and panoptic segmentation maps and publish it on `OUTPUT_RGB_IMAGE_TOPIC` (default=deactivated) + - `--checkpoint CHECKPOINT` : download pretrained models [cityscapes, kitti] or load from the provided path (default=`cityscapes`) + +3. Default output topics: + - Output images: `/opendr/panoptic/semantic`, `/opendr/panoptic/instance`, `/opendr/panoptic/rgb_visualization` + - Detection messages: `/opendr/panoptic/semantic`, `/opendr/panoptic/instance` + + For viewing the output, refer to the [notes above.](#notes) + +### Semantic Segmentation ROS Node + +You can find the semantic segmentation ROS node python script [here](./scripts/semantic_segmentation_bisenet_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [semantic segmentation tool](../../../../src/opendr/perception/semantic_segmentation/bisenet/bisenet_learner.py) whose documentation can be found [here](../../../../docs/reference/semantic-segmentation.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the semantic segmentation node: + + ```shell + rosrun opendr_perception semantic_segmentation_bisenet_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_heatmap_topic OUTPUT_HEATMAP_TOPIC`: topic to which we are publishing the heatmap in the form of a ROS image containing class IDs, `None` to stop the node from publishing on this topic (default=`/opendr/heatmap`) + - `-ov or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic to which we are publishing the heatmap image blended with the input image and a class legend for visualization purposes, `None` to stop the node from publishing on this topic (default=`/opendr/heatmap_visualization`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Output images: `/opendr/heatmap`, `/opendr/heatmap_visualization` + - Detection messages: `/opendr/heatmap` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +On the table below you can find the detectable classes and their corresponding IDs: + +| Class | Bicyclist | Building | Car | Column Pole | Fence | Pedestrian | Road | Sidewalk | Sign Symbol | Sky | Tree | Unknown | +|--------|-----------|----------|-----|-------------|-------|------------|------|----------|-------------|-----|------|---------| +| **ID** | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | + +### Image-based Facial Emotion Estimation ROS Node + +You can find the image-based facial emotion estimation ROS node python script [here](./scripts/facial_emotion_estimation_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's image-based facial emotion estimation tool which can be found [here](../../../../src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/facial_emotion_learner.py) +whose documentation can be found [here](../../../../docs/reference/image_based_facial_emotion_estimation.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the image-based facial emotion estimation node: + + ```shell + rosrun opendr_perception facial_emotion_estimation_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_emotion_estimation_annotated`) + - `-e or --output_emotions_topic OUTPUT_EMOTIONS_TOPIC`: topic to which we are publishing the facial emotion results, `None` to stop the node from publishing on this topic (default=`"/opendr/facial_emotion_estimation"`) + - `-m or --output_emotions_description_topic OUTPUT_EMOTIONS_DESCRIPTION_TOPIC`: topic to which we are publishing the description of the estimated facial emotion, `None` to stop the node from publishing on this topic (default=`/opendr/facial_emotion_estimation_description`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Output images: `/opendr/image_emotion_estimation_annotated` + - Detection messages: `/opendr/facial_emotion_estimation`, `/opendr/facial_emotion_estimation_description` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +This node requires the detection of a face first. This is achieved by including of the toolkit's face detector and running face detection on the input. +Afterwards, the detected bounding box of the face is cropped and fed into the facial emotion estimator. +Feel free to modify the node to detect faces in a different way that matches your use case. + +### Landmark-based Facial Expression Recognition ROS Node + +A ROS node for performing landmark-based facial expression recognition using a trained model on AFEW, CK+ or Oulu-CASIA datasets. +OpenDR does not include a pretrained model, so one should be provided by the user. +An alternative would be to use the [image-based facial expression estimation node](#image-based-facial-emotion-estimation-ros-node) provided by the toolkit. + +You can find the landmark-based facial expression recognition ROS node python script [here](./scripts/landmark_based_facial_expression_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's landmark-based facial expression recognition tool which can be found [here](../../../../src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/progressive_spatio_temporal_bln_learner.py) +whose documentation can be found [here](../../../../docs/reference/landmark-based-facial-expression-recognition.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the landmark-based facial expression recognition node: + + ```shell + rosrun opendr_perception landmark_based_facial_expression_recognition_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_category_topic OUTPUT_CATEGORY_TOPIC`: topic to which we are publishing the recognized facial expression category info, `None` to stop the node from publishing on this topic (default=`"/opendr/landmark_expression_recognition"`) + - `-d or --output_category_description_topic OUTPUT_CATEGORY_DESCRIPTION_TOPIC`: topic to which we are publishing the description of the recognized facial expression, `None` to stop the node from publishing on this topic (default=`/opendr/landmark_expression_recognition_description`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--model`: architecture to use for facial expression recognition, options are `pstbln_ck+`, `pstbln_casia`, `pstbln_afew` (default=`pstbln_afew`) + - `-s or --shape_predictor SHAPE_PREDICTOR`: shape predictor (landmark_extractor) to use (default=`./predictor_path`) + +3. Default output topics: + - Detection messages: `/opendr/landmark_expression_recognition`, `/opendr/landmark_expression_recognition_description` + + For viewing the output, refer to the [notes above.](#notes) + +### Skeleton-based Human Action Recognition ROS Node + +A ROS node for performing skeleton-based human action recognition using either ST-GCN or PST-GCN models pretrained on NTU-RGBD-60 dataset. +The human body poses of the image are first extracted by the lightweight OpenPose method which is implemented in the toolkit, and they are passed to the skeleton-based action recognition method to be categorized. + +You can find the skeleton-based human action recognition ROS node python script [here](./scripts/skeleton_based_action_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's skeleton-based human action recognition tool which can be found [here for ST-GCN](../../../../src/opendr/perception/skeleton_based_action_recognition/spatio_temporal_gcn_learner.py) +and [here for PST-GCN](../../../../src/opendr/perception/skeleton_based_action_recognition/progressive_spatio_temporal_gcn_learner.py) +whose documentation can be found [here](../../../../docs/reference/skeleton-based-action-recognition.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the skeleton-based human action recognition node: + + ```shell + rosrun opendr_perception skeleton_based_action_recognition_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output pose-annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_pose_annotated`) + - `-p or --pose_annotations_topic POSE_ANNOTATIONS_TOPIC`: topic name for pose annotations, `None` to stop the node from publishing on this topic (default=`/opendr/poses`) + - `-c or --output_category_topic OUTPUT_CATEGORY_TOPIC`: topic name for recognized action category, `None` to stop the node from publishing on this topic (default=`"/opendr/skeleton_recognized_action"`) + - `-d or --output_category_description_topic OUTPUT_CATEGORY_DESCRIPTION_TOPIC`: topic name for description of the recognized action category, `None` to stop the node from publishing on this topic (default=`/opendr/skeleton_recognized_action_description`) + - `--model`: model to use, options are `stgcn` or `pstgcn`, (default=`stgcn`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Detection messages: `/opendr/skeleton_based_action_recognition`, `/opendr/skeleton_based_action_recognition_description`, `/opendr/poses` + - Output images: `/opendr/image_pose_annotated` + + For viewing the output, refer to the [notes above.](#notes) + +### Video Human Activity Recognition ROS Node + +A ROS node for performing human activity recognition using either CoX3D or X3D models pretrained on Kinetics400. + +You can find the video human activity recognition ROS node python script [here](./scripts/video_activity_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's video human activity recognition tools which can be found [here for CoX3D](../../../../src/opendr/perception/activity_recognition/cox3d/cox3d_learner.py) and +[here for X3D](../../../../src/opendr/perception/activity_recognition/x3d/x3d_learner.py) whose documentation can be found [here](../../../../docs/reference/activity-recognition.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the video human activity recognition node: + + ```shell + rosrun opendr_perception video_activity_recognition_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/usb_cam/image_raw`) + - `-o or --output_category_topic OUTPUT_CATEGORY_TOPIC`: topic to which we are publishing the recognized activity, `None` to stop the node from publishing on this topic (default=`"/opendr/human_activity_recognition"`) + - `-od or --output_category_description_topic OUTPUT_CATEGORY_DESCRIPTION_TOPIC`: topic to which we are publishing the ID of the recognized action, `None` to stop the node from publishing on this topic (default=`/opendr/human_activity_recognition_description`) + - `--model`: architecture to use for human activity recognition, options are `cox3d-s`, `cox3d-m`, `cox3d-l`, `x3d-xs`, `x3d-s`, `x3d-m`, or `x3d-l` (default=`cox3d-m`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Detection messages: `/opendr/human_activity_recognition`, `/opendr/human_activity_recognition_description` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +You can find the corresponding IDs regarding activity recognition [here](https://github.com/opendr-eu/opendr/blob/master/src/opendr/perception/activity_recognition/datasets/kinetics400_classes.csv). + +## RGB + Infrared input + +### 2D Object Detection GEM ROS Node + +You can find the object detection 2D GEM ROS node python script [here](./scripts/object_detection_2d_gem_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [object detection 2D GEM tool](../../../../src/opendr/perception/object_detection_2d/gem/gem_learner.py) +whose documentation can be found [here](../../../../docs/reference/gem.md). + +#### Instructions for basic usage: + +1. First one needs to find points in the color and infrared images that correspond, in order to find the homography matrix that allows to correct for the difference in perspective between the infrared and the RGB camera. + These points can be selected using a [utility tool](../../../../src/opendr/perception/object_detection_2d/utils/get_color_infra_alignment.py) that is provided in the toolkit. + +2. Pass the points you have found as *pts_color* and *pts_infra* arguments to the [ROS GEM node](./scripts/object_detection_2d_gem.py). + +3. Start the node responsible for publishing images. If you have a RealSense camera, then you can use the corresponding node (assuming you have installed [realsense2_camera](http://wiki.ros.org/realsense2_camera)): + + ```shell + roslaunch realsense2_camera rs_camera.launch enable_color:=true enable_infra:=true enable_depth:=false enable_sync:=true infra_width:=640 infra_height:=480 + ``` + +4. You are then ready to start the object detection 2d GEM node: + + ```shell + rosrun opendr_perception object_detection_2d_gem_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-ic or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/camera/color/image_raw`) + - `-ii or --input_infra_image_topic INPUT_INFRA_IMAGE_TOPIC`: topic name for input infrared image (default=`/camera/infra/image_raw`) + - `-oc or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/rgb_image_objects_annotated`) + - `-oi or --output_infra_image_topic OUTPUT_INFRA_IMAGE_TOPIC`: topic name for output annotated infrared image, `None` to stop the node from publishing on this topic (default=`/opendr/infra_image_objects_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/objects`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +5. Default output topics: + - Output RGB images: `/opendr/rgb_image_objects_annotated` + - Output infrared images: `/opendr/infra_image_objects_annotated` + - Detection messages: `/opendr/objects` + + For viewing the output, refer to the [notes above.](#notes) + +---- +## RGBD input + +### RGBD Hand Gesture Recognition ROS Node +A ROS node for performing hand gesture recognition using a MobileNetv2 model trained on HANDS dataset. +The node has been tested with Kinectv2 for depth data acquisition with the following drivers: https://github.com/OpenKinect/libfreenect2 and https://github.com/code-iai/iai_kinect2. + +You can find the RGBD hand gesture recognition ROS node python script [here](./scripts/rgbd_hand_gesture_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [hand gesture recognition tool](../../../../src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/rgbd_hand_gesture_learner.py) +whose documentation can be found [here](../../../../docs/reference/rgbd-hand-gesture-learner.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images from an RGBD camera. Remember to modify the input topics using the arguments in step 2 if needed. + +2. You are then ready to start the hand gesture recognition node: + ```shell + rosrun opendr_perception rgbd_hand_gesture_recognition_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-ic or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/kinect2/qhd/image_color_rect`) + - `-id or --input_depth_image_topic INPUT_DEPTH_IMAGE_TOPIC`: topic name for input depth image (default=`/kinect2/qhd/image_depth_rect`) + - `-o or --output_gestures_topic OUTPUT_GESTURES_TOPIC`: topic name for predicted gesture class (default=`/opendr/gestures`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Detection messages:`/opendr/gestures` + + For viewing the output, refer to the [notes above.](#notes) + +---- +## RGB + Audio input + +### Audiovisual Emotion Recognition ROS Node + +You can find the audiovisual emotion recognition ROS node python script [here](./scripts/audiovisual_emotion_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [audiovisual emotion recognition tool](../../../../src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/avlearner.py), +whose documentation can be found [here](../../../../docs/reference/audiovisual-emotion-recognition-learner.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). +2. Start the node responsible for publishing audio. Remember to modify the input topics using the arguments in step 2 if needed. +3. You are then ready to start the audiovisual emotion recognition node + + ```shell + rosrun opendr_perception audiovisual_emotion_recognition_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-iv or --input_video_topic INPUT_VIDEO_TOPIC`: topic name for input video, expects detected face of size 224x224 (default=`/usb_cam/image_raw`) + - `-ia or --input_audio_topic INPUT_AUDIO_TOPIC`: topic name for input audio (default=`/audio/audio`) + - `-o or --output_emotions_topic OUTPUT_EMOTIONS_TOPIC`: topic to which we are publishing the predicted emotion (default=`/opendr/audiovisual_emotion`) + - `--buffer_size BUFFER_SIZE`: length of audio and video in seconds, (default=`3.6`) + - `--model_path MODEL_PATH`: if given, the pretrained model will be loaded from the specified local path, otherwise it will be downloaded from an OpenDR FTP server + +4. Default output topics: + - Detection messages: `/opendr/audiovisual_emotion` + + For viewing the output, refer to the [notes above.](#notes) + +---- +## Audio input + +### Speech Command Recognition ROS Node + +A ROS node for recognizing speech commands from an audio stream using MatchboxNet, EdgeSpeechNets or Quadratic SelfONN models, pretrained on the Google Speech Commands dataset. + +You can find the speech command recognition ROS node python script [here](./scripts/speech_command_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's speech command recognition tools: +[EdgeSpeechNets tool](../../../../src/opendr/perception/speech_recognition/edgespeechnets/edgespeechnets_learner.py), [MatchboxNet tool](../../../../src/opendr/perception/speech_recognition/matchboxnet/matchboxnet_learner.py), [Quadratic SelfONN tool](../../../../src/opendr/perception/speech_recognition/quadraticselfonn/quadraticselfonn_learner.py) +whose documentation can be found here: +[EdgeSpeechNet docs](../../../../docs/reference/edgespeechnets.md), [MatchboxNet docs](../../../../docs/reference/matchboxnet.md), [Quadratic SelfONN docs](../../../../docs/reference/quadratic-selfonn.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing audio. Remember to modify the input topics using the arguments in step 2, if needed. + +2. You are then ready to start the speech command recognition node + + ```shell + rosrun opendr_perception speech_command_recognition_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_audio_topic INPUT_AUDIO_TOPIC`: topic name for input audio (default=`/audio/audio`) + - `-o or --output_speech_command_topic OUTPUT_SPEECH_COMMAND_TOPIC`: topic name for speech command output (default=`/opendr/speech_recognition`) + - `--buffer_size BUFFER_SIZE`: set the size of the audio buffer (expected command duration) in seconds (default=`1.5`) + - `--model MODEL`: the model to use, choices are `matchboxnet`, `edgespeechnets` or `quad_selfonn` (default=`matchboxnet`) + - `--model_path MODEL_PATH`: if given, the pretrained model will be loaded from the specified local path, otherwise it will be downloaded from an OpenDR FTP server + +3. Default output topics: + - Detection messages, class id and confidence: `/opendr/speech_recognition` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +EdgeSpeechNets currently does not have a pretrained model available for download, only local files may be used. + +---- +## Point cloud input + +### 3D Object Detection Voxel ROS Node + +A ROS node for performing 3D object detection Voxel using PointPillars or TANet methods with either pretrained models on KITTI dataset, or custom trained models. + +You can find the 3D object detection Voxel ROS node python script [here](./scripts/object_detection_3d_voxel_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [3D object detection Voxel tool](../../../../src/opendr/perception/object_detection_3d/voxel_object_detection_3d/voxel_object_detection_3d_learner.py) +whose documentation can be found [here](../../../../docs/reference/voxel-object-detection-3d.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing point clouds. OpenDR provides a [point cloud dataset node](#point-cloud-dataset-ros-node) for convenience. + +2. You are then ready to start the 3D object detection node: + + ```shell + rosrun opendr_perception object_detection_3d_voxel_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_point_cloud_topic INPUT_POINT_CLOUD_TOPIC`: point cloud topic provided by either a point_cloud_dataset_node or any other 3D point cloud node (default=`/opendr/dataset_point_cloud`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages (default=`/opendr/objects3d`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `-n or --model_name MODEL_NAME`: name of the trained model (default=`tanet_car_xyres_16`) + - `-c or --model_config_path MODEL_CONFIG_PATH`: path to a model .proto config (default=`../../src/opendr/perception/object_detection3d/voxel_object_detection_3d/second_detector/configs/tanet/car/xyres_16.proto`) + +3. Default output topics: + - Detection messages: `/opendr/objects3d` + + For viewing the output, refer to the [notes above.](#notes) + +### 3D Object Tracking AB3DMOT ROS Node + +A ROS node for performing 3D object tracking using AB3DMOT stateless method. +This is a detection-based method, and therefore the 3D object detector is needed to provide detections, which then will be used to make associations and generate tracking ids. +The predicted tracking annotations are split into two topics with detections and tracking IDs. + +You can find the 3D object tracking AB3DMOT ROS node python script [here](./scripts/object_tracking_3d_ab3dmot_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [3D object tracking AB3DMOT tool](../../../../src/opendr/perception/object_tracking_3d/ab3dmot/object_tracking_3d_ab3dmot_learner.py) +whose documentation can be found [here](../../../../docs/reference/object-tracking-3d-ab3dmot.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing point clouds. OpenDR provides a [point cloud dataset node](#point-cloud-dataset-ros-node) for convenience. + +2. You are then ready to start the 3D object tracking node: + + ```shell + rosrun opendr_perception object_tracking_3d_ab3dmot_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_point_cloud_topic INPUT_POINT_CLOUD_TOPIC`: point cloud topic provided by either a point_cloud_dataset_node or any other 3D point cloud node (default=`/opendr/dataset_point_cloud`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/objects3d`) + - `-t or --tracking3d_id_topic TRACKING3D_ID_TOPIC`: topic name for output tracking IDs with the same element count as in detection topic, `None` to stop the node from publishing on this topic (default=`/opendr/objects_tracking_id`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `-dn or --detector_model_name DETECTOR_MODEL_NAME`: name of the trained model (default=`tanet_car_xyres_16`) + - `-dc or --detector_model_config_path DETECTOR_MODEL_CONFIG_PATH`: path to a model .proto config (default=`../../src/opendr/perception/object_detection3d/voxel_object_detection_3d/second_detector/configs/tanet/car/xyres_16.proto`) + +3. Default output topics: + - Detection messages: `/opendr/objects3d` + - Tracking ID messages: `/opendr/objects_tracking_id` + + For viewing the output, refer to the [notes above.](#notes) + +---- +## Biosignal input + +### Heart Anomaly Detection ROS Node + +A ROS node for performing heart anomaly (atrial fibrillation) detection from ECG data using GRU or ANBOF models trained on AF dataset. + +You can find the heart anomaly detection ROS node python script [here](./scripts/heart_anomaly_detection_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's heart anomaly detection tools: [ANBOF tool](../../../../src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/attention_neural_bag_of_feature_learner.py) and +[GRU tool](../../../../src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/gated_recurrent_unit_learner.py), whose documentation can be found here: +[ANBOF docs](../../../../docs/reference/attention-neural-bag-of-feature-learner.md) and [GRU docs](../../../../docs/reference/gated-recurrent-unit-learner.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing ECG data. + +2. You are then ready to start the heart anomaly detection node: + + ```shell + rosrun opendr_perception heart_anomaly_detection_node.py + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_ecg_topic INPUT_ECG_TOPIC`: topic name for input ECG data (default=`/ecg/ecg`) + - `-o or --output_heart_anomaly_topic OUTPUT_HEART_ANOMALY_TOPIC`: topic name for heart anomaly detection (default=`/opendr/heart_anomaly`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--model MODEL`: the model to use, choices are `anbof` or `gru` (default=`anbof`) + +3. Default output topics: + - Detection messages: `/opendr/heart_anomaly` + + For viewing the output, refer to the [notes above.](#notes) + +---- +## Dataset ROS Nodes + +The dataset nodes can be used to publish data from the disk, which is useful to test the functionality without the use of a sensor. +Dataset nodes use a provided `DatasetIterator` object that returns a `(Data, Target)` pair. +If the type of the `Data` object is correct, the node will transform it into a corresponding ROS message object and publish it to a desired topic. +The OpenDR toolkit currently provides two such nodes, an image dataset node and a point cloud dataset node. + +### Image Dataset ROS Node + +The image dataset node downloads a `nano_MOT20` dataset from OpenDR's FTP server and uses it to publish data to the ROS topic, +which is intended to be used with the [2D object tracking nodes](#2d-object-tracking-ros-nodes). + +You can create an instance of this node with any `DatasetIterator` object that returns `(Image, Target)` as elements, +to use alongside other nodes and datasets. +You can inspect [the node](./scripts/image_dataset_node.py) and modify it to your needs for other image datasets. + +To get an image from a dataset on the disk, you can start a `image_dataset.py` node as: +```shell +rosrun opendr_perception image_dataset_node.py +``` +The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-o or --output_rgb_image_topic`: topic name to publish the data (default=`/opendr/dataset_image`) + - `-f or --fps FPS`: data fps (default=`10`) + - `-d or --dataset_path DATASET_PATH`: path to a dataset (default=`/MOT`) + - `-ks or --mot20_subsets_path MOT20_SUBSETS_PATH`: path to MOT20 subsets (default=`../../src/opendr/perception/object_tracking_2d/datasets/splits/nano_mot20.train`) + +### Point Cloud Dataset ROS Node + +The point cloud dataset node downloads a `nano_KITTI` dataset from OpenDR's FTP server and uses it to publish data to the ROS topic, +which is intended to be used with the [3D object detection node](#3d-object-detection-voxel-ros-node), +as well as the [3D object tracking node](#3d-object-tracking-ab3dmot-ros-node). + +You can create an instance of this node with any `DatasetIterator` object that returns `(PointCloud, Target)` as elements, +to use alongside other nodes and datasets. +You can inspect [the node](./scripts/point_cloud_dataset_node.py) and modify it to your needs for other point cloud datasets. + +To get a point cloud from a dataset on the disk, you can start a `point_cloud_dataset.py` node as: +```shell +rosrun opendr_perception point_cloud_dataset_node.py +``` +The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-o or --output_point_cloud_topic`: topic name to publish the data (default=`/opendr/dataset_point_cloud`) + - `-f or --fps FPS`: data fps (default=`10`) + - `-d or --dataset_path DATASET_PATH`: path to a dataset, if it does not exist, nano KITTI dataset will be downloaded there (default=`/KITTI/opendr_nano_kitti`) + - `-ks or --kitti_subsets_path KITTI_SUBSETS_PATH`: path to KITTI subsets, used only if a KITTI dataset is downloaded (default=`../../src/opendr/perception/object_detection_3d/datasets/nano_kitti_subsets`) diff --git a/projects/opendr_ws/src/perception/include/perception/.keep b/projects/opendr_ws/src/opendr_perception/include/opendr_perception/.keep similarity index 100% rename from projects/opendr_ws/src/perception/include/perception/.keep rename to projects/opendr_ws/src/opendr_perception/include/opendr_perception/.keep diff --git a/projects/opendr_ws/src/perception/package.xml b/projects/opendr_ws/src/opendr_perception/package.xml similarity index 94% rename from projects/opendr_ws/src/perception/package.xml rename to projects/opendr_ws/src/opendr_perception/package.xml index 7b7c0e00c9..b9a89f0245 100644 --- a/projects/opendr_ws/src/perception/package.xml +++ b/projects/opendr_ws/src/opendr_perception/package.xml @@ -1,7 +1,7 @@ - perception - 1.1.1 + opendr_perception + 2.0.0 OpenDR's ROS nodes for perception package OpenDR Project Coordinator Apache License v2.0 diff --git a/projects/opendr_ws/src/perception/scripts/audiovisual_emotion_recognition.py b/projects/opendr_ws/src/opendr_perception/scripts/audiovisual_emotion_recognition_node.py old mode 100644 new mode 100755 similarity index 67% rename from projects/opendr_ws/src/perception/scripts/audiovisual_emotion_recognition.py rename to projects/opendr_ws/src/opendr_perception/scripts/audiovisual_emotion_recognition_node.py index c4fe3e126a..91bff9b3c8 --- a/projects/opendr_ws/src/perception/scripts/audiovisual_emotion_recognition.py +++ b/projects/opendr_ws/src/opendr_perception/scripts/audiovisual_emotion_recognition_node.py @@ -19,6 +19,7 @@ import numpy as np import torch import librosa +import cv2 import rospy import message_filters @@ -35,28 +36,25 @@ class AudiovisualEmotionNode: def __init__(self, input_video_topic="/usb_cam/image_raw", input_audio_topic="/audio/audio", - annotations_topic="/opendr/audiovisual_emotion", buffer_size=3.6, device="cuda"): + output_emotions_topic="/opendr/audiovisual_emotion", buffer_size=3.6, device="cuda"): """ Creates a ROS Node for audiovisual emotion recognition :param input_video_topic: Topic from which we are reading the input video. Expects detected face of size 224x224 :type input_video_topic: str :param input_audio_topic: Topic from which we are reading the input audio :type input_audio_topic: str - :param annotations_topic: Topic to which we are publishing the predicted class - :type annotations_topic: str + :param output_emotions_topic: Topic to which we are publishing the predicted class + :type output_emotions_topic: str :param buffer_size: length of audio and video in sec :type buffer_size: float :param device: device on which we are running inference ('cpu' or 'cuda') :type device: str """ - self.publisher = rospy.Publisher(annotations_topic, Classification2D, queue_size=10) + self.publisher = rospy.Publisher(output_emotions_topic, Classification2D, queue_size=10) - video_sub = message_filters.Subscriber(input_video_topic, ROS_Image) - audio_sub = message_filters.Subscriber(input_audio_topic, AudioData) - # synchronize video and audio data topics - ts = message_filters.ApproximateTimeSynchronizer([video_sub, audio_sub], 10, 0.1, allow_headerless=True) - ts.registerCallback(self.callback) + self.input_video_topic = input_video_topic + self.input_audio_topic = input_audio_topic self.bridge = ROSBridge() @@ -77,21 +75,31 @@ def listen(self): """ Start the node and begin processing input data """ - rospy.init_node('opendr_audiovisualemotion_recognition', anonymous=True) - rospy.loginfo("Audiovisual emotion recognition node started!") + rospy.init_node('opendr_audiovisual_emotion_recognition_node', anonymous=True) + + video_sub = message_filters.Subscriber(self.input_video_topic, ROS_Image) + audio_sub = message_filters.Subscriber(self.input_audio_topic, AudioData) + # synchronize video and audio data topics + ts = message_filters.ApproximateTimeSynchronizer([video_sub, audio_sub], 10, 0.1, allow_headerless=True) + ts.registerCallback(self.callback) + + rospy.loginfo("Audiovisual emotion recognition node started.") rospy.spin() def callback(self, image_data, audio_data): """ Callback that process the input data and publishes to the corresponding topics - :param image_data: input image message, face image of size 224x224 + :param image_data: input image message, face image :type image_data: sensor_msgs.msg.Image :param audio_data: input audio message, speech :type audio_data: audio_common_msgs.msg.AudioData """ audio_data = np.reshape(np.frombuffer(audio_data.data, dtype=np.int16)/32768.0, (1, -1)) self.data_buffer = np.append(self.data_buffer, audio_data) + image_data = self.bridge.from_ros_image(image_data, encoding='bgr8').convert(format='channels_last') + image_data = cv2.resize(image_data, (224, 224)) + self.video_buffer = np.append(self.video_buffer, np.expand_dims(image_data.data, 0), axis=0) if self.data_buffer.shape[0] > 16000*self.buffer_size: @@ -116,16 +124,35 @@ def callback(self, image_data, audio_data): def select_distributed(m, n): return [i*n//m + n//(2*m) for i in range(m)] -if __name__ == '__main__': - device = 'cuda' if torch.cuda.is_available() else 'cpu' +if __name__ == '__main__': parser = argparse.ArgumentParser() - parser.add_argument('--video_topic', type=str, help='listen to video input data on this topic') - parser.add_argument('--audio_topic', type=str, help='listen to audio input data on this topic') - parser.add_argument('--buffer_size', type=float, default=3.6, help='size of the audio buffer in seconds') + parser.add_argument("-iv", "--input_video_topic", type=str, default="/usb_cam/image_raw", + help="Listen to video input data on this topic") + parser.add_argument("-ia", "--input_audio_topic", type=str, default="/audio/audio", + help="Listen to audio input data on this topic") + parser.add_argument("-o", "--output_emotions_topic", type=str, default="/opendr/audiovisual_emotion", + help="Topic name for output emotions recognition") + parser.add_argument("--device", type=str, default="cuda", + help="Device to use (cpu, cuda)", choices=["cuda", "cpu"]) + parser.add_argument("--buffer_size", type=float, default=3.6, + help="Size of the audio buffer in seconds") args = parser.parse_args() - avnode = AudiovisualEmotionNode(input_video_topic=args.video_topic, input_audio_topic=args.audio_topic, - annotations_topic="/opendr/audiovisual_emotion", + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU") + device = "cpu" + except: + print("Using CPU") + device = "cpu" + + avnode = AudiovisualEmotionNode(input_video_topic=args.input_video_topic, input_audio_topic=args.input_audio_topic, + output_emotions_topic=args.output_emotions_topic, buffer_size=args.buffer_size, device=device) avnode.listen() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/face_detection_retinaface_node.py b/projects/opendr_ws/src/opendr_perception/scripts/face_detection_retinaface_node.py new file mode 100755 index 0000000000..24665eb35c --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/face_detection_retinaface_node.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import mxnet as mx + +import rospy +from vision_msgs.msg import Detection2DArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import RetinaFaceLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class FaceDetectionNode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_faces_annotated", detections_topic="/opendr/faces", + device="cuda", backbone="resnet"): + """ + Creates a ROS Node for face detection with Retinaface. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no face detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param backbone: retinaface backbone, options are either 'mnet' or 'resnet', + where 'mnet' detects masked faces as well + :type backbone: str + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.face_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=1) + else: + self.face_publisher = None + + self.bridge = ROSBridge() + + # Initialize the face detector + self.face_detector = RetinaFaceLearner(backbone=backbone, device=device) + self.face_detector.download(path=".", verbose=True) + self.face_detector.load("retinaface_{}".format(backbone)) + self.class_names = ["face", "masked_face"] + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_face_detection_retinaface_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Face detection RetinaFace node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run face detection + boxes = self.face_detector.infer(image) + + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_boxes(boxes) # Convert to ROS boxes + if self.face_publisher is not None: + self.face_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with face detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.class_names) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_faces_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/faces") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--backbone", + help="Retinaface backbone, options are either 'mnet' or 'resnet', where 'mnet' detects " + "masked faces as well", + type=str, default="resnet", choices=["resnet", "mnet"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and mx.context.num_gpus() > 0: + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + face_detection_node = FaceDetectionNode(device=device, backbone=args.backbone, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + face_detection_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/face_recognition_node.py b/projects/opendr_ws/src/opendr_perception/scripts/face_recognition_node.py new file mode 100755 index 0000000000..ebd0da3c18 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/face_recognition_node.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import cv2 +import torch + +import rospy +from std_msgs.msg import String +from vision_msgs.msg import ObjectHypothesis +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.face_recognition import FaceRecognitionLearner +from opendr.perception.object_detection_2d import RetinaFaceLearner +from opendr.perception.object_detection_2d.datasets.transforms import BoundingBoxListToNumpyArray + + +class FaceRecognitionNode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_face_reco_annotated", + detections_topic="/opendr/face_recognition", detections_id_topic="/opendr/face_recognition_id", + database_path="./database", device="cuda", backbone="mobilefacenet"): + """ + Creates a ROS Node for face recognition. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the recognized face information (if None, + no face recognition message is published) + :type detections_topic: str + :param detections_id_topic: Topic to which we are publishing the ID of the recognized person (if None, + no ID message is published) + :type detections_id_topic: str + :param device: Device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param backbone: Backbone network + :type backbone: str + :param database_path: Path of the directory where the images of the faces to be recognized are stored + :type database_path: str + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.face_publisher = rospy.Publisher(detections_topic, ObjectHypothesis, queue_size=1) + else: + self.face_publisher = None + + if detections_id_topic is not None: + self.face_id_publisher = rospy.Publisher(detections_id_topic, String, queue_size=1) + else: + self.face_id_publisher = None + + self.bridge = ROSBridge() + + # Initialize the face recognizer + self.recognizer = FaceRecognitionLearner(device=device, mode='backbone_only', backbone=backbone) + self.recognizer.download(path=".") + self.recognizer.load(".") + self.recognizer.fit_reference(database_path, save_path=".", create_new=True) + + # Initialize the face detector + self.face_detector = RetinaFaceLearner(backbone='mnet', device=device) + self.face_detector.download(path=".", verbose=True) + self.face_detector.load("retinaface_{}".format('mnet')) + self.class_names = ["face", "masked_face"] + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_face_recognition_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Face recognition node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + # Get an OpenCV image back + image = image.opencv() + + # Run face detection and recognition + if image is not None: + bounding_boxes = self.face_detector.infer(image) + if bounding_boxes: + bounding_boxes = BoundingBoxListToNumpyArray()(bounding_boxes) + boxes = bounding_boxes[:, :4] + for idx, box in enumerate(boxes): + (startX, startY, endX, endY) = int(box[0]), int(box[1]), int(box[2]), int(box[3]) + frame = image[startY:endY, startX:endX] + result = self.recognizer.infer(frame) + + # Publish face information and ID + if self.face_publisher is not None: + self.face_publisher.publish(self.bridge.to_ros_face(result)) + + if self.face_id_publisher is not None: + self.face_id_publisher.publish(self.bridge.to_ros_face_id(result)) + + if self.image_publisher is not None: + if result.description != 'Not found': + color = (0, 255, 0) + else: + color = (0, 0, 255) + # Annotate image with face detection/recognition boxes + cv2.rectangle(image, (startX, startY), (endX, endY), color, thickness=2) + cv2.putText(image, result.description, (startX, endY - 10), cv2.FONT_HERSHEY_SIMPLEX, + 1, color, 2, cv2.LINE_AA) + + if self.image_publisher is not None: + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_face_reco_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/face_recognition") + parser.add_argument("-id", "--detections_id_topic", help="Topic name for detection ID messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/face_recognition_id") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--backbone", help="Backbone network, defaults to mobilefacenet", + type=str, default="mobilefacenet", choices=["mobilefacenet"]) + parser.add_argument("--dataset_path", + help="Path of the directory where the images of the faces to be recognized are stored, " + "defaults to \"./database\"", + type=str, default="./database") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + face_recognition_node = FaceRecognitionNode(device=device, backbone=args.backbone, database_path=args.dataset_path, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic, + detections_id_topic=args.detections_id_topic) + face_recognition_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/facial_emotion_estimation_node.py b/projects/opendr_ws/src/opendr_perception/scripts/facial_emotion_estimation_node.py new file mode 100644 index 0000000000..c2da6e55ce --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/facial_emotion_estimation_node.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch +import numpy as np +import cv2 +from torchvision import transforms +import PIL + +import rospy +from std_msgs.msg import String +from vision_msgs.msg import ObjectHypothesis +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.facial_expression_recognition import FacialEmotionLearner +from opendr.perception.facial_expression_recognition import image_processing +from opendr.perception.object_detection_2d import RetinaFaceLearner +from opendr.perception.object_detection_2d.datasets.transforms import BoundingBoxListToNumpyArray + +INPUT_IMAGE_SIZE = (96, 96) +INPUT_IMAGE_NORMALIZATION_MEAN = [0.0, 0.0, 0.0] +INPUT_IMAGE_NORMALIZATION_STD = [1.0, 1.0, 1.0] + + +class FacialEmotionEstimationNode: + def __init__(self, + face_detector_learner, + input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_emotion_estimation_annotated", + output_emotions_topic="/opendr/facial_emotion_estimation", + output_emotions_description_topic="/opendr/facial_emotion_estimation_description", + device="cuda"): + """ + Creates a ROS Node for facial emotion estimation. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param output_emotions_topic: Topic to which we are publishing the facial emotion results + (if None, we are not publishing the info) + :type output_emotions_topic: str + :param output_emotions_description_topic: Topic to which we are publishing the description of the estimated + facial emotion (if None, we are not publishing the description) + :type output_emotions_description_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + """ + + # Set up ROS topics and bridge + self.input_rgb_image_topic = input_rgb_image_topic + self.bridge = ROSBridge() + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if output_emotions_topic is not None: + self.hypothesis_publisher = rospy.Publisher(output_emotions_topic, ObjectHypothesis, queue_size=1) + else: + self.hypothesis_publisher = None + + if output_emotions_description_topic is not None: + self.string_publisher = rospy.Publisher(output_emotions_description_topic, String, queue_size=1) + else: + self.string_publisher = None + + self.face_detector = face_detector_learner + + # Initialize the facial emotion estimator + self.facial_emotion_estimator = FacialEmotionLearner(device=device, batch_size=2, + ensemble_size=9, + name_experiment='esr_9') + self.facial_emotion_estimator.init_model(num_branches=9) + + model_saved_path = self.facial_emotion_estimator.download(path=None, mode="pretrained") + self.facial_emotion_estimator.load(ensemble_size=9, path_to_saved_network=model_saved_path) + + def listen(self): + """ + Start the node and begin processing input data + """ + rospy.init_node('opendr_facial_emotion_estimation_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Facial emotion estimation node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + image = self.bridge.from_ros_image(data, encoding='bgr8').opencv() + + emotion = None + # Run face detection and emotion estimation + if image is not None: + bounding_boxes = self.face_detector.infer(image) + if bounding_boxes: + bounding_boxes = BoundingBoxListToNumpyArray()(bounding_boxes) + boxes = bounding_boxes[:, :4] + for idx, box in enumerate(boxes): + (startX, startY, endX, endY) = int(box[0]), int(box[1]), int(box[2]), int(box[3]) + face_crop = image[startY:endY, startX:endX] + + # Preprocess detected face + input_face = _pre_process_input_image(face_crop) + + # Recognize facial expression + + emotion, affect = self.facial_emotion_estimator.infer(input_face) + # Converts from Tensor to ndarray + affect = np.array([a.cpu().detach().numpy() for a in affect]) + affect = affect[0] # a numpy array of valence and arousal values + emotion = emotion[0] # the emotion class with confidence tensor + + cv2.rectangle(image, (startX, startY), (endX, endY), (0, 255, 255), thickness=2) + cv2.putText(image, "Valence: %.2f" % affect[0], (startX, endY - 30), cv2.FONT_HERSHEY_SIMPLEX, + 0.5, (0, 255, 255), 1, cv2.LINE_AA) + cv2.putText(image, "Arousal: %.2f" % affect[1], (startX, endY - 15), cv2.FONT_HERSHEY_SIMPLEX, + 0.5, (0, 255, 255), 1, cv2.LINE_AA) + cv2.putText(image, emotion.description, (startX, endY), cv2.FONT_HERSHEY_SIMPLEX, + 0.5, (0, 255, 255), 1, cv2.LINE_AA) + + if self.hypothesis_publisher is not None and emotion: + self.hypothesis_publisher.publish(self.bridge.to_ros_category(emotion)) + + if self.string_publisher is not None and emotion: + self.string_publisher.publish(self.bridge.to_ros_category_description(emotion)) + + if self.image_publisher is not None: + # Convert the annotated OpenDR image to ROS image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def _pre_process_input_image(image): + """ + Pre-processes an image for ESR-9. + :param image: (ndarray) + :return: (ndarray) image + """ + + image = image_processing.resize(image, INPUT_IMAGE_SIZE) + image = PIL.Image.fromarray(image) + image = transforms.Normalize(mean=INPUT_IMAGE_NORMALIZATION_MEAN, + std=INPUT_IMAGE_NORMALIZATION_STD)(transforms.ToTensor()(image)).unsqueeze(0) + + return image + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('-i', '--input_rgb_image_topic', type=str, help='Topic name for input rgb image', + default='/usb_cam/image_raw') + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_emotion_estimation_annotated") + parser.add_argument("-e", "--output_emotions_topic", help="Topic name for output emotion", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/facial_emotion_estimation") + parser.add_argument('-m', '--output_emotions_description_topic', + help='Topic to which we are publishing the description of the estimated facial emotion', + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/facial_emotion_estimation_description") + parser.add_argument('-d', '--device', help='Device to use, either cpu or cuda', + type=str, default="cuda", choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + print("GPU found.") + device = 'cuda' + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU") + device = 'cpu' + except: + print("Using CPU") + device = 'cpu' + + # Initialize the face detector + face_detector = RetinaFaceLearner(backbone="resnet", device=device) + face_detector.download(path=".", verbose=True) + face_detector.load("retinaface_{}".format("resnet")) + + facial_emotion_estimation_node = FacialEmotionEstimationNode( + face_detector, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + output_emotions_topic=args.output_emotions_topic, + output_emotions_description_topic=args.output_emotions_description_topic, + device=device) + + facial_emotion_estimation_node.listen() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/fall_detection_node.py b/projects/opendr_ws/src/opendr_perception/scripts/fall_detection_node.py new file mode 100755 index 0000000000..210d49f8e3 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/fall_detection_node.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import cv2 +import argparse +import torch + +import rospy +from vision_msgs.msg import Detection2DArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.engine.target import BoundingBox, BoundingBoxList +from opendr.perception.pose_estimation import get_bbox +from opendr.perception.pose_estimation import LightweightOpenPoseLearner +from opendr.perception.fall_detection import FallDetectorLearner + + +class FallDetectionNode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_fallen_annotated", detections_topic="/opendr/fallen", + device="cuda", num_refinement_stages=2, use_stride=False, half_precision=False): + """ + Creates a ROS Node for rule-based fall detection based on Lightweight OpenPose. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no fall detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param num_refinement_stages: Specifies the number of pose estimation refinement stages are added on the + model's head, including the initial stage. Can be 0, 1 or 2, with more stages meaning slower and more accurate + inference + :type num_refinement_stages: int + :param use_stride: Whether to add a stride value in the model, which reduces accuracy but increases + inference speed + :type use_stride: bool + :param half_precision: Enables inference using half (fp16) precision instead of single (fp32) precision. + Valid only for GPU-based inference + :type half_precision: bool + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.fall_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=1) + else: + self.fall_publisher = None + + self.bridge = ROSBridge() + + # Initialize the pose estimation learner + self.pose_estimator = LightweightOpenPoseLearner(device=device, num_refinement_stages=num_refinement_stages, + mobilenet_use_stride=use_stride, + half_precision=half_precision) + self.pose_estimator.download(path=".", verbose=True) + self.pose_estimator.load("openpose_default") + + # Initialize the fall detection learner + self.fall_detector = FallDetectorLearner(self.pose_estimator) + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_fall_detection_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Fall detection node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run fall detection + detections = self.fall_detector.infer(image) + + # Get an OpenCV image back + image = image.opencv() + + bboxes = BoundingBoxList([]) + fallen_pose_id = 0 + for detection in detections: + fallen = detection[0].data + + if fallen == 1: + pose = detection[2] + x, y, w, h = get_bbox(pose) + if self.image_publisher is not None: + # Paint person bounding box inferred from pose + color = (0, 0, 255) + cv2.rectangle(image, (x, y), (x + w, y + h), color, 2) + cv2.putText(image, "Fallen person", (x, y + h - 10), cv2.FONT_HERSHEY_SIMPLEX, + 1, color, 2, cv2.LINE_AA) + + if self.fall_publisher is not None: + # Convert detected boxes to ROS type and add to list + bboxes.data.append(BoundingBox(left=x, top=y, width=w, height=h, name=fallen_pose_id)) + fallen_pose_id += 1 + + if self.fall_publisher is not None: + if len(bboxes) > 0: + self.fall_publisher.publish(self.bridge.to_ros_boxes(bboxes)) + + if self.image_publisher is not None: + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_fallen_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/fallen") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--accelerate", help="Enables acceleration flags (e.g., stride)", default=False, + action="store_true") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + if args.accelerate: + stride = True + stages = 0 + half_prec = True + else: + stride = False + stages = 2 + half_prec = False + + fall_detection_node = FallDetectionNode(device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic, + num_refinement_stages=stages, use_stride=stride, half_precision=half_prec) + fall_detection_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/perception/scripts/heart_anomaly_detection.py b/projects/opendr_ws/src/opendr_perception/scripts/heart_anomaly_detection_node.py similarity index 55% rename from projects/opendr_ws/src/perception/scripts/heart_anomaly_detection.py rename to projects/opendr_ws/src/opendr_perception/scripts/heart_anomaly_detection_node.py index 4e72471b9d..98001abcdb 100755 --- a/projects/opendr_ws/src/perception/scripts/heart_anomaly_detection.py +++ b/projects/opendr_ws/src/opendr_perception/scripts/heart_anomaly_detection_node.py @@ -14,33 +14,36 @@ # See the License for the specific language governing permissions and # limitations under the License. -import rospy +import argparse import torch + +import rospy from vision_msgs.msg import Classification2D -import argparse from std_msgs.msg import Float32MultiArray + from opendr_bridge import ROSBridge from opendr.perception.heart_anomaly_detection import GatedRecurrentUnitLearner, AttentionNeuralBagOfFeatureLearner class HeartAnomalyNode: - def __init__(self, input_topic="/ecg/ecg", prediction_topic="/opendr/heartanomaly", device="cuda", model='anbof'): + def __init__(self, input_ecg_topic="/ecg/ecg", output_heart_anomaly_topic="/opendr/heart_anomaly", + device="cuda", model="anbof"): """ Creates a ROS Node for heart anomaly (atrial fibrillation) detection from ecg data - :param input_topic: Topic from which we are reading the input array data - :type input_topic: str - :param prediction_topic: Topic to which we are publishing the predicted class - :type prediction_topic: str + :param input_ecg_topic: Topic from which we are reading the input array data + :type input_ecg_topic: str + :param output_heart_anomaly_topic: Topic to which we are publishing the predicted class + :type output_heart_anomaly_topic: str :param device: device on which we are running inference ('cpu' or 'cuda') :type device: str :param model: model to use: anbof or gru :type model: str """ - self.publisher = rospy.Publisher(prediction_topic, Classification2D, queue_size=10) + self.publisher = rospy.Publisher(output_heart_anomaly_topic, Classification2D, queue_size=10) - rospy.Subscriber(input_topic, Float32MultiArray, self.callback) + rospy.Subscriber(input_ecg_topic, Float32MultiArray, self.callback) self.bridge = ROSBridge() @@ -48,7 +51,6 @@ def __init__(self, input_topic="/ecg/ecg", prediction_topic="/opendr/heartanomal self.channels = 1 self.series_length = 9000 - # Initialize the gesture recognition if model == 'gru': self.learner = GatedRecurrentUnitLearner(in_channels=self.channels, series_length=self.series_length, n_class=4, device=device) @@ -63,15 +65,15 @@ def listen(self): """ Start the node and begin processing input data """ - rospy.init_node('opendr_heart_anomaly_detection', anonymous=True) - rospy.loginfo("Heart anomaly detection node started!") + rospy.init_node('opendr_heart_anomaly_detection_node', anonymous=True) + rospy.loginfo("Heart anomaly detection node started.") rospy.spin() def callback(self, msg_data): """ Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: std_msgs.msg.Float32MultiArray + :param msg_data: input message + :type msg_data: std_msgs.msg.Float32MultiArray """ # Convert Float32MultiArray to OpenDR Timeseries data = self.bridge.from_rosarray_to_timeseries(msg_data, self.channels, self.series_length) @@ -83,17 +85,35 @@ def callback(self, msg_data): ros_class = self.bridge.from_category_to_rosclass(class_pred) self.publisher.publish(ros_class) + if __name__ == '__main__': - # Select the device for running + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_ecg_topic", type=str, default="/ecg/ecg", + help="listen to input ECG data on this topic") + parser.add_argument("-o", "--output_heart_anomaly_topic", type=str, default="/opendr/heart_anomaly", + help="Topic name for heart anomaly detection topic") + parser.add_argument("--device", type=str, default="cuda", help="Device to use (cpu, cuda)", + choices=["cuda", "cpu"]) + parser.add_argument("--model", type=str, default="anbof", help="model to be used for prediction: anbof or gru", + choices=["anbof", "gru"]) + + args = parser.parse_args() + try: - device = 'cuda' if torch.cuda.is_available() else 'cpu' + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU") + device = "cpu" except: - device = 'cpu' + print("Using CPU") + device = "cpu" - parser = argparse.ArgumentParser() - parser.add_argument('input_topic', type=str, help='listen to input data on this topic') - parser.add_argument('model', type=str, help='model to be used for prediction: anbof or gru') - args = parser.parse_args() + heart_anomaly_detection_node = HeartAnomalyNode(input_ecg_topic=args.input_ecg_topic, + output_heart_anomaly_topic=args.output_heart_anomaly_topic, + model=args.model, device=device) - gesture_node = HeartAnomalyNode(input_topic=args.input_topic, model=args.model, device=device) - gesture_node.listen() + heart_anomaly_detection_node.listen() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/hr_pose_estimation_node.py b/projects/opendr_ws/src/opendr_perception/scripts/hr_pose_estimation_node.py new file mode 100755 index 0000000000..0a471b224e --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/hr_pose_estimation_node.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +import rospy +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge.msg import OpenDRPose2D +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.pose_estimation import draw +from opendr.perception.pose_estimation import HighResolutionPoseEstimationLearner + + +class PoseEstimationNode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_pose_annotated", detections_topic="/opendr/poses", device="cuda", + num_refinement_stages=2, use_stride=False, half_precision=False): + """ + Creates a ROS Node for high resolution pose estimation with HR Pose Estimation. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no pose detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param num_refinement_stages: Specifies the number of pose estimation refinement stages are added on the + model's head, including the initial stage. Can be 0, 1 or 2, with more stages meaning slower and more accurate + inference + :type num_refinement_stages: int + :param use_stride: Whether to add a stride value in the model, which reduces accuracy but increases + inference speed + :type use_stride: bool + :param half_precision: Enables inference using half (fp16) precision instead of single (fp32) precision. + Valid only for GPU-based inference + :type half_precision: bool + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.pose_publisher = rospy.Publisher(detections_topic, OpenDRPose2D, queue_size=1) + else: + self.pose_publisher = None + + self.bridge = ROSBridge() + + # Initialize the high resolution pose estimation learner + self.pose_estimator = HighResolutionPoseEstimationLearner(device=device, num_refinement_stages=num_refinement_stages, + mobilenet_use_stride=use_stride, + half_precision=half_precision) + self.pose_estimator.download(path=".", verbose=True) + self.pose_estimator.load("openpose_default") + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_hr_pose_estimation_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Pose estimation node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run pose estimation + poses = self.pose_estimator.infer(image) + + # Publish detections in ROS message + if self.pose_publisher is not None: + for pose in poses: + if pose.id is None: # Temporary fix for pose not having id + pose.id = -1 + # Convert OpenDR pose to ROS pose message using bridge and publish it + self.pose_publisher.publish(self.bridge.to_ros_pose(pose)) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with poses + for pose in poses: + draw(image, pose) + # Convert the annotated OpenDR image to ROS image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_pose_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/poses") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--accelerate", help="Enables acceleration flags (e.g., stride)", default=False, + action="store_true") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + if args.accelerate: + stride = True + stages = 0 + half_prec = True + else: + stride = False + stages = 2 + half_prec = False + + pose_estimator_node = PoseEstimationNode(device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic, + num_refinement_stages=stages, use_stride=stride, half_precision=half_prec) + pose_estimator_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/image_dataset_node.py b/projects/opendr_ws/src/opendr_perception/scripts/image_dataset_node.py new file mode 100755 index 0000000000..575c1c4dce --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/image_dataset_node.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +import rospy +import time +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge +from opendr.engine.datasets import DatasetIterator +from opendr.perception.object_tracking_2d import MotDataset, RawMotDatasetIterator + + +class ImageDatasetNode: + def __init__( + self, + dataset: DatasetIterator, + output_rgb_image_topic="/opendr/dataset_image", + data_fps=30, + ): + """ + Creates a ROS Node for publishing dataset images + """ + + self.dataset = dataset + # Initialize OpenDR ROSBridge object + self.bridge = ROSBridge() + self.delay = 1.0 / data_fps + + self.output_image_publisher = rospy.Publisher( + output_rgb_image_topic, ROS_Image, queue_size=10 + ) + + def start(self): + rospy.loginfo("Timing images") + i = 0 + while not rospy.is_shutdown(): + image = self.dataset[i % len(self.dataset)][0] # Dataset should have an (Image, Target) pair as elements + message = self.bridge.to_ros_image( + image, encoding="bgr8" + ) + self.output_image_publisher.publish(message) + + time.sleep(self.delay) + i += 1 + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-d", "--dataset_path", help="Path to a dataset", + type=str, default="MOT") + parser.add_argument( + "-ks", "--mot20_subsets_path", help="Path to mot20 subsets", + type=str, default=os.path.join( + "..", "..", "src", "opendr", "perception", "object_tracking_2d", + "datasets", "splits", "nano_mot20.train" + ) + ) + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name to publish the data", + type=str, default="/opendr/dataset_image") + parser.add_argument("-f", "--fps", help="Data FPS", + type=float, default=30) + args = parser.parse_args() + + dataset_path = args.dataset_path + mot20_subsets_path = args.mot20_subsets_path + output_rgb_image_topic = args.output_rgb_image_topic + data_fps = args.fps + + if not os.path.exists(dataset_path): + dataset_path = MotDataset.download_nano_mot20( + "MOT", True + ).path + + dataset = RawMotDatasetIterator( + dataset_path, + { + "mot20": mot20_subsets_path + }, + scan_labels=False + ) + + rospy.init_node("opendr_image_dataset_node", anonymous=True) + + dataset_node = ImageDatasetNode( + dataset, + output_rgb_image_topic=output_rgb_image_topic, + data_fps=data_fps, + ) + + rospy.loginfo("Image dataset node started.") + dataset_node.start() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/perception/scripts/landmark_based_facial_expression_recognition.py b/projects/opendr_ws/src/opendr_perception/scripts/landmark_based_facial_expression_recognition_node.py old mode 100644 new mode 100755 similarity index 66% rename from projects/opendr_ws/src/perception/scripts/landmark_based_facial_expression_recognition.py rename to projects/opendr_ws/src/opendr_perception/scripts/landmark_based_facial_expression_recognition_node.py index a6b0c2188f..96a274f555 --- a/projects/opendr_ws/src/perception/scripts/landmark_based_facial_expression_recognition.py +++ b/projects/opendr_ws/src/opendr_perception/scripts/landmark_based_facial_expression_recognition_node.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. - +import argparse import rospy import torch import numpy as np @@ -29,14 +29,14 @@ class LandmarkFacialExpressionRecognitionNode: - def __init__(self, input_image_topic="/usb_cam/image_raw", - output_category_topic="/opendr/landmark_based_expression_recognition", - output_category_description_topic="/opendr/landmark_based_expression_recognition_description", + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_category_topic="/opendr/landmark_expression_recognition", + output_category_description_topic="/opendr/landmark_expression_recognition_description", device="cpu", model='pstbln_afew', shape_predictor='./predictor_path'): """ - Creates a ROS Node for pose detection - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str + Creates a ROS Node for landmark-based facial expression recognition. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str :param output_category_topic: Topic to which we are publishing the recognized facial expression category info (if None, we are not publishing the info) :type output_category_topic: str @@ -53,6 +53,8 @@ def __init__(self, input_image_topic="/usb_cam/image_raw", """ # Set up ROS topics and bridge + self.input_rgb_image_topic = input_rgb_image_topic + self.bridge = ROSBridge() if output_category_topic is not None: self.hypothesis_publisher = rospy.Publisher(output_category_topic, ObjectHypothesis, queue_size=10) @@ -64,9 +66,6 @@ def __init__(self, input_image_topic="/usb_cam/image_raw", else: self.string_publisher = None - self.input_image_topic = input_image_topic - self.bridge = ROSBridge() - # Initialize the landmark-based facial expression recognition if model == 'pstbln_ck+': num_point = 303 @@ -90,9 +89,9 @@ def listen(self): """ Start the node and begin processing input data """ - rospy.init_node('opendr_landmark_based_facial_expression_recognition', anonymous=True) - rospy.Subscriber(self.input_image_topic, ROS_Image, self.callback) - rospy.loginfo("landmark-based facial expression recognition node started!") + rospy.init_node('opendr_landmark_based_facial_expression_recognition_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback) + rospy.loginfo("Landmark-based facial expression recognition node started.") rospy.spin() def callback(self, data): @@ -134,16 +133,42 @@ def _landmark2numpy(landmarks): if __name__ == '__main__': - # Select the device for running the + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_category_topic", help="Topic name for output recognized category", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/landmark_expression_recognition") + parser.add_argument("-d", "--output_category_description_topic", help="Topic name for category description", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/landmark_expression_recognition_description") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Model to use, either 'pstbln_ck+', 'pstbln_casia', 'pstbln_afew'", + type=str, default="pstbln_afew", choices=['pstbln_ck+', 'pstbln_casia', 'pstbln_afew']) + parser.add_argument("-s", "--shape_predictor", help="Shape predictor (landmark_extractor) to use", + type=str, default='./predictor_path') + args = parser.parse_args() + try: - if torch.cuda.is_available(): - print("GPU found.") - device = 'cuda' - else: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": print("GPU not found. Using CPU instead.") - device = 'cpu' + device = "cpu" + else: + print("Using CPU.") + device = "cpu" except: - device = 'cpu' - - pose_estimation_node = LandmarkFacialExpressionRecognitionNode(device=device) - pose_estimation_node.listen() + print("Using CPU.") + device = "cpu" + + landmark_expression_estimation_node = \ + LandmarkFacialExpressionRecognitionNode( + input_rgb_image_topic=args.input_rgb_image_topic, + output_category_topic=args.output_category_topic, + output_category_description_topic=args.output_category_description_topic, + device=device, model=args.model, + shape_predictor=args.shape_predictor) + landmark_expression_estimation_node.listen() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_centernet_node.py b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_centernet_node.py new file mode 100755 index 0000000000..4e64663ff1 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_centernet_node.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import mxnet as mx + +import rospy +from vision_msgs.msg import Detection2DArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import CenterNetDetectorLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionCenterNetNode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_objects_annotated", detections_topic="/opendr/objects", + device="cuda", backbone="resnet50_v1b"): + """ + Creates a ROS Node for object detection with Centernet. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param backbone: backbone network + :type backbone: str + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=1) + else: + self.object_publisher = None + + self.bridge = ROSBridge() + + # Initialize the object detector + self.object_detector = CenterNetDetectorLearner(backbone=backbone, device=device) + self.object_detector.download(path=".", verbose=True) + self.object_detector.load("centernet_default") + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_detection_2d_centernet_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Object detection 2D Centernet node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run object detection + boxes = self.object_detector.infer(image, threshold=0.45, keep_size=False) + + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_boxes(boxes) # Convert to ROS boxes + if self.object_publisher is not None: + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--backbone", help="Backbone network, defaults to \"resnet50_v1b\"", + type=str, default="resnet50_v1b", choices=["resnet50_v1b"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and mx.context.num_gpus() > 0: + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_centernet_node = ObjectDetectionCenterNetNode(device=device, backbone=args.backbone, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + object_detection_centernet_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_detr_node.py b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_detr_node.py new file mode 100755 index 0000000000..fc11461891 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_detr_node.py @@ -0,0 +1,232 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import argparse +import torch + +import rospy +from vision_msgs.msg import Detection2DArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import DetrLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionDetrNode: + def __init__( + self, + input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_objects_annotated", + detections_topic="/opendr/objects", + device="cuda", + ): + """ + Creates a ROS Node for object detection with DETR. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=1) + else: + self.object_publisher = None + + self.bridge = ROSBridge() + + self.class_names = [ + "N/A", + "person", + "bicycle", + "car", + "motorcycle", + "airplane", + "bus", + "train", + "truck", + "boat", + "traffic light", + "fire hydrant", + "N/A", + "stop sign", + "parking meter", + "bench", + "bird", + "cat", + "dog", + "horse", + "sheep", + "cow", + "elephant", + "bear", + "zebra", + "giraffe", + "N/A", + "backpack", + "umbrella", + "N/A", + "N/A", + "handbag", + "tie", + "suitcase", + "frisbee", + "skis", + "snowboard", + "sports ball", + "kite", + "baseball bat", + "baseball glove", + "skateboard", + "surfboard", + "tennis racket", + "bottle", + "N/A", + "wine glass", + "cup", + "fork", + "knife", + "spoon", + "bowl", + "banana", + "apple", + "sandwich", + "orange", + "broccoli", + "carrot", + "hot dog", + "pizza", + "donut", + "cake", + "chair", + "couch", + "potted plant", + "bed", + "N/A", + "dining table", + "N/A", + "N/A", + "toilet", + "N/A", + "tv", + "laptop", + "mouse", + "remote", + "keyboard", + "cell phone", + "microwave", + "oven", + "toaster", + "sink", + "refrigerator", + "N/A", + "book", + "clock", + "vase", + "scissors", + "teddy bear", + "hair drier", + "toothbrush", + ] + + # Initialize the detection estimation + self.detr_learner = DetrLearner(device=device) + self.detr_learner.download(path=".", verbose=True) + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_detection_2d_detr_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Object detection 2D DETR node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding="bgr8") + + # Run object detection + boxes = self.detr_learner.infer(image) + + # Get an OpenCV image back + image = image.opencv() + + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_bounding_box_list(boxes) # Convert to ROS bounding_box_list + if self.object_publisher is not None: + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.class_names) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=str, default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=str, default="/opendr/objects") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_detr_node = ObjectDetectionDetrNode(device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + object_detection_detr_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_gem_node.py b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_gem_node.py new file mode 100755 index 0000000000..2a6243a30d --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_gem_node.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import rospy +import torch +import message_filters +import cv2 +import numpy as np +import argparse +from vision_msgs.msg import Detection2DArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge +from opendr.perception.object_detection_2d import GemLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes +from opendr.engine.data import Image + + +class ObjectDetectionGemNode: + def __init__( + self, + input_rgb_image_topic="/camera/color/image_raw", + input_infra_image_topic="/camera/infra/image_raw", + output_rgb_image_topic="/opendr/rgb_image_objects_annotated", + output_infra_image_topic="/opendr/infra_image_objects_annotated", + detections_topic="/opendr/objects", + device="cuda", + pts_rgb=None, + pts_infra=None, + ): + """ + Creates a ROS Node for object detection with GEM + :param input_rgb_image_topic: Topic from which we are reading the input rgb image + :type input_rgb_image_topic: str + :param input_infra_image_topic: Topic from which we are reading the input infrared image + :type: input_infra_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated rgb image (if None, we are not + publishing annotated image) + :type output_rgb_image_topic: str + :param output_infra_image_topic: Topic to which we are publishing the annotated infrared image (if None, we are not + publishing annotated image) + :type output_infra_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, we are + not publishing annotations) + :type detections_topic: str + :param device: Device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param pts_rgb: Point on the rgb image that define alignment with the infrared image. These are camera + specific and can be obtained using get_color_infra_alignment.py which is located in the + opendr/perception/object_detection2d/utils module. + :type pts_rgb: {list, numpy.ndarray} + :param pts_infra: Points on the infrared image that define alignment with rgb image. These are camera specific + and can be obtained using get_rgb_infra_alignment.py which is located in the + opendr/perception/object_detection2d/utils module. + :type pts_infra: {list, numpy.ndarray} + """ + rospy.init_node("opendr_object_detection_2d_gem_node", anonymous=True) + if output_rgb_image_topic is not None: + self.rgb_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=10) + else: + self.rgb_publisher = None + if output_infra_image_topic is not None: + self.ir_publisher = rospy.Publisher(output_infra_image_topic, ROS_Image, queue_size=10) + else: + self.ir_publisher = None + + if detections_topic is not None: + self.detection_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=10) + else: + self.detection_publisher = None + if pts_infra is None: + pts_infra = np.array( + [ + [478, 248], + [465, 338], + [458, 325], + [468, 256], + [341, 240], + [335, 310], + [324, 321], + [311, 383], + [434, 365], + [135, 384], + [67, 257], + [167, 206], + [124, 131], + [364, 276], + [424, 269], + [277, 131], + [41, 310], + [202, 320], + [188, 318], + [188, 308], + [196, 241], + [499, 317], + [311, 164], + [220, 216], + [435, 352], + [213, 363], + [390, 364], + [212, 368], + [390, 370], + [467, 324], + [415, 364], + ] + ) + rospy.logwarn( + "\nUsing default calibration values for pts_infra!" + + "\nThese are probably incorrect." + + "\nThe correct values for pts_infra can be found by running get_color_infra_alignment.py." + + "\nThis file is located in the opendr/perception/object_detection2d/utils module." + ) + if pts_rgb is None: + pts_rgb = np.array( + [ + [910, 397], + [889, 572], + [874, 552], + [891, 411], + [635, 385], + [619, 525], + [603, 544], + [576, 682], + [810, 619], + [216, 688], + [90, 423], + [281, 310], + [193, 163], + [684, 449], + [806, 431], + [504, 170], + [24, 538], + [353, 552], + [323, 550], + [323, 529], + [344, 387], + [961, 533], + [570, 233], + [392, 336], + [831, 610], + [378, 638], + [742, 630], + [378, 648], + [742, 640], + [895, 550], + [787, 630], + ] + ) + rospy.logwarn( + "\nUsing default calibration values for pts_rgb!" + + "\nThese are probably incorrect." + + "\nThe correct values for pts_rgb can be found by running get_color_infra_alignment.py." + + "\nThis file is located in the opendr/perception/object_detection2d/utils module." + ) + # Object classes + self.classes = ["N/A", "chair", "cycle", "bin", "laptop", "drill", "rocker"] + + # Estimating Homography matrix for aligning infra with RGB + self.h, status = cv2.findHomography(pts_infra, pts_rgb) + + self.bridge = ROSBridge() + + # Initialize the detection estimation + model_backbone = "resnet50" + + self.gem_learner = GemLearner( + backbone=model_backbone, + num_classes=7, + device=device, + ) + self.gem_learner.fusion_method = "sc_avg" + self.gem_learner.download(path=".", verbose=True) + + # Subscribers + msg_rgb = message_filters.Subscriber(input_rgb_image_topic, ROS_Image, queue_size=1, buff_size=10000000) + msg_ir = message_filters.Subscriber(input_infra_image_topic, ROS_Image, queue_size=1, buff_size=10000000) + + sync = message_filters.TimeSynchronizer([msg_rgb, msg_ir], 1) + sync.registerCallback(self.callback) + rospy.loginfo("GEM node initialized.") + + def listen(self): + """ + Start the node and begin processing input data + """ + rospy.loginfo("Object detection 2D GEM node started.") + rospy.spin() + + def callback(self, msg_rgb, msg_ir): + """ + Callback that process the input data and publishes to the corresponding topics + :param msg_rgb: input rgb image message + :type msg_rgb: sensor_msgs.msg.Image + :param msg_ir: input infrared image message + :type msg_ir: sensor_msgs.msg.Image + """ + # Convert images to OpenDR standard + image_rgb = self.bridge.from_ros_image(msg_rgb).opencv() + image_ir_raw = self.bridge.from_ros_image(msg_ir, "bgr8").opencv() + image_ir = cv2.warpPerspective(image_ir_raw, self.h, (image_rgb.shape[1], image_rgb.shape[0])) + + # Perform inference on images + boxes, w_sensor1, _ = self.gem_learner.infer(image_rgb, image_ir) + + # Annotate image and publish results: + if self.detection_publisher is not None: + ros_detection = self.bridge.to_ros_bounding_box_list(boxes) + self.detection_publisher.publish(ros_detection) + + if self.rgb_publisher is not None: + plot_rgb = draw_bounding_boxes(image_rgb, boxes, class_names=self.classes) + message = self.bridge.to_ros_image(Image(np.uint8(plot_rgb))) + self.rgb_publisher.publish(message) + if self.ir_publisher is not None: + plot_ir = draw_bounding_boxes(image_ir, boxes, class_names=self.classes) + message = self.bridge.to_ros_image(Image(np.uint8(plot_ir))) + self.ir_publisher.publish(message) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("-ic", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/camera/color/image_raw") + parser.add_argument("-ii", "--input_infra_image_topic", help="Topic name for input infrared image", + type=str, default="/camera/infra/image_raw") + parser.add_argument("-oc", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/rgb_image_objects_annotated") + parser.add_argument("-oi", "--output_infra_image_topic", help="Topic name for output annotated infrared image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/infra_image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("--device", help='Device to use, either "cpu" or "cuda", defaults to "cuda"', + type=str, default="cuda", choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + detection_estimation_node = ObjectDetectionGemNode( + device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + input_infra_image_topic=args.input_infra_image_topic, + output_infra_image_topic=args.output_infra_image_topic, + detections_topic=args.detections_topic, + ) + detection_estimation_node.listen() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_nanodet_node.py b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_nanodet_node.py new file mode 100755 index 0000000000..c2304ce6ff --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_nanodet_node.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +import rospy +from vision_msgs.msg import Detection2DArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import NanodetLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionNanodetNode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_objects_annotated", detections_topic="/opendr/objects", + device="cuda", model="plus_m_1.5x_416"): + """ + Creates a ROS Node for object detection with Nanodet. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model: the name of the model of which we want to load the config file + :type model: str + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=1) + else: + self.object_publisher = None + + self.bridge = ROSBridge() + + # Initialize the object detector + self.object_detector = NanodetLearner(model_to_use=model, device=device) + self.object_detector.download(path=".", mode="pretrained", verbose=True) + self.object_detector.load("./nanodet_{}".format(model)) + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_detection_2d_nanodet_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Object detection 2D Nanodet node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run object detection + boxes = self.object_detector.infer(image, threshold=0.35) + + # Get an OpenCV image back + image = image.opencv() + + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_boxes(boxes) # Convert to ROS boxes + if self.object_publisher is not None: + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Model that config file will be used", type=str, default="plus_m_1.5x_416") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_nanodet_node = ObjectDetectionNanodetNode(device=device, model=args.model, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + object_detection_nanodet_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_ssd_node.py b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_ssd_node.py new file mode 100755 index 0000000000..1e189bcd60 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_ssd_node.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import mxnet as mx + +import rospy +from vision_msgs.msg import Detection2DArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import SingleShotDetectorLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes +from opendr.perception.object_detection_2d import Seq2SeqNMSLearner, SoftNMS, FastNMS, ClusterNMS + + +class ObjectDetectionSSDNode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_objects_annotated", detections_topic="/opendr/objects", + device="cuda", backbone="vgg16_atrous", nms_type='default'): + """ + Creates a ROS Node for object detection with SSD. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param backbone: backbone network + :type backbone: str + :param nms_type: type of NMS method + :type nms_type: str + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=1) + else: + self.object_publisher = None + + self.bridge = ROSBridge() + + # Initialize the object detector + self.object_detector = SingleShotDetectorLearner(backbone=backbone, device=device) + self.object_detector.download(path=".", verbose=True) + self.object_detector.load("ssd_default_person") + self.custom_nms = None + + # Initialize NMS if selected + if nms_type == 'seq2seq-nms': + self.custom_nms = Seq2SeqNMSLearner(fmod_map_type='EDGEMAP', iou_filtering=0.8, + app_feats='fmod', device=device) + self.custom_nms.download(model_name='seq2seq_pets_jpd_fmod', path='.') + self.custom_nms.load('./seq2seq_pets_jpd_fmod/', verbose=True) + rospy.loginfo("Object Detection 2D SSD node seq2seq-nms initialized.") + elif nms_type == 'soft-nms': + self.custom_nms = SoftNMS(nms_thres=0.45, device=device) + rospy.loginfo("Object Detection 2D SSD node soft-nms initialized.") + elif nms_type == 'fast-nms': + self.custom_nms = FastNMS(device=device) + rospy.loginfo("Object Detection 2D SSD node fast-nms initialized.") + elif nms_type == 'cluster-nms': + self.custom_nms = ClusterNMS(device=device) + rospy.loginfo("Object Detection 2D SSD node cluster-nms initialized.") + else: + rospy.loginfo("Object Detection 2D SSD node using default NMS.") + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_detection_2d_ssd_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Object detection 2D SSD node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run object detection + boxes = self.object_detector.infer(image, threshold=0.45, keep_size=False, custom_nms=self.custom_nms) + + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_boxes(boxes) # Convert to ROS boxes + if self.object_publisher is not None: + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--backbone", help="Backbone network, defaults to vgg16_atrous", + type=str, default="vgg16_atrous", choices=["vgg16_atrous"]) + parser.add_argument("--nms_type", help="Non-Maximum Suppression type, defaults to \"default\", options are " + "\"seq2seq-nms\", \"soft-nms\", \"fast-nms\", \"cluster-nms\"", + type=str, default="default", + choices=["default", "seq2seq-nms", "soft-nms", "fast-nms", "cluster-nms"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and mx.context.num_gpus() > 0: + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_ssd_node = ObjectDetectionSSDNode(device=device, backbone=args.backbone, nms_type=args.nms_type, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + object_detection_ssd_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_yolov3_node.py b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_yolov3_node.py new file mode 100755 index 0000000000..2b29cc0597 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_yolov3_node.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import mxnet as mx + +import rospy +from vision_msgs.msg import Detection2DArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import YOLOv3DetectorLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionYOLONode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_objects_annotated", detections_topic="/opendr/objects", + device="cuda", backbone="darknet53"): + """ + Creates a ROS Node for object detection with YOLOV3. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param backbone: backbone network + :type backbone: str + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=1) + else: + self.object_publisher = None + + self.bridge = ROSBridge() + + # Initialize the object detector + self.object_detector = YOLOv3DetectorLearner(backbone=backbone, device=device) + self.object_detector.download(path=".", verbose=True) + self.object_detector.load("yolo_default") + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_detection_2d_yolov3_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Object detection 2D YOLOV3 node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run object detection + boxes = self.object_detector.infer(image, threshold=0.1, keep_size=False) + + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_bounding_box_list(boxes) # Convert to ROS bounding_box_list + if self.object_publisher is not None: + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--backbone", help="Backbone network, defaults to \"darknet53\"", + type=str, default="darknet53", choices=["darknet53"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and mx.context.num_gpus() > 0: + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_yolov3_node = ObjectDetectionYOLONode(device=device, backbone=args.backbone, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + object_detection_yolov3_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_yolov5_node.py b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_yolov5_node.py new file mode 100644 index 0000000000..55918c5649 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_2d_yolov5_node.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +import rospy +from vision_msgs.msg import Detection2DArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import YOLOv5DetectorLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionYOLONode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_objects_annotated", detections_topic="/opendr/objects", + device="cuda", model_name="yolov5s"): + """ + Creates a ROS Node for object detection with YOLOV5. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model_name: network architecture name + :type model_name: str + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=1) + else: + self.object_publisher = None + + self.bridge = ROSBridge() + + # Initialize the object detector + self.object_detector = YOLOv5DetectorLearner(model_name=model_name, device=device) + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_detection_yolov5_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Object detection YOLOV5 node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run object detection + boxes = self.object_detector.infer(image) + + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_bounding_box_list(boxes) # Convert to ROS bounding_box_list + if self.object_publisher is not None: + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes, line_thickness=3) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model_name", help="Network architecture, defaults to \"yolov5s\"", + type=str, default="yolov5s", choices=['yolov5s', 'yolov5n', 'yolov5m', 'yolov5l', 'yolov5x', + 'yolov5n6', 'yolov5s6', 'yolov5m6', 'yolov5l6', 'custom']) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_yolov5_node = ObjectDetectionYOLONode(device=device, model_name=args.model_name, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + object_detection_yolov5_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_3d_voxel.py b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_3d_voxel_node.py old mode 100644 new mode 100755 similarity index 51% rename from projects/opendr_ws/src/perception/scripts/object_detection_3d_voxel.py rename to projects/opendr_ws/src/opendr_perception/scripts/object_detection_3d_voxel_node.py index 6d6b74015a..3c43514906 --- a/projects/opendr_ws/src/perception/scripts/object_detection_3d_voxel.py +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_detection_3d_voxel_node.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import argparse import torch import os import rospy @@ -26,11 +27,11 @@ class ObjectDetection3DVoxelNode: def __init__( self, input_point_cloud_topic="/opendr/dataset_point_cloud", - output_detection3d_topic="/opendr/detection3d", + detections_topic="/opendr/objects3d", device="cuda:0", model_name="tanet_car_xyres_16", model_config_path=os.path.join( - "..", "..", "src", "opendr", "perception", "object_detection_3d", + "$OPENDR_HOME", "src", "opendr", "perception", "object_detection_3d", "voxel_object_detection_3d", "second_detector", "configs", "tanet", "ped_cycle", "test_short.proto" ), @@ -39,9 +40,9 @@ def __init__( """ Creates a ROS Node for 3D object detection :param input_point_cloud_topic: Topic from which we are reading the input point cloud - :type input_image_topic: str - :param output_detection3d_topic: Topic to which we are publishing the annotations - :type output_detection3d_topic: str + :type input_point_cloud_topic: str + :param detections_topic: Topic to which we are publishing the annotations + :type detections_topic: str :param device: device on which we are running inference ('cpu' or 'cuda') :type device: str :param model_name: the pretrained model to download or a trained model in temp_dir @@ -58,15 +59,13 @@ def __init__( self.learner.load(os.path.join(temp_dir, model_name), verbose=True) - # Initialize OpenDR ROSBridge object + self.input_point_cloud_topic = input_point_cloud_topic self.bridge = ROSBridge() self.detection_publisher = rospy.Publisher( - output_detection3d_topic, Detection3DArray, queue_size=10 + detections_topic, Detection3DArray, queue_size=1 ) - rospy.Subscriber(input_point_cloud_topic, ROS_PointCloud, self.callback) - def callback(self, data): """ Callback that process the input data and publishes to the corresponding topics @@ -80,39 +79,67 @@ def callback(self, data): # Convert detected boxes to ROS type and publish ros_boxes = self.bridge.to_ros_boxes_3d(detection_boxes, classes=["Car", "Van", "Truck", "Pedestrian", "Cyclist"]) - if self.detection_publisher is not None: - self.detection_publisher.publish(ros_boxes) - rospy.loginfo("Published detection boxes") - -if __name__ == "__main__": - # Automatically run on GPU/CPU - device = "cuda:0" if torch.cuda.is_available() else "cpu" - - # initialize ROS node - rospy.init_node("opendr_voxel_detection_3d", anonymous=True) - rospy.loginfo("Voxel Detection 3D node started") - - model_name = rospy.get_param("~model_name", "tanet_car_xyres_16") - model_config_path = rospy.get_param( - "~model_config_path", os.path.join( - "..", "..", "src", "opendr", "perception", "object_detection_3d", + self.detection_publisher.publish(ros_boxes) + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_detection_3d_voxel_node', anonymous=True) + rospy.Subscriber(self.input_point_cloud_topic, ROS_PointCloud, self.callback, queue_size=1, buff_size=10000000) + + rospy.loginfo("Object Detection 3D Voxel Node started.") + rospy.spin() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_point_cloud_topic", + help="Point Cloud topic provided by either a point_cloud_dataset_node or any other 3D Point Cloud Node", + type=str, default="/opendr/dataset_point_cloud") + parser.add_argument("-d", "--detections_topic", + help="Output detections topic", + type=str, default="/opendr/objects3d") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("-n", "--model_name", help="Name of the trained model", + type=str, default="tanet_car_xyres_16", choices=["tanet_car_xyres_16"]) + parser.add_argument( + "-c", "--model_config_path", help="Path to a model .proto config", + type=str, default=os.path.join( + "$OPENDR_HOME", "src", "opendr", "perception", "object_detection_3d", "voxel_object_detection_3d", "second_detector", "configs", "tanet", - "car", "test_short.proto" + "car", "xyres_16.proto" ) ) - temp_dir = rospy.get_param("~temp_dir", "temp") - input_point_cloud_topic = rospy.get_param( - "~input_point_cloud_topic", "/opendr/dataset_point_cloud" - ) - rospy.loginfo("Using model_name: {}".format(model_name)) + parser.add_argument("-t", "--temp_dir", help="Path to a temporary directory with models", + type=str, default="temp") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" - # created node object voxel_node = ObjectDetection3DVoxelNode( device=device, - model_name=model_name, - model_config_path=model_config_path, - input_point_cloud_topic=input_point_cloud_topic, - temp_dir=temp_dir, + model_name=args.model_name, + model_config_path=args.model_config_path, + input_point_cloud_topic=args.input_point_cloud_topic, + temp_dir=args.temp_dir, + detections_topic=args.detections_topic, ) - # begin ROS communications - rospy.spin() + + voxel_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/perception/scripts/object_tracking_2d_deep_sort.py b/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_2d_deep_sort_node.py old mode 100644 new mode 100755 similarity index 50% rename from projects/opendr_ws/src/perception/scripts/object_tracking_2d_deep_sort.py rename to projects/opendr_ws/src/opendr_perception/scripts/object_tracking_2d_deep_sort_node.py index 70d66c69a8..8844e336a4 --- a/projects/opendr_ws/src/perception/scripts/object_tracking_2d_deep_sort.py +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_2d_deep_sort_node.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,16 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import argparse import cv2 import torch import os -from opendr.engine.target import TrackingAnnotation +from opendr.engine.target import TrackingAnnotationList import rospy from vision_msgs.msg import Detection2DArray from std_msgs.msg import Int32MultiArray from sensor_msgs.msg import Image as ROS_Image from opendr_bridge import ROSBridge -from opendr.engine.learners import Learner from opendr.perception.object_tracking_2d import ( ObjectTracking2DDeepSortLearner, ObjectTracking2DFairMotLearner @@ -33,11 +33,11 @@ class ObjectTracking2DDeepSortNode: def __init__( self, - detector: Learner, - input_image_topic="/usb_cam/image_raw", - output_detection_topic="/opendr/detection", - output_tracking_id_topic="/opendr/tracking_id", - output_image_topic="/opendr/image_annotated", + detector=None, + input_rgb_image_topic="/usb_cam/image_raw", + output_detection_topic="/opendr/objects", + output_tracking_id_topic="/opendr/objects_tracking_id", + output_rgb_image_topic="/opendr/image_objects_annotated", device="cuda:0", model_name="deep_sort", temp_dir="temp", @@ -46,11 +46,11 @@ def __init__( Creates a ROS Node for 2D object tracking :param detector: Learner to generate object detections :type detector: Learner - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing annotated image) - :type output_image_topic: str + :type output_rgb_image_topic: str :param output_detection_topic: Topic to which we are publishing the detections :type output_detection_topic: str :param output_tracking_id_topic: Topic to which we are publishing the tracking ids @@ -63,7 +63,6 @@ def __init__( :type temp_dir: str """ - # # Initialize the face detector self.detector = detector self.learner = ObjectTracking2DDeepSortLearner( device=device, temp_path=temp_dir, @@ -73,22 +72,23 @@ def __init__( self.learner.load(os.path.join(temp_dir, model_name), verbose=True) - # Initialize OpenDR ROSBridge object self.bridge = ROSBridge() - self.tracking_id_publisher = rospy.Publisher( - output_tracking_id_topic, Int32MultiArray, queue_size=10 - ) + self.input_rgb_image_topic = input_rgb_image_topic - if output_image_topic is not None: - self.output_image_publisher = rospy.Publisher( - output_image_topic, ROS_Image, queue_size=10 + if output_tracking_id_topic is not None: + self.tracking_id_publisher = rospy.Publisher( + output_tracking_id_topic, Int32MultiArray, queue_size=10 ) - self.detection_publisher = rospy.Publisher( - output_detection_topic, Detection2DArray, queue_size=10 - ) + if output_rgb_image_topic is not None: + self.output_image_publisher = rospy.Publisher( + output_rgb_image_topic, ROS_Image, queue_size=10 + ) - rospy.Subscriber(input_image_topic, ROS_Image, self.callback) + if output_detection_topic is not None: + self.detection_publisher = rospy.Publisher( + output_detection_topic, Detection2DArray, queue_size=10 + ) def callback(self, data): """ @@ -101,8 +101,7 @@ def callback(self, data): image = self.bridge.from_ros_image(data, encoding="bgr8") detection_boxes = self.detector.infer(image) image_with_detections = ImageWithDetections(image.numpy(), detection_boxes) - print(image_with_detections.data.shape) - tracking_boxes = self.learner.infer(image_with_detections) + tracking_boxes = self.learner.infer(image_with_detections, swap_left_top=True) if self.output_image_publisher is not None: frame = image.opencv() @@ -111,22 +110,26 @@ def callback(self, data): Image(frame), encoding="bgr8" ) self.output_image_publisher.publish(message) - rospy.loginfo("Published annotated image") - - ids = [tracking_box.id for tracking_box in tracking_boxes] - # Convert detected boxes to ROS type and publish - ros_boxes = self.bridge.to_ros_boxes(detection_boxes) if self.detection_publisher is not None: + ros_boxes = self.bridge.to_ros_boxes(detection_boxes) self.detection_publisher.publish(ros_boxes) - rospy.loginfo("Published detection boxes") - - ros_ids = Int32MultiArray() - ros_ids.data = ids if self.tracking_id_publisher is not None: + ids = [tracking_box.id for tracking_box in tracking_boxes] + ros_ids = Int32MultiArray() + ros_ids.data = ids self.tracking_id_publisher.publish(ros_ids) - rospy.loginfo("Published tracking ids") + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_tracking_2d_deep_sort_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + + rospy.loginfo("Object Tracking 2D Deep Sort Node started.") + rospy.spin() colors = [ @@ -139,7 +142,7 @@ def callback(self, data): ] -def draw_predictions(frame, predictions: TrackingAnnotation, is_centered=False, is_flipped_xy=True): +def draw_predictions(frame, predictions: TrackingAnnotationList, is_centered=False, is_flipped_xy=True): global colors w, h, _ = frame.shape @@ -174,36 +177,65 @@ def draw_predictions(frame, predictions: TrackingAnnotation, is_centered=False, ) -if __name__ == "__main__": - # Automatically run on GPU/CPU - device = "cuda:0" if torch.cuda.is_available() else "cpu" - - # initialize ROS node - rospy.init_node("opendr_deep_sort", anonymous=True) - rospy.loginfo("Deep Sort node started") - - model_name = rospy.get_param("~model_name", "deep_sort") - temp_dir = rospy.get_param("~temp_dir", "temp") - input_image_topic = rospy.get_param( - "~input_image_topic", "/opendr/dataset_image" - ) - rospy.loginfo("Using model_name: {}".format(model_name)) +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", + help="Input Image topic provided by either an image_dataset_node, webcam or any other image node", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", + help="Output annotated image topic with a visualization of detections and their ids", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", + help="Output detections topic", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("-t", "--tracking_id_topic", + help="Output tracking ids topic with the same element count as in output_detection_topic", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects_tracking_id") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("-n", "--model_name", help="Name of the trained model", + type=str, default="deep_sort", choices=["deep_sort"]) + parser.add_argument("-td", "--temp_dir", help="Path to a temporary directory with models", + type=str, default="temp") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" detection_learner = ObjectTracking2DFairMotLearner( - device=device, temp_path=temp_dir, + device=device, temp_path=args.temp_dir, ) - if not os.path.exists(os.path.join(temp_dir, "fairmot_dla34")): - ObjectTracking2DFairMotLearner.download("fairmot_dla34", temp_dir) + if not os.path.exists(os.path.join(args.temp_dir, "fairmot_dla34")): + ObjectTracking2DFairMotLearner.download("fairmot_dla34", args.temp_dir) - detection_learner.load(os.path.join(temp_dir, "fairmot_dla34"), verbose=True) + detection_learner.load(os.path.join(args.temp_dir, "fairmot_dla34"), verbose=True) - # created node object deep_sort_node = ObjectTracking2DDeepSortNode( detector=detection_learner, device=device, - model_name=model_name, - input_image_topic=input_image_topic, - temp_dir=temp_dir, + model_name=args.model_name, + input_rgb_image_topic=args.input_rgb_image_topic, + temp_dir=args.temp_dir, + output_detection_topic=args.detections_topic, + output_tracking_id_topic=args.tracking_id_topic, + output_rgb_image_topic=args.output_rgb_image_topic, ) - # begin ROS communications - rospy.spin() + + deep_sort_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_2d_fair_mot_node.py b/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_2d_fair_mot_node.py new file mode 100755 index 0000000000..6fe2f81f46 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_2d_fair_mot_node.py @@ -0,0 +1,226 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import cv2 +import torch +import os +from opendr.engine.target import TrackingAnnotationList +import rospy +from vision_msgs.msg import Detection2DArray +from std_msgs.msg import Int32MultiArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge +from opendr.perception.object_tracking_2d import ( + ObjectTracking2DFairMotLearner, +) +from opendr.engine.data import Image + + +class ObjectTracking2DFairMotNode: + def __init__( + self, + input_rgb_image_topic="/usb_cam/image_raw", + output_detection_topic="/opendr/objects", + output_tracking_id_topic="/opendr/objects_tracking_id", + output_rgb_image_topic="/opendr/image_objects_annotated", + device="cuda:0", + model_name="fairmot_dla34", + temp_dir="temp", + ): + """ + Creates a ROS Node for 2D object tracking + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing + annotated image) + :type output_rgb_image_topic: str + :param output_detection_topic: Topic to which we are publishing the detections + :type output_detection_topic: str + :param output_tracking_id_topic: Topic to which we are publishing the tracking ids + :type output_tracking_id_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model_name: the pretrained model to download or a saved model in temp_dir folder to use + :type model_name: str + :param temp_dir: the folder to download models + :type temp_dir: str + """ + + self.learner = ObjectTracking2DFairMotLearner( + device=device, temp_path=temp_dir, + ) + if not os.path.exists(os.path.join(temp_dir, model_name)): + ObjectTracking2DFairMotLearner.download(model_name, temp_dir) + + self.learner.load(os.path.join(temp_dir, model_name), verbose=True) + + self.bridge = ROSBridge() + self.input_rgb_image_topic = input_rgb_image_topic + + if output_detection_topic is not None: + self.detection_publisher = rospy.Publisher( + output_detection_topic, Detection2DArray, queue_size=10 + ) + + if output_tracking_id_topic is not None: + self.tracking_id_publisher = rospy.Publisher( + output_tracking_id_topic, Int32MultiArray, queue_size=10 + ) + + if output_rgb_image_topic is not None: + self.output_image_publisher = rospy.Publisher( + output_rgb_image_topic, ROS_Image, queue_size=10 + ) + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding="bgr8") + tracking_boxes = self.learner.infer(image) + + if self.output_image_publisher is not None: + frame = image.opencv() + draw_predictions(frame, tracking_boxes) + message = self.bridge.to_ros_image( + Image(frame), encoding="bgr8" + ) + self.output_image_publisher.publish(message) + + if self.detection_publisher is not None: + detection_boxes = tracking_boxes.bounding_box_list() + ros_boxes = self.bridge.to_ros_boxes(detection_boxes) + self.detection_publisher.publish(ros_boxes) + + if self.tracking_id_publisher is not None: + ids = [tracking_box.id for tracking_box in tracking_boxes] + ros_ids = Int32MultiArray() + ros_ids.data = ids + self.tracking_id_publisher.publish(ros_ids) + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_tracking_2d_fair_mot_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + + rospy.loginfo("Object Tracking 2D Fair Mot Node started.") + rospy.spin() + + +colors = [ + (255, 0, 255), + (0, 0, 255), + (0, 255, 0), + (255, 0, 0), + (35, 69, 55), + (43, 63, 54), +] + + +def draw_predictions(frame, predictions: TrackingAnnotationList, is_centered=False, is_flipped_xy=True): + global colors + w, h, _ = frame.shape + + for prediction in predictions.boxes: + prediction = prediction + + if not hasattr(prediction, "id"): + prediction.id = 0 + + color = colors[int(prediction.id) * 7 % len(colors)] + + x = prediction.left + y = prediction.top + + if is_flipped_xy: + x = prediction.top + y = prediction.left + + if is_centered: + x -= prediction.width + y -= prediction.height + + cv2.rectangle( + frame, + (int(x), int(y)), + ( + int(x + prediction.width), + int(y + prediction.height), + ), + color, + 2, + ) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", + help="Input Image topic provided by either an image_dataset_node, webcam or any other image node", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", + help="Output annotated image topic with a visualization of detections and their ids", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", + help="Output detections topic", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("-t", "--tracking_id_topic", + help="Output tracking ids topic with the same element count as in output_detection_topic", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects_tracking_id") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("-n", "--model_name", help="Name of the trained model", + type=str, default="fairmot_dla34", choices=["fairmot_dla34"]) + parser.add_argument("-td", "--temp_dir", help="Path to a temporary directory with models", + type=str, default="temp") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + fair_mot_node = ObjectTracking2DFairMotNode( + device=device, + model_name=args.model_name, + input_rgb_image_topic=args.input_rgb_image_topic, + temp_dir=args.temp_dir, + output_detection_topic=args.detections_topic, + output_tracking_id_topic=args.tracking_id_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + ) + + fair_mot_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_2d_siamrpn_node.py b/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_2d_siamrpn_node.py new file mode 100644 index 0000000000..6dd2a79291 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_2d_siamrpn_node.py @@ -0,0 +1,173 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import mxnet as mx + +import cv2 +from math import dist +import rospy + +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2D +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.engine.target import TrackingAnnotation, BoundingBox +from opendr.perception.object_tracking_2d import SiamRPNLearner +from opendr.perception.object_detection_2d import YOLOv3DetectorLearner + + +class ObjectTrackingSiamRPNNode: + def __init__(self, object_detector, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_tracking_annotated", + tracker_topic="/opendr/tracked_object", + device="cuda"): + """ + Creates a ROS Node for object tracking with SiamRPN. + :param object_detector: An object detector learner to use for initialization + :type object_detector: opendr.engine.learners.Learner + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param tracker_topic: Topic to which we are publishing the annotation + :type tracker_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if tracker_topic is not None: + self.object_publisher = rospy.Publisher(tracker_topic, Detection2D, queue_size=1) + else: + self.object_publisher = None + + self.bridge = ROSBridge() + + self.object_detector = object_detector + # Initialize the object detector + self.tracker = SiamRPNLearner(device=device) + self.image = None + self.initialized = False + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_tracking_2d_siamrpn_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.img_callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Object Tracking 2D SiamRPN node started.") + rospy.spin() + + def img_callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + self.image = image + + if not self.initialized: + # Run object detector to initialize the tracker + image = self.bridge.from_ros_image(data, encoding='bgr8') + boxes = self.object_detector.infer(image) + + img_center = [int(image.data.shape[2] // 2), int(image.data.shape[1] // 2)] # width, height + # Find the box that is closest to the center of the image + center_box = BoundingBox("", left=0, top=0, width=0, height=0) + min_distance = dist([center_box.left, center_box.top], img_center) + for box in boxes: + new_distance = dist([int(box.left + box.width // 2), int(box.top + box.height // 2)], img_center) + if new_distance < min_distance: + center_box = box + min_distance = dist([center_box.left, center_box.top], img_center) + + # Initialize tracker with the most central box found + init_box = TrackingAnnotation(center_box.name, + center_box.left, center_box.top, center_box.width, center_box.height, + id=0, score=center_box.confidence) + + self.tracker.infer(self.image, init_box) + self.initialized = True + rospy.loginfo("Object Tracking 2D SiamRPN node initialized with the most central bounding box.") + + if self.initialized: + # Run object tracking + box = self.tracker.infer(image) + + if self.object_publisher is not None: + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_single_tracking_annotation(box) + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + cv2.rectangle(image, (box.left, box.top), + (box.left + box.width, box.top + box.height), + (0, 255, 255), 3) + # Convert the annotated OpenDR image to ROS image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_tracking_annotated") + parser.add_argument("-t", "--tracker_topic", help="Topic name for tracker messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/tracked_object") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and mx.context.num_gpus() > 0: + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detector = YOLOv3DetectorLearner(backbone="darknet53", device=device) + object_detector.download(path=".", verbose=True) + object_detector.load("yolo_default") + + object_tracker_2d_siamrpn_node = ObjectTrackingSiamRPNNode(object_detector=object_detector, device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + tracker_topic=args.tracker_topic) + object_tracker_2d_siamrpn_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_3d_ab3dmot_node.py b/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_3d_ab3dmot_node.py new file mode 100755 index 0000000000..ae2af44475 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/object_tracking_3d_ab3dmot_node.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +import torch +import rospy +from vision_msgs.msg import Detection3DArray +from std_msgs.msg import Int32MultiArray +from sensor_msgs.msg import PointCloud as ROS_PointCloud +from opendr_bridge import ROSBridge +from opendr.perception.object_tracking_3d import ObjectTracking3DAb3dmotLearner +from opendr.perception.object_detection_3d import VoxelObjectDetection3DLearner + + +class ObjectTracking3DAb3dmotNode: + def __init__( + self, + detector=None, + input_point_cloud_topic="/opendr/dataset_point_cloud", + output_detection3d_topic="/opendr/detection3d", + output_tracking3d_id_topic="/opendr/tracking3d_id", + device="cuda:0", + ): + """ + Creates a ROS Node for 3D object tracking + :param detector: Learner that provides 3D object detections + :type detector: Learner + :param input_point_cloud_topic: Topic from which we are reading the input point cloud + :type input_point_cloud_topic: str + :param output_detection3d_topic: Topic to which we are publishing the annotations + :type output_detection3d_topic: str + :param output_tracking3d_id_topic: Topic to which we are publishing the tracking ids + :type output_tracking3d_id_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + """ + + self.detector = detector + self.learner = ObjectTracking3DAb3dmotLearner( + device=device + ) + + self.bridge = ROSBridge() + self.input_point_cloud_topic = input_point_cloud_topic + + if output_detection3d_topic is not None: + self.detection_publisher = rospy.Publisher( + output_detection3d_topic, Detection3DArray, queue_size=10 + ) + + if output_tracking3d_id_topic is not None: + self.tracking_id_publisher = rospy.Publisher( + output_tracking3d_id_topic, Int32MultiArray, queue_size=10 + ) + + rospy.Subscriber(input_point_cloud_topic, ROS_PointCloud, self.callback) + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + point_cloud = self.bridge.from_ros_point_cloud(data) + detection_boxes = self.detector.infer(point_cloud) + tracking_boxes = self.learner.infer(detection_boxes) + + if self.detection_publisher is not None: + # Convert detected boxes to ROS type and publish + ros_boxes = self.bridge.to_ros_boxes_3d(detection_boxes, classes=["Car", "Van", "Truck", "Pedestrian", "Cyclist"]) + self.detection_publisher.publish(ros_boxes) + + if self.tracking_id_publisher is not None: + ids = [tracking_box.id for tracking_box in tracking_boxes] + ros_ids = Int32MultiArray() + ros_ids.data = ids + self.tracking_id_publisher.publish(ros_ids) + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_object_ab3dmot_tracking_3d_node', anonymous=True) + rospy.Subscriber(self.input_point_cloud_topic, ROS_PointCloud, self.callback, queue_size=1, buff_size=10000000) + + rospy.loginfo("Object Tracking 3D Ab3dmot Node started.") + rospy.spin() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_point_cloud_topic", + help="Point Cloud topic provided by either a point_cloud_dataset_node or any other 3D Point Cloud Node", + type=str, default="/opendr/dataset_point_cloud") + parser.add_argument("-d", "--detections_topic", + help="Output detections topic", + type=lambda value: value if value.lower() != "none" else None, default="/opendr/objects3d") + parser.add_argument("-t", "--tracking3d_id_topic", + help="Output tracking ids topic with the same element count as in output_detection_topic", + type=lambda value: value if value.lower() != "none" else None, default="/opendr/objects_tracking_id") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("-dn", "--detector_model_name", help="Name of the trained model", + type=str, default="tanet_car_xyres_16", choices=["tanet_car_xyres_16"]) + parser.add_argument( + "-dc", "--detector_model_config_path", help="Path to a model .proto config", + type=str, default=os.path.join( + "$OPENDR_HOME", "src", "opendr", "perception", "object_detection_3d", + "voxel_object_detection_3d", "second_detector", "configs", "tanet", + "car", "xyres_16.proto" + ) + ) + parser.add_argument("-td", "--temp_dir", help="Path to a temporary directory with models", + type=str, default="temp") + args = parser.parse_args() + + input_point_cloud_topic = args.input_point_cloud_topic + detector_model_name = args.detector_model_name + temp_dir = args.temp_dir + detector_model_config_path = args.detector_model_config_path + output_detection3d_topic = args.detections_topic + output_tracking3d_id_topic = args.tracking3d_id_topic + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + detector = VoxelObjectDetection3DLearner( + device=device, + temp_path=temp_dir, + model_config_path=detector_model_config_path + ) + if not os.path.exists(os.path.join(temp_dir, detector_model_name)): + VoxelObjectDetection3DLearner.download(detector_model_name, temp_dir) + + detector.load(os.path.join(temp_dir, detector_model_name), verbose=True) + + ab3dmot_node = ObjectTracking3DAb3dmotNode( + detector=detector, + device=device, + input_point_cloud_topic=input_point_cloud_topic, + output_detection3d_topic=output_detection3d_topic, + output_tracking3d_id_topic=output_tracking3d_id_topic, + ) + + ab3dmot_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/perception/scripts/panoptic_segmentation_efficient_ps.py b/projects/opendr_ws/src/opendr_perception/scripts/panoptic_segmentation_efficient_ps_node.py similarity index 54% rename from projects/opendr_ws/src/perception/scripts/panoptic_segmentation_efficient_ps.py rename to projects/opendr_ws/src/opendr_perception/scripts/panoptic_segmentation_efficient_ps_node.py index bce86e46ea..04f7024b2b 100755 --- a/projects/opendr_ws/src/perception/scripts/panoptic_segmentation_efficient_ps.py +++ b/projects/opendr_ws/src/opendr_perception/scripts/panoptic_segmentation_efficient_ps_node.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys +from pathlib import Path import argparse from typing import Optional @@ -29,27 +31,31 @@ class EfficientPsNode: def __init__(self, + input_rgb_image_topic: str, checkpoint: str, - input_image_topic: str, output_heatmap_topic: Optional[str] = None, - output_visualization_topic: Optional[str] = None, + output_rgb_visualization_topic: Optional[str] = None, detailed_visualization: bool = False ): """ Initialize the EfficientPS ROS node and create an instance of the respective learner class. - :param checkpoint: Path to a saved model + :param checkpoint: This is either a path to a saved model or one of [cityscapes, kitti] to download + pre-trained model weights. :type checkpoint: str - :param input_image_topic: ROS topic for the input image stream - :type input_image_topic: str + :param input_rgb_image_topic: ROS topic for the input image stream + :type input_rgb_image_topic: str :param output_heatmap_topic: ROS topic for the predicted semantic and instance maps :type output_heatmap_topic: str - :param output_visualization_topic: ROS topic for the generated visualization of the panoptic map - :type output_visualization_topic: str + :param output_rgb_visualization_topic: ROS topic for the generated visualization of the panoptic map + :type output_rgb_visualization_topic: str + :param detailed_visualization: if True, generate a combined overview of the input RGB image and the + semantic, instance, and panoptic segmentation maps and publish it on output_rgb_visualization_topic + :type detailed_visualization: bool """ + self.input_rgb_image_topic = input_rgb_image_topic self.checkpoint = checkpoint - self.input_image_topic = input_image_topic self.output_heatmap_topic = output_heatmap_topic - self.output_visualization_topic = output_visualization_topic + self.output_rgb_visualization_topic = output_rgb_visualization_topic self.detailed_visualization = detailed_visualization # Initialize all ROS related things @@ -59,14 +65,27 @@ def __init__(self, self._visualization_publisher = None # Initialize the panoptic segmentation network - self._learner = EfficientPsLearner() + config_file = Path(sys.modules[ + EfficientPsLearner.__module__].__file__).parent / 'configs' / 'singlegpu_cityscapes.py' + self._learner = EfficientPsLearner(str(config_file)) + + # Other + self._tmp_folder = Path(__file__).parent.parent / 'tmp' / 'efficientps' + self._tmp_folder.mkdir(exist_ok=True, parents=True) def _init_learner(self) -> bool: """ - Load the weights from the specified checkpoint file. + The model can be initialized via + 1. downloading pre-trained weights for Cityscapes or KITTI. + 2. passing a path to an existing checkpoint file. This has not been done in the __init__() function since logging is available only once the node is registered. """ + if self.checkpoint in ['cityscapes', 'kitti']: + file_path = EfficientPsLearner.download(str(self._tmp_folder), + trained_on=self.checkpoint) + self.checkpoint = file_path + if self._learner.load(self.checkpoint): rospy.loginfo('Successfully loaded the checkpoint.') return True @@ -78,27 +97,28 @@ def _init_subscribers(self): """ Subscribe to all relevant topics. """ - rospy.Subscriber(self.input_image_topic, ROS_Image, self.callback) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) def _init_publisher(self): """ Set up the publishers as requested by the user. """ if self.output_heatmap_topic is not None: - self._instance_heatmap_publisher = rospy.Publisher(f'{self.output_heatmap_topic}/instance', ROS_Image, - queue_size=10) - self._semantic_heatmap_publisher = rospy.Publisher(f'{self.output_heatmap_topic}/semantic', ROS_Image, - queue_size=10) - if self.output_visualization_topic is not None: - self._visualization_publisher = rospy.Publisher(self.output_visualization_topic, ROS_Image, queue_size=10) + self._instance_heatmap_publisher = rospy.Publisher( + f'{self.output_heatmap_topic}/instance', ROS_Image, queue_size=10) + self._semantic_heatmap_publisher = rospy.Publisher( + f'{self.output_heatmap_topic}/semantic', ROS_Image, queue_size=10) + if self.output_rgb_visualization_topic is not None: + self._visualization_publisher = rospy.Publisher(self.output_rgb_visualization_topic, + ROS_Image, queue_size=10) def listen(self): """ Start the node and begin processing input data. The order of the function calls ensures that the node does not try to process input images without being in a trained state. """ - rospy.init_node('efficient_ps', anonymous=True) - rospy.loginfo("EfficientPS node started!") + rospy.init_node('opendr_efficient_panoptic_segmentation_node', anonymous=True) + rospy.loginfo("Panoptic segmentation EfficientPS node started.") if self._init_learner(): self._init_publisher() self._init_subscribers() @@ -121,33 +141,41 @@ def callback(self, data: ROS_Image): if self._visualization_publisher is not None and self._visualization_publisher.get_num_connections() > 0: panoptic_image = EfficientPsLearner.visualize(image, prediction, show_figure=False, detailed=self.detailed_visualization) - self._visualization_publisher.publish(self._bridge.to_ros_image(panoptic_image)) + self._visualization_publisher.publish(self._bridge.to_ros_image(panoptic_image, encoding="rgb8")) if self._instance_heatmap_publisher is not None and self._instance_heatmap_publisher.get_num_connections() > 0: self._instance_heatmap_publisher.publish(self._bridge.to_ros_image(prediction[0])) if self._semantic_heatmap_publisher is not None and self._semantic_heatmap_publisher.get_num_connections() > 0: self._semantic_heatmap_publisher.publish(self._bridge.to_ros_image(prediction[1])) - except Exception: - rospy.logwarn('Failed to generate prediction.') + except Exception as e: + rospy.logwarn(f'Failed to generate prediction: {e}') if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('checkpoint', type=str, help='load the model weights from the provided path') - parser.add_argument('image_topic', type=str, help='listen to images on this topic') - parser.add_argument('--heatmap_topic', type=str, help='publish the semantic and instance maps on this topic') - parser.add_argument('--visualization_topic', type=str, + parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('-i', '--input_rgb_image_topic', type=str, default='/usb_cam/image_raw', + help='listen to RGB images on this topic') + parser.add_argument('-oh', '--output_heatmap_topic', + type=lambda value: value if value.lower() != "none" else None, + default='/opendr/panoptic', + help='publish the semantic and instance maps on this topic as "OUTPUT_HEATMAP_TOPIC/semantic" \ + and "OUTPUT_HEATMAP_TOPIC/instance"') + parser.add_argument('-ov', '--output_rgb_image_topic', + type=lambda value: value if value.lower() != "none" else None, + default='/opendr/panoptic/rgb_visualization', help='publish the panoptic segmentation map as an RGB image on this topic or a more detailed \ overview if using the --detailed_visualization flag') parser.add_argument('--detailed_visualization', action='store_true', help='generate a combined overview of the input RGB image and the semantic, instance, and \ - panoptic segmentation maps') + panoptic segmentation maps and publish it on OUTPUT_RGB_IMAGE_TOPIC') + parser.add_argument('--checkpoint', type=str, default='cityscapes', + help='download pretrained models [cityscapes, kitti] or load from the provided path') args = parser.parse_args() - efficient_ps_node = EfficientPsNode(args.checkpoint, - args.image_topic, - args.heatmap_topic, - args.visualization_topic, + efficient_ps_node = EfficientPsNode(args.input_rgb_image_topic, + args.checkpoint, + args.output_heatmap_topic, + args.output_rgb_image_topic, args.detailed_visualization) efficient_ps_node.listen() diff --git a/projects/opendr_ws/src/perception/scripts/point_cloud_dataset.py b/projects/opendr_ws/src/opendr_perception/scripts/point_cloud_dataset_node.py old mode 100644 new mode 100755 similarity index 52% rename from projects/opendr_ws/src/perception/scripts/point_cloud_dataset.py rename to projects/opendr_ws/src/opendr_perception/scripts/point_cloud_dataset_node.py index 0701e1005e..010b90b1d1 --- a/projects/opendr_ws/src/perception/scripts/point_cloud_dataset.py +++ b/projects/opendr_ws/src/opendr_perception/scripts/point_cloud_dataset_node.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import argparse import os import rospy import time @@ -27,48 +28,57 @@ def __init__( self, dataset: DatasetIterator, output_point_cloud_topic="/opendr/dataset_point_cloud", + data_fps=10, ): """ Creates a ROS Node for publishing dataset point clouds """ - # Initialize the face detector self.dataset = dataset - # Initialize OpenDR ROSBridge object self.bridge = ROSBridge() + self.delay = 1.0 / data_fps - if output_point_cloud_topic is not None: - self.output_point_cloud_publisher = rospy.Publisher( - output_point_cloud_topic, ROS_PointCloud, queue_size=10 - ) + self.output_point_cloud_publisher = rospy.Publisher( + output_point_cloud_topic, ROS_PointCloud, queue_size=10 + ) def start(self): + rospy.loginfo("Timing point cloud images") i = 0 - while not rospy.is_shutdown(): - point_cloud = self.dataset[i % len(self.dataset)][0] # Dataset should have a (PointCloud, Target) pair as elements - - rospy.loginfo("Publishing point_cloud [" + str(i) + "]") message = self.bridge.to_ros_point_cloud( point_cloud ) self.output_point_cloud_publisher.publish(message) - time.sleep(0.1) - + time.sleep(self.delay) i += 1 -if __name__ == "__main__": - - rospy.init_node('opendr_point_cloud_dataset') - - dataset_path = "KITTI/opendr_nano_kitti" +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-d", "--dataset_path", + help="Path to a dataset. If does not exist, nano KITTI dataset will be downloaded there.", + type=str, default="KITTI/opendr_nano_kitti") + parser.add_argument("-ks", "--kitti_subsets_path", + help="Path to kitti subsets. Used only if a KITTI dataset is downloaded", + type=str, + default="../../src/opendr/perception/object_detection_3d/datasets/nano_kitti_subsets") + parser.add_argument("-o", "--output_point_cloud_topic", help="Topic name to publish the data", + type=str, default="/opendr/dataset_point_cloud") + parser.add_argument("-f", "--fps", help="Data FPS", + type=float, default=10) + args = parser.parse_args() + + dataset_path = args.dataset_path + kitti_subsets_path = args.kitti_subsets_path + output_point_cloud_topic = args.output_point_cloud_topic + data_fps = args.fps if not os.path.exists(dataset_path): dataset_path = KittiDataset.download_nano_kitti( - "KITTI", kitti_subsets_path="../../src/opendr/perception/object_detection_3d/datasets/nano_kitti_subsets", + "KITTI", kitti_subsets_path=kitti_subsets_path, create_dir=True, ).path @@ -78,5 +88,16 @@ def start(self): dataset_path + "/training/calib", ) - dataset_node = PointCloudDatasetNode(dataset) + rospy.init_node('opendr_point_cloud_dataset_node', anonymous=True) + + dataset_node = PointCloudDatasetNode( + dataset, output_point_cloud_topic=output_point_cloud_topic, data_fps=data_fps + ) + dataset_node.start() + rospy.loginfo("Point cloud dataset node started.") + rospy.spin() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/pose_estimation_node.py b/projects/opendr_ws/src/opendr_perception/scripts/pose_estimation_node.py new file mode 100755 index 0000000000..c07321a3ec --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/pose_estimation_node.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +import rospy +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge.msg import OpenDRPose2D +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.perception.pose_estimation import draw +from opendr.perception.pose_estimation import LightweightOpenPoseLearner + + +class PoseEstimationNode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_pose_annotated", detections_topic="/opendr/poses", device="cuda", + num_refinement_stages=2, use_stride=False, half_precision=False): + """ + Creates a ROS Node for pose estimation with Lightweight OpenPose. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no pose detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param num_refinement_stages: Specifies the number of pose estimation refinement stages are added on the + model's head, including the initial stage. Can be 0, 1 or 2, with more stages meaning slower and more accurate + inference + :type num_refinement_stages: int + :param use_stride: Whether to add a stride value in the model, which reduces accuracy but increases + inference speed + :type use_stride: bool + :param half_precision: Enables inference using half (fp16) precision instead of single (fp32) precision. + Valid only for GPU-based inference + :type half_precision: bool + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.pose_publisher = rospy.Publisher(detections_topic, OpenDRPose2D, queue_size=1) + else: + self.pose_publisher = None + + self.bridge = ROSBridge() + + # Initialize the pose estimation learner + self.pose_estimator = LightweightOpenPoseLearner(device=device, num_refinement_stages=num_refinement_stages, + mobilenet_use_stride=use_stride, + half_precision=half_precision) + self.pose_estimator.download(path=".", verbose=True) + self.pose_estimator.load("openpose_default") + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_pose_estimation_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Pose estimation node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run pose estimation + poses = self.pose_estimator.infer(image) + + # Publish detections in ROS message + if self.pose_publisher is not None: + for pose in poses: + # Convert OpenDR pose to ROS pose message using bridge and publish it + self.pose_publisher.publish(self.bridge.to_ros_pose(pose)) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with poses + for pose in poses: + draw(image, pose) + # Convert the annotated OpenDR image to ROS image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_pose_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/poses") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--accelerate", help="Enables acceleration flags (e.g., stride)", default=False, + action="store_true") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + if args.accelerate: + stride = True + stages = 0 + half_prec = True + else: + stride = False + stages = 2 + half_prec = False + + pose_estimator_node = PoseEstimationNode(device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic, + num_refinement_stages=stages, use_stride=stride, half_precision=half_prec) + pose_estimator_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/rgbd_hand_gesture_recognition_node.py b/projects/opendr_ws/src/opendr_perception/scripts/rgbd_hand_gesture_recognition_node.py new file mode 100755 index 0000000000..098e297a18 --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/rgbd_hand_gesture_recognition_node.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +import cv2 +import numpy as np +import torch + +import rospy +import message_filters +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Classification2D + +from opendr.engine.data import Image +from opendr.perception.multimodal_human_centric import RgbdHandGestureLearner +from opendr_bridge import ROSBridge + + +class RgbdHandGestureNode: + + def __init__(self, input_rgb_image_topic="/kinect2/qhd/image_color_rect", + input_depth_image_topic="/kinect2/qhd/image_depth_rect", + output_gestures_topic="/opendr/gestures", device="cuda", delay=0.1): + """ + Creates a ROS Node for gesture recognition from RGBD. Assuming that the following drivers have been installed: + https://github.com/OpenKinect/libfreenect2 and https://github.com/code-iai/iai_kinect2. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param input_depth_image_topic: Topic from which we are reading the input depth image + :type input_depth_image_topic: str + :param output_gestures_topic: Topic to which we are publishing the predicted gesture class + :type output_gestures_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param delay: Define the delay (in seconds) with which rgb message and depth message can be synchronized + :type delay: float + """ + + self.input_rgb_image_topic = input_rgb_image_topic + self.input_depth_image_topic = input_depth_image_topic + self.delay = delay + + self.gesture_publisher = rospy.Publisher(output_gestures_topic, Classification2D, queue_size=10) + + self.bridge = ROSBridge() + + # Initialize the gesture recognition + self.gesture_learner = RgbdHandGestureLearner(n_class=16, architecture="mobilenet_v2", device=device) + model_path = './mobilenet_v2' + if not os.path.exists(model_path): + self.gesture_learner.download(path=model_path) + self.gesture_learner.load(path=model_path) + + # mean and std for preprocessing, based on HANDS dataset + self.mean = np.asarray([0.485, 0.456, 0.406, 0.0303]).reshape(1, 1, 4) + self.std = np.asarray([0.229, 0.224, 0.225, 0.0353]).reshape(1, 1, 4) + + def listen(self): + """ + Start the node and begin processing input data + """ + rospy.init_node('opendr_rgbd_hand_gesture_recognition_node', anonymous=True) + + image_sub = message_filters.Subscriber(self.input_rgb_image_topic, ROS_Image, queue_size=1, buff_size=10000000) + depth_sub = message_filters.Subscriber(self.input_depth_image_topic, ROS_Image, queue_size=1, buff_size=10000000) + # synchronize image and depth data topics + ts = message_filters.ApproximateTimeSynchronizer([image_sub, depth_sub], queue_size=10, slop=self.delay, + allow_headerless=True) + ts.registerCallback(self.callback) + + rospy.loginfo("RGBD hand gesture recognition node started.") + rospy.spin() + + def callback(self, rgb_data, depth_data): + """ + Callback that process the input data and publishes to the corresponding topics + :param rgb_data: input image message + :type rgb_data: sensor_msgs.msg.Image + :param depth_data: input depth image message + :type depth_data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image and preprocess + rgb_image = self.bridge.from_ros_image(rgb_data, encoding='bgr8') + depth_data.encoding = 'mono16' + depth_image = self.bridge.from_ros_image_to_depth(depth_data, encoding='mono16') + img = self.preprocess(rgb_image, depth_image) + + # Run gesture recognition + gesture_class = self.gesture_learner.infer(img) + + # Publish results + ros_gesture = self.bridge.from_category_to_rosclass(gesture_class) + self.gesture_publisher.publish(ros_gesture) + + def preprocess(self, rgb_image, depth_image): + """ + Preprocess rgb_image, depth_image and concatenate them + :param rgb_image: input RGB image + :type rgb_image: engine.data.Image + :param depth_image: input depth image + :type depth_image: engine.data.Image + """ + rgb_image = rgb_image.convert(format='channels_last') / (2**8 - 1) + depth_image = depth_image.convert(format='channels_last') / (2**16 - 1) + + # resize the images to 224x224 + rgb_image = cv2.resize(rgb_image, (224, 224)) + depth_image = cv2.resize(depth_image, (224, 224)) + + # concatenate and standardize + img = np.concatenate([rgb_image, np.expand_dims(depth_image, axis=-1)], axis=-1) + img = (img - self.mean) / self.std + img = Image(img, dtype=np.float32) + return img + + +if __name__ == '__main__': + # default topics are according to kinectv2 drivers at https://github.com/OpenKinect/libfreenect2 + # and https://github.com/code-iai-iai_kinect2 + parser = argparse.ArgumentParser() + parser.add_argument("-ic", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/kinect2/qhd/image_color_rect") + parser.add_argument("-id", "--input_depth_image_topic", help="Topic name for input depth image", + type=str, default="/kinect2/qhd/image_depth_rect") + parser.add_argument("-o", "--output_gestures_topic", help="Topic name for predicted gesture class", + type=str, default="/opendr/gestures") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", + choices=["cuda", "cpu"]) + parser.add_argument("--delay", help="The delay (in seconds) with which RGB message and" + "depth message can be synchronized", type=float, default=0.1) + + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU") + device = "cpu" + except: + print("Using CPU") + device = "cpu" + + gesture_node = RgbdHandGestureNode(input_rgb_image_topic=args.input_rgb_image_topic, + input_depth_image_topic=args.input_depth_image_topic, + output_gestures_topic=args.output_gestures_topic, device=device, + delay=args.delay) + + gesture_node.listen() diff --git a/projects/opendr_ws/src/opendr_perception/scripts/semantic_segmentation_bisenet_node.py b/projects/opendr_ws/src/opendr_perception/scripts/semantic_segmentation_bisenet_node.py new file mode 100755 index 0000000000..0047e8fe2e --- /dev/null +++ b/projects/opendr_ws/src/opendr_perception/scripts/semantic_segmentation_bisenet_node.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python3 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import numpy as np +import torch +import cv2 +import colorsys + +import rospy +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROSBridge + +from opendr.engine.data import Image +from opendr.engine.target import Heatmap +from opendr.perception.semantic_segmentation import BisenetLearner + + +class BisenetNode: + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", output_heatmap_topic="/opendr/heatmap", + output_rgb_image_topic="/opendr/heatmap_visualization", device="cuda"): + """ + Creates a ROS Node for semantic segmentation with Bisenet. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_heatmap_topic: Topic to which we are publishing the heatmap in the form of a ROS image containing + class ids + :type output_heatmap_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the heatmap image blended with the + input image and a class legend for visualization purposes + :type output_rgb_image_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + """ + self.input_rgb_image_topic = input_rgb_image_topic + + if output_heatmap_topic is not None: + self.heatmap_publisher = rospy.Publisher(output_heatmap_topic, ROS_Image, queue_size=1) + else: + self.heatmap_publisher = None + + if output_rgb_image_topic is not None: + self.visualization_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) + else: + self.visualization_publisher = None + + self.bridge = ROSBridge() + + # Initialize the semantic segmentation model + self.learner = BisenetLearner(device=device) + self.learner.download(path="bisenet_camvid") + self.learner.load("bisenet_camvid") + + self.class_names = ["Bicyclist", "Building", "Car", "Column Pole", "Fence", "Pedestrian", "Road", "Sidewalk", + "Sign Symbol", "Sky", "Tree", "Unknown"] + self.colors = self.get_distinct_colors(len(self.class_names)) # Generate n distinct colors + + def listen(self): + """ + Start the node and begin processing input data. + """ + rospy.init_node('opendr_semantic_segmentation_bisenet_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Semantic segmentation BiSeNet node started.") + rospy.spin() + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + try: + # Run semantic segmentation to retrieve the OpenDR heatmap + heatmap = self.learner.infer(image) + + # Publish heatmap in the form of an image containing class ids + if self.heatmap_publisher is not None: + heatmap = Heatmap(heatmap.data.astype(np.uint8)) # Convert to uint8 + self.heatmap_publisher.publish(self.bridge.to_ros_image(heatmap)) + + # Publish heatmap color visualization blended with the input image and a class color legend + if self.visualization_publisher is not None: + heatmap_colors = Image(self.colors[heatmap.numpy()]) + image = Image(cv2.resize(image.convert("channels_last", "bgr"), (960, 720))) + alpha = 0.4 # 1.0 means full input image, 0.0 means full heatmap + beta = (1.0 - alpha) + image_blended = cv2.addWeighted(image.opencv(), alpha, heatmap_colors.opencv(), beta, 0.0) + # Add a legend + image_blended = self.add_legend(image_blended, np.unique(heatmap.data)) + + self.visualization_publisher.publish(self.bridge.to_ros_image(Image(image_blended), + encoding='bgr8')) + except Exception as e: + print(e) + rospy.logwarn('Failed to generate prediction.') + + def add_legend(self, image, unique_class_ints): + # Text setup + origin_x, origin_y = 5, 5 # Text origin x, y + color_rectangle_size = 25 + font_size = 1.0 + font_thickness = 2 + w_max = 0 + for i in range(len(unique_class_ints)): + text = self.class_names[unique_class_ints[i]] # Class name + x, y = origin_x, origin_y + i * color_rectangle_size # Text position + # Determine class color and convert to regular integers + color = (int(self.colors[unique_class_ints[i]][0]), + int(self.colors[unique_class_ints[i]][1]), + int(self.colors[unique_class_ints[i]][2])) + # Get text width and height + (w, h), _ = cv2.getTextSize(text, cv2.FONT_HERSHEY_SIMPLEX, font_size, font_thickness) + if w >= w_max: + w_max = w + # Draw partial background rectangle + image = cv2.rectangle(image, (x - origin_x, y), + (x + origin_x + color_rectangle_size + w_max, + y + color_rectangle_size), + (255, 255, 255, 0.5), -1) + # Draw color rectangle + image = cv2.rectangle(image, (x, y), + (x + color_rectangle_size, y + color_rectangle_size), color, -1) + # Draw class name text + image = cv2.putText(image, text, (x + color_rectangle_size + 2, y + h), + cv2.FONT_HERSHEY_SIMPLEX, font_size, (0, 0, 0), font_thickness) + return image + + @staticmethod + def hsv_to_rgb(h, s, v): + (r, g, b) = colorsys.hsv_to_rgb(h, s, v) + return np.array([int(255 * r), int(255 * g), int(255 * b)]) + + def get_distinct_colors(self, n): + hue_partition = 1.0 / (n + 1) + return np.array([self.hsv_to_rgb(hue_partition * value, 1.0, 1.0) for value in range(0, n)]).astype(np.uint8) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_heatmap_topic", help="Topic to which we are publishing the heatmap in the form " + "of a ROS image containing class ids", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/heatmap") + parser.add_argument("-ov", "--output_rgb_image_topic", help="Topic to which we are publishing the heatmap image " + "blended with the input image and a class legend for " + "visualization purposes", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/heatmap_visualization") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + bisenet_node = BisenetNode(device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_heatmap_topic=args.output_heatmap_topic, + output_rgb_image_topic=args.output_rgb_image_topic) + bisenet_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws/src/perception/scripts/skeleton_based_action_recognition.py b/projects/opendr_ws/src/opendr_perception/scripts/skeleton_based_action_recognition_node.py old mode 100644 new mode 100755 similarity index 64% rename from projects/opendr_ws/src/perception/scripts/skeleton_based_action_recognition.py rename to projects/opendr_ws/src/opendr_perception/scripts/skeleton_based_action_recognition_node.py index 0556acfd52..0bb74a0e8e --- a/projects/opendr_ws/src/perception/scripts/skeleton_based_action_recognition.py +++ b/projects/opendr_ws/src/opendr_perception/scripts/skeleton_based_action_recognition_node.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,13 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. - +import argparse import rospy import torch import numpy as np from std_msgs.msg import String from vision_msgs.msg import ObjectHypothesis -from vision_msgs.msg import Detection2DArray +from opendr_bridge.msg import OpenDRPose2D from sensor_msgs.msg import Image as ROS_Image from opendr_bridge import ROSBridge from opendr.perception.pose_estimation import draw @@ -31,18 +31,19 @@ class SkeletonActionRecognitionNode: - def __init__(self, input_image_topic="/usb_cam/image_raw", output_image_topic="/opendr/image_pose_annotated", + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", + output_rgb_image_topic="/opendr/image_pose_annotated", pose_annotations_topic="/opendr/poses", - output_category_topic="/opendr/skeleton_based_action_recognition", - output_category_description_topic="/opendr/skeleton_based_action_recognition_description", + output_category_topic="/opendr/skeleton_recognized_action", + output_category_description_topic="/opendr/skeleton_recognized_action_description", device="cuda", model='stgcn'): """ Creates a ROS Node for skeleton-based action recognition - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing annotated image) - :type output_image_topic: str + :type output_rgb_image_topic: str :param pose_annotations_topic: Topic to which we are publishing the annotations (if None, we are not publishing annotated pose annotations) :type pose_annotations_topic: str @@ -60,34 +61,34 @@ def __init__(self, input_image_topic="/usb_cam/image_raw", output_image_topic="/ """ # Set up ROS topics and bridge + self.input_rgb_image_topic = input_rgb_image_topic + self.bridge = ROSBridge() - if output_category_topic is not None: - self.hypothesis_publisher = rospy.Publisher(output_category_topic, ObjectHypothesis, queue_size=10) - else: - self.hypothesis_publisher = None - - if output_category_description_topic is not None: - self.string_publisher = rospy.Publisher(output_category_description_topic, String, queue_size=10) - else: - self.string_publisher = None - - if output_image_topic is not None: - self.image_publisher = rospy.Publisher(output_image_topic, ROS_Image, queue_size=10) + if output_rgb_image_topic is not None: + self.image_publisher = rospy.Publisher(output_rgb_image_topic, ROS_Image, queue_size=1) else: self.image_publisher = None if pose_annotations_topic is not None: - self.pose_publisher = rospy.Publisher(pose_annotations_topic, Detection2DArray, queue_size=10) + self.pose_publisher = rospy.Publisher(pose_annotations_topic, OpenDRPose2D, queue_size=1) else: self.pose_publisher = None - self.input_image_topic = input_image_topic - self.bridge = ROSBridge() + if output_category_topic is not None: + self.hypothesis_publisher = rospy.Publisher(output_category_topic, ObjectHypothesis, queue_size=1) + else: + self.hypothesis_publisher = None + + if output_category_description_topic is not None: + self.string_publisher = rospy.Publisher(output_category_description_topic, String, queue_size=1) + else: + self.string_publisher = None # Initialize the pose estimation - self.pose_estimator = LightweightOpenPoseLearner(device=device, num_refinement_stages=0, + self.pose_estimator = LightweightOpenPoseLearner(device=device, num_refinement_stages=2, mobilenet_use_stride=False, - half_precision=False) + half_precision=False + ) self.pose_estimator.download(path=".", verbose=True) self.pose_estimator.load("openpose_default") @@ -111,9 +112,9 @@ def listen(self): """ Start the node and begin processing input data """ - rospy.init_node('opendr_skeleton_based_action_recognition', anonymous=True) - rospy.Subscriber(self.input_image_topic, ROS_Image, self.callback) - rospy.loginfo("Skeleton-based action recognition node started!") + rospy.init_node('opendr_skeleton_action_recognition_node', anonymous=True) + rospy.Subscriber(self.input_rgb_image_topic, ROS_Image, self.callback, queue_size=1, buff_size=10000000) + rospy.loginfo("Skeleton-based action recognition node started.") rospy.spin() def callback(self, data): @@ -155,6 +156,7 @@ def callback(self, data): # Run action recognition category = self.action_classifier.infer(skeleton_seq) + category.confidence = float(category.confidence.max()) if self.hypothesis_publisher is not None: self.hypothesis_publisher.publish(self.bridge.to_ros_category(category)) @@ -171,7 +173,8 @@ def _select_2_poses(poses): energy.append(s) energy = np.array(energy) index = energy.argsort()[::-1][0:2] - selected_poses.append(poses[index]) + for i in range(len(index)): + selected_poses.append(poses[index[i]]) return selected_poses @@ -188,16 +191,49 @@ def _pose2numpy(num_current_frames, poses_list): if __name__ == '__main__': - # Select the device for running the + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_pose_annotated") + parser.add_argument("-p", "--pose_annotations_topic", help="Topic name for pose annotations", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/poses") + parser.add_argument("-c", "--output_category_topic", help="Topic name for recognized action category", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/skeleton_recognized_action") + parser.add_argument("-d", "--output_category_description_topic", + help="Topic name for description of the recognized action category", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/skeleton_recognized_action_description") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Model to use, either \"stgcn\" or \"pstgcn\"", + type=str, default="stgcn", choices=["stgcn", "pstgcn"]) + + args = parser.parse_args() + try: - if torch.cuda.is_available(): - print("GPU found.") - device = 'cuda' - else: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": print("GPU not found. Using CPU instead.") - device = 'cpu' + device = "cpu" + else: + print("Using CPU.") + device = "cpu" except: - device = 'cpu' - - pose_estimation_node = SkeletonActionRecognitionNode(device=device) - pose_estimation_node.listen() + print("Using CPU.") + device = "cpu" + + skeleton_action_recognition_node = \ + SkeletonActionRecognitionNode(input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + pose_annotations_topic=args.pose_annotations_topic, + output_category_topic=args.output_category_topic, + output_category_description_topic=args.output_category_description_topic, + device=device, + model=args.model) + skeleton_action_recognition_node.listen() diff --git a/projects/opendr_ws/src/perception/scripts/speech_command_recognition.py b/projects/opendr_ws/src/opendr_perception/scripts/speech_command_recognition_node.py similarity index 54% rename from projects/opendr_ws/src/perception/scripts/speech_command_recognition.py rename to projects/opendr_ws/src/opendr_perception/scripts/speech_command_recognition_node.py index 4726b478a1..3d6385fd58 100755 --- a/projects/opendr_ws/src/perception/scripts/speech_command_recognition.py +++ b/projects/opendr_ws/src/opendr_perception/scripts/speech_command_recognition_node.py @@ -28,26 +28,26 @@ class SpeechRecognitionNode: - def __init__(self, input_topic='/audio/audio', prediction_topic="/opendr/speech_recognition", - buffer_size=1.5, model='matchboxnet', model_path=None, device='cuda'): + def __init__(self, input_audio_topic="/audio/audio", output_speech_command_topic="/opendr/speech_recognition", + buffer_size=1.5, model="matchboxnet", model_path=None, device="cuda"): """ Creates a ROS Node for speech command recognition - :param input_topic: Topic from which the audio data is received - :type input_topic: str - :param prediction_topic: Topic to which the predictions are published - :type prediction_topic: str + :param input_audio_topic: Topic from which the audio data is received + :type input_audio_topic: str + :param output_speech_command_topic: Topic to which the predictions are published + :type output_speech_command_topic: str :param buffer_size: Length of the audio buffer in seconds :type buffer_size: float :param model: base speech command recognition model: matchboxnet or quad_selfonn :type model: str - :param device: device for inference ('cpu' or 'cuda') + :param device: device for inference ("cpu" or "cuda") :type device: str """ - self.publisher = rospy.Publisher(prediction_topic, Classification2D, queue_size=10) + self.publisher = rospy.Publisher(output_speech_command_topic, Classification2D, queue_size=10) - rospy.Subscriber(input_topic, AudioData, self.callback) + rospy.Subscriber(input_audio_topic, AudioData, self.callback) self.bridge = ROSBridge() @@ -59,17 +59,17 @@ def __init__(self, input_topic='/audio/audio', prediction_topic="/opendr/speech_ # Initialize the recognition model if model == "matchboxnet": self.learner = MatchboxNetLearner(output_classes_n=20, device=device) - load_path = './MatchboxNet' + load_path = "./MatchboxNet" elif model == "edgespeechnets": self.learner = EdgeSpeechNetsLearner(output_classes_n=20, device=device) assert model_path is not None, "No pretrained EdgeSpeechNets model available for download" elif model == "quad_selfonn": self.learner = QuadraticSelfOnnLearner(output_classes_n=20, device=device) - load_path = './QuadraticSelfOnn' + load_path = "./QuadraticSelfOnn" # Download the recognition model if model_path is None: - self.learner.download_pretrained(path='.') + self.learner.download_pretrained(path=".") self.learner.load(load_path) else: self.learner.load(model_path) @@ -78,15 +78,15 @@ def listen(self): """ Start the node and begin processing input data """ - rospy.init_node('opendr_speech_command_recognition', anonymous=True) - rospy.loginfo("Speech command recognition node started!") + rospy.init_node("opendr_speech_command_recognition_node", anonymous=True) + rospy.loginfo("Speech command recognition node started.") rospy.spin() def callback(self, msg_data): """ Callback that processes the input data and publishes predictions to the output topic - :param data: incoming message - :type data: audio_common_msgs.msg.AudioData + :param msg_data: incoming message + :type msg_data: audio_common_msgs.msg.AudioData """ # Accumulate data until the buffer is full data = np.reshape(np.frombuffer(msg_data.data, dtype=np.int16)/32768.0, (1, -1)) @@ -105,22 +105,36 @@ def callback(self, msg_data): self.data_buffer = np.zeros((1, 1)) -if __name__ == '__main__': - # Select the device for running - try: - device = 'cuda' if torch.cuda.is_available() else 'cpu' - except: - device = 'cpu' - +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('input_topic', type=str, help='listen to input data on this topic') - parser.add_argument('--buffer_size', type=float, default=1.5, help='size of the audio buffer in seconds') - parser.add_argument('--model', choices=["matchboxnet", "edgespeechnets", "quad_selfonn"], default="matchboxnet", - help='model to be used for prediction: matchboxnet or quad_selfonn') - parser.add_argument('--model_path', type=str, - help='path to the model files, if not given, the pretrained model will be downloaded') + parser.add_argument("-i", "--input_audio_topic", type=str, default="audio/audio", + help="Listen to input data on this topic") + parser.add_argument("-o", "--output_speech_command_topic", type=str, default="/opendr/speech_recognition", + help="Topic name for speech command output") + parser.add_argument("--device", type=str, default="cuda", choices=["cuda", "cpu"], + help="Device to use (cpu, cuda)") + parser.add_argument("--buffer_size", type=float, default=1.5, help="Size of the audio buffer in seconds") + parser.add_argument("--model", default="matchboxnet", choices=["matchboxnet", "edgespeechnets", "quad_selfonn"], + help="Model to be used for prediction: matchboxnet, edgespeechnets or quad_selfonn") + parser.add_argument("--model_path", type=str, + help="Path to the model files, if not given, the pretrained model will be downloaded") args = parser.parse_args() - speech_node = SpeechRecognitionNode(input_topic=args.input_topic, buffer_size=args.buffer_size, - model=args.model, model_path=args.model_path, device=device) + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU") + device = "cpu" + except: + print("Using CPU") + device = "cpu" + + speech_node = SpeechRecognitionNode(input_audio_topic=args.input_audio_topic, + output_speech_command_topic=args.output_speech_command_topic, + buffer_size=args.buffer_size, model=args.model, model_path=args.model_path, + device=device) speech_node.listen() diff --git a/projects/opendr_ws/src/perception/scripts/video_activity_recognition.py b/projects/opendr_ws/src/opendr_perception/scripts/video_activity_recognition_node.py similarity index 59% rename from projects/opendr_ws/src/perception/scripts/video_activity_recognition.py rename to projects/opendr_ws/src/opendr_perception/scripts/video_activity_recognition_node.py index b79a462e3a..f05169f5ba 100755 --- a/projects/opendr_ws/src/perception/scripts/video_activity_recognition.py +++ b/projects/opendr_ws/src/opendr_perception/scripts/video_activity_recognition_node.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright 2020-2022 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,12 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. - +import argparse import rospy import torch import torchvision import cv2 -import numpy as np from pathlib import Path from std_msgs.msg import String from vision_msgs.msg import ObjectHypothesis @@ -31,20 +30,19 @@ class HumanActivityRecognitionNode: - def __init__( self, - input_image_topic="/usb_cam/image_raw", + input_rgb_image_topic="/usb_cam/image_raw", output_category_topic="/opendr/human_activity_recognition", output_category_description_topic="/opendr/human_activity_recognition_description", device="cuda", - model='cox3d-m' + model="cox3d-m", ): """ - Creates a ROS Node for face recognition - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_category_topic: Topic to which we are publishing the recognized face info + Creates a ROS Node for video-based human activity recognition. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_category_topic: Topic to which we are publishing the recognized activity (if None, we are not publishing the info) :type output_category_topic: str :param output_category_description_topic: Topic to which we are publishing the ID of the recognized action @@ -52,12 +50,20 @@ def __init__( :type output_category_description_topic: str :param device: device on which we are running inference ('cpu' or 'cuda') :type device: str - :param model: architecture to use for human activity recognition. + :param model: Architecture to use for human activity recognition. (Options: 'cox3d-s', 'cox3d-m', 'cox3d-l', 'x3d-xs', 'x3d-s', 'x3d-m', 'x3d-l') :type model: str """ - assert model in {"cox3d-s", "cox3d-m", "cox3d-l", "x3d-xs", "x3d-s", "x3d-m", "x3d-l"} + assert model in { + "cox3d-s", + "cox3d-m", + "cox3d-l", + "x3d-xs", + "x3d-s", + "x3d-m", + "x3d-l", + } model_name, model_size = model.split("-") Learner = {"cox3d": CoX3DLearner, "x3d": X3DLearner}[model_name] @@ -68,7 +74,9 @@ def __init__( # Set up preprocessing if model_name == "cox3d": - self.preprocess = _image_preprocess(image_size=self.learner.model_hparams["image_size"]) + self.preprocess = _image_preprocess( + image_size=self.learner.model_hparams["image_size"] + ) else: # == x3d self.preprocess = _video_preprocess( image_size=self.learner.model_hparams["image_size"], @@ -76,23 +84,33 @@ def __init__( ) # Set up ROS topics and bridge + self.input_rgb_image_topic = input_rgb_image_topic self.hypothesis_publisher = ( - rospy.Publisher(output_category_topic, ObjectHypothesis, queue_size=10) if output_category_topic else None + rospy.Publisher(output_category_topic, ObjectHypothesis, queue_size=1) + if output_category_topic + else None ) self.string_publisher = ( - rospy.Publisher(output_category_description_topic, String, queue_size=10) if output_category_topic else None + rospy.Publisher(output_category_description_topic, String, queue_size=1) + if output_category_description_topic + else None ) - rospy.Subscriber(input_image_topic, ROS_Image, self.callback) - self.bridge = ROSBridge() def listen(self): """ Start the node and begin processing input data """ - rospy.init_node('opendr_human_activity_recognition', anonymous=True) - rospy.loginfo("Human activity recognition node started!") + rospy.init_node("opendr_human_activity_recognition_node", anonymous=True) + rospy.Subscriber( + self.input_rgb_image_topic, + ROS_Image, + self.callback, + queue_size=1, + buff_size=10000000, + ) + rospy.loginfo("Human activity recognition node started.") rospy.spin() def callback(self, data): @@ -101,49 +119,43 @@ def callback(self, data): :param data: input message :type data: sensor_msgs.msg.Image """ - image = self.bridge.from_ros_image(data) + image = self.bridge.from_ros_image(data, encoding="rgb8") if image is None: return - x = self.preprocess(image.numpy()) + x = self.preprocess(image.convert("channels_first", "rgb")) result = self.learner.infer(x) assert len(result) == 1 category = result[0] - category.confidence = float(max(category.confidence.max())) # Confidence for predicted class + category.confidence = float(category.confidence.max()) # Confidence for predicted class category.description = KINETICS400_CLASSES[category.data] # Class name if self.hypothesis_publisher is not None: self.hypothesis_publisher.publish(self.bridge.to_ros_category(category)) if self.string_publisher is not None: - self.string_publisher.publish(self.bridge.to_ros_category_description(category)) + self.string_publisher.publish( + self.bridge.to_ros_category_description(category) + ) -def _resize(image, width=None, height=None, inter=cv2.INTER_AREA): +def _resize(image, size=None, inter=cv2.INTER_AREA): # initialize the dimensions of the image to be resized and # grab the image size dim = None (h, w) = image.shape[:2] - # if both the width and height are None, then return the - # original image - if width is None and height is None: - return image - - # check to see if the width is None - if width is None: - # calculate the ratio of the height and construct the + if h > w: + # calculate the ratio of the width and construct the # dimensions - r = height / float(h) - dim = (int(w * r), height) - - # otherwise, the height is None + r = size / float(w) + dim = (size, int(h * r)) else: - # calculate the ratio of the width and construct the + # calculate the ratio of the height and construct the # dimensions - r = width / float(w) - dim = (width, int(h * r)) + r = size / float(h) + dim = (int(w * r), size) # resize the image resized = cv2.resize(image, dim, interpolation=inter) @@ -160,11 +172,11 @@ def _image_preprocess(image_size: int): def wrapped(frame): nonlocal standardize frame = frame.transpose((1, 2, 0)) # C, H, W -> H, W, C - frame = _resize(frame, height=image_size, width=image_size) + frame = _resize(frame, size=image_size) frame = torch.tensor(frame).permute((2, 0, 1)) # H, W, C -> C, H, W frame = frame / 255.0 # [0, 255] -> [0.0, 1.0] frame = standardize(frame) - return Image(frame, dtype=np.float) + return Image(frame, dtype=float) return wrapped @@ -179,7 +191,7 @@ def _video_preprocess(image_size: int, window_size: int): def wrapped(frame): nonlocal frames, standardize frame = frame.transpose((1, 2, 0)) # C, H, W -> H, W, C - frame = _resize(frame, height=image_size, width=image_size) + frame = _resize(frame, size=image_size) frame = torch.tensor(frame).permute((2, 0, 1)) # H, W, C -> C, H, W frame = frame / 255.0 # [0, 255] -> [0.0, 1.0] frame = standardize(frame) @@ -194,17 +206,46 @@ def wrapped(frame): return wrapped -if __name__ == '__main__': - # Select the device for running the +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/usb_cam/image_raw") + parser.add_argument("-o", "--output_category_topic", help="Topic to which we are publishing the recognized activity", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/human_activity_recognition") + parser.add_argument("-od", "--output_category_description_topic", + help="Topic to which we are publishing the ID of the recognized action", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/human_activity_recognition_description") + parser.add_argument("--device", help='Device to use, either "cpu" or "cuda", defaults to "cuda"', + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Architecture to use for human activity recognition.", + type=str, default="cox3d-m", + choices=["cox3d-s", "cox3d-m", "cox3d-l", "x3d-xs", "x3d-s", "x3d-m", "x3d-l"]) + args = parser.parse_args() + try: - if torch.cuda.is_available(): - print("GPU found.") - device = 'cuda' - else: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": print("GPU not found. Using CPU instead.") - device = 'cpu' + device = "cpu" + else: + print("Using CPU.") + device = "cpu" except: - device = 'cpu' - - human_activity_recognition_node = HumanActivityRecognitionNode(device=device) + print("Using CPU.") + device = "cpu" + + human_activity_recognition_node = HumanActivityRecognitionNode( + input_rgb_image_topic=args.input_rgb_image_topic, + output_category_topic=args.output_category_topic, + output_category_description_topic=args.output_category_description_topic, + device=device, + model=args.model, + ) human_activity_recognition_node.listen() + + +if __name__ == "__main__": + main() diff --git a/projects/opendr_ws/src/perception/src/.keep b/projects/opendr_ws/src/opendr_perception/src/.keep similarity index 100% rename from projects/opendr_ws/src/perception/src/.keep rename to projects/opendr_ws/src/opendr_perception/src/.keep diff --git a/projects/opendr_ws/src/opendr_planning/CMakeLists.txt b/projects/opendr_ws/src/opendr_planning/CMakeLists.txt new file mode 100644 index 0000000000..f6f9a5900a --- /dev/null +++ b/projects/opendr_ws/src/opendr_planning/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.0.2) +project(opendr_planning) + +find_package(catkin REQUIRED COMPONENTS + roscpp + rospy + std_msgs +) + +catkin_package() + +include_directories( + ${catkin_INCLUDE_DIRS} +) diff --git a/projects/opendr_ws/src/ros_bridge/include/ros_bridge/.keep b/projects/opendr_ws/src/opendr_planning/include/opendr_planning/.keep similarity index 100% rename from projects/opendr_ws/src/ros_bridge/include/ros_bridge/.keep rename to projects/opendr_ws/src/opendr_planning/include/opendr_planning/.keep diff --git a/projects/opendr_ws/src/opendr_planning/package.xml b/projects/opendr_ws/src/opendr_planning/package.xml new file mode 100644 index 0000000000..c049e29ddb --- /dev/null +++ b/projects/opendr_ws/src/opendr_planning/package.xml @@ -0,0 +1,18 @@ + + + opendr_planning + 2.0.0 + OpenDR's ROS planning package + OpenDR Project Coordinator + Apache License v2.0 + opendr.eu + catkin + rospy + std_msgs + rospy + std_msgs + rospy + std_msgs + + + diff --git a/projects/opendr_ws/src/opendr_planning/scripts/end_to_end_planner_node.py b/projects/opendr_ws/src/opendr_planning/scripts/end_to_end_planner_node.py new file mode 100755 index 0000000000..757280aa16 --- /dev/null +++ b/projects/opendr_ws/src/opendr_planning/scripts/end_to_end_planner_node.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import rospy +import numpy as np +import webots_ros.srv +from cv_bridge import CvBridge +from std_msgs.msg import String +from sensor_msgs.msg import Imu, Image +from geometry_msgs.msg import PoseStamped, PointStamped +from opendr.planning.end_to_end_planning import EndToEndPlanningRLLearner +from opendr.planning.end_to_end_planning.utils.euler_quaternion_transformations import euler_from_quaternion +from opendr.planning.end_to_end_planning.utils.euler_quaternion_transformations import euler_to_quaternion + + +class EndToEndPlannerNode: + + def __init__(self): + """ + Creates a ROS Node for end-to-end planner + """ + self.node_name = "opendr_end_to_end_planner" + self.bridge = CvBridge() + self.model_name = "" + self.current_pose = PoseStamped() + self.target_pose = PoseStamped() + self.current_pose.header.frame_id = "map" + self.target_pose.header.frame_id = "map" + rospy.init_node(self.node_name, anonymous=True) + self.r = rospy.Rate(25) + rospy.Subscriber("/model_name", String, self.model_name_callback) + counter = 0 + while self.model_name == "": + self.r.sleep() + counter += 1 + if counter > 25: + break + if self.model_name == "": + rospy.loginfo("Webots model is not started!") + return + self.input_depth_image_topic = "/range_finder/range_image" + self.position_topic = "/gps/values" + self.orientation_topic = "/inertial_unit/quaternion" + self.ros_srv_range_sensor_enable = rospy.ServiceProxy( + "/range_finder/enable", webots_ros.srv.set_int) + self.ros_srv_gps_sensor_enable = rospy.ServiceProxy( + "/gps/enable", webots_ros.srv.set_int) + self.ros_srv_inertial_unit_enable = rospy.ServiceProxy( + "/inertial_unit/enable", webots_ros.srv.set_int) + self.end_to_end_planner = EndToEndPlanningRLLearner(env=None) + + try: + self.ros_srv_gps_sensor_enable(1) + self.ros_srv_inertial_unit_enable(1) + self.ros_srv_range_sensor_enable(1) + except rospy.ServiceException as exc: + print("Service did not process request: " + str(exc)) + self.ros_pub_current_pose = rospy.Publisher('current_uav_pose', PoseStamped, queue_size=10) + self.ros_pub_target_pose = rospy.Publisher('target_uav_pose', PoseStamped, queue_size=10) + + def listen(self): + """ + Start the node and begin processing input data + """ + rospy.Subscriber(self.orientation_topic, Imu, self.imu_callback) + rospy.Subscriber(self.position_topic, PointStamped, self.gps_callback) + rospy.Subscriber(self.input_depth_image_topic, Image, self.range_callback, queue_size=1) + rospy.spin() + + def range_callback(self, data): + image_arr = self.bridge.imgmsg_to_cv2(data) + self.range_image = ((np.clip(image_arr.reshape((64, 64, 1)), 0, 15) / 15.) * 255).astype(np.uint8) + observation = {'depth_cam': np.copy(self.range_image), 'moving_target': np.array([5, 0, 0])} + action = self.end_to_end_planner.infer(observation, deterministic=True)[0] + self.publish_poses(action) + + def gps_callback(self, data): # for no dynamics + self.current_pose.header.stamp = rospy.Time.now() + self.current_pose.pose.position.x = -data.point.x + self.current_pose.pose.position.y = -data.point.y + self.current_pose.pose.position.z = data.point.z + + def imu_callback(self, data): # for no dynamics + self.current_orientation = data.orientation + self.current_yaw = euler_from_quaternion(data.orientation)["yaw"] + self.current_pose.pose.orientation = euler_to_quaternion(0, 0, yaw=self.current_yaw) + + def model_name_callback(self, data): + if data.data[:5] == "robot": + self.model_name = data.data + if data.data[:4] == "quad": + self.model_name = data.data + + def publish_poses(self, action): + self.ros_pub_current_pose.publish(self.current_pose) + forward_step = np.cos(action[0] * 22.5 / 180 * np.pi) + side_step = np.sin(action[0] * 22.5 / 180 * np.pi) + yaw_step = action[1] * 22.5 / 180 * np.pi + self.target_pose.header.stamp = rospy.Time.now() + self.target_pose.pose.position.x = self.current_pose.pose.position.x + forward_step * np.cos( + self.current_yaw) - side_step * np.sin(self.current_yaw) + self.target_pose.pose.position.y = self.current_pose.pose.position.y + forward_step * np.sin( + self.current_yaw) + side_step * np.cos(self.current_yaw) + self.target_pose.pose.position.z = self.current_pose.pose.position.z + self.target_pose.pose.orientation = euler_to_quaternion(0, 0, yaw=self.current_yaw+yaw_step) + self.ros_pub_target_pose.publish(self.target_pose) + + +if __name__ == '__main__': + end_to_end_planner_node = EndToEndPlannerNode() + end_to_end_planner_node.listen() diff --git a/projects/opendr_ws/src/ros_bridge/msg/.keep b/projects/opendr_ws/src/opendr_planning/src/.keep similarity index 100% rename from projects/opendr_ws/src/ros_bridge/msg/.keep rename to projects/opendr_ws/src/opendr_planning/src/.keep diff --git a/projects/opendr_ws/src/simulation/CMakeLists.txt b/projects/opendr_ws/src/opendr_simulation/CMakeLists.txt similarity index 96% rename from projects/opendr_ws/src/simulation/CMakeLists.txt rename to projects/opendr_ws/src/opendr_simulation/CMakeLists.txt index 5b25717dee..403bbf6c0e 100644 --- a/projects/opendr_ws/src/simulation/CMakeLists.txt +++ b/projects/opendr_ws/src/opendr_simulation/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.0.2) -project(simulation) +project(opendr_simulation) find_package(catkin REQUIRED COMPONENTS roscpp diff --git a/projects/opendr_ws/src/simulation/README.md b/projects/opendr_ws/src/opendr_simulation/README.md similarity index 79% rename from projects/opendr_ws/src/simulation/README.md rename to projects/opendr_ws/src/opendr_simulation/README.md index 398eac32e3..3b943e83a7 100644 --- a/projects/opendr_ws/src/simulation/README.md +++ b/projects/opendr_ws/src/opendr_simulation/README.md @@ -1,4 +1,4 @@ -# Simulation Package +# OpenDR Simulation Package This package contains ROS nodes related to simulation package of OpenDR. @@ -14,10 +14,10 @@ export PYTHONPATH=$OPENDR_HOME/src:$PYTHONPATH 2. You can start the human model generation service node. ```shell -rosrun simulation human_model_generation_service.py +rosrun opendr_simulation human_model_generation_service.py ``` 3. An example client node can run to examine the basic utilities of the service. ```shell -rosrun simulation human_model_generation_client.py +rosrun opendr_simulation human_model_generation_client.py ``` diff --git a/projects/opendr_ws/src/simulation/package.xml b/projects/opendr_ws/src/opendr_simulation/package.xml similarity index 93% rename from projects/opendr_ws/src/simulation/package.xml rename to projects/opendr_ws/src/opendr_simulation/package.xml index cd9795529b..00df4fa4e0 100644 --- a/projects/opendr_ws/src/simulation/package.xml +++ b/projects/opendr_ws/src/opendr_simulation/package.xml @@ -1,7 +1,7 @@ - simulation - 1.1.1 + opendr_simulation + 2.0.0 OpenDR's ROS nodes for simulation package OpenDR Project Coordinator Apache License v2.0 diff --git a/projects/opendr_ws/src/simulation/scripts/human_model_generation_client.py b/projects/opendr_ws/src/opendr_simulation/scripts/human_model_generation_client.py similarity index 93% rename from projects/opendr_ws/src/simulation/scripts/human_model_generation_client.py rename to projects/opendr_ws/src/opendr_simulation/scripts/human_model_generation_client.py index 1f9470f9c6..246c757432 100644 --- a/projects/opendr_ws/src/simulation/scripts/human_model_generation_client.py +++ b/projects/opendr_ws/src/opendr_simulation/scripts/human_model_generation_client.py @@ -20,14 +20,14 @@ from cv_bridge import CvBridge from opendr_bridge import ROSBridge from std_msgs.msg import Bool -from simulation.srv import Mesh_vc +from opendr_simulation.srv import Mesh_vc from opendr.simulation.human_model_generation.utilities.model_3D import Model_3D if __name__ == '__main__': - rgb_img = cv2.imread(os.path.join(os.environ['OPENDR_HOME'], 'projects/simulation/' + rgb_img = cv2.imread(os.path.join(os.environ['OPENDR_HOME'], 'projects/python/simulation/' 'human_model_generation/demos/imgs_input/rgb/result_0004.jpg')) - msk_img = cv2.imread(os.path.join(os.environ['OPENDR_HOME'], 'projects/simulation/' + msk_img = cv2.imread(os.path.join(os.environ['OPENDR_HOME'], 'projects/python/simulation/' 'human_model_generation/demos/imgs_input/msk/result_0004.jpg')) bridge_cv = CvBridge() bridge_ros = ROSBridge() @@ -46,6 +46,6 @@ human_model = Model_3D(vertices, triangles, vertex_colors) human_model.save_obj_mesh('./human_model.obj') [out_imgs, human_pose_2D] = human_model.get_img_views(rotations=[30, 120], human_pose_3D=pose, plot_kps=True) - cv2.imwrite('./rendering.png', out_imgs[0].numpy()) + cv2.imwrite('./rendering.png', out_imgs[0].opencv()) except rospy.ServiceException as e: print("Service call failed: %s" % e) diff --git a/projects/opendr_ws/src/simulation/scripts/human_model_generation_service.py b/projects/opendr_ws/src/opendr_simulation/scripts/human_model_generation_service.py similarity index 98% rename from projects/opendr_ws/src/simulation/scripts/human_model_generation_service.py rename to projects/opendr_ws/src/opendr_simulation/scripts/human_model_generation_service.py index f869d989b3..0ad5f13643 100644 --- a/projects/opendr_ws/src/simulation/scripts/human_model_generation_service.py +++ b/projects/opendr_ws/src/opendr_simulation/scripts/human_model_generation_service.py @@ -19,7 +19,7 @@ import numpy as np from opendr_bridge import ROSBridge from opendr.simulation.human_model_generation.pifu_generator_learner import PIFuGeneratorLearner -from simulation.srv import Mesh_vc +from opendr_simulation.srv import Mesh_vc class PifuNode: diff --git a/projects/opendr_ws/src/simulation/srv/Mesh_vc.srv b/projects/opendr_ws/src/opendr_simulation/srv/Mesh_vc.srv similarity index 100% rename from projects/opendr_ws/src/simulation/srv/Mesh_vc.srv rename to projects/opendr_ws/src/opendr_simulation/srv/Mesh_vc.srv diff --git a/projects/opendr_ws/src/perception/README.md b/projects/opendr_ws/src/perception/README.md deleted file mode 100755 index ba0ab81059..0000000000 --- a/projects/opendr_ws/src/perception/README.md +++ /dev/null @@ -1,304 +0,0 @@ -# Perception Package - -This package contains ROS nodes related to perception package of OpenDR. - -## Dataset ROS Nodes - -Assuming that you have already [built your workspace](../../README.md) and started roscore (i.e., just run `roscore`), then you can start a dataset node to publish data from the disk, which is useful to test the functionality without the use of a sensor. -Dataset nodes take a `DatasetIterator` object that shoud returns a `(Data, Target)` pair elements. -If the type of the `Data` object is correct, the node will transform it into a corresponding ROS message object and publish it to a desired topic. - -### Point Cloud Dataset ROS Node -To get a point cloud from a dataset on the disk, you can start a `point_cloud_dataset.py` node as: -```shell -rosrun perception point_cloud_dataset.py -``` -By default, it downloads a `nano_KITTI` dataset from OpenDR's FTP server and uses it to publish data to the ROS topic. You can create an instance of this node with any `DatasetIterator` object that returns `(PointCloud, Target)` as elements. - -### Image Dataset ROS Node -To get an image from a dataset on the disk, you can start a `image_dataset.py` node as: -```shell -rosrun perception image_dataset.py -``` -By default, it downloads a `nano_MOT20` dataset from OpenDR's FTP server and uses it to publish data to the ROS topic. You can create an instance of this node with any `DatasetIterator` object that returns `(Image, Target)` as elements. - -## Pose Estimation ROS Node -Assuming that you have already [activated the OpenDR environment](../../../../docs/reference/installation.md), [built your workspace](../../README.md) and started roscore (i.e., just run `roscore`), then you can - -1. Start the node responsible for publishing images. If you have a usb camera, then you can use the corresponding node (assuming you have installed the corresponding package): - -```shell -rosrun usb_cam usb_cam_node -``` - -2. You are then ready to start the pose detection node - -```shell -rosrun perception pose_estimation.py -``` - -3. You can examine the annotated image stream using `rqt_image_view` (select the topic `/opendr/image_pose_annotated`) or - `rostopic echo /opendr/poses` - -## Fall Detection ROS Node -Assuming that you have already [activated the OpenDR environment](../../../../docs/reference/installation.md), [built your workspace](../../README.md) and started roscore (i.e., just run `roscore`), then you can - -1. Start the node responsible for publishing images. If you have a usb camera, then you can use the corresponding node (assuming you have installed the corresponding package): - -```shell -rosrun usb_cam usb_cam_node -``` - -2. You are then ready to start the fall detection node - -```shell -rosrun perception fall_detection.py -``` - -3. You can examine the annotated image stream using `rqt_image_view` (select the topic `/opendr/image_fall_annotated`) or - `rostopic echo /opendr/falls`, where the node publishes bounding boxes of detected fallen poses - -## Face Recognition ROS Node -Assuming that you have already [activated the OpenDR environment](../../../../docs/reference/installation.md), [built your workspace](../../README.md) and started roscore (i.e., just run `roscore`), then you can - - -1. Start the node responsible for publishing images. If you have a usb camera, then you can use the corresponding node (assuming you have installed the corresponding package): - -```shell -rosrun usb_cam usb_cam_node -``` - -2. You are then ready to start the face recognition node. Note that you should pass the folder containing the images of known faces as argument to create the corresponding database of known persons. - -```shell -rosrun perception face_recognition.py _database_path:='./database' -``` -**Notes** - -Reference images should be placed in a defined structure like: -- imgs - - ID1 - - image1 - - image2 - - ID2 - - ID3 - - ... - -Τhe name of the sub-folder, e.g. ID1, will be published under `/opendr/face_recognition_id`. - -4. The database entry and the returned confidence is published under the topic name `/opendr/face_recognition`, and the human-readable ID -under `/opendr/face_recognition_id`. - -## 2D Object Detection ROS Nodes -ROS nodes are implemented for the SSD, YOLOv3, CenterNet and DETR generic object detectors. Steps 1, 2 from above must run first. -Then, to initiate the SSD detector node, run: - -```shell -rosrun perception object_detection_2d_ssd.py -``` -The annotated image stream can be viewed using `rqt_image_view`, and the default topic name is -`/opendr/image_boxes_annotated`. The bounding boxes alone are also published as `/opendr/objects`. -Similarly, the YOLOv3, CenterNet and DETR detector nodes can be run with: -```shell -rosrun perception object_detection_2d_yolov3.py -``` -or -```shell -rosrun perception object_detection_2d_centernet.py -``` -or -```shell -rosrun perception object_detection_2d_detr.py -``` -respectively. - -## Face Detection ROS Node -A ROS node for the RetinaFace detector is implemented, supporting both the ResNet and MobileNet versions, the latter of -which performs mask recognition as well. After setting up the environment, the detector node can be initiated as: -```shell -rosrun perception face_detection_retinaface.py -``` -The annotated image stream is published under the topic name `/opendr/image_boxes_annotated`, and the bounding boxes alone -under `/opendr/faces`. - -## GEM ROS Node -Assuming that you have already [built your workspace](../../README.md) and started roscore (i.e., just run `roscore`), then you can - - -1. Add OpenDR to `PYTHONPATH` (please make sure you do not overwrite `PYTHONPATH` ), e.g., -```shell -export PYTHONPATH="/home/user/opendr/src:$PYTHONPATH" -``` -2. First one needs to find points in the color and infrared images that correspond, in order to find the homography matrix that allows to correct for the difference in perspective between the infrared and the RGB camera. -These points can be selected using a [utility tool](../../../../src/opendr/perception/object_detection_2d/utils/get_color_infra_alignment.py) that is provided in the toolkit. - -3. Pass the points you have found as *pts_color* and *pts_infra* arguments to the ROS gem.py node. - -4. Start the node responsible for publishing images. If you have a RealSense camera, then you can use the corresponding node (assuming you have installed [realsense2_camera](http://wiki.ros.org/realsense2_camera)): - -```shell -roslaunch realsense2_camera rs_camera.launch enable_color:=true enable_infra:=true enable_depth:=false enable_sync:=true infra_width:=640 infra_height:=480 -``` - -4. You are then ready to start the pose detection node - -```shell -rosrun perception object_detection_2d_gem.py -``` - -5. You can examine the annotated image stream using `rqt_image_view` (select one of the topics `/opendr/color_detection_annotated` or `/opendr/infra_detection_annotated`) or `rostopic echo /opendr/detections` - - -## Panoptic Segmentation ROS Node -A ROS node for performing panoptic segmentation on a specified RGB image stream using the [EfficientPS](../../../../src/opendr/perception/panoptic_segmentation/README.md) network. -Assuming that the OpenDR catkin workspace has been sourced, the node can be started with: -```shell -rosrun perception panoptic_segmentation_efficient_ps.py CHECKPOINT IMAGE_TOPIC -``` -with `CHECKPOINT` pointing to the path to the trained model weights and `IMAGE_TOPIC` specifying the ROS topic, to which the node will subscribe. - -Additionally, the following optional arguments are available: -- `-h, --help`: show a help message and exit -- `--heamap_topic HEATMAP_TOPIC`: publish the semantic and instance maps on `HEATMAP_TOPIC` -- `--visualization_topic VISUALIZATION_TOPIC`: publish the panoptic segmentation map as an RGB image on `VISUALIZATION_TOPIC` or a more detailed overview if using the `--detailed_visualization` flag -- `--detailed_visualization`: generate a combined overview of the input RGB image and the semantic, instance, and panoptic segmentation maps - - -## Semantic Segmentation ROS Node -A ROS node for performing semantic segmentation on an input image using the BiseNet model. -Assuming that the OpenDR catkin workspace has been sourced, the node can be started with: -```shell -rosrun perception semantic_segmentation_bisenet.py IMAGE_TOPIC -``` - -Additionally, the following optional arguments are available: -- `-h, --help`: show a help message and exit -- `--heamap_topic HEATMAP_TOPIC`: publish the heatmap on `HEATMAP_TOPIC` - -## RGBD Hand Gesture Recognition ROS Node - -A ROS node for performing hand gesture recognition using MobileNetv2 model trained on HANDS dataset. The node has been tested with Kinectv2 for depth data acquisition with the following drivers: https://github.com/OpenKinect/libfreenect2 and https://github.com/code-iai/iai_kinect2. Assuming that the drivers have been installed and OpenDR catkin workspace has been sourced, the node can be started as: -```shell -rosrun perception rgbd_hand_gesture_recognition.py -``` -The predictied classes are published to the topic `/opendr/gestures`. - -## Heart Anomaly Detection ROS Node - -A ROS node for performing heart anomaly (atrial fibrillation) detection from ecg data using GRU or ANBOF models trained on AF dataset. Assuming that the OpenDR catkin workspace has been sourced, the node can be started as: -```shell -rosrun perception heart_anomaly_detection.py ECG_TOPIC MODEL -``` -with `ECG_TOPIC` specifying the ROS topic to which the node will subscribe, and `MODEL` set to either *gru* or *anbof*. The predictied classes are published to the topic `/opendr/heartanomaly`. - -## Human Action Recognition ROS Node - -A ROS node for performing Human Activity Recognition using either CoX3D or X3D models pretrained on Kinetics400. -Assuming the drivers have been installed and OpenDR catkin workspace has been sourced, the node can be started as: -```shell -rosrun perception video_activity_recognition.py -``` -The predictied class id and confidence is published under the topic name `/opendr/human_activity_recognition`, and the human-readable class name under `/opendr/human_activity_recognition_description`. - -## Landmark-based Facial Expression Recognition ROS Node - -A ROS node for performing Landmark-based Facial Expression Recognition using the pretrained model PST-BLN on AFEW, CK+ or Oulu-CASIA datasets. -Assuming the drivers have been installed and OpenDR catkin workspace has been sourced, the node can be started as: -```shell -rosrun perception landmark_based_facial_expression_recognition.py -``` -The predictied class id and confidence is published under the topic name `/opendr/landmark_based_expression_recognition`, and the human-readable class name under `/opendr/landmark_based_expression_recognition_description`. - -## Skeleton-based Human Action Recognition ROS Node - -A ROS node for performing Skeleton-based Human Action Recognition using either ST-GCN or PST-GCN models pretrained on NTU-RGBD-60 dataset. The human body poses of the image are first extracted by the light-weight Openpose method which is implemented in the toolkit, and they are passed to the skeleton-based action recognition method to be categorized. -Assuming the drivers have been installed and OpenDR catkin workspace has been sourced, the node can be started as: -```shell -rosrun perception skeleton_based_action_recognition.py -``` -The predictied class id and confidence is published under the topic name `/opendr/skeleton_based_action_recognition`, and the human-readable class name under `/opendr/skeleton_based_action_recognition_description`. -Besides, the annotated image is published in `/opendr/image_pose_annotated` as well as the corresponding poses in `/opendr/poses`. - -## Speech Command Recognition ROS Node - -A ROS node for recognizing speech commands from an audio stream using MatchboxNet, EdgeSpeechNets or Quadratic SelfONN models, pretrained on the Google Speech Commands dataset. -Assuming that the OpenDR catkin workspace has been sourced, the node can be started with: -```shell -rosrun perception speech_command_recognition.py INPUT_AUDIO_TOPIC -``` -The following optional arguments are available: -- `--buffer_size BUFFER_SIZE`: set the size of the audio buffer (expected command duration) in seconds, default value **1.5** -- `--model MODEL`: choose the model to use: `matchboxnet` (default value), `edgespeechnets` or `quad_selfonn` -- `--model_path MODEL_PATH`: if given, the pretrained model will be loaded from the specified local path, otherwise it will be downloaded from an OpenDR FTP server - -The predictions (class id and confidence) are published to the topic `/opendr/speech_recognition`. -**Note:** EdgeSpeechNets currently does not have a pretrained model available for download, only local files may be used. - -## Voxel Object Detection 3D ROS Node - -A ROS node for performing Object Detection 3D using PointPillars or TANet methods with either pretrained models on KITTI dataset, or custom trained models. -The predicted detection annotations are pushed to `output_detection3d_topic` (default `output_detection3d_topic="/opendr/detection3d"`). - -Assuming the drivers have been installed and OpenDR catkin workspace has been sourced, the node can be started as: -```shell -rosrun perception object_detection_3d_voxel.py -``` -To get a point cloud from a dataset on the disk, you can start a `point_cloud_dataset.py` node as: -```shell -rosrun perception point_cloud_dataset.py -``` -This will pulbish the dataset point clouds to a `/opendr/dataset_point_cloud` topic by default, which means that the `input_point_cloud_topic` should be set to `/opendr/dataset_point_cloud`. - -## AB3DMOT Object Tracking 3D ROS Node - -A ROS node for performing Object Tracking 3D using AB3DMOT stateless method. -This is a detection-based method, and therefore the 3D object detector is needed to provide detections, which then will be used to make associations and generate tracking ids. -The predicted tracking annotations are split into two topics with detections (default `output_detection_topic="/opendr/detection3d"`) and tracking ids (default `output_tracking_id_topic="/opendr/tracking3d_id"`). - -Assuming the drivers have been installed and OpenDR catkin workspace has been sourced, the node can be started as: -```shell -rosrun perception object_tracking_3d_ab3dmot.py -``` -To get a point cloud from a dataset on the disk, you can start a `point_cloud_dataset.py` node as: -```shell -rosrun perception point_cloud_dataset.py -``` -This will pulbish the dataset point clouds to a `/opendr/dataset_point_cloud` topic by default, which means that the `input_point_cloud_topic` should be set to `/opendr/dataset_point_cloud`. - - -## FairMOT Object Tracking 2D ROS Node - -A ROS node for performing Object Tracking 2D using FairMOT with either pretrained models on MOT dataset, or custom trained models. The predicted tracking annotations are split into two topics with detections (default `output_detection_topic="/opendr/detection"`) and tracking ids (default `output_tracking_id_topic="/opendr/tracking_id"`). Additionally, an annotated image is generated if the `output_image_topic` is not None (default `output_image_topic="/opendr/image_annotated"`) -Assuming the drivers have been installed and OpenDR catkin workspace has been sourced, the node can be started as: -```shell -rosrun perception object_tracking_2d_fair_mot.py -``` -To get images from usb_camera, you can start the camera node as: -```shell -rosrun usb_cam usb_cam_node -``` -The corresponding `input_image_topic` should be `/usb_cam/image_raw`. -If you want to use a dataset from the disk, you can start a `image_dataset.py` node as: -```shell -rosrun perception image_dataset.py -``` -This will pulbish the dataset images to an `/opendr/dataset_image` topic by default, which means that the `input_image_topic` should be set to `/opendr/dataset_image`. - -## Deep Sort Object Tracking 2D ROS Node - -A ROS node for performing Object Tracking 2D using Deep Sort using either pretrained models on Market1501 dataset, or custom trained models. This is a detection-based method, and therefore the 2D object detector is needed to provide detections, which then will be used to make associations and generate tracking ids. The predicted tracking annotations are split into two topics with detections (default `output_detection_topic="/opendr/detection"`) and tracking ids (default `output_tracking_id_topic="/opendr/tracking_id"`). Additionally, an annotated image is generated if the `output_image_topic` is not None (default `output_image_topic="/opendr/image_annotated"`) -Assuming the drivers have been installed and OpenDR catkin workspace has been sourced, the node can be started as: -```shell -rosrun perception object_tracking_2d_deep_sort.py -``` -To get images from usb_camera, you can start the camera node as: -```shell -rosrun usb_cam usb_cam_node -``` -The corresponding `input_image_topic` should be `/usb_cam/image_raw`. -If you want to use a dataset from the disk, you can start an `image_dataset.py` node as: -```shell -rosrun perception image_dataset.py -``` -This will pulbish the dataset images to an `/opendr/dataset_image` topic by default, which means that the `input_image_topic` should be set to `/opendr/dataset_image`. - diff --git a/projects/opendr_ws/src/perception/scripts/face_detection_retinaface.py b/projects/opendr_ws/src/perception/scripts/face_detection_retinaface.py deleted file mode 100755 index 7227951b17..0000000000 --- a/projects/opendr_ws/src/perception/scripts/face_detection_retinaface.py +++ /dev/null @@ -1,127 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import rospy -import mxnet as mx -from vision_msgs.msg import Detection2DArray -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge -from opendr.perception.object_detection_2d import RetinaFaceLearner -from opendr.perception.object_detection_2d import draw_bounding_boxes -from opendr.engine.data import Image - - -class FaceDetectionNode: - def __init__(self, input_image_topic="/usb_cam/image_raw", output_image_topic="/opendr/image_boxes_annotated", - face_detections_topic="/opendr/faces", device="cuda", backbone="resnet"): - """ - Creates a ROS Node for face detection - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing - annotated image) - :type output_image_topic: str - :param face_detections_topic: Topic to which we are publishing the annotations (if None, we are not publishing - annotated pose annotations) - :type face_detections_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - :param backbone: retinaface backbone, options are ('mnet' and 'resnet'), where 'mnet' detects masked faces as well - :type backbone: str - """ - - # Initialize the face detector - self.face_detector = RetinaFaceLearner(backbone=backbone, device=device) - self.face_detector.download(path=".", verbose=True) - self.face_detector.load("retinaface_{}".format(backbone)) - self.class_names = ["face", "masked_face"] - - # Initialize OpenDR ROSBridge object - self.bridge = ROSBridge() - - # setup communications - if output_image_topic is not None: - self.image_publisher = rospy.Publisher(output_image_topic, ROS_Image, queue_size=10) - else: - self.image_publisher = None - - if face_detections_topic is not None: - self.face_publisher = rospy.Publisher(face_detections_topic, Detection2DArray, queue_size=10) - else: - self.face_publisher = None - - rospy.Subscriber(input_image_topic, ROS_Image, self.callback) - - def callback(self, data): - """ - Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: sensor_msgs.msg.Image - """ - - # Convert sensor_msgs.msg.Image into OpenDR Image - image = self.bridge.from_ros_image(data, encoding='bgr8') - - # Run pose estimation - boxes = self.face_detector.infer(image) - - # Get an OpenCV image back - image = image.opencv() - - # Convert detected boxes to ROS type and publish - ros_boxes = self.bridge.to_ros_boxes(boxes) - if self.face_publisher is not None: - self.face_publisher.publish(ros_boxes) - rospy.loginfo("Published face boxes") - - # Annotate image and publish result - # NOTE: converting back to OpenDR BoundingBoxList is unnecessary here, - # only used to test the corresponding bridge methods - odr_boxes = self.bridge.from_ros_boxes(ros_boxes) - image = draw_bounding_boxes(image, odr_boxes, class_names=self.class_names) - if self.image_publisher is not None: - message = self.bridge.to_ros_image(Image(image), encoding='bgr8') - self.image_publisher.publish(message) - rospy.loginfo("Published annotated image") - - -if __name__ == '__main__': - # Automatically run on GPU/CPU - try: - if mx.context.num_gpus() > 0: - print("GPU found.") - device = 'cuda' - else: - print("GPU not found. Using CPU instead.") - device = 'cpu' - except: - device = 'cpu' - - # initialize ROS node - rospy.init_node('opendr_face_detection', anonymous=True) - rospy.loginfo("Face detection node started!") - - # get network backbone ("mnet" detects masked faces as well) - backbone = rospy.get_param("~backbone", "resnet") - input_image_topic = rospy.get_param("~input_image_topic", "/videofile/image_raw") - - rospy.loginfo("Using backbone: {}".format(backbone)) - assert backbone in ["resnet", "mnet"], "backbone should be one of ['resnet', 'mnet']" - - # created node object - face_detection_node = FaceDetectionNode(device=device, backbone=backbone, - input_image_topic=input_image_topic) - # begin ROS communications - rospy.spin() diff --git a/projects/opendr_ws/src/perception/scripts/face_recognition.py b/projects/opendr_ws/src/perception/scripts/face_recognition.py deleted file mode 100755 index 9bbe783f33..0000000000 --- a/projects/opendr_ws/src/perception/scripts/face_recognition.py +++ /dev/null @@ -1,148 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import rospy -import torch -from vision_msgs.msg import ObjectHypothesis -from std_msgs.msg import String -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge - -from opendr.perception.face_recognition import FaceRecognitionLearner -from opendr.perception.object_detection_2d import RetinaFaceLearner -from opendr.perception.object_detection_2d.datasets.transforms import BoundingBoxListToNumpyArray - - -class FaceRecognitionNode: - - def __init__(self, input_image_topic="/usb_cam/image_raw", - face_recognition_topic="/opendr/face_recognition", - face_id_topic="/opendr/face_recognition_id", - database_path="./database", device="cuda", - backbone='mobilefacenet'): - """ - Creates a ROS Node for face recognition - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param face_recognition_topic: Topic to which we are publishing the recognized face info - (if None, we are not publishing the info) - :type face_recognition_topic: str - :param face_id_topic: Topic to which we are publishing the ID of the recognized person - (if None, we are not publishing the ID) - :type face_id_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - """ - - # Initialize the face recognizer - self.recognizer = FaceRecognitionLearner(device=device, mode='backbone_only', backbone=backbone) - self.recognizer.download(path=".") - self.recognizer.load(".") - self.recognizer.fit_reference(database_path, save_path=".", create_new=True) - - # Initialize the face detector - self.face_detector = RetinaFaceLearner(backbone='mnet', device=device) - self.face_detector.download(path=".", verbose=True) - self.face_detector.load("retinaface_{}".format('mnet')) - self.class_names = ["face", "masked_face"] - - if face_recognition_topic is not None: - self.face_publisher = rospy.Publisher(face_recognition_topic, ObjectHypothesis, queue_size=10) - else: - self.face_publisher = None - - if face_id_topic is not None: - self.face_id_publisher = rospy.Publisher(face_id_topic, String, queue_size=10) - else: - self.face_id_publisher = None - - self.bridge = ROSBridge() - rospy.Subscriber(input_image_topic, ROS_Image, self.callback) - - def callback(self, data): - """ - Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: sensor_msgs.msg.Image - """ - # Convert sensor_msgs.msg.Image into OpenDR Image - image = self.bridge.from_ros_image(data) - image = image.opencv() - - # Run face detection and recognition - if image is not None: - bounding_boxes = self.face_detector.infer(image) - if bounding_boxes: - bounding_boxes = BoundingBoxListToNumpyArray()(bounding_boxes) - boxes = bounding_boxes[:, :4] - for idx, box in enumerate(boxes): - (startX, startY, endX, endY) = int(box[0]), int(box[1]), int(box[2]), int(box[3]) - img = image[startY:endY, startX:endX] - result = self.recognizer.infer(img) - - if result.data is not None: - if self.face_publisher is not None: - ros_face = self.bridge.to_ros_face(result) - self.face_publisher.publish(ros_face) - - if self.face_id_publisher is not None: - ros_face_id = self.bridge.to_ros_face_id(result) - self.face_id_publisher.publish(ros_face_id.data) - - else: - result.description = "Unknown" - if self.face_publisher is not None: - ros_face = self.bridge.to_ros_face(result) - self.face_publisher.publish(ros_face) - - if self.face_id_publisher is not None: - ros_face_id = self.bridge.to_ros_face_id(result) - self.face_id_publisher.publish(ros_face_id.data) - - # We get can the data back using self.bridge.from_ros_face(ros_face) - # e.g. - # face = self.bridge.from_ros_face(ros_face) - # face.description = self.recognizer.database[face.id][0] - - -if __name__ == '__main__': - # Select the device for running the - try: - if torch.cuda.is_available(): - print("GPU found.") - device = 'cuda' - else: - print("GPU not found. Using CPU instead.") - device = 'cpu' - except: - device = 'cpu' - - # initialize ROS node - rospy.init_node('opendr_face_recognition', anonymous=True) - rospy.loginfo("Face recognition node started!") - - # get network backbone - backbone = rospy.get_param("~backbone", "mobilefacenet") - input_image_topic = rospy.get_param("~input_image_topic", "/usb_cam/image_raw") - database_path = rospy.get_param('~database_path', './') - rospy.loginfo("Using backbone: {}".format(backbone)) - assert backbone in ["mobilefacenet", "ir_50"], "backbone should be one of ['mobilefacenet', 'ir_50']" - - face_recognition_node = FaceRecognitionNode(device=device, backbone=backbone, - input_image_topic=input_image_topic, - database_path=database_path) - # begin ROS communications - rospy.spin() diff --git a/projects/opendr_ws/src/perception/scripts/fall_detection.py b/projects/opendr_ws/src/perception/scripts/fall_detection.py deleted file mode 100644 index ef456d2ec8..0000000000 --- a/projects/opendr_ws/src/perception/scripts/fall_detection.py +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import rospy -import torch -import cv2 -from vision_msgs.msg import Detection2DArray -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge -from opendr.perception.pose_estimation import get_bbox -from opendr.perception.pose_estimation import LightweightOpenPoseLearner -from opendr.perception.fall_detection import FallDetectorLearner -from opendr.engine.data import Image -from opendr.engine.target import BoundingBox, BoundingBoxList - - -class FallDetectionNode: - - def __init__(self, input_image_topic="/usb_cam/image_raw", output_image_topic="/opendr/image_fall_annotated", - fall_annotations_topic="/opendr/falls", device="cuda"): - """ - Creates a ROS Node for fall detection - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing - annotated image) - :type output_image_topic: str - :param fall_annotations_topic: Topic to which we are publishing the annotations (if None, we are not publishing - annotated fall annotations) - :type fall_annotations_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - """ - if output_image_topic is not None: - self.image_publisher = rospy.Publisher(output_image_topic, ROS_Image, queue_size=10) - else: - self.image_publisher = None - - if fall_annotations_topic is not None: - self.fall_publisher = rospy.Publisher(fall_annotations_topic, Detection2DArray, queue_size=10) - else: - self.fall_publisher = None - - self.input_image_topic = input_image_topic - - self.bridge = ROSBridge() - - # Initialize the pose estimation - self.pose_estimator = LightweightOpenPoseLearner(device=device, num_refinement_stages=2, - mobilenet_use_stride=False, - half_precision=False) - self.pose_estimator.download(path=".", verbose=True) - self.pose_estimator.load("openpose_default") - - self.fall_detector = FallDetectorLearner(self.pose_estimator) - - def listen(self): - """ - Start the node and begin processing input data - """ - rospy.init_node('opendr_fall_detection', anonymous=True) - rospy.Subscriber(self.input_image_topic, ROS_Image, self.callback) - rospy.loginfo("Fall detection node started!") - rospy.spin() - - def callback(self, data): - """ - Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: sensor_msgs.msg.Image - """ - - # Convert sensor_msgs.msg.Image into OpenDR Image - image = self.bridge.from_ros_image(data, encoding='bgr8') - - # Run fall detection - detections = self.fall_detector.infer(image) - - # Get an OpenCV image back - image = image.opencv() - - bboxes = BoundingBoxList([]) - for detection in detections: - fallen = detection[0].data - pose = detection[2] - - if fallen == 1: - color = (0, 0, 255) - x, y, w, h = get_bbox(pose) - bbox = BoundingBox(left=x, top=y, width=w, height=h, name=0) - bboxes.data.append(bbox) - - cv2.rectangle(image, (x, y), (x + w, y + h), color, 2) - cv2.putText(image, "Detected fallen person", (5, 55), cv2.FONT_HERSHEY_SIMPLEX, - 0.75, color, 1, cv2.LINE_AA) - - # Convert detected boxes to ROS type and publish - ros_boxes = self.bridge.to_ros_boxes(bboxes) - if self.fall_publisher is not None: - self.fall_publisher.publish(ros_boxes) - - if self.image_publisher is not None: - message = self.bridge.to_ros_image(Image(image), encoding='bgr8') - self.image_publisher.publish(message) - - -if __name__ == '__main__': - # Select the device for running the - try: - if torch.cuda.is_available(): - print("GPU found.") - device = 'cuda' - else: - print("GPU not found. Using CPU instead.") - device = 'cpu' - except: - device = 'cpu' - - fall_detection_node = FallDetectionNode(device=device) - fall_detection_node.listen() diff --git a/projects/opendr_ws/src/perception/scripts/image_dataset.py b/projects/opendr_ws/src/perception/scripts/image_dataset.py deleted file mode 100644 index 0ce4ee3850..0000000000 --- a/projects/opendr_ws/src/perception/scripts/image_dataset.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import rospy -import time -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge -from opendr.engine.datasets import DatasetIterator -from opendr.perception.object_tracking_2d import MotDataset, RawMotDatasetIterator - - -class ImageDatasetNode: - def __init__( - self, - dataset: DatasetIterator, - output_image_topic="/opendr/dataset_image", - ): - """ - Creates a ROS Node for publishing dataset images - """ - - # Initialize the face detector - self.dataset = dataset - # Initialize OpenDR ROSBridge object - self.bridge = ROSBridge() - - if output_image_topic is not None: - self.output_image_publisher = rospy.Publisher( - output_image_topic, ROS_Image, queue_size=10 - ) - - def start(self): - rospy.loginfo("Timing images") - - i = 0 - - while not rospy.is_shutdown(): - - image = self.dataset[i % len(self.dataset)][0] # Dataset should have an (Image, Target) pair as elements - - rospy.loginfo("Publishing image [" + str(i) + "]") - message = self.bridge.to_ros_image( - image, encoding="rgb8" - ) - self.output_image_publisher.publish(message) - - time.sleep(0.1) - - i += 1 - - -if __name__ == "__main__": - - rospy.init_node('opendr_image_dataset') - - dataset_path = MotDataset.download_nano_mot20( - "MOT", True - ).path - - dataset = RawMotDatasetIterator( - dataset_path, - { - "mot20": os.path.join( - "..", "..", "src", "opendr", "perception", "object_tracking_2d", - "datasets", "splits", "nano_mot20.train" - ) - }, - scan_labels=False - ) - dataset_node = ImageDatasetNode(dataset) - dataset_node.start() diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_2d_centernet.py b/projects/opendr_ws/src/perception/scripts/object_detection_2d_centernet.py deleted file mode 100755 index c1615f99a7..0000000000 --- a/projects/opendr_ws/src/perception/scripts/object_detection_2d_centernet.py +++ /dev/null @@ -1,122 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import rospy -import mxnet as mx -import numpy as np -from vision_msgs.msg import Detection2DArray -from sensor_msgs.msg import Image as ROS_Image -from opendr.engine.data import Image -from opendr_bridge import ROSBridge -from opendr.perception.object_detection_2d import CenterNetDetectorLearner -from opendr.perception.object_detection_2d import draw_bounding_boxes - - -class ObjectDetectionCenterNetNode: - def __init__(self, input_image_topic="/usb_cam/image_raw", output_image_topic="/opendr/image_boxes_annotated", - detections_topic="/opendr/objects", device="cuda", backbone="resnet50_v1b"): - """ - Creates a ROS Node for face detection - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing - annotated image) - :type output_image_topic: str - :param detections_topic: Topic to which we are publishing the annotations (if None, we are not publishing - annotated pose annotations) - :type detections_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - :param backbone: backbone network - :type backbone: str - """ - - # Initialize the face detector - self.object_detector = CenterNetDetectorLearner(backbone=backbone, device=device) - self.object_detector.download(path=".", verbose=True) - self.object_detector.load("centernet_default") - self.class_names = self.object_detector.classes - - # Initialize OpenDR ROSBridge object - self.bridge = ROSBridge() - - # setup communications - if output_image_topic is not None: - self.image_publisher = rospy.Publisher(output_image_topic, ROS_Image, queue_size=10) - else: - self.image_publisher = None - - if detections_topic is not None: - self.bbox_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=10) - else: - self.bbox_publisher = None - - rospy.Subscriber(input_image_topic, ROS_Image, self.callback) - - def callback(self, data): - """ - Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: sensor_msgs.msg.Image - """ - - # Convert sensor_msgs.msg.Image into OpenDR Image - image = self.bridge.from_ros_image(data, encoding='bgr8') - - # Run pose estimation - boxes = self.object_detector.infer(image, threshold=0.45, keep_size=False) - - # Get an OpenCV image back - image = np.float32(image.opencv()) - - # Convert detected boxes to ROS type and publish - ros_boxes = self.bridge.to_ros_boxes(boxes) - if self.bbox_publisher is not None: - self.bbox_publisher.publish(ros_boxes) - rospy.loginfo("Published face boxes") - - # Annotate image and publish result - # NOTE: converting back to OpenDR BoundingBoxList is unnecessary here, - # only used to test the corresponding bridge methods - odr_boxes = self.bridge.from_ros_boxes(ros_boxes) - image = draw_bounding_boxes(image, odr_boxes, class_names=self.class_names) - if self.image_publisher is not None: - message = self.bridge.to_ros_image(Image(image), encoding='bgr8') - self.image_publisher.publish(message) - rospy.loginfo("Published annotated image") - - -if __name__ == '__main__': - # Automatically run on GPU/CPU - try: - if mx.context.num_gpus() > 0: - print("GPU found.") - device = 'cuda' - else: - print("GPU not found. Using CPU instead.") - device = 'cpu' - except: - device = 'cpu' - - # initialize ROS node - rospy.init_node('opendr_object_detection', anonymous=True) - rospy.loginfo("Object detection node started!") - - input_image_topic = rospy.get_param("~input_image_topic", "/videofile/image_raw") - - # created node object - object_detection_node = ObjectDetectionCenterNetNode(device=device, input_image_topic=input_image_topic) - # begin ROS communications - rospy.spin() diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_2d_detr.py b/projects/opendr_ws/src/perception/scripts/object_detection_2d_detr.py deleted file mode 100644 index ec98c4ddf0..0000000000 --- a/projects/opendr_ws/src/perception/scripts/object_detection_2d_detr.py +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import rospy -import torch -import numpy as np -from vision_msgs.msg import Detection2DArray -from sensor_msgs.msg import Image as ROS_Image -from opendr.engine.data import Image -from opendr_bridge import ROSBridge -from opendr.perception.object_detection_2d.detr.algorithm.util.draw import draw -from opendr.perception.object_detection_2d import DetrLearner - - -class DetrNode: - - def __init__(self, input_image_topic="/usb_cam/image_raw", output_image_topic="/opendr/image_boxes_annotated", - detection_annotations_topic="/opendr/objects", device="cuda"): - """ - Creates a ROS Node for object detection with DETR - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing - annotated image) - :type output_image_topic: str - :param detection_annotations_topic: Topic to which we are publishing the annotations (if None, we are not publishing - annotations) - :type detection_annotations_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - """ - - if output_image_topic is not None: - self.image_publisher = rospy.Publisher(output_image_topic, ROS_Image, queue_size=10) - else: - self.image_publisher = None - - if detection_annotations_topic is not None: - self.detection_publisher = rospy.Publisher(detection_annotations_topic, Detection2DArray, queue_size=10) - else: - self.detection_publisher = None - - rospy.Subscriber(input_image_topic, ROS_Image, self.callback) - - self.bridge = ROSBridge() - - # Initialize the detection estimation - self.detr_learner = DetrLearner(device=device) - self.detr_learner.download(path=".", verbose=True) - - def listen(self): - """ - Start the node and begin processing input data - """ - rospy.init_node('detr', anonymous=True) - rospy.loginfo("DETR node started!") - rospy.spin() - - def callback(self, data): - """ - Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: sensor_msgs.msg.Image - """ - - # Convert sensor_msgs.msg.Image into OpenDR Image - image = self.bridge.from_ros_image(data, encoding='bgr8') - - # Run detection estimation - boxes = self.detr_learner.infer(image) - - # Get an OpenCV image back - image = np.float32(image.opencv()) - - # Annotate image and publish results: - if self.detection_publisher is not None: - ros_detection = self.bridge.to_ros_bounding_box_list(boxes) - self.detection_publisher.publish(ros_detection) - # We get can the data back using self.bridge.from_ros_bounding_box_list(ros_detection) - # e.g., opendr_detection = self.bridge.from_ros_bounding_box_list(ros_detection) - - if self.image_publisher is not None: - image = draw(image, boxes) - message = self.bridge.to_ros_image(Image(image), encoding='bgr8') - self.image_publisher.publish(message) - - -if __name__ == '__main__': - # Select the device for running the - try: - if torch.cuda.is_available(): - print("GPU found.") - device = 'cuda' - else: - print("GPU not found. Using CPU instead.") - device = 'cpu' - except: - device = 'cpu' - - detection_estimation_node = DetrNode(device=device) - detection_estimation_node.listen() diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_2d_gem.py b/projects/opendr_ws/src/perception/scripts/object_detection_2d_gem.py deleted file mode 100644 index ee1d784566..0000000000 --- a/projects/opendr_ws/src/perception/scripts/object_detection_2d_gem.py +++ /dev/null @@ -1,200 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import rospy -import torch -import message_filters -import cv2 -import time -import numpy as np -from vision_msgs.msg import Detection2DArray -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge -from opendr.perception.object_detection_2d import GemLearner -from opendr.perception.object_detection_2d import draw -from opendr.engine.data import Image - - -class GemNode: - - def __init__(self, - input_color_topic="/camera/color/image_raw", - input_infra_topic="/camera/infra/image_raw", - output_color_topic="/opendr/color_detection_annotated", - output_infra_topic="/opendr/infra_detection_annotated", - detection_annotations_topic="/opendr/detections", - device="cuda", - pts_color=None, - pts_infra=None, - ): - """ - Creates a ROS Node for object detection with GEM - :param input_color_topic: Topic from which we are reading the input color image - :type input_color_topic: str - :param input_infra_topic: Topic from which we are reading the input infrared image - :type: input_infra_topic: str - :param output_color_topic: Topic to which we are publishing the annotated color image (if None, we are not - publishing annotated image) - :type output_color_topic: str - :param output_infra_topic: Topic to which we are publishing the annotated infrared image (if None, we are not - publishing annotated image) - :type output_infra_topic: str - :param detection_annotations_topic: Topic to which we are publishing the annotations (if None, we are - not publishing annotations) - :type detection_annotations_topic: str - :param device: Device on which we are running inference ('cpu' or 'cuda') - :type device: str - :param pts_color: Point on the color image that define alignment with the infrared image. These are camera - specific and can be obtained using get_color_infra_alignment.py which is located in the - opendr/perception/object_detection2d/utils module. - :type pts_color: {list, numpy.ndarray} - :param pts_infra: Points on the infrared image that define alignment with color image. These are camera specific - and can be obtained using get_color_infra_alignment.py which is located in the - opendr/perception/object_detection2d/utils module. - :type pts_infra: {list, numpy.ndarray} - """ - rospy.init_node('gem', anonymous=True) - if output_color_topic is not None: - self.rgb_publisher = rospy.Publisher(output_color_topic, ROS_Image, queue_size=10) - else: - self.rgb_publisher = None - if output_infra_topic is not None: - self.ir_publisher = rospy.Publisher(output_infra_topic, ROS_Image, queue_size=10) - else: - self.ir_publisher = None - - if detection_annotations_topic is not None: - self.detection_publisher = rospy.Publisher(detection_annotations_topic, Detection2DArray, queue_size=10) - else: - self.detection_publisher = None - if pts_infra is None: - pts_infra = np.array([[478, 248], [465, 338], [458, 325], [468, 256], - [341, 240], [335, 310], [324, 321], [311, 383], - [434, 365], [135, 384], [67, 257], [167, 206], - [124, 131], [364, 276], [424, 269], [277, 131], - [41, 310], [202, 320], [188, 318], [188, 308], - [196, 241], [499, 317], [311, 164], [220, 216], - [435, 352], [213, 363], [390, 364], [212, 368], - [390, 370], [467, 324], [415, 364]]) - rospy.logwarn( - '\nUsing default calibration values for pts_infra!' + - '\nThese are probably incorrect.' + - '\nThe correct values for pts_infra can be found by running get_color_infra_alignment.py.' + - '\nThis file is located in the opendr/perception/object_detection2d/utils module.' - ) - if pts_color is None: - pts_color = np.array([[910, 397], [889, 572], [874, 552], [891, 411], - [635, 385], [619, 525], [603, 544], [576, 682], - [810, 619], [216, 688], [90, 423], [281, 310], - [193, 163], [684, 449], [806, 431], [504, 170], - [24, 538], [353, 552], [323, 550], [323, 529], - [344, 387], [961, 533], [570, 233], [392, 336], - [831, 610], [378, 638], [742, 630], [378, 648], - [742, 640], [895, 550], [787, 630]]) - rospy.logwarn( - '\nUsing default calibration values for pts_color!' + - '\nThese are probably incorrect.' + - '\nThe correct values for pts_color can be found by running get_color_infra_alignment.py.' + - '\nThis file is located in the opendr/perception/object_detection2d/utils module.' - ) - # Object classes - self.classes = ['N/A', 'chair', 'cycle', 'bin', 'laptop', 'drill', 'rocker'] - - # Estimating Homography matrix for aligning infra with RGB - self.h, status = cv2.findHomography(pts_infra, pts_color) - - self.bridge = ROSBridge() - - # Initialize the detection estimation - model_backbone = "resnet50" - - self.gem_learner = GemLearner(backbone=model_backbone, - num_classes=7, - device=device, - ) - self.gem_learner.fusion_method = 'sc_avg' - self.gem_learner.download(path=".", verbose=True) - - # Subscribers - msg_rgb = message_filters.Subscriber(input_color_topic, ROS_Image) - msg_ir = message_filters.Subscriber(input_infra_topic, ROS_Image) - - sync = message_filters.TimeSynchronizer([msg_rgb, msg_ir], 1) - sync.registerCallback(self.callback) - - def listen(self): - """ - Start the node and begin processing input data - """ - self.fps_list = [] - rospy.loginfo("GEM node started!") - rospy.spin() - - def callback(self, msg_rgb, msg_ir): - """ - Callback that process the input data and publishes to the corresponding topics - :param msg_rgb: input color image message - :type msg_rgb: sensor_msgs.msg.Image - :param msg_ir: input infrared image message - :type msg_ir: sensor_msgs.msg.Image - """ - # Convert images to OpenDR standard - image_rgb = self.bridge.from_ros_image(msg_rgb).opencv() - image_ir_raw = self.bridge.from_ros_image(msg_ir, 'bgr8').opencv() - image_ir = cv2.warpPerspective(image_ir_raw, self.h, (image_rgb.shape[1], image_rgb.shape[0])) - - # Perform inference on images - start = time.time() - boxes, w_sensor1, _ = self.gem_learner.infer(image_rgb, image_ir) - end = time.time() - - # Calculate fps - fps = 1 / (end - start) - self.fps_list.append(fps) - if len(self.fps_list) > 10: - del self.fps_list[0] - mean_fps = sum(self.fps_list) / len(self.fps_list) - - # Annotate image and publish results: - if self.detection_publisher is not None: - ros_detection = self.bridge.to_ros_bounding_box_list(boxes) - self.detection_publisher.publish(ros_detection) - # We get can the data back using self.bridge.from_ros_bounding_box_list(ros_detection) - # e.g., opendr_detection = self.bridge.from_ros_bounding_box_list(ros_detection) - - if self.rgb_publisher is not None: - plot_rgb = draw(image_rgb, boxes, w_sensor1, mean_fps) - message = self.bridge.to_ros_image(Image(np.uint8(plot_rgb))) - self.rgb_publisher.publish(message) - if self.ir_publisher is not None: - plot_ir = draw(image_ir, boxes, w_sensor1, mean_fps) - message = self.bridge.to_ros_image(Image(np.uint8(plot_ir))) - self.ir_publisher.publish(message) - - -if __name__ == '__main__': - # Select the device for running the - try: - if torch.cuda.is_available(): - print("GPU found.") - device = 'cuda' - else: - print("GPU not found. Using CPU instead.") - device = 'cpu' - except: - device = 'cpu' - detection_estimation_node = GemNode(device=device) - detection_estimation_node.listen() diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_2d_ssd.py b/projects/opendr_ws/src/perception/scripts/object_detection_2d_ssd.py deleted file mode 100755 index f0dd7ca1d3..0000000000 --- a/projects/opendr_ws/src/perception/scripts/object_detection_2d_ssd.py +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import rospy -import mxnet as mx -import numpy as np -from vision_msgs.msg import Detection2DArray -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge -from opendr.engine.data import Image -from opendr.perception.object_detection_2d import SingleShotDetectorLearner -from opendr.perception.object_detection_2d import draw_bounding_boxes -from opendr.perception.object_detection_2d import Seq2SeqNMSLearner, SoftNMS, FastNMS, ClusterNMS - - -class ObjectDetectionSSDNode: - def __init__(self, input_image_topic="/usb_cam/image_raw", output_image_topic="/opendr/image_boxes_annotated", - detections_topic="/opendr/objects", device="cuda", backbone="vgg16_atrous", nms_type='default'): - """ - Creates a ROS Node for face detection - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing - annotated image) - :type output_image_topic: str - :param detections_topic: Topic to which we are publishing the annotations (if None, we are not publishing - annotated pose annotations) - :type detections_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - :param backbone: backbone network - :type backbone: str - :param ms_type: type of NMS method - :type nms_type: str - """ - - # Initialize the face detector - self.object_detector = SingleShotDetectorLearner(backbone=backbone, device=device) - self.object_detector.download(path=".", verbose=True) - self.object_detector.load("ssd_default_person") - self.class_names = self.object_detector.classes - self.custom_nms = None - - # Initialize Seq2Seq-NMS if selected - if nms_type == 'seq2seq-nms': - self.custom_nms = Seq2SeqNMSLearner(fmod_map_type='EDGEMAP', iou_filtering=0.8, - app_feats='fmod', device=self.device) - self.custom_nms.download(model_name='seq2seq_pets_jpd', path='.') - self.custom_nms.load('./seq2seq_pets_jpd/', verbose=True) - elif nms_type == 'soft-nms': - self.custom_nms = SoftNMS(nms_thres=0.45, device=self.device) - elif nms_type == 'fast-nms': - self.custom_nms = FastNMS(nms_thres=0.45, device=self.device) - elif nms_type == 'cluster-nms': - self.custom_nms = ClusterNMS(nms_thres=0.45, device=self.device) - - # Initialize OpenDR ROSBridge object - self.bridge = ROSBridge() - - # setup communications - if output_image_topic is not None: - self.image_publisher = rospy.Publisher(output_image_topic, ROS_Image, queue_size=10) - else: - self.image_publisher = None - - if detections_topic is not None: - self.bbox_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=10) - else: - self.bbox_publisher = None - - rospy.Subscriber(input_image_topic, ROS_Image, self.callback) - - def callback(self, data): - """ - Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: sensor_msgs.msg.Image - """ - - # Convert sensor_msgs.msg.Image into OpenDR Image - image = self.bridge.from_ros_image(data, encoding='bgr8') - - # Run pose estimation - boxes = self.object_detector.infer(image, threshold=0.45, keep_size=False, custom_nms=self.custom_nms) - - # Get an OpenCV image back - image = np.float32(image.opencv()) - - # Convert detected boxes to ROS type and publish - ros_boxes = self.bridge.to_ros_boxes(boxes) - if self.bbox_publisher is not None: - self.bbox_publisher.publish(ros_boxes) - rospy.loginfo("Published face boxes") - - # Annotate image and publish result - # NOTE: converting back to OpenDR BoundingBoxList is unnecessary here, - # only used to test the corresponding bridge methods - odr_boxes = self.bridge.from_ros_boxes(ros_boxes) - image = draw_bounding_boxes(image, odr_boxes, class_names=self.class_names) - if self.image_publisher is not None: - message = self.bridge.to_ros_image(Image(image), encoding='bgr8') - self.image_publisher.publish(message) - rospy.loginfo("Published annotated image") - - -if __name__ == '__main__': - # Automatically run on GPU/CPU - try: - if mx.context.num_gpus() > 0: - print("GPU found.") - device = 'cuda' - else: - print("GPU not found. Using CPU instead.") - device = 'cpu' - except: - device = 'cpu' - - # initialize ROS node - rospy.init_node('opendr_object_detection', anonymous=True) - rospy.loginfo("Object detection node started!") - - input_image_topic = rospy.get_param("~input_image_topic", "/videofile/image_raw") - - # created node object - object_detection_node = ObjectDetectionSSDNode(device=device, input_image_topic=input_image_topic) - # begin ROS communications - rospy.spin() diff --git a/projects/opendr_ws/src/perception/scripts/object_detection_2d_yolov3.py b/projects/opendr_ws/src/perception/scripts/object_detection_2d_yolov3.py deleted file mode 100755 index 93155f148b..0000000000 --- a/projects/opendr_ws/src/perception/scripts/object_detection_2d_yolov3.py +++ /dev/null @@ -1,123 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import rospy -import mxnet as mx -import numpy as np -from vision_msgs.msg import Detection2DArray -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge -from opendr.engine.data import Image -from opendr.perception.object_detection_2d import YOLOv3DetectorLearner -from opendr.perception.object_detection_2d import draw_bounding_boxes - - -class ObjectDetectionYOLONode: - def __init__(self, input_image_topic="/usb_cam/image_raw", output_image_topic="/opendr/image_boxes_annotated", - detections_topic="/opendr/objects", device="cuda", backbone="darknet53"): - """ - Creates a ROS Node for face detection - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing - annotated image) - :type output_image_topic: str - :param detections_topic: Topic to which we are publishing the annotations (if None, we are not publishing - annotated pose annotations) - :type detections_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - :param backbone: backbone network - :type backbone: str - """ - - # Initialize the face detector - self.object_detector = YOLOv3DetectorLearner(backbone=backbone, device=device) - self.object_detector.download(path=".", verbose=True) - self.object_detector.load("yolo_default") - self.class_names = self.object_detector.classes - - # Initialize OpenDR ROSBridge object - self.bridge = ROSBridge() - - # setup communications - if output_image_topic is not None: - self.image_publisher = rospy.Publisher(output_image_topic, ROS_Image, queue_size=10) - else: - self.image_publisher = None - - if detections_topic is not None: - self.bbox_publisher = rospy.Publisher(detections_topic, Detection2DArray, queue_size=10) - else: - self.bbox_publisher = None - - rospy.Subscriber(input_image_topic, ROS_Image, self.callback) - - def callback(self, data): - """ - Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: sensor_msgs.msg.Image - """ - - # Convert sensor_msgs.msg.Image into OpenDR Image - image = self.bridge.from_ros_image(data, encoding='bgr8') - rospy.loginfo("image info: {}".format(image.numpy().shape)) - - # Run pose estimation - boxes = self.object_detector.infer(image, threshold=0.1, keep_size=False) - - # Get an OpenCV image back - image = np.float32(image.opencv()) - - # Convert detected boxes to ROS type and publish - ros_boxes = self.bridge.to_ros_boxes(boxes) - if self.bbox_publisher is not None: - self.bbox_publisher.publish(ros_boxes) - rospy.loginfo("Published face boxes") - - # Annotate image and publish result - # NOTE: converting back to OpenDR BoundingBoxList is unnecessary here, - # only used to test the corresponding bridge methods - odr_boxes = self.bridge.from_ros_boxes(ros_boxes) - image = draw_bounding_boxes(image, odr_boxes, class_names=self.class_names) - if self.image_publisher is not None: - message = self.bridge.to_ros_image(Image(image), encoding='bgr8') - self.image_publisher.publish(message) - rospy.loginfo("Published annotated image") - - -if __name__ == '__main__': - # Automatically run on GPU/CPU - try: - if mx.context.num_gpus() > 0: - print("GPU found.") - device = 'cuda' - else: - print("GPU not found. Using CPU instead.") - device = 'cpu' - except: - device = 'cpu' - - # initialize ROS node - rospy.init_node('opendr_object_detection', anonymous=True) - rospy.loginfo("Object detection node started!") - - input_image_topic = rospy.get_param("~input_image_topic", "/videofile/image_raw") - - # created node object - object_detection_node = ObjectDetectionYOLONode(device=device, input_image_topic=input_image_topic) - # begin ROS communications - rospy.spin() diff --git a/projects/opendr_ws/src/perception/scripts/object_tracking_2d_fair_mot.py b/projects/opendr_ws/src/perception/scripts/object_tracking_2d_fair_mot.py deleted file mode 100755 index 0f8d3a7373..0000000000 --- a/projects/opendr_ws/src/perception/scripts/object_tracking_2d_fair_mot.py +++ /dev/null @@ -1,192 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import cv2 -import torch -import os -from opendr.engine.target import TrackingAnnotation -import rospy -from vision_msgs.msg import Detection2DArray -from std_msgs.msg import Int32MultiArray -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge -from opendr.perception.object_tracking_2d import ( - ObjectTracking2DFairMotLearner, -) -from opendr.engine.data import Image - - -class ObjectTracking2DFairMotNode: - def __init__( - self, - input_image_topic="/usb_cam/image_raw", - output_detection_topic="/opendr/detection", - output_tracking_id_topic="/opendr/tracking_id", - output_image_topic="/opendr/image_annotated", - device="cuda:0", - model_name="fairmot_dla34", - temp_dir="temp", - ): - """ - Creates a ROS Node for 2D object tracking - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing - annotated image) - :type output_image_topic: str - :param output_detection_topic: Topic to which we are publishing the detections - :type output_detection_topic: str - :param output_tracking_id_topic: Topic to which we are publishing the tracking ids - :type output_tracking_id_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - :param model_name: the pretrained model to download or a saved model in temp_dir folder to use - :type model_name: str - :param temp_dir: the folder to download models - :type temp_dir: str - """ - - # # Initialize the face detector - self.learner = ObjectTracking2DFairMotLearner( - device=device, temp_path=temp_dir, - ) - if not os.path.exists(os.path.join(temp_dir, model_name)): - ObjectTracking2DFairMotLearner.download(model_name, temp_dir) - - self.learner.load(os.path.join(temp_dir, model_name), verbose=True) - - # Initialize OpenDR ROSBridge object - self.bridge = ROSBridge() - - self.detection_publisher = rospy.Publisher( - output_detection_topic, Detection2DArray, queue_size=10 - ) - self.tracking_id_publisher = rospy.Publisher( - output_tracking_id_topic, Int32MultiArray, queue_size=10 - ) - - if output_image_topic is not None: - self.output_image_publisher = rospy.Publisher( - output_image_topic, ROS_Image, queue_size=10 - ) - - rospy.Subscriber(input_image_topic, ROS_Image, self.callback) - - def callback(self, data): - """ - Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: sensor_msgs.msg.Image - """ - - # Convert sensor_msgs.msg.Image into OpenDR Image - image = self.bridge.from_ros_image(data, encoding="bgr8") - tracking_boxes = self.learner.infer(image) - - if self.output_image_publisher is not None: - frame = image.opencv() - draw_predictions(frame, tracking_boxes) - message = self.bridge.to_ros_image( - Image(frame), encoding="bgr8" - ) - self.output_image_publisher.publish(message) - rospy.loginfo("Published annotated image") - - detection_boxes = tracking_boxes.bounding_box_list() - ids = [tracking_box.id for tracking_box in tracking_boxes] - - # Convert detected boxes to ROS type and publish - ros_boxes = self.bridge.to_ros_boxes(detection_boxes) - if self.detection_publisher is not None: - self.detection_publisher.publish(ros_boxes) - rospy.loginfo("Published detection boxes") - - ros_ids = Int32MultiArray() - ros_ids.data = ids - - if self.tracking_id_publisher is not None: - self.tracking_id_publisher.publish(ros_ids) - rospy.loginfo("Published tracking ids") - - -colors = [ - (255, 0, 255), - (0, 0, 255), - (0, 255, 0), - (255, 0, 0), - (35, 69, 55), - (43, 63, 54), -] - - -def draw_predictions(frame, predictions: TrackingAnnotation, is_centered=False, is_flipped_xy=True): - global colors - w, h, _ = frame.shape - - for prediction in predictions.boxes: - prediction = prediction - - if not hasattr(prediction, "id"): - prediction.id = 0 - - color = colors[int(prediction.id) * 7 % len(colors)] - - x = prediction.left - y = prediction.top - - if is_flipped_xy: - x = prediction.top - y = prediction.left - - if is_centered: - x -= prediction.width - y -= prediction.height - - cv2.rectangle( - frame, - (int(x), int(y)), - ( - int(x + prediction.width), - int(y + prediction.height), - ), - color, - 2, - ) - - -if __name__ == "__main__": - # Automatically run on GPU/CPU - device = "cuda:0" if torch.cuda.is_available() else "cpu" - - # initialize ROS node - rospy.init_node("opendr_fair_mot", anonymous=True) - rospy.loginfo("FairMOT node started") - - model_name = rospy.get_param("~model_name", "fairmot_dla34") - temp_dir = rospy.get_param("~temp_dir", "temp") - input_image_topic = rospy.get_param( - "~input_image_topic", "/opendr/dataset_image" - ) - rospy.loginfo("Using model_name: {}".format(model_name)) - - # created node object - fair_mot_node = ObjectTracking2DFairMotNode( - device=device, - model_name=model_name, - input_image_topic=input_image_topic, - temp_dir=temp_dir, - ) - # begin ROS communications - rospy.spin() diff --git a/projects/opendr_ws/src/perception/scripts/object_tracking_3d_ab3dmot.py b/projects/opendr_ws/src/perception/scripts/object_tracking_3d_ab3dmot.py deleted file mode 100644 index b9927182ce..0000000000 --- a/projects/opendr_ws/src/perception/scripts/object_tracking_3d_ab3dmot.py +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import torch -from opendr.engine.learners import Learner -import rospy -from vision_msgs.msg import Detection3DArray -from std_msgs.msg import Int32MultiArray -from sensor_msgs.msg import PointCloud as ROS_PointCloud -from opendr_bridge import ROSBridge -from opendr.perception.object_tracking_3d import ObjectTracking3DAb3dmotLearner -from opendr.perception.object_detection_3d import VoxelObjectDetection3DLearner - - -class ObjectTracking3DAb3dmotNode: - def __init__( - self, - detector: Learner, - input_point_cloud_topic="/opendr/dataset_point_cloud", - output_detection3d_topic="/opendr/detection3d", - output_tracking3d_id_topic="/opendr/tracking3d_id", - device="cuda:0", - ): - """ - Creates a ROS Node for 3D object tracking - :param detector: Learner that proides 3D object detections - :type detector: Learner - :param input_point_cloud_topic: Topic from which we are reading the input point cloud - :type input_image_topic: str - :param output_detection3d_topic: Topic to which we are publishing the annotations - :type output_detection3d_topic: str - :param output_tracking3d_id_topic: Topic to which we are publishing the tracking ids - :type output_tracking3d_id_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - """ - - self.detector = detector - self.learner = ObjectTracking3DAb3dmotLearner( - device=device - ) - - # Initialize OpenDR ROSBridge object - self.bridge = ROSBridge() - - self.detection_publisher = rospy.Publisher( - output_detection3d_topic, Detection3DArray, queue_size=10 - ) - self.tracking_id_publisher = rospy.Publisher( - output_tracking3d_id_topic, Int32MultiArray, queue_size=10 - ) - - rospy.Subscriber(input_point_cloud_topic, ROS_PointCloud, self.callback) - - def callback(self, data): - """ - Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: sensor_msgs.msg.Image - """ - - # Convert sensor_msgs.msg.Image into OpenDR Image - point_cloud = self.bridge.from_ros_point_cloud(data) - detection_boxes = self.detector.infer(point_cloud) - tracking_boxes = self.learner.infer(detection_boxes) - ids = [tracking_box.id for tracking_box in tracking_boxes] - - # Convert detected boxes to ROS type and publish - ros_boxes = self.bridge.to_ros_boxes_3d(detection_boxes, classes=["Car", "Van", "Truck", "Pedestrian", "Cyclist"]) - if self.detection_publisher is not None: - self.detection_publisher.publish(ros_boxes) - rospy.loginfo("Published detection boxes") - - ros_ids = Int32MultiArray() - ros_ids.data = ids - - if self.tracking_id_publisher is not None: - self.tracking_id_publisher.publish(ros_ids) - rospy.loginfo("Published tracking ids") - -if __name__ == "__main__": - # Automatically run on GPU/CPU - device = "cuda:0" if torch.cuda.is_available() else "cpu" - - # initialize ROS node - rospy.init_node("opendr_voxel_detection_3d", anonymous=True) - rospy.loginfo("AB3DMOT node started") - - input_point_cloud_topic = rospy.get_param( - "~input_point_cloud_topic", "/opendr/dataset_point_cloud" - ) - temp_dir = rospy.get_param("~temp_dir", "temp") - detector_model_name = rospy.get_param("~detector_model_name", "tanet_car_xyres_16") - detector_model_config_path = rospy.get_param( - "~detector_model_config_path", os.path.join( - "..", "..", "src", "opendr", "perception", "object_detection_3d", - "voxel_object_detection_3d", "second_detector", "configs", "tanet", - "car", "test_short.proto" - ) - ) - - detector = VoxelObjectDetection3DLearner( - device=device, temp_path=temp_dir, model_config_path=detector_model_config_path - ) - if not os.path.exists(os.path.join(temp_dir, detector_model_name)): - VoxelObjectDetection3DLearner.download(detector_model_name, temp_dir) - - detector.load(os.path.join(temp_dir, detector_model_name), verbose=True) - - # created node object - ab3dmot_node = ObjectTracking3DAb3dmotNode( - detector=detector, - device=device, - input_point_cloud_topic=input_point_cloud_topic, - ) - # begin ROS communications - rospy.spin() diff --git a/projects/opendr_ws/src/perception/scripts/pose_estimation.py b/projects/opendr_ws/src/perception/scripts/pose_estimation.py deleted file mode 100644 index 855ada40cf..0000000000 --- a/projects/opendr_ws/src/perception/scripts/pose_estimation.py +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import rospy -import torch -from vision_msgs.msg import Detection2DArray -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge -from opendr.perception.pose_estimation import draw -from opendr.perception.pose_estimation import LightweightOpenPoseLearner -from opendr.engine.data import Image - - -class PoseEstimationNode: - - def __init__(self, input_image_topic="/usb_cam/image_raw", output_image_topic="/opendr/image_pose_annotated", - pose_annotations_topic="/opendr/poses", device="cuda"): - """ - Creates a ROS Node for pose detection - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param output_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing - annotated image) - :type output_image_topic: str - :param pose_annotations_topic: Topic to which we are publishing the annotations (if None, we are not publishing - annotated pose annotations) - :type pose_annotations_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - """ - if output_image_topic is not None: - self.image_publisher = rospy.Publisher(output_image_topic, ROS_Image, queue_size=10) - else: - self.image_publisher = None - - if pose_annotations_topic is not None: - self.pose_publisher = rospy.Publisher(pose_annotations_topic, Detection2DArray, queue_size=10) - else: - self.pose_publisher = None - - self.input_image_topic = input_image_topic - - self.bridge = ROSBridge() - - # Initialize the pose estimation - self.pose_estimator = LightweightOpenPoseLearner(device=device, num_refinement_stages=0, - mobilenet_use_stride=False, - half_precision=False) - self.pose_estimator.download(path=".", verbose=True) - self.pose_estimator.load("openpose_default") - - def listen(self): - """ - Start the node and begin processing input data - """ - rospy.init_node('opendr_pose_estimation', anonymous=True) - rospy.Subscriber(self.input_image_topic, ROS_Image, self.callback) - rospy.loginfo("Pose estimation node started!") - rospy.spin() - - def callback(self, data): - """ - Callback that process the input data and publishes to the corresponding topics - :param data: input message - :type data: sensor_msgs.msg.Image - """ - - # Convert sensor_msgs.msg.Image into OpenDR Image - image = self.bridge.from_ros_image(data, encoding='bgr8') - - # Run pose estimation - poses = self.pose_estimator.infer(image) - - # Get an OpenCV image back - image = image.opencv() - # Annotate image and publish results - for pose in poses: - if self.pose_publisher is not None: - ros_pose = self.bridge.to_ros_pose(pose) - self.pose_publisher.publish(ros_pose) - # We get can the data back using self.bridge.from_ros_pose(ros_pose) - # e.g., opendr_pose = self.bridge.from_ros_pose(ros_pose) - draw(image, pose) - - if self.image_publisher is not None: - message = self.bridge.to_ros_image(Image(image), encoding='bgr8') - self.image_publisher.publish(message) - - -if __name__ == '__main__': - # Select the device for running the - try: - if torch.cuda.is_available(): - print("GPU found.") - device = 'cuda' - else: - print("GPU not found. Using CPU instead.") - device = 'cpu' - except: - device = 'cpu' - - pose_estimation_node = PoseEstimationNode(device=device) - pose_estimation_node.listen() diff --git a/projects/opendr_ws/src/perception/scripts/rgbd_hand_gesture_recognition.py b/projects/opendr_ws/src/perception/scripts/rgbd_hand_gesture_recognition.py deleted file mode 100755 index 69150856ad..0000000000 --- a/projects/opendr_ws/src/perception/scripts/rgbd_hand_gesture_recognition.py +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import rospy -import torch -import numpy as np -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge -import os -from opendr.perception.multimodal_human_centric import RgbdHandGestureLearner -from opendr.engine.data import Image -from vision_msgs.msg import Classification2D -import message_filters -import cv2 - - -class RgbdHandGestureNode: - - def __init__(self, input_image_topic="/usb_cam/image_raw", input_depth_image_topic="/usb_cam/image_raw", - gesture_annotations_topic="/opendr/gestures", device="cuda"): - """ - Creates a ROS Node for gesture recognition from RGBD - :param input_image_topic: Topic from which we are reading the input image - :type input_image_topic: str - :param input_depth_image_topic: Topic from which we are reading the input depth image - :type input_depth_image_topic: str - :param gesture_annotations_topic: Topic to which we are publishing the predicted gesture class - :type gesture_annotations_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - """ - - self.gesture_publisher = rospy.Publisher(gesture_annotations_topic, Classification2D, queue_size=10) - - image_sub = message_filters.Subscriber(input_image_topic, ROS_Image) - depth_sub = message_filters.Subscriber(input_depth_image_topic, ROS_Image) - # synchronize image and depth data topics - ts = message_filters.TimeSynchronizer([image_sub, depth_sub], 10) - ts.registerCallback(self.callback) - - self.bridge = ROSBridge() - - # Initialize the gesture recognition - self.gesture_learner = RgbdHandGestureLearner(n_class=16, architecture="mobilenet_v2", device=device) - model_path = './mobilenet_v2' - if not os.path.exists(model_path): - self.gesture_learner.download(path=model_path) - self.gesture_learner.load(path=model_path) - - # mean and std for preprocessing, based on HANDS dataset - self.mean = np.asarray([0.485, 0.456, 0.406, 0.0303]).reshape(1, 1, 4) - self.std = np.asarray([0.229, 0.224, 0.225, 0.0353]).reshape(1, 1, 4) - - def listen(self): - """ - Start the node and begin processing input data - """ - rospy.init_node('opendr_gesture_recognition', anonymous=True) - rospy.loginfo("RGBD gesture recognition node started!") - rospy.spin() - - def callback(self, image_data, depth_data): - """ - Callback that process the input data and publishes to the corresponding topics - :param image_data: input image message - :type image_data: sensor_msgs.msg.Image - :param depth_data: input depth image message - :type depth_data: sensor_msgs.msg.Image - """ - - # Convert sensor_msgs.msg.Image into OpenDR Image and preprocess - image = self.bridge.from_ros_image(image_data, encoding='bgr8') - depth_data.encoding = 'mono16' - depth_image = self.bridge.from_ros_image_to_depth(depth_data, encoding='mono16') - img = self.preprocess(image, depth_image) - - # Run gesture recognition - gesture_class = self.gesture_learner.infer(img) - - # Publish results - ros_gesture = self.bridge.from_category_to_rosclass(gesture_class) - self.gesture_publisher.publish(ros_gesture) - - def preprocess(self, image, depth_img): - ''' - Preprocess image, depth_image and concatenate them - :param image_data: input image - :type image_data: engine.data.Image - :param depth_data: input depth image - :type depth_data: engine.data.Image - ''' - image = image.convert(format='channels_last') / (2**8 - 1) - depth_img = depth_img.convert(format='channels_last') / (2**16 - 1) - - # resize the images to 224x224 - image = cv2.resize(image, (224, 224)) - depth_img = cv2.resize(depth_img, (224, 224)) - - # concatenate and standardize - img = np.concatenate([image, np.expand_dims(depth_img, axis=-1)], axis=-1) - img = (img - self.mean) / self.std - img = Image(img, dtype=np.float32) - return img - -if __name__ == '__main__': - # Select the device for running - try: - device = 'cuda' if torch.cuda.is_available() else 'cpu' - except: - device = 'cpu' - - # default topics are according to kinectv2 drivers at https://github.com/OpenKinect/libfreenect2 - # and https://github.com/code-iai-iai_kinect2 - depth_topic = "/kinect2/qhd/image_depth_rect" - image_topic = "/kinect2/qhd/image_color_rect" - gesture_node = RgbdHandGestureNode(input_image_topic=image_topic, input_depth_image_topic=depth_topic, device=device) - gesture_node.listen() diff --git a/projects/opendr_ws/src/perception/scripts/semantic_segmentation_bisenet.py b/projects/opendr_ws/src/perception/scripts/semantic_segmentation_bisenet.py deleted file mode 100644 index 32390c9157..0000000000 --- a/projects/opendr_ws/src/perception/scripts/semantic_segmentation_bisenet.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import torch -import rospy -from sensor_msgs.msg import Image as ROS_Image -from opendr_bridge import ROSBridge -from opendr.engine.data import Image -from opendr.perception.semantic_segmentation import BisenetLearner -import numpy as np -import cv2 - - -class BisenetNode: - def __init__(self, - input_image_topic, - output_heatmap_topic=None, - device="cuda" - ): - """ - Initialize the Bisenet ROS node and create an instance of the respective learner class. - :param input_image_topic: ROS topic for the input image - :type input_image_topic: str - :param output_heatmap_topic: ROS topic for the predicted heatmap - :type output_heatmap_topic: str - :param device: device on which we are running inference ('cpu' or 'cuda') - :type device: str - """ - self.input_image_topic = input_image_topic - self.output_heatmap_topic = output_heatmap_topic - - if self.output_heatmap_topic is not None: - self._heatmap_publisher = rospy.Publisher(f'{self.output_heatmap_topic}/semantic', ROS_Image, queue_size=10) - else: - self._heatmap_publisher = None - - rospy.Subscriber(self.input_image_topic, ROS_Image, self.callback) - - # Initialize OpenDR ROSBridge object - self._bridge = ROSBridge() - - # Initialize the semantic segmentation model - self._learner = BisenetLearner(device=device) - self._learner.download(path="bisenet_camvid") - self._learner.load("bisenet_camvid") - - self._colors = np.random.randint(0, 256, (256, 3), dtype=np.uint8) - - def listen(self): - """ - Start the node and begin processing input data - """ - rospy.init_node('bisenet', anonymous=True) - rospy.loginfo("Bisenet node started!") - rospy.spin() - - def callback(self, data: ROS_Image): - """ - Predict the heatmap from the input image and publish the results. - :param data: Input image message - :type data: sensor_msgs.msg.Image - """ - # Convert sensor_msgs.msg.Image to OpenDR Image - image = self._bridge.from_ros_image(data) - - try: - # Retrieve the OpenDR heatmap - prediction = self._learner.infer(image) - - if self._heatmap_publisher is not None and self._heatmap_publisher.get_num_connections() > 0: - heatmap_np = prediction.numpy() - heatmap_o = self._colors[heatmap_np] - heatmap_o = cv2.resize(np.uint8(heatmap_o), (960, 720)) - self._heatmap_publisher.publish(self._bridge.to_ros_image(Image(heatmap_o), encoding='bgr8')) - - except Exception: - rospy.logwarn('Failed to generate prediction.') - - -if __name__ == '__main__': - # Select the device for running the - try: - if torch.cuda.is_available(): - print("GPU found.") - device = "cuda" - else: - print("GPU not found. Using CPU instead.") - device = "cpu" - except: - device = "cpu" - - parser = argparse.ArgumentParser() - parser.add_argument('image_topic', type=str, help='listen to images on this topic') - parser.add_argument('--heatmap_topic', type=str, help='publish the heatmap on this topic') - args = parser.parse_args() - - bisenet_node = BisenetNode(device=device, input_image_topic=args.image_topic, output_heatmap_topic=args.heatmap_topic) - bisenet_node.listen() diff --git a/projects/opendr_ws_2/README.md b/projects/opendr_ws_2/README.md new file mode 100755 index 0000000000..4379f69595 --- /dev/null +++ b/projects/opendr_ws_2/README.md @@ -0,0 +1,88 @@ +# opendr_ws_2 + +## Description +This ROS2 workspace contains ROS2 nodes and tools developed by OpenDR project. Currently, ROS2 nodes are compatible with ROS2 Foxy. +This workspace contains the `opendr_ros2_bridge` package, which contains the `ROS2Bridge` class that provides an interface to convert OpenDR data types and targets into ROS-compatible +ones similar to CvBridge. The workspace also contains the `opendr_ros2_interfaces` which provides message and service definitions for ROS-compatible OpenDR data types. You can find more information in the corresponding [opendr_ros2_bridge documentation](../../docs/reference/ros2bridge.md) and [opendr_ros2_interfaces documentation](). + +## First time setup + +For the initial setup you can follow the instructions below: + +0. Make sure that [ROS2-foxy is installed.](https://docs.ros.org/en/foxy/Installation/Ubuntu-Install-Debians.html) + +1. Source the necessary distribution tools: + ```shell + source /opt/ros/foxy/setup.bash + ``` + _For convenience, you can add this line to your `.bashrc` so you don't have to source the tools each time you open a terminal window._ + + + +2. Navigate to your OpenDR home directory (`~/opendr`) and activate the OpenDR environment using: + ```shell + source bin/activate.sh + ``` + You need to do this step every time before running an OpenDR node. + +3. Navigate into the OpenDR ROS2 workspace:: + ```shell + cd projects/opendr_ws_2 + ``` + +4. Build the packages inside the workspace: + ```shell + colcon build + ``` + +5. Source the workspace: + ```shell + . install/setup.bash + ``` + You are now ready to run an OpenDR ROS node. + +#### After first time setup +For running OpenDR nodes after you have completed the initial setup, you can skip steps 0 from the list above. +You can also skip building the workspace (step 4) granted it's been already built and no changes were made to the code inside the workspace, e.g. you modified the source code of a node. + +#### More information +After completing the setup you can read more information on the [opendr perception package README](src/opendr_perception/README.md), where you can find a concise list of prerequisites and helpful notes to view the output of the nodes or optimize their performance. + +#### Node documentation +You can also take a look at the list of tools [below](#structure) and click on the links to navigate directly to documentation for specific nodes with instructions on how to run and modify them. + +**For first time users we suggest reading the introductory sections (prerequisites and notes) first.** + +## Structure + +Currently, apart from tools, opendr_ws_2 contains the following ROS2 nodes (categorized according to the input they receive): + +### [Perception](src/opendr_perception/README.md) +## RGB input +1. [Pose Estimation](src/opendr_perception/README.md#pose-estimation-ros2-node) +2. [High Resolution Pose Estimation](src/opendr_perception/README.md#high-resolution-pose-estimation-ros2-node) +3. [Fall Detection](src/opendr_perception/README.md#fall-detection-ros2-node) +4. [Face Detection](src/opendr_perception/README.md#face-detection-ros2-node) +5. [Face Recognition](src/opendr_perception/README.md#face-recognition-ros2-node) +6. [2D Object Detection](src/opendr_perception/README.md#2d-object-detection-ros2-nodes) +7. [2D Single Object Tracking](src/opendr_perception/README.md#2d-single-object-tracking-ros2-node) +8. [2D Object Tracking](src/opendr_perception/README.md#2d-object-tracking-ros2-nodes) +9. [Panoptic Segmentation](src/opendr_perception/README.md#panoptic-segmentation-ros2-node) +10. [Semantic Segmentation](src/opendr_perception/README.md#semantic-segmentation-ros2-node) +11. [Image-based Facial Emotion Estimation](src/opendr_perception/README.md#image-based-facial-emotion-estimation-ros2-node) +12. [Landmark-based Facial Expression Recognition](src/opendr_perception/README.md#landmark-based-facial-expression-recognition-ros2-node) +13. [Skeleton-based Human Action Recognition](src/opendr_perception/README.md#skeleton-based-human-action-recognition-ros2-node) +14. [Video Human Activity Recognition](src/opendr_perception/README.md#video-human-activity-recognition-ros2-node) +## RGB + Infrared input +1. [End-to-End Multi-Modal Object Detection (GEM)](src/opendr_perception/README.md#2d-object-detection-gem-ros2-node) +## RGBD input +1. [RGBD Hand Gesture Recognition](src/opendr_perception/README.md#rgbd-hand-gesture-recognition-ros2-node) +## RGB + Audio input +1. [Audiovisual Emotion Recognition](src/opendr_perception/README.md#audiovisual-emotion-recognition-ros2-node) +## Audio input +1. [Speech Command Recognition](src/opendr_perception/README.md#speech-command-recognition-ros2-node) +## Point cloud input +1. [3D Object Detection Voxel](src/opendr_perception/README.md#3d-object-detection-voxel-ros2-node) +2. [3D Object Tracking AB3DMOT](src/opendr_perception/README.md#3d-object-tracking-ab3dmot-ros2-node) +## Biosignal input +1. [Heart Anomaly Detection](src/opendr_perception/README.md#heart-anomaly-detection-ros2-node) diff --git a/projects/opendr_ws_2/images/opendr_node_diagram.png b/projects/opendr_ws_2/images/opendr_node_diagram.png new file mode 100644 index 0000000000..70b202ad3c Binary files /dev/null and b/projects/opendr_ws_2/images/opendr_node_diagram.png differ diff --git a/projects/opendr_ws_2/src/opendr_bridge/opendr_bridge/__init__.py b/projects/opendr_ws_2/src/opendr_bridge/opendr_bridge/__init__.py new file mode 100644 index 0000000000..06c41996d7 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_bridge/opendr_bridge/__init__.py @@ -0,0 +1,3 @@ +from opendr_bridge.bridge import ROS2Bridge + +__all__ = ['ROS2Bridge', ] diff --git a/projects/opendr_ws_2/src/opendr_bridge/opendr_bridge/bridge.py b/projects/opendr_ws_2/src/opendr_bridge/opendr_bridge/bridge.py new file mode 100644 index 0000000000..3deb3f8207 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_bridge/opendr_bridge/bridge.py @@ -0,0 +1,629 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +from opendr.engine.data import Image, PointCloud, Timeseries +from opendr.engine.target import ( + Pose, BoundingBox, BoundingBoxList, Category, + BoundingBox3D, BoundingBox3DList, TrackingAnnotation +) +from cv_bridge import CvBridge +from std_msgs.msg import String, ColorRGBA, Header +from sensor_msgs.msg import Image as ImageMsg, PointCloud as PointCloudMsg, ChannelFloat32 as ChannelFloat32Msg +from vision_msgs.msg import ( + Detection2DArray, Detection2D, BoundingBox2D, ObjectHypothesisWithPose, + Detection3D, Detection3DArray, BoundingBox3D as BoundingBox3DMsg, + Classification2D, ObjectHypothesis +) +from shape_msgs.msg import Mesh, MeshTriangle +from geometry_msgs.msg import ( + Pose2D, Point32 as Point32Msg, + Quaternion as QuaternionMsg, Pose as Pose3D, + Point +) +from opendr_interface.msg import OpenDRPose2D, OpenDRPose2DKeypoint, OpenDRPose3D, OpenDRPose3DKeypoint + + +class ROS2Bridge: + """ + This class provides an interface to convert OpenDR data types and targets into ROS2-compatible ones similar + to CvBridge. + For each data type X two methods are provided: + from_ros_X: which converts the ROS2 equivalent of X into OpenDR data type + to_ros_X: which converts the OpenDR data type into the ROS2 equivalent of X + """ + + def __init__(self): + self._cv_bridge = CvBridge() + + def to_ros_image(self, image: Image, encoding: str='passthrough') -> ImageMsg: + """ + Converts an OpenDR image into a ROS2 image message + :param image: OpenDR image to be converted + :type image: engine.data.Image + :param encoding: encoding to be used for the conversion (inherited from CvBridge) + :type encoding: str + :return: ROS2 image + :rtype: sensor_msgs.msg.Image + """ + # Convert from the OpenDR standard (CHW/RGB) to OpenCV standard (HWC/BGR) + message = self._cv_bridge.cv2_to_imgmsg(image.opencv(), encoding=encoding) + return message + + def from_ros_image(self, message: ImageMsg, encoding: str='passthrough') -> Image: + """ + Converts a ROS2 image message into an OpenDR image + :param message: ROS2 image to be converted + :type message: sensor_msgs.msg.Image + :param encoding: encoding to be used for the conversion (inherited from CvBridge) + :type encoding: str + :return: OpenDR image (RGB) + :rtype: engine.data.Image + """ + cv_image = self._cv_bridge.imgmsg_to_cv2(message, desired_encoding=encoding) + image = Image(np.asarray(cv_image, dtype=np.uint8)) + return image + + def to_ros_pose(self, pose: Pose): + """ + Converts an OpenDR Pose into a OpenDRPose2D msg that can carry the same information, i.e. a list of keypoints, + the pose detection confidence and the pose id. + Each keypoint is represented as an OpenDRPose2DKeypoint with x, y pixel position on input image with (0, 0) + being the top-left corner. + :param pose: OpenDR Pose to be converted to OpenDRPose2D + :type pose: engine.target.Pose + :return: ROS message with the pose + :rtype: opendr_interface.msg.OpenDRPose2D + """ + data = pose.data + # Setup ros pose + ros_pose = OpenDRPose2D() + ros_pose.pose_id = int(pose.id) + if pose.confidence: + ros_pose.conf = pose.confidence + + # Add keypoints to pose + for i in range(data.shape[0]): + ros_keypoint = OpenDRPose2DKeypoint() + ros_keypoint.kpt_name = pose.kpt_names[i] + ros_keypoint.x = int(data[i][0]) + ros_keypoint.y = int(data[i][1]) + # Add keypoint to pose + ros_pose.keypoint_list.append(ros_keypoint) + return ros_pose + + def from_ros_pose(self, ros_pose: OpenDRPose2D): + """ + Converts an OpenDRPose2D message into an OpenDR Pose. + :param ros_pose: the ROS pose to be converted + :type ros_pose: opendr_interface.msg.OpenDRPose2D + :return: an OpenDR Pose + :rtype: engine.target.Pose + """ + ros_keypoints = ros_pose.keypoint_list + keypoints = [] + pose_id, confidence = ros_pose.pose_id, ros_pose.conf + + for ros_keypoint in ros_keypoints: + keypoints.append(int(ros_keypoint.x)) + keypoints.append(int(ros_keypoint.y)) + data = np.asarray(keypoints).reshape((-1, 2)) + + pose = Pose(data, confidence) + pose.id = pose_id + return pose + + def to_ros_boxes(self, box_list): + """ + Converts an OpenDR BoundingBoxList into a Detection2DArray msg that can carry the same information. + Each bounding box is represented by its center coordinates as well as its width/height dimensions. + :param box_list: OpenDR bounding boxes to be converted + :type box_list: engine.target.BoundingBoxList + :return: ROS2 message with the bounding boxes + :rtype: vision_msgs.msg.Detection2DArray + """ + boxes = box_list.data + ros_boxes = Detection2DArray() + for idx, box in enumerate(boxes): + ros_box = Detection2D() + ros_box.bbox = BoundingBox2D() + ros_box.results.append(ObjectHypothesisWithPose()) + ros_box.bbox.center = Pose2D() + ros_box.bbox.center.x = box.left + box.width / 2. + ros_box.bbox.center.y = box.top + box.height / 2. + ros_box.bbox.size_x = float(box.width) + ros_box.bbox.size_y = float(box.height) + ros_box.results[0].id = str(box.name) + if box.confidence: + ros_box.results[0].score = float(box.confidence) + ros_boxes.detections.append(ros_box) + return ros_boxes + + def from_ros_boxes(self, ros_detections): + """ + Converts a ROS2 message with bounding boxes into an OpenDR BoundingBoxList + :param ros_detections: the boxes to be converted (represented as vision_msgs.msg.Detection2DArray) + :type ros_detections: vision_msgs.msg.Detection2DArray + :return: an OpenDR BoundingBoxList + :rtype: engine.target.BoundingBoxList + """ + ros_boxes = ros_detections.detections + bboxes = BoundingBoxList(boxes=[]) + + for idx, box in enumerate(ros_boxes): + width = box.bbox.size_x + height = box.bbox.size_y + left = box.bbox.center.x - width / 2. + top = box.bbox.center.y - height / 2. + _id = int(float(box.results[0].id.strip('][').split(', ')[0])) + bbox = BoundingBox(top=top, left=left, width=width, height=height, name=_id) + bboxes.data.append(bbox) + return bboxes + + def to_ros_bounding_box_list(self, bounding_box_list): + """ + Converts an OpenDR bounding_box_list into a Detection2DArray msg that can carry the same information + The object class is also embedded on each bounding box (stored in ObjectHypothesisWithPose). + :param bounding_box_list: OpenDR bounding_box_list to be converted + :type bounding_box_list: engine.target.BoundingBoxList + :return: ROS2 message with the bounding box list + :rtype: vision_msgs.msg.Detection2DArray + """ + detections = Detection2DArray() + for bounding_box in bounding_box_list: + detection = Detection2D() + detection.bbox = BoundingBox2D() + detection.results.append(ObjectHypothesisWithPose()) + detection.bbox.center = Pose2D() + detection.bbox.center.x = bounding_box.left + bounding_box.width / 2.0 + detection.bbox.center.y = bounding_box.top + bounding_box.height / 2.0 + detection.bbox.size_x = float(bounding_box.width) + detection.bbox.size_y = float(bounding_box.height) + detection.results[0].id = str(bounding_box.name) + detection.results[0].score = float(bounding_box.confidence) + detections.detections.append(detection) + return detections + + def from_ros_bounding_box_list(self, ros_detection_2d_array): + """ + Converts a ROS2 message with bounding box list payload into an OpenDR pose + :param ros_detection_2d_array: the bounding boxes to be converted (represented as + vision_msgs.msg.Detection2DArray) + :type ros_detection_2d_array: vision_msgs.msg.Detection2DArray + :return: an OpenDR bounding box list + :rtype: engine.target.BoundingBoxList + """ + detections = ros_detection_2d_array.detections + boxes = [] + + for detection in detections: + width = detection.bbox.size_x + height = detection.bbox.size_y + left = detection.bbox.center.x - width / 2.0 + top = detection.bbox.center.y - height / 2.0 + name = detection.results[0].id + score = detection.results[0].confidence + boxes.append(BoundingBox(name, left, top, width, height, score)) + bounding_box_list = BoundingBoxList(boxes) + return bounding_box_list + + def from_ros_single_tracking_annotation(self, ros_detection_box): + """ + Converts a pair of ROS messages with bounding boxes and tracking ids into an OpenDR TrackingAnnotationList + :param ros_detection_box: The boxes to be converted. + :type ros_detection_box: vision_msgs.msg.Detection2D + :return: An OpenDR TrackingAnnotationList + :rtype: engine.target.TrackingAnnotationList + """ + width = ros_detection_box.bbox.size_x + height = ros_detection_box.bbox.size_y + left = ros_detection_box.bbox.center.x - width / 2. + top = ros_detection_box.bbox.center.y - height / 2. + id = 0 + bbox = TrackingAnnotation( + name=id, + left=left, + top=top, + width=width, + height=height, + id=0, + frame=-1 + ) + return bbox + + def to_ros_single_tracking_annotation(self, tracking_annotation): + """ + Converts a pair of ROS messages with bounding boxes and tracking ids into an OpenDR TrackingAnnotationList + :param tracking_annotation: The box to be converted. + :type tracking_annotation: engine.target.TrackingAnnotation + :return: A ROS vision_msgs.msg.Detection2D + :rtype: vision_msgs.msg.Detection2D + """ + ros_box = Detection2D() + ros_box.bbox = BoundingBox2D() + ros_box.results.append(ObjectHypothesisWithPose()) + ros_box.bbox.center = Pose2D() + ros_box.bbox.center.x = tracking_annotation.left + tracking_annotation.width / 2.0 + ros_box.bbox.center.y = tracking_annotation.top + tracking_annotation.height / 2.0 + ros_box.bbox.size_x = float(tracking_annotation.width) + ros_box.bbox.size_y = float(tracking_annotation.height) + ros_box.results[0].id = str(tracking_annotation.name) + ros_box.results[0].score = float(-1) + return ros_box + + def to_ros_face(self, category): + """ + Converts an OpenDR category into a ObjectHypothesis msg that can carry the Category.data and + Category.confidence. + :param category: OpenDR category to be converted + :type category: engine.target.Category + :return: ROS2 message with the category.data and category.confidence + :rtype: vision_msgs.msg.ObjectHypothesis + """ + result = ObjectHypothesisWithPose() + result.id = str(category.data) + result.score = category.confidence + return result + + def from_ros_face(self, ros_hypothesis): + """ + Converts a ROS2 message with category payload into an OpenDR category + :param ros_hypothesis: the object hypothesis to be converted + :type ros_hypothesis: vision_msgs.msg.ObjectHypothesis + :return: an OpenDR category + :rtype: engine.target.Category + """ + return Category(prediction=ros_hypothesis.id, description=None, + confidence=ros_hypothesis.score) + + def to_ros_face_id(self, category): + """ + Converts an OpenDR category into a string msg that can carry the Category.description. + :param category: OpenDR category to be converted + :type category: engine.target.Category + :return: ROS2 message with the category.description + :rtype: std_msgs.msg.String + """ + result = String() + result.data = category.description + return result + + def from_ros_point_cloud(self, point_cloud: PointCloudMsg): + """ + Converts a ROS PointCloud message into an OpenDR PointCloud + :param point_cloud: ROS PointCloud to be converted + :type point_cloud: sensor_msgs.msg.PointCloud + :return: OpenDR PointCloud + :rtype: engine.data.PointCloud + """ + + points = np.empty([len(point_cloud.points), 3 + len(point_cloud.channels)], dtype=np.float32) + + for i in range(len(point_cloud.points)): + point = point_cloud.points[i] + x, y, z = point.x, point.y, point.z + + points[i, 0] = x + points[i, 1] = y + points[i, 2] = z + + for q in range(len(point_cloud.channels)): + points[i, 3 + q] = point_cloud.channels[q].values[i] + + result = PointCloud(points) + + return result + + def to_ros_point_cloud(self, point_cloud, time_stamp): + """ + Converts an OpenDR PointCloud message into a ROS2 PointCloud + :param point_cloud: OpenDR PointCloud + :type point_cloud: engine.data.PointCloud + :param time_stamp: Time stamp + :type time_stamp: ROS Time + :return: ROS PointCloud + :rtype: sensor_msgs.msg.PointCloud + """ + + ros_point_cloud = PointCloudMsg() + + header = Header() + + header.stamp = time_stamp + ros_point_cloud.header = header + + channels_count = point_cloud.data.shape[-1] - 3 + + channels = [ChannelFloat32Msg(name="channel_" + str(i), values=[]) for i in range(channels_count)] + points = [] + + for point in point_cloud.data: + point_msg = Point32Msg() + point_msg.x = float(point[0]) + point_msg.y = float(point[1]) + point_msg.z = float(point[2]) + points.append(point_msg) + for i in range(channels_count): + channels[i].values.append(float(point[3 + i])) + + ros_point_cloud.points = points + ros_point_cloud.channels = channels + + return ros_point_cloud + + def from_ros_boxes_3d(self, ros_boxes_3d): + """ + Converts a ROS2 Detection3DArray message into an OpenDR BoundingBox3D object. + :param ros_boxes_3d: The ROS boxes to be converted. + :type ros_boxes_3d: vision_msgs.msg.Detection3DArray + :return: An OpenDR BoundingBox3DList object. + :rtype: engine.target.BoundingBox3DList + """ + boxes = [] + + for ros_box in ros_boxes_3d: + + box = BoundingBox3D( + name=ros_box.results[0].id, + truncated=0, + occluded=0, + bbox2d=None, + dimensions=np.array([ + ros_box.bbox.size.position.x, + ros_box.bbox.size.position.y, + ros_box.bbox.size.position.z, + ]), + location=np.array([ + ros_box.bbox.center.position.x, + ros_box.bbox.center.position.y, + ros_box.bbox.center.position.z, + ]), + rotation_y=ros_box.bbox.center.rotation.y, + score=ros_box.results[0].score, + ) + boxes.append(box) + + result = BoundingBox3DList(boxes) + return result + + def to_ros_boxes_3d(self, boxes_3d): + """ + Converts an OpenDR BoundingBox3DList object into a ROS2 Detection3DArray message. + :param boxes_3d: The OpenDR boxes to be converted. + :type boxes_3d: engine.target.BoundingBox3DList + :return: ROS message with the boxes + :rtype: vision_msgs.msg.Detection3DArray + """ + ros_boxes_3d = Detection3DArray() + for i in range(len(boxes_3d)): + box = Detection3D() + box.bbox = BoundingBox3DMsg() + box.results.append(ObjectHypothesisWithPose()) + box.bbox.center = Pose3D() + box.bbox.center.position.x = float(boxes_3d[i].location[0]) + box.bbox.center.position.y = float(boxes_3d[i].location[1]) + box.bbox.center.position.z = float(boxes_3d[i].location[2]) + box.bbox.center.orientation = QuaternionMsg(x=0.0, y=float(boxes_3d[i].rotation_y), z=0.0, w=0.0) + box.bbox.size.x = float(boxes_3d[i].dimensions[0]) + box.bbox.size.y = float(boxes_3d[i].dimensions[1]) + box.bbox.size.z = float(boxes_3d[i].dimensions[2]) + box.results[0].id = boxes_3d[i].name + box.results[0].score = float(boxes_3d[i].confidence) + ros_boxes_3d.detections.append(box) + return ros_boxes_3d + + def from_ros_mesh(self, mesh_ROS): + """ + Converts a ROS mesh into arrays of vertices and faces of a mesh + :param mesh_ROS: the ROS mesh to be converted + :type mesh_ROS: shape_msgs.msg.Mesh + :return vertices: Numpy array Nx3 representing vertices of the 3D model respectively + :rtype vertices: np.array + :return faces: Numpy array Nx3 representing the IDs of the vertices of each face of the 3D model + :rtype faces: numpy array (Nx3) + """ + vertices = np.zeros([len(mesh_ROS.vertices), 3]) + faces = np.zeros([len(mesh_ROS.triangles), 3]).astype(int) + for i in range(len(mesh_ROS.vertices)): + vertices[i] = np.array([mesh_ROS.vertices[i].x, mesh_ROS.vertices[i].y, mesh_ROS.vertices[i].z]) + for i in range(len(mesh_ROS.triangles)): + faces[i] = np.array([int(mesh_ROS.triangles[i].vertex_indices[0]), int(mesh_ROS.triangles[i].vertex_indices[1]), + int(mesh_ROS.triangles[i].vertex_indices[2])]).astype(int) + return vertices, faces + + def to_ros_mesh(self, vertices, faces): + """ + Converts a mesh into a ROS Mesh + :param vertices: the vertices of the 3D model + :type vertices: numpy array (Nx3) + :param faces: the faces of the 3D model + :type faces: numpy array (Nx3) + :return mesh_ROS: a ROS mesh + :rtype mesh_ROS: shape_msgs.msg.Mesh + """ + mesh_ROS = Mesh() + for i in range(vertices.shape[0]): + point = Point() + point.x = vertices[i, 0] + point.y = vertices[i, 1] + point.z = vertices[i, 2] + mesh_ROS.vertices.append(point) + for i in range(faces.shape[0]): + mesh_triangle = MeshTriangle() + mesh_triangle.vertex_indices[0] = int(faces[i][0]) + mesh_triangle.vertex_indices[1] = int(faces[i][1]) + mesh_triangle.vertex_indices[2] = int(faces[i][2]) + mesh_ROS.triangles.append(mesh_triangle) + return mesh_ROS + + def from_ros_colors(self, ros_colors): + """ + Converts a list of ROS colors into a list of colors + :param ros_colors: a list of the colors of the vertices + :type ros_colors: std_msgs.msg.ColorRGBA[] + :return colors: the colors of the vertices of the 3D model + :rtype colors: numpy array (Nx3) + """ + colors = np.zeros([len(ros_colors), 3]) + for i in range(len(ros_colors)): + colors[i] = np.array([ros_colors[i].r, ros_colors[i].g, ros_colors[i].b]) + return colors + + def to_ros_colors(self, colors): + """ + Converts an array of vertex_colors to a list of ROS colors + :param colors: a numpy array of RGB colors + :type colors: numpy array (Nx3) + :return ros_colors: a list of the colors of the vertices + :rtype ros_colors: std_msgs.msg.ColorRGBA[] + """ + ros_colors = [] + for i in range(colors.shape[0]): + color = ColorRGBA() + color.r = colors[i, 0] + color.g = colors[i, 1] + color.b = colors[i, 2] + color.a = 0.0 + ros_colors.append(color) + return ros_colors + + def from_ros_pose_3D(self, ros_pose): + """ + Converts a ROS message with pose payload into an OpenDR pose + :param ros_pose: the pose to be converted (represented as opendr_interface.msg.OpenDRPose3D) + :type ros_pose: opendr_interface.msg.OpenDRPose3D + :return: an OpenDR pose + :rtype: engine.target.Pose + """ + keypoints = ros_pose.keypoint_list + data = [] + for i, keypoint in enumerate(keypoints): + data.append([keypoint.x, keypoint.y, keypoint.z]) + pose = Pose(data, 1.0) + pose.id = 0 + return pose + + def to_ros_pose_3D(self, pose): + """ + Converts an OpenDR pose into a OpenDRPose3D msg that can carry the same information + Each keypoint is represented as an OpenDRPose3DKeypoint with x, y, z coordinates. + :param pose: OpenDR pose to be converted + :type pose: engine.target.Pose + :return: ROS message with the pose + :rtype: opendr_interface.msg.OpenDRPose3D + """ + data = pose.data + ros_pose = OpenDRPose3D() + ros_pose.pose_id = 0 + if pose.id is not None: + ros_pose.pose_id = int(pose.id) + ros_pose.conf = 1.0 + for i in range(len(data)): + keypoint = OpenDRPose3DKeypoint() + keypoint.kpt_name = '' + keypoint.x = float(data[i][0]) + keypoint.y = float(data[i][1]) + keypoint.z = float(data[i][2]) + ros_pose.keypoint_list.append(keypoint) + return ros_pose + + def to_ros_category(self, category): + """ + Converts an OpenDR category into a ObjectHypothesis msg that can carry the Category.data and Category.confidence. + :param category: OpenDR category to be converted + :type category: engine.target.Category + :return: ROS message with the category.data and category.confidence + :rtype: vision_msgs.msg.ObjectHypothesis + """ + result = ObjectHypothesis() + result.id = str(category.data) + result.score = float(category.confidence) + return result + + def from_ros_category(self, ros_hypothesis): + """ + Converts a ROS message with category payload into an OpenDR category + :param ros_hypothesis: the object hypothesis to be converted + :type ros_hypothesis: vision_msgs.msg.ObjectHypothesis + :return: an OpenDR category + :rtype: engine.target.Category + """ + category = Category(prediction=ros_hypothesis.id, description=None, + confidence=ros_hypothesis.score) + return category + + def to_ros_category_description(self, category): + """ + Converts an OpenDR category into a string msg that can carry the Category.description. + :param category: OpenDR category to be converted + :type category: engine.target.Category + :return: ROS message with the category.description + :rtype: std_msgs.msg.String + """ + result = String() + result.data = category.description + return result + + def from_rosarray_to_timeseries(self, ros_array, dim1, dim2): + """ + Converts ROS2 array into OpenDR Timeseries object + :param ros_array: data to be converted + :type ros_array: std_msgs.msg.Float32MultiArray + :param dim1: 1st dimension + :type dim1: int + :param dim2: 2nd dimension + :type dim2: int + :rtype: engine.data.Timeseries + """ + data = np.reshape(ros_array.data, (dim1, dim2)) + data = Timeseries(data) + return data + + def from_ros_image_to_depth(self, message, encoding='mono16'): + """ + Converts a ROS2 image message into an OpenDR grayscale depth image + :param message: ROS2 image to be converted + :type message: sensor_msgs.msg.Image + :param encoding: encoding to be used for the conversion + :type encoding: str + :return: OpenDR image + :rtype: engine.data.Image + """ + cv_image = self._cv_bridge.imgmsg_to_cv2(message, desired_encoding=encoding) + cv_image = np.expand_dims(cv_image, axis=-1) + image = Image(np.asarray(cv_image, dtype=np.uint8)) + return image + + def from_category_to_rosclass(self, prediction, timestamp, source_data=None): + """ + Converts OpenDR Category into Classification2D message with class label, confidence, timestamp and corresponding input + :param prediction: classification prediction + :type prediction: engine.target.Category + :param timestamp: time stamp for header message + :type timestamp: str + :param source_data: corresponding input or None + :return classification + :rtype: vision_msgs.msg.Classification2D + """ + classification = Classification2D() + classification.header = Header() + classification.header.stamp = timestamp + + result = ObjectHypothesis() + result.id = str(prediction.data) + result.score = prediction.confidence + classification.results.append(result) + if source_data is not None: + classification.source_img = source_data + return classification diff --git a/projects/opendr_ws_2/src/opendr_bridge/package.xml b/projects/opendr_ws_2/src/opendr_bridge/package.xml new file mode 100644 index 0000000000..290546ab5d --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_bridge/package.xml @@ -0,0 +1,21 @@ + + + + opendr_bridge + 2.0.0 + OpenDR ROS2 bridge package. This package provides a way to translate ROS2 messages into OpenDR data types + and vice versa. + OpenDR Project Coordinator + Apache License v2.0 + + rclpy + + ament_copyright + ament_flake8 + ament_pep257 + python3-pytest + + + ament_python + + diff --git a/projects/control/eagerx/demos/__init__.py b/projects/opendr_ws_2/src/opendr_bridge/resource/opendr_bridge similarity index 100% rename from projects/control/eagerx/demos/__init__.py rename to projects/opendr_ws_2/src/opendr_bridge/resource/opendr_bridge diff --git a/projects/opendr_ws_2/src/opendr_bridge/setup.cfg b/projects/opendr_ws_2/src/opendr_bridge/setup.cfg new file mode 100644 index 0000000000..9d9e5c012f --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_bridge/setup.cfg @@ -0,0 +1,4 @@ +[develop] +script_dir=$base/lib/opendr_bridge +[install] +install_scripts=$base/lib/opendr_bridge diff --git a/projects/opendr_ws_2/src/opendr_bridge/setup.py b/projects/opendr_ws_2/src/opendr_bridge/setup.py new file mode 100644 index 0000000000..df933edd8b --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_bridge/setup.py @@ -0,0 +1,26 @@ +from setuptools import setup + +package_name = 'opendr_bridge' + +setup( + name=package_name, + version='2.0.0', + packages=[package_name], + data_files=[ + ('share/ament_index/resource_index/packages', + ['resource/' + package_name]), + ('share/' + package_name, ['package.xml']), + ], + install_requires=['setuptools'], + zip_safe=True, + maintainer='OpenDR Project Coordinator', + maintainer_email='tefas@csd.auth.gr', + description='OpenDR ROS2 bridge package. This package provides a way to translate ROS2 messages into OpenDR' + + 'data types and vice versa.', + license='Apache License v2.0', + tests_require=['pytest'], + entry_points={ + 'console_scripts': [ + ], + }, +) diff --git a/projects/opendr_ws_2/src/opendr_bridge/test/test_copyright.py b/projects/opendr_ws_2/src/opendr_bridge/test/test_copyright.py new file mode 100644 index 0000000000..cc8ff03f79 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_bridge/test/test_copyright.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_copyright.main import main +import pytest + + +@pytest.mark.copyright +@pytest.mark.linter +def test_copyright(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found errors' diff --git a/projects/opendr_ws_2/src/opendr_bridge/test/test_flake8.py b/projects/opendr_ws_2/src/opendr_bridge/test/test_flake8.py new file mode 100644 index 0000000000..27ee1078ff --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_bridge/test/test_flake8.py @@ -0,0 +1,25 @@ +# Copyright 2017 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_flake8.main import main_with_errors +import pytest + + +@pytest.mark.flake8 +@pytest.mark.linter +def test_flake8(): + rc, errors = main_with_errors(argv=[]) + assert rc == 0, \ + 'Found %d code style errors / warnings:\n' % len(errors) + \ + '\n'.join(errors) diff --git a/projects/opendr_ws_2/src/opendr_bridge/test/test_pep257.py b/projects/opendr_ws_2/src/opendr_bridge/test/test_pep257.py new file mode 100644 index 0000000000..b234a3840f --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_bridge/test/test_pep257.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_pep257.main import main +import pytest + + +@pytest.mark.linter +@pytest.mark.pep257 +def test_pep257(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found code style errors / warnings' diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/__init__.py b/projects/opendr_ws_2/src/opendr_data_generation/opendr_data_generation/__init__.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/__init__.py rename to projects/opendr_ws_2/src/opendr_data_generation/opendr_data_generation/__init__.py diff --git a/projects/opendr_ws_2/src/opendr_data_generation/opendr_data_generation/synthetic_facial_generation_node.py b/projects/opendr_ws_2/src/opendr_data_generation/opendr_data_generation/synthetic_facial_generation_node.py new file mode 100644 index 0000000000..094a20cfd8 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_data_generation/opendr_data_generation/synthetic_facial_generation_node.py @@ -0,0 +1,173 @@ +#!/usr/bin/env python3.6 +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import cv2 +import os +import argparse +import numpy as np + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from cv_bridge import CvBridge + +from opendr.projects.python.simulation.synthetic_multi_view_facial_image_generation.algorithm.DDFA.utils.ddfa \ + import str2bool +from opendr.src.opendr.engine.data import Image +from opendr.projects.python.simulation.synthetic_multi_view_facial_image_generation.SyntheticDataGeneration \ + import MultiviewDataGeneration + + +class SyntheticDataGeneratorNode(Node): + + def __init__(self, args, input_rgb_image_topic="/image_raw", + output_rgb_image_topic="/opendr/synthetic_facial_images"): + """ + Creates a ROS Node for SyntheticDataGeneration + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the synthetic facial image (if None, no image + is published) + :type output_rgb_image_topic: str + """ + super().__init__('synthetic_facial_image_generation_node') + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 10) + self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + self._cv_bridge = CvBridge() + self.ID = 0 + self.args = args + self.path_in = args.path_in + self.key = str(args.path_3ddfa + "/example/Images/") + self.key1 = str(args.path_3ddfa + "/example/") + self.key2 = str(args.path_3ddfa + "/results/") + self.save_path = args.save_path + self.val_yaw = args.val_yaw + self.val_pitch = args.val_pitch + self.device = args.device + + # Initialize the SyntheticDataGeneration + self.synthetic = MultiviewDataGeneration(self.args) + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + + cv_image = self._cv_bridge.imgmsg_to_cv2(data, desired_encoding="rgb8") + image = Image(np.asarray(cv_image, dtype=np.uint8)) + self.ID = self.ID + 1 + # Get an OpenCV image back + image = cv2.cvtColor(image.opencv(), cv2.COLOR_RGBA2BGR) + name = str(f"{self.ID:02d}" + "_single.jpg") + cv2.imwrite(os.path.join(self.path_in, name), image) + + if self.ID == 10: + # Run SyntheticDataGeneration + self.synthetic.eval() + self.ID = 0 + # Annotate image and publish results + current_directory_path = os.path.join(self.save_path, str("/Documents_orig/")) + for file in os.listdir(current_directory_path): + name, ext = os.path.splitext(file) + if ext == ".jpg": + image_file_savepath = os.path.join(current_directory_path, file) + cv_image = cv2.imread(image_file_savepath) + cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB) + if self.image_publisher is not None: + image = Image(np.array(cv_image, dtype=np.uint8)) + message = self.bridge.to_ros_image(image, encoding="rgb8") + self.image_publisher.publish(message) + for f in os.listdir(self.path_in): + os.remove(os.path.join(self.path_in, f)) + + +def main(args=None): + rclpy.init(args=args) + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=str, default="/opendr/synthetic_facial_images") + parser.add_argument("--path_in", default=os.path.join("opendr", "projects", + "data_generation", + "synthetic_multi_view_facial_image_generation", + "demos", "imgs_input"), + type=str, help='Give the path of image folder') + parser.add_argument('--path_3ddfa', default=os.path.join("opendr", "projects", + "data_generation", + "synthetic_multi_view_facial_image_generation", + "algorithm", "DDFA"), + type=str, help='Give the path of DDFA folder') + parser.add_argument('--save_path', default=os.path.join("opendr", "projects", + "data_generation", + "synthetic_multi_view_facial_image_generation", + "results"), + type=str, help='Give the path of results folder') + parser.add_argument('--val_yaw', default="10 20", nargs='+', type=str, help='yaw poses list between [-90,90]') + parser.add_argument('--val_pitch', default="30 40", nargs='+', type=str, help='pitch poses list between [-90,90]') + parser.add_argument("--device", default="cuda", type=str, help="choose between cuda or cpu ") + parser.add_argument('-f', '--files', nargs='+', + help='image files paths fed into network, single or multiple images') + parser.add_argument('--show_flg', default='false', type=str2bool, help='whether show the visualization result') + parser.add_argument('--dump_res', default='true', type=str2bool, + help='whether write out the visualization image') + parser.add_argument('--dump_vertex', default='false', type=str2bool, + help='whether write out the dense face vertices to mat') + parser.add_argument('--dump_ply', default='true', type=str2bool) + parser.add_argument('--dump_pts', default='true', type=str2bool) + parser.add_argument('--dump_roi_box', default='false', type=str2bool) + parser.add_argument('--dump_pose', default='true', type=str2bool) + parser.add_argument('--dump_depth', default='true', type=str2bool) + parser.add_argument('--dump_pncc', default='true', type=str2bool) + parser.add_argument('--dump_paf', default='true', type=str2bool) + parser.add_argument('--paf_size', default=3, type=int, help='PAF feature kernel size') + parser.add_argument('--dump_obj', default='true', type=str2bool) + parser.add_argument('--dlib_bbox', default='true', type=str2bool, help='whether use dlib to predict bbox') + parser.add_argument('--dlib_landmark', default='true', type=str2bool, + help='whether use dlib landmark to crop image') + parser.add_argument('-m', '--mode', default='gpu', type=str, help='gpu or cpu mode') + parser.add_argument('--bbox_init', default='two', type=str, + help='one|two: one-step bbox initialization or two-step') + parser.add_argument('--dump_2d_img', default='true', type=str2bool, help='whether to save 3d rendered image') + parser.add_argument('--dump_param', default='true', type=str2bool, help='whether to save param') + parser.add_argument('--dump_lmk', default='true', type=str2bool, help='whether to save landmarks') + parser.add_argument('--save_dir', default='./algorithm/DDFA/results', type=str, help='dir to save result') + parser.add_argument('--save_lmk_dir', default='./example', type=str, help='dir to save landmark result') + parser.add_argument('--img_list', default='./txt_name_batch.txt', type=str, help='test image list file') + parser.add_argument('--rank', default=0, type=int, help='used when parallel run') + parser.add_argument('--world_size', default=1, type=int, help='used when parallel run') + parser.add_argument('--resume_idx', default=0, type=int) + args = parser.parse_args() + + synthetic_data_generation_node = SyntheticDataGeneratorNode(args=args, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic) + + rclpy.spin(synthetic_data_generation_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + synthetic_data_generation_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_data_generation/package.xml b/projects/opendr_ws_2/src/opendr_data_generation/package.xml new file mode 100644 index 0000000000..e6f73e51d2 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_data_generation/package.xml @@ -0,0 +1,25 @@ + + + + opendr_data_generation + 2.0.0 + OpenDR's ROS2 nodes for data generation package + tefas + Apache License v2.0 + + sensor_msgs + + rclpy + opendr_bridge + + ament_cmake + + ament_copyright + ament_flake8 + ament_pep257 + python3-pytest + + + ament_python + + diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/__init__.py b/projects/opendr_ws_2/src/opendr_data_generation/resource/opendr_data_generation similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/__init__.py rename to projects/opendr_ws_2/src/opendr_data_generation/resource/opendr_data_generation diff --git a/projects/opendr_ws_2/src/opendr_data_generation/setup.cfg b/projects/opendr_ws_2/src/opendr_data_generation/setup.cfg new file mode 100644 index 0000000000..893b4dda07 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_data_generation/setup.cfg @@ -0,0 +1,4 @@ +[develop] +script_dir=$base/lib/opendr_data_generation +[install] +install_scripts=$base/lib/opendr_data_generation diff --git a/projects/opendr_ws_2/src/opendr_data_generation/setup.py b/projects/opendr_ws_2/src/opendr_data_generation/setup.py new file mode 100644 index 0000000000..0735f378c8 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_data_generation/setup.py @@ -0,0 +1,40 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from setuptools import setup + +package_name = 'opendr_data_generation' + +setup( + name=package_name, + version='2.0.0', + packages=[package_name], + data_files=[ + ('share/ament_index/resource_index/packages', + ['resource/' + package_name]), + ('share/' + package_name, ['package.xml']), + ], + install_requires=['setuptools'], + zip_safe=True, + maintainer='OpenDR Project Coordinator', + maintainer_email='tefas@csd.auth.gr', + description='OpenDR\'s ROS2 nodes for data generation package', + license='Apache License v2.0', + tests_require=['pytest'], + entry_points={ + 'console_scripts': [ + 'synthetic_facial_generation = opendr_data_generation.synthetic_facial_generation_node:main' + ], + }, +) diff --git a/projects/opendr_ws_2/src/opendr_data_generation/test/test_copyright.py b/projects/opendr_ws_2/src/opendr_data_generation/test/test_copyright.py new file mode 100644 index 0000000000..cc8ff03f79 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_data_generation/test/test_copyright.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_copyright.main import main +import pytest + + +@pytest.mark.copyright +@pytest.mark.linter +def test_copyright(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found errors' diff --git a/projects/opendr_ws_2/src/opendr_data_generation/test/test_flake8.py b/projects/opendr_ws_2/src/opendr_data_generation/test/test_flake8.py new file mode 100644 index 0000000000..18bd9331ea --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_data_generation/test/test_flake8.py @@ -0,0 +1,25 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_flake8.main import main_with_errors +import pytest + + +@pytest.mark.flake8 +@pytest.mark.linter +def test_flake8(): + rc, errors = main_with_errors(argv=[]) + assert rc == 0, \ + 'Found %d code style errors / warnings:\n' % len(errors) + \ + '\n'.join(errors) diff --git a/projects/opendr_ws_2/src/opendr_data_generation/test/test_pep257.py b/projects/opendr_ws_2/src/opendr_data_generation/test/test_pep257.py new file mode 100644 index 0000000000..b234a3840f --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_data_generation/test/test_pep257.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_pep257.main import main +import pytest + + +@pytest.mark.linter +@pytest.mark.pep257 +def test_pep257(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found code style errors / warnings' diff --git a/projects/opendr_ws_2/src/opendr_interface/CMakeLists.txt b/projects/opendr_ws_2/src/opendr_interface/CMakeLists.txt new file mode 100644 index 0000000000..9c158812e5 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_interface/CMakeLists.txt @@ -0,0 +1,50 @@ +cmake_minimum_required(VERSION 3.5) +project(opendr_interface) + +# Default to C99 +if(NOT CMAKE_C_STANDARD) + set(CMAKE_C_STANDARD 99) +endif() + +# Default to C++14 +if(NOT CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 14) +endif() + +if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_compile_options(-Wall -Wextra -Wpedantic) +endif() + +# find dependencies +find_package(ament_cmake REQUIRED) +# uncomment the following section in order to fill in +# further dependencies manually. +# find_package( REQUIRED) +find_package(std_msgs REQUIRED) +find_package(shape_msgs REQUIRED) +find_package(sensor_msgs REQUIRED) +find_package(vision_msgs REQUIRED) +find_package(rosidl_default_generators REQUIRED) + +rosidl_generate_interfaces(${PROJECT_NAME} + "msg/OpenDRPose2D.msg" + "msg/OpenDRPose2DKeypoint.msg" + "msg/OpenDRPose3D.msg" + "msg/OpenDRPose3DKeypoint.msg" + "srv/OpenDRSingleObjectTracking.srv" + "srv/ImgToMesh.srv" + DEPENDENCIES std_msgs shape_msgs sensor_msgs vision_msgs +) + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + # the following line skips the linter which checks for copyrights + # uncomment the line when a copyright and license is not present in all source files + #set(ament_cmake_copyright_FOUND TRUE) + # the following line skips cpplint (only works in a git repo) + # uncomment the line when this package is not in a git repo + #set(ament_cmake_cpplint_FOUND TRUE) + ament_lint_auto_find_test_dependencies() +endif() + +ament_package() diff --git a/projects/perception/lightweight_open_pose/jetbot/results/.keep b/projects/opendr_ws_2/src/opendr_interface/include/opendr_interface/.keep similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/results/.keep rename to projects/opendr_ws_2/src/opendr_interface/include/opendr_interface/.keep diff --git a/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose2D.msg b/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose2D.msg new file mode 100644 index 0000000000..184f3fd11b --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose2D.msg @@ -0,0 +1,26 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This message represents a full OpenDR human pose 2D as a list of keypoints + +std_msgs/Header header + +# The id of the pose +int32 pose_id + +# The pose detection confidence of the model +float32 conf + +# A list of a human 2D pose keypoints +OpenDRPose2DKeypoint[] keypoint_list \ No newline at end of file diff --git a/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose2DKeypoint.msg b/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose2DKeypoint.msg new file mode 100644 index 0000000000..72d14a19f2 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose2DKeypoint.msg @@ -0,0 +1,22 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This message contains all relevant information for an OpenDR human pose 2D keypoint + +# The kpt_name according to https://github.com/opendr-eu/opendr/blob/master/docs/reference/lightweight-open-pose.md#notes +string kpt_name + +# x and y pixel position on the input image, (0, 0) is top-left corner of image +int32 x +int32 y diff --git a/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose3D.msg b/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose3D.msg new file mode 100644 index 0000000000..a180eed5b0 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose3D.msg @@ -0,0 +1,26 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This message represents a full OpenDR human pose 3D as a list of keypoints + +std_msgs/Header header + +# The id of the pose +int32 pose_id + +# The pose detection confidence of the model +float32 conf + +# A list of a human 3D pose keypoints +OpenDRPose3DKeypoint[] keypoint_list diff --git a/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose3DKeypoint.msg b/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose3DKeypoint.msg new file mode 100644 index 0000000000..179aa9e348 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_interface/msg/OpenDRPose3DKeypoint.msg @@ -0,0 +1,22 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# The kpt_name according to https://github.com/opendr-eu/opendr/blob/master/docs/reference/lightweight-open-pose.md#notes +string kpt_name + +# This message contains all relevant information for an OpenDR human pose 3D keypoint + +float32 x +float32 y +float32 z diff --git a/projects/opendr_ws_2/src/opendr_interface/package.xml b/projects/opendr_ws_2/src/opendr_interface/package.xml new file mode 100644 index 0000000000..fdbd9c351e --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_interface/package.xml @@ -0,0 +1,24 @@ + + + + opendr_interface + 2.0.0 + OpenDR ROS2 custom interface package. This package includes all custom OpenDR ROS2 messages and services. + OpenDR Project Coordinator + Apache License v2.0 + + ament_cmake + + std_msgs + rosidl_default_generators + + rosidl_default_runtime + rosidl_interface_packages + + ament_lint_auto + ament_lint_common + + + ament_cmake + + diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/__init__.py b/projects/opendr_ws_2/src/opendr_interface/src/.keep similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/__init__.py rename to projects/opendr_ws_2/src/opendr_interface/src/.keep diff --git a/projects/opendr_ws_2/src/opendr_interface/srv/ImgToMesh.srv b/projects/opendr_ws_2/src/opendr_interface/srv/ImgToMesh.srv new file mode 100644 index 0000000000..3d6d15717a --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_interface/srv/ImgToMesh.srv @@ -0,0 +1,21 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +sensor_msgs/Image img_rgb +sensor_msgs/Image img_msk +std_msgs/Bool extract_pose +--- +shape_msgs/Mesh mesh +std_msgs/ColorRGBA[] vertex_colors +OpenDRPose3D pose diff --git a/projects/opendr_ws_2/src/opendr_interface/srv/OpenDRSingleObjectTracking.srv b/projects/opendr_ws_2/src/opendr_interface/srv/OpenDRSingleObjectTracking.srv new file mode 100644 index 0000000000..e7b3c29517 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_interface/srv/OpenDRSingleObjectTracking.srv @@ -0,0 +1,17 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +vision_msgs/Detection2D init_box +--- +bool success diff --git a/projects/opendr_ws_2/src/opendr_perception/README.md b/projects/opendr_ws_2/src/opendr_perception/README.md new file mode 100755 index 0000000000..1fce5f935d --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/README.md @@ -0,0 +1,885 @@ +# OpenDR Perception Package + +This package contains ROS2 nodes related to the perception package of OpenDR. + +--- + +## Prerequisites + +Before you can run any of the toolkit's ROS2 nodes, some prerequisites need to be fulfilled: +1. First of all, you need to [set up the required packages and build your workspace.](../../README.md#first-time-setup) +2. _(Optional for nodes with [RGB input](#rgb-input-nodes))_ + + For basic usage and testing, all the toolkit's ROS2 nodes that use RGB images are set up to expect input from a basic webcam using the default package `usb_cam` which is installed with OpenDR. You can run the webcam node in a new terminal: + ```shell + ros2 run usb_cam usb_cam_node_exe + ``` + By default, the USB cam node publishes images on `/image_raw` and the RGB input nodes subscribe to this topic if not provided with an input topic argument. + As explained for each node below, you can modify the topics via arguments, so if you use any other node responsible for publishing images, **make sure to change the input topic accordingly.** + +3. _(Optional for nodes with [audio input](#audio-input) or [audiovisual input](#rgb--audio-input))_ + + For basic usage and testing, the toolkit's ROS2 nodes that use audio as input are set up to expect input from a basic audio device using the default package `audio_common` which is installed with OpenDR. You can run the audio node in a new terminal: + ```shell + ros2 run audio_capture audio_capture_node + ``` + By default, the audio capture node publishes audio data on `/audio` and the audio input nodes subscribe to this topic if not provided with an input topic argument. + As explained for each node below, you can modify the topics via arguments, so if you use any other node responsible for publishing audio, **make sure to change the input topic accordingly.** + +--- + +## Notes + +- ### Display output images with rqt_image_view + For any node that outputs images, `rqt_image_view` can be used to display them by running the following command: + ```shell + ros2 run rqt_image_view rqt_image_view & + ``` + A window will appear, where the topic that you want to view can be selected from the drop-down menu on the top-left area of the window. + Refer to each node's documentation below to find out the default output image topic, where applicable, and select it on the drop-down menu of rqt_image_view. + +- ### Echo node output + All OpenDR nodes publish some kind of detection message, which can be echoed by running the following command: + ```shell + ros2 topic echo /opendr/topic_name + ``` + You can find out the default topic name for each node, in its documentation below. + +- ### Increase performance by disabling output + Optionally, nodes can be modified via command line arguments, which are presented for each node separately below. + Generally, arguments give the option to change the input and output topics, the device the node runs on (CPU or GPU), etc. + When a node publishes on several topics, where applicable, a user can opt to disable one or more of the outputs by providing `None` in the corresponding output topic. + This disables publishing on that topic, forgoing some operations in the node, which might increase its performance. + + _An example would be to disable the output annotated image topic in a node when visualization is not needed and only use the detection message in another node, thus eliminating the OpenCV operations._ + +- ### An example diagram of OpenDR nodes running + ![Face Detection ROS2 node running diagram](../../images/opendr_node_diagram.png) + - On the left, the `usb_cam` node can be seen, which is using a system camera to publish images on the `/image_raw` topic. + - In the middle, OpenDR's face detection node is running taking as input the published image. By default, the node has its input topic set to `/image_raw`. + - To the right the two output topics of the face detection node can be seen. + The bottom topic `/opendr/image_faces_annotated` is the annotated image which can be easily viewed with `rqt_image_view` as explained earlier. + The other topic `/opendr/faces` is the detection message which contains the detected faces' detailed information. + This message can be easily viewed by running `ros2 topic echo /opendr/faces` in a terminal. + + + +---- + +## RGB input nodes + +### Pose Estimation ROS2 Node + +You can find the pose estimation ROS2 node python script [here](./opendr_perception/pose_estimation_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [pose estimation tool](../../../../src/opendr/perception/pose_estimation/lightweight_open_pose/lightweight_open_pose_learner.py) whose documentation can be found [here](../../../../docs/reference/lightweight-open-pose.md). +The node publishes the detected poses in [OpenDR's 2D pose message format](../opendr_interface/msg/OpenDRPose2D.msg), which saves a list of [OpenDR's keypoint message format](../opendr_interface/msg/OpenDRPose2DKeypoint.msg). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the pose detection node: + ```shell + ros2 run opendr_perception pose_estimation + ``` + The following optional arguments are available: + - `-h, --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_pose_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/poses`) + - `--device DEVICE`: Device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--accelerate`: Acceleration flag that causes pose estimation to run faster but with less accuracy + +3. Default output topics: + - Output images: `/opendr/image_pose_annotated` + - Detection messages: `/opendr/poses` + + For viewing the output, refer to the [notes above.](#notes) + +### High Resolution Pose Estimation ROS2 Node + +You can find the high resolution pose estimation ROS2 node python script [here](./opendr_perception/hr_pose_estimation_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [high resolution pose estimation tool](../../../../src/opendr/perception/pose_estimation/hr_pose_estimation/high_resolution_learner.py) whose documentation can be found [here](../../../../docs/reference/high-resolution-pose-estimation.md). +The node publishes the detected poses in [OpenDR's 2D pose message format](../opendr_interface/msg/OpenDRPose2D.msg), which saves a list of [OpenDR's keypoint message format](../opendr_interface/msg/OpenDRPose2DKeypoint.msg). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the high resolution pose detection node: + ```shell + ros2 run opendr_perception hr_pose_estimation + ``` + The following optional arguments are available: + - `-h, --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_pose_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/poses`) + - `--device DEVICE`: Device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--accelerate`: Acceleration flag that causes pose estimation to run faster but with less accuracy + +3. Default output topics: + - Output images: `/opendr/image_pose_annotated` + - Detection messages: `/opendr/poses` + + For viewing the output, refer to the [notes above.](#notes) + +### Fall Detection ROS2 Node + +You can find the fall detection ROS2 node python script [here](./opendr_perception/fall_detection_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [fall detection tool](../../../../src/opendr/perception/fall_detection/fall_detector_learner.py) whose documentation can be found [here](../../../../docs/reference/fall-detection.md). +Fall detection uses the toolkit's pose estimation tool internally. + + + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the fall detection node: + + ```shell + ros2 run opendr_perception fall_detection + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_fallen_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/fallen`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--accelerate`: acceleration flag that causes pose estimation that runs internally to run faster but with less accuracy + +3. Default output topics: + - Output images: `/opendr/image_fallen_annotated` + - Detection messages: `/opendr/fallen` + + For viewing the output, refer to the [notes above.](#notes) + +### Face Detection ROS2 Node + +The face detection ROS2 node supports both the ResNet and MobileNet versions, the latter of which performs masked face detection as well. + +You can find the face detection ROS2 node python script [here](./opendr_perception/face_detection_retinaface_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [face detection tool](../../../../src/opendr/perception/object_detection_2d/retinaface/retinaface_learner.py) whose documentation can be found [here](../../../../docs/reference/face-detection-2d-retinaface.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the face detection node + + ```shell + ros2 run opendr_perception face_detection_retinaface + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_faces_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/faces`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--backbone BACKBONE`: retinaface backbone, options are either `mnet` or `resnet`, where `mnet` detects masked faces as well (default=`resnet`) + +3. Default output topics: + - Output images: `/opendr/image_faces_annotated` + - Detection messages: `/opendr/faces` + + For viewing the output, refer to the [notes above.](#notes) + +### Face Recognition ROS2 Node + +You can find the face recognition ROS2 node python script [here](./opendr_perception/face_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [face recognition tool](../../../../src/opendr/perception/face_recognition/face_recognition_learner.py) whose documentation can be found [here](../../../../docs/reference/face-recognition.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the face recognition node: + + ```shell + ros2 run opendr_perception face_recognition + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_face_reco_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/face_recognition`) + - `-id or --detections_id_topic DETECTIONS_ID_TOPIC`: topic name for detection ID messages, `None` to stop the node from publishing on this topic (default=`/opendr/face_recognition_id`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--backbone BACKBONE`: backbone network (default=`mobilefacenet`) + - `--dataset_path DATASET_PATH`: path of the directory where the images of the faces to be recognized are stored (default=`./database`) + +3. Default output topics: + - Output images: `/opendr/image_face_reco_annotated` + - Detection messages: `/opendr/face_recognition` and `/opendr/face_recognition_id` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +Reference images should be placed in a defined structure like: +- imgs + - ID1 + - image1 + - image2 + - ID2 + - ID3 + - ... + +The default dataset path is `./database`. Please use the `--database_path ./your/path/` argument to define a custom one. +Τhe name of the sub-folder, e.g. ID1, will be published under `/opendr/face_recognition_id`. + +The database entry and the returned confidence is published under the topic name `/opendr/face_recognition`, and the human-readable ID +under `/opendr/face_recognition_id`. + +### 2D Object Detection ROS2 Nodes + +For 2D object detection, there are several ROS2 nodes implemented using various algorithms. The generic object detectors are SSD, YOLOv3, YOLOv5, CenterNet, Nanodet and DETR. + +You can find the 2D object detection ROS2 node python scripts here: +[SSD node](./opendr_perception/object_detection_2d_ssd_node.py), [YOLOv3 node](./opendr_perception/object_detection_2d_yolov3_node.py), [YOLOv5 node](./opendr_perception/object_detection_2d_yolov5_node.py), [CenterNet node](./opendr_perception/object_detection_2d_centernet_node.py), [Nanodet node](./opendr_perception/object_detection_2d_nanodet_node.py) and [DETR node](./opendr_perception/object_detection_2d_detr_node.py), +where you can inspect the code and modify it as you wish to fit your needs. +The nodes makes use of the toolkit's various 2D object detection tools: +[SSD tool](../../../../src/opendr/perception/object_detection_2d/ssd/ssd_learner.py), [YOLOv3 tool](../../../../src/opendr/perception/object_detection_2d/yolov3/yolov3_learner.py), [YOLOv5 tool](../../../../src/opendr/perception/object_detection_2d/yolov5/yolov5_learner.py), +[CenterNet tool](../../../../src/opendr/perception/object_detection_2d/centernet/centernet_learner.py), [Nanodet tool](../../../../src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py), [DETR tool](../../../../src/opendr/perception/object_detection_2d/detr/detr_learner.py), +whose documentation can be found here: +[SSD docs](../../../../docs/reference/object-detection-2d-ssd.md), [YOLOv3 docs](../../../../docs/reference/object-detection-2d-yolov3.md), [YOLOv5 docs](../../../../docs/reference/object-detection-2d-yolov5.md), +[CenterNet docs](../../../../docs/reference/object-detection-2d-centernet.md), [Nanodet docs](../../../../docs/reference/nanodet.md), [DETR docs](../../../../docs/reference/detr.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start a 2D object detector node: + 1. SSD node + ```shell + ros2 run opendr_perception object_detection_2d_ssd + ``` + The following optional arguments are available for the SSD node: + - `--backbone BACKBONE`: Backbone network (default=`vgg16_atrous`) + - `--nms_type NMS_TYPE`: Non-Maximum Suppression type options are `default`, `seq2seq-nms`, `soft-nms`, `fast-nms`, `cluster-nms` (default=`default`) + + 2. YOLOv3 node + ```shell + ros2 run opendr_perception object_detection_2d_yolov3 + ``` + The following optional argument is available for the YOLOv3 node: + - `--backbone BACKBONE`: Backbone network (default=`darknet53`) + + 3. YOLOv5 node + ```shell + ros2 run opendr_perception object_detection_2d_yolov5 + ``` + The following optional argument is available for the YOLOv5 node: + - `--model_name MODEL_NAME`: Network architecture, options are `yolov5s`, `yolov5n`, `yolov5m`, `yolov5l`, `yolov5x`, `yolov5n6`, `yolov5s6`, `yolov5m6`, `yolov5l6`, `custom` (default=`yolov5s`) + + 4. CenterNet node + ```shell + ros2 run opendr_perception object_detection_2d_centernet + ``` + The following optional argument is available for the CenterNet node: + - `--backbone BACKBONE`: Backbone network (default=`resnet50_v1b`) + + 5. Nanodet node + ```shell + ros2 run opendr_perception object_detection_2d_nanodet + ``` + The following optional argument is available for the Nanodet node: + - `--model Model`: Model that config file will be used (default=`plus_m_1.5x_416`) + + 6. DETR node + ```shell + ros2 run opendr_perception object_detection_2d_detr + ``` + + The following optional arguments are available for all nodes above: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_objects_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/objects`) + - `--device DEVICE`: Device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Output images: `/opendr/image_objects_annotated` + - Detection messages: `/opendr/objects` + + For viewing the output, refer to the [notes above.](#notes) + +### 2D Single Object Tracking ROS2 Node + +You can find the single object tracking 2D ROS2 node python script [here](./opendr_perception/object_tracking_2d_siamrpn_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [single object tracking 2D SiamRPN tool](../../../../src/opendr/perception/object_tracking_2d/siamrpn/siamrpn_learner.py) whose documentation can be found [here](../../../../docs/reference/object-tracking-2d-siamrpn.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the single object tracking 2D node: + + ```shell + ros2 run opendr_perception object_tracking_2d_siamrpn + ``` + + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC` : listen to RGB images on this topic (default=`/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_tracking_annotated`) + - `-t or --tracker_topic TRACKER_TOPIC`: topic name for tracker messages, `None` to stop the node from publishing on this topic (default=`/opendr/tracked_object`) + - `--device DEVICE`: Device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Output images: `/opendr/image_tracking_annotated` + - Detection messages: `/opendr/tracked_object` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +To initialize this node it is required to provide a bounding box of an object to track. +This is achieved by initializing one of the toolkit's 2D object detectors (YOLOv3) and running object detection once on the input. +Afterwards, **the detected bounding box that is closest to the center of the image** is used to initialize the tracker. +Feel free to modify the node to initialize it in a different way that matches your use case. + +### 2D Object Tracking ROS2 Nodes + +For 2D object tracking, there two ROS2 nodes provided, one using Deep Sort and one using FairMOT which use either pretrained models, or custom trained models. +The predicted tracking annotations are split into two topics with detections and tracking IDs. Additionally, an annotated image is generated. + +You can find the 2D object detection ROS2 node python scripts here: [Deep Sort node](./opendr_perception/object_tracking_2d_deep_sort_node.py) and [FairMOT node](./opendr_perception/object_tracking_2d_fair_mot_node.py) +where you can inspect the code and modify it as you wish to fit your needs. +The nodes makes use of the toolkit's [object tracking 2D - Deep Sort tool](../../../../src/opendr/perception/object_tracking_2d/deep_sort/object_tracking_2d_deep_sort_learner.py) +and [object tracking 2D - FairMOT tool](../../../../src/opendr/perception/object_tracking_2d/fair_mot/object_tracking_2d_fair_mot_learner.py) +whose documentation can be found here: [Deep Sort docs](../../../../docs/reference/object-tracking-2d-deep-sort.md), [FairMOT docs](../../../../docs/reference/object-tracking-2d-fair-mot.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start a 2D object tracking node: + 1. Deep Sort node + ```shell + ros2 run opendr_perception object_tracking_2d_deep_sort + ``` + The following optional argument is available for the Deep Sort node: + - `-n --model_name MODEL_NAME`: name of the trained model (default=`deep_sort`) + 2. FairMOT node + ```shell + ros2 run opendr_perception object_tracking_2d_fair_mot + ``` + The following optional argument is available for the FairMOT node: + - `-n --model_name MODEL_NAME`: name of the trained model (default=`fairmot_dla34`) + + The following optional arguments are available for both nodes: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_objects_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/objects`) + - `-t or --tracking_id_topic TRACKING_ID_TOPIC`: topic name for tracking ID messages, `None` to stop the node from publishing on this topic (default=`/opendr/objects_tracking_id`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `-td --temp_dir TEMP_DIR`: path to a temporary directory with models (default=`temp`) + +3. Default output topics: + - Output images: `/opendr/image_objects_annotated` + - Detection messages: `/opendr/objects` + - Tracking ID messages: `/opendr/objects_tracking_id` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +An [image dataset node](#image-dataset-ros2-node) is also provided to be used along these nodes. +Make sure to change the default input topic of the tracking node if you are not using the USB cam node. + +### Panoptic Segmentation ROS2 Node + +You can find the panoptic segmentation ROS2 node python script [here](./opendr_perception/panoptic_segmentation_efficient_ps_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [panoptic segmentation tool](../../../../src/opendr/perception/panoptic_segmentation/efficient_ps/efficient_ps_learner.py) whose documentation can be found [here](../../../../docs/reference/efficient-ps.md) +and additional information about Efficient PS [here](../../../../src/opendr/perception/panoptic_segmentation/README.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the panoptic segmentation node: + + ```shell + ros2 run opendr_perception panoptic_segmentation_efficient_ps + ``` + + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC` : listen to RGB images on this topic (default=`/image_raw`) + - `-oh --output_heatmap_topic OUTPUT_HEATMAP_TOPIC`: publish the semantic and instance maps on this topic as `OUTPUT_HEATMAP_TOPIC/semantic` and `OUTPUT_HEATMAP_TOPIC/instance`, `None` to stop the node from publishing on this topic (default=`/opendr/panoptic`) + - `-ov --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: publish the panoptic segmentation map as an RGB image on this topic or a more detailed overview if using the `--detailed_visualization` flag, `None` to stop the node from publishing on this topic (default=`opendr/panoptic/rgb_visualization`) + - `--detailed_visualization`: generate a combined overview of the input RGB image and the semantic, instance, and panoptic segmentation maps and publish it on `OUTPUT_RGB_IMAGE_TOPIC` (default=deactivated) + - `--checkpoint CHECKPOINT` : download pretrained models [cityscapes, kitti] or load from the provided path (default=`cityscapes`) + +3. Default output topics: + - Output images: `/opendr/panoptic/semantic`, `/opendr/panoptic/instance`, `/opendr/panoptic/rgb_visualization` + - Detection messages: `/opendr/panoptic/semantic`, `/opendr/panoptic/instance` + + For viewing the output, refer to the [notes above.](#notes) + +### Semantic Segmentation ROS2 Node + +You can find the semantic segmentation ROS2 node python script [here](./opendr_perception/semantic_segmentation_bisenet_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [semantic segmentation tool](../../../../src/opendr/perception/semantic_segmentation/bisenet/bisenet_learner.py) whose documentation can be found [here](../../../../docs/reference/semantic-segmentation.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the semantic segmentation node: + + ```shell + ros2 run opendr_perception semantic_segmentation_bisenet + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_heatmap_topic OUTPUT_HEATMAP_TOPIC`: topic to which we are publishing the heatmap in the form of a ROS2 image containing class IDs, `None` to stop the node from publishing on this topic (default=`/opendr/heatmap`) + - `-ov or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic to which we are publishing the heatmap image blended with the input image and a class legend for visualization purposes, `None` to stop the node from publishing on this topic (default=`/opendr/heatmap_visualization`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Output images: `/opendr/heatmap`, `/opendr/heatmap_visualization` + - Detection messages: `/opendr/heatmap` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +On the table below you can find the detectable classes and their corresponding IDs: + +| Class | Bicyclist | Building | Car | Column Pole | Fence | Pedestrian | Road | Sidewalk | Sign Symbol | Sky | Tree | Unknown | +|--------|-----------|----------|-----|-------------|-------|------------|------|----------|-------------|-----|------|---------| +| **ID** | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | + +### Image-based Facial Emotion Estimation ROS2 Node + +You can find the image-based facial emotion estimation ROS2 node python script [here](./opendr_perception/facial_emotion_estimation_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's image-based facial emotion estimation tool which can be found [here](../../../../src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/facial_emotion_learner.py) +whose documentation can be found [here](../../../../docs/reference/image_based_facial_emotion_estimation.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the image-based facial emotion estimation node: + + ```shell + ros2 run opendr_perception facial_emotion_estimation + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_emotion_estimation_annotated`) + - `-e or --output_emotions_topic OUTPUT_EMOTIONS_TOPIC`: topic to which we are publishing the facial emotion results, `None` to stop the node from publishing on this topic (default=`"/opendr/facial_emotion_estimation"`) + - `-m or --output_emotions_description_topic OUTPUT_EMOTIONS_DESCRIPTION_TOPIC`: topic to which we are publishing the description of the estimated facial emotion, `None` to stop the node from publishing on this topic (default=`/opendr/facial_emotion_estimation_description`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Output images: `/opendr/image_emotion_estimation_annotated` + - Detection messages: `/opendr/facial_emotion_estimation`, `/opendr/facial_emotion_estimation_description` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +This node requires the detection of a face first. This is achieved by including of the toolkit's face detector and running face detection on the input. +Afterwards, the detected bounding box of the face is cropped and fed into the facial emotion estimator. +Feel free to modify the node to detect faces in a different way that matches your use case. + +### Landmark-based Facial Expression Recognition ROS2 Node + +A ROS2 node for performing landmark-based facial expression recognition using a trained model on AFEW, CK+ or Oulu-CASIA datasets. +OpenDR does not include a pretrained model, so one should be provided by the user. +An alternative would be to use the [image-based facial expression estimation node](#image-based-facial-emotion-estimation-ros2-node) provided by the toolkit. + +You can find the landmark-based facial expression recognition ROS2 node python script [here](./opendr_perception/landmark_based_facial_expression_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's landmark-based facial expression recognition tool which can be found [here](../../../../src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/progressive_spatio_temporal_bln_learner.py) +whose documentation can be found [here](../../../../docs/reference/landmark-based-facial-expression-recognition.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the landmark-based facial expression recognition node: + + ```shell + ros2 run opendr_perception landmark_based_facial_expression_recognition + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_category_topic OUTPUT_CATEGORY_TOPIC`: topic to which we are publishing the recognized facial expression category info, `None` to stop the node from publishing on this topic (default=`"/opendr/landmark_expression_recognition"`) + - `-d or --output_category_description_topic OUTPUT_CATEGORY_DESCRIPTION_TOPIC`: topic to which we are publishing the description of the recognized facial expression, `None` to stop the node from publishing on this topic (default=`/opendr/landmark_expression_recognition_description`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--model`: architecture to use for facial expression recognition, options are `pstbln_ck+`, `pstbln_casia`, `pstbln_afew` (default=`pstbln_afew`) + - `-s or --shape_predictor SHAPE_PREDICTOR`: shape predictor (landmark_extractor) to use (default=`./predictor_path`) + +3. Default output topics: + - Detection messages: `/opendr/landmark_expression_recognition`, `/opendr/landmark_expression_recognition_description` + + For viewing the output, refer to the [notes above.](#notes) + +### Skeleton-based Human Action Recognition ROS2 Node + +A ROS2 node for performing skeleton-based human action recognition using either ST-GCN or PST-GCN models pretrained on NTU-RGBD-60 dataset. +The human body poses of the image are first extracted by the lightweight OpenPose method which is implemented in the toolkit, and they are passed to the skeleton-based action recognition method to be categorized. + +You can find the skeleton-based human action recognition ROS2 node python script [here](./opendr_perception/skeleton_based_action_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's skeleton-based human action recognition tool which can be found [here for ST-GCN](../../../../src/opendr/perception/skeleton_based_action_recognition/spatio_temporal_gcn_learner.py) +and [here for PST-GCN](../../../../src/opendr/perception/skeleton_based_action_recognition/progressive_spatio_temporal_gcn_learner.py) +whose documentation can be found [here](../../../../docs/reference/skeleton-based-action-recognition.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the skeleton-based human action recognition node: + + ```shell + ros2 run opendr_perception skeleton_based_action_recognition + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output pose-annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/image_pose_annotated`) + - `-p or --pose_annotations_topic POSE_ANNOTATIONS_TOPIC`: topic name for pose annotations, `None` to stop the node from publishing on this topic (default=`/opendr/poses`) + - `-c or --output_category_topic OUTPUT_CATEGORY_TOPIC`: topic name for recognized action category, `None` to stop the node from publishing on this topic (default=`"/opendr/skeleton_recognized_action"`) + - `-d or --output_category_description_topic OUTPUT_CATEGORY_DESCRIPTION_TOPIC`: topic name for description of the recognized action category, `None` to stop the node from publishing on this topic (default=`/opendr/skeleton_recognized_action_description`) + - `--model`: model to use, options are `stgcn` or `pstgcn`, (default=`stgcn`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Detection messages: `/opendr/skeleton_based_action_recognition`, `/opendr/skeleton_based_action_recognition_description`, `/opendr/poses` + - Output images: `/opendr/image_pose_annotated` + + For viewing the output, refer to the [notes above.](#notes) + +### Video Human Activity Recognition ROS2 Node + +A ROS2 node for performing human activity recognition using either CoX3D or X3D models pretrained on Kinetics400. + +You can find the video human activity recognition ROS2 node python script [here](./opendr_perception/video_activity_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's video human activity recognition tools which can be found [here for CoX3D](../../../../src/opendr/perception/activity_recognition/cox3d/cox3d_learner.py) and +[here for X3D](../../../../src/opendr/perception/activity_recognition/x3d/x3d_learner.py) whose documentation can be found [here](../../../../docs/reference/activity-recognition.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the video human activity recognition node: + + ```shell + ros2 run opendr_perception video_activity_recognition + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/image_raw`) + - `-o or --output_category_topic OUTPUT_CATEGORY_TOPIC`: topic to which we are publishing the recognized activity, `None` to stop the node from publishing on this topic (default=`"/opendr/human_activity_recognition"`) + - `-od or --output_category_description_topic OUTPUT_CATEGORY_DESCRIPTION_TOPIC`: topic to which we are publishing the ID of the recognized action, `None` to stop the node from publishing on this topic (default=`/opendr/human_activity_recognition_description`) + - `--model`: architecture to use for human activity recognition, options are `cox3d-s`, `cox3d-m`, `cox3d-l`, `x3d-xs`, `x3d-s`, `x3d-m`, or `x3d-l` (default=`cox3d-m`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Detection messages: `/opendr/human_activity_recognition`, `/opendr/human_activity_recognition_description` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +You can find the corresponding IDs regarding activity recognition [here](https://github.com/opendr-eu/opendr/blob/master/src/opendr/perception/activity_recognition/datasets/kinetics400_classes.csv). + +## RGB + Infrared input + +### 2D Object Detection GEM ROS2 Node + +You can find the object detection 2D GEM ROS2 node python script [here](./opendr_perception/object_detection_2d_gem_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [object detection 2D GEM tool](../../../../src/opendr/perception/object_detection_2d/gem/gem_learner.py) +whose documentation can be found [here](../../../../docs/reference/gem.md). + +#### Instructions for basic usage: + +1. First one needs to find points in the color and infrared images that correspond, in order to find the homography matrix that allows to correct for the difference in perspective between the infrared and the RGB camera. + These points can be selected using a [utility tool](../../../../src/opendr/perception/object_detection_2d/utils/get_color_infra_alignment.py) that is provided in the toolkit. + +2. Pass the points you have found as *pts_color* and *pts_infra* arguments to the [ROS2 GEM node](./opendr_perception/object_detection_2d_gem.py). + +3. Start the node responsible for publishing images. If you have a RealSense camera, then you can use the corresponding node (assuming you have installed [realsense2_camera](http://wiki.ros.org/realsense2_camera)): + + ```shell + roslaunch realsense2_camera rs_camera.launch enable_color:=true enable_infra:=true enable_depth:=false enable_sync:=true infra_width:=640 infra_height:=480 + ``` + +4. You are then ready to start the object detection 2d GEM node: + + ```shell + ros2 run opendr_perception object_detection_2d_gem + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-ic or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/camera/color/image_raw`) + - `-ii or --input_infra_image_topic INPUT_INFRA_IMAGE_TOPIC`: topic name for input infrared image (default=`/camera/infra/image_raw`) + - `-oc or --output_rgb_image_topic OUTPUT_RGB_IMAGE_TOPIC`: topic name for output annotated RGB image, `None` to stop the node from publishing on this topic (default=`/opendr/rgb_image_objects_annotated`) + - `-oi or --output_infra_image_topic OUTPUT_INFRA_IMAGE_TOPIC`: topic name for output annotated infrared image, `None` to stop the node from publishing on this topic (default=`/opendr/infra_image_objects_annotated`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/objects`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +5. Default output topics: + - Output RGB images: `/opendr/rgb_image_objects_annotated` + - Output infrared images: `/opendr/infra_image_objects_annotated` + - Detection messages: `/opendr/objects` + + For viewing the output, refer to the [notes above.](#notes) + +---- +## RGBD input + +### RGBD Hand Gesture Recognition ROS2 Node +A ROS2 node for performing hand gesture recognition using a MobileNetv2 model trained on HANDS dataset. +The node has been tested with Kinectv2 for depth data acquisition with the following drivers: https://github.com/OpenKinect/libfreenect2 and https://github.com/code-iai/iai_kinect2. + +You can find the RGBD hand gesture recognition ROS2 node python script [here](./opendr_perception/rgbd_hand_gesture_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [hand gesture recognition tool](../../../../src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/rgbd_hand_gesture_learner.py) +whose documentation can be found [here](../../../../docs/reference/rgbd-hand-gesture-learner.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images from an RGBD camera. Remember to modify the input topics using the arguments in step 2 if needed. + +2. You are then ready to start the hand gesture recognition node: + ```shell + ros2 run opendr_perception rgbd_hand_gesture_recognition + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-ic or --input_rgb_image_topic INPUT_RGB_IMAGE_TOPIC`: topic name for input RGB image (default=`/kinect2/qhd/image_color_rect`) + - `-id or --input_depth_image_topic INPUT_DEPTH_IMAGE_TOPIC`: topic name for input depth image (default=`/kinect2/qhd/image_depth_rect`) + - `-o or --output_gestures_topic OUTPUT_GESTURES_TOPIC`: topic name for predicted gesture class (default=`/opendr/gestures`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + +3. Default output topics: + - Detection messages:`/opendr/gestures` + + For viewing the output, refer to the [notes above.](#notes) + +---- +## RGB + Audio input + +### Audiovisual Emotion Recognition ROS2 Node + +You can find the audiovisual emotion recognition ROS2 node python script [here](./opendr_perception/audiovisual_emotion_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [audiovisual emotion recognition tool](../../../../src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/avlearner.py), +whose documentation can be found [here](../../../../docs/reference/audiovisual-emotion-recognition-learner.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing images. If you have a USB camera, then you can use the `usb_cam_node` as explained in the [prerequisites above](#prerequisites). +2. Start the node responsible for publishing audio. If you have an audio capture device, then you can use the `audio_capture_node` as explained in the [prerequisites above](#prerequisites). +3. You are then ready to start the audiovisual emotion recognition node + + ```shell + ros2 run opendr_perception audiovisual_emotion_recognition + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-iv or --input_video_topic INPUT_VIDEO_TOPIC`: topic name for input video, expects detected face of size 224x224 (default=`/image_raw`) + - `-ia or --input_audio_topic INPUT_AUDIO_TOPIC`: topic name for input audio (default=`/audio`) + - `-o or --output_emotions_topic OUTPUT_EMOTIONS_TOPIC`: topic to which we are publishing the predicted emotion (default=`/opendr/audiovisual_emotion`) + - `--buffer_size BUFFER_SIZE`: length of audio and video in seconds, (default=`3.6`) + - `--model_path MODEL_PATH`: if given, the pretrained model will be loaded from the specified local path, otherwise it will be downloaded from an OpenDR FTP server + +4. Default output topics: + - Detection messages: `/opendr/audiovisual_emotion` + + For viewing the output, refer to the [notes above.](#notes) + +---- +## Audio input + +### Speech Command Recognition ROS2 Node + +A ROS2 node for recognizing speech commands from an audio stream using MatchboxNet, EdgeSpeechNets or Quadratic SelfONN models, pretrained on the Google Speech Commands dataset. + +You can find the speech command recognition ROS2 node python script [here](./opendr_perception/speech_command_recognition_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's speech command recognition tools: +[EdgeSpeechNets tool](../../../../src/opendr/perception/speech_recognition/edgespeechnets/edgespeechnets_learner.py), [MatchboxNet tool](../../../../src/opendr/perception/speech_recognition/matchboxnet/matchboxnet_learner.py), [Quadratic SelfONN tool](../../../../src/opendr/perception/speech_recognition/quadraticselfonn/quadraticselfonn_learner.py) +whose documentation can be found here: +[EdgeSpeechNet docs](../../../../docs/reference/edgespeechnets.md), [MatchboxNet docs](../../../../docs/reference/matchboxnet.md), [Quadratic SelfONN docs](../../../../docs/reference/quadratic-selfonn.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing audio. If you have an audio capture device, then you can use the `audio_capture_node` as explained in the [prerequisites above](#prerequisites). + +2. You are then ready to start the speech command recognition node + + ```shell + ros2 run opendr_perception speech_command_recognition + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_audio_topic INPUT_AUDIO_TOPIC`: topic name for input audio (default=`/audio`) + - `-o or --output_speech_command_topic OUTPUT_SPEECH_COMMAND_TOPIC`: topic name for speech command output (default=`/opendr/speech_recognition`) + - `--buffer_size BUFFER_SIZE`: set the size of the audio buffer (expected command duration) in seconds (default=`1.5`) + - `--model MODEL`: the model to use, choices are `matchboxnet`, `edgespeechnets` or `quad_selfonn` (default=`matchboxnet`) + - `--model_path MODEL_PATH`: if given, the pretrained model will be loaded from the specified local path, otherwise it will be downloaded from an OpenDR FTP server + +3. Default output topics: + - Detection messages, class id and confidence: `/opendr/speech_recognition` + + For viewing the output, refer to the [notes above.](#notes) + +**Notes** + +EdgeSpeechNets currently does not have a pretrained model available for download, only local files may be used. + +---- +## Point cloud input + +### 3D Object Detection Voxel ROS2 Node + +A ROS2 node for performing 3D object detection Voxel using PointPillars or TANet methods with either pretrained models on KITTI dataset, or custom trained models. + +You can find the 3D object detection Voxel ROS2 node python script [here](./opendr_perception/object_detection_3d_voxel_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [3D object detection Voxel tool](../../../../src/opendr/perception/object_detection_3d/voxel_object_detection_3d/voxel_object_detection_3d_learner.py) +whose documentation can be found [here](../../../../docs/reference/voxel-object-detection-3d.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing point clouds. OpenDR provides a [point cloud dataset node](#point-cloud-dataset-ros2-node) for convenience. + +2. You are then ready to start the 3D object detection node: + + ```shell + ros2 run opendr_perception object_detection_3d_voxel + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_point_cloud_topic INPUT_POINT_CLOUD_TOPIC`: point cloud topic provided by either a point_cloud_dataset_node or any other 3D point cloud node (default=`/opendr/dataset_point_cloud`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages (default=`/opendr/objects3d`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `-n or --model_name MODEL_NAME`: name of the trained model (default=`tanet_car_xyres_16`) + - `-c or --model_config_path MODEL_CONFIG_PATH`: path to a model .proto config (default=`../../src/opendr/perception/object_detection3d/voxel_object_detection_3d/second_detector/configs/tanet/car/xyres_16.proto`) + +3. Default output topics: + - Detection messages: `/opendr/objects3d` + + For viewing the output, refer to the [notes above.](#notes) + +### 3D Object Tracking AB3DMOT ROS2 Node + +A ROS2 node for performing 3D object tracking using AB3DMOT stateless method. +This is a detection-based method, and therefore the 3D object detector is needed to provide detections, which then will be used to make associations and generate tracking ids. +The predicted tracking annotations are split into two topics with detections and tracking IDs. + +You can find the 3D object tracking AB3DMOT ROS2 node python script [here](./opendr_perception/object_tracking_3d_ab3dmot_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's [3D object tracking AB3DMOT tool](../../../../src/opendr/perception/object_tracking_3d/ab3dmot/object_tracking_3d_ab3dmot_learner.py) +whose documentation can be found [here](../../../../docs/reference/object-tracking-3d-ab3dmot.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing point clouds. OpenDR provides a [point cloud dataset node](#point-cloud-dataset-ros2-node) for convenience. + +2. You are then ready to start the 3D object tracking node: + + ```shell + ros2 run opendr_perception object_tracking_3d_ab3dmot + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_point_cloud_topic INPUT_POINT_CLOUD_TOPIC`: point cloud topic provided by either a point_cloud_dataset_node or any other 3D point cloud node (default=`/opendr/dataset_point_cloud`) + - `-d or --detections_topic DETECTIONS_TOPIC`: topic name for detection messages, `None` to stop the node from publishing on this topic (default=`/opendr/objects3d`) + - `-t or --tracking3d_id_topic TRACKING3D_ID_TOPIC`: topic name for output tracking IDs with the same element count as in detection topic, `None` to stop the node from publishing on this topic (default=`/opendr/objects_tracking_id`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `-dn or --detector_model_name DETECTOR_MODEL_NAME`: name of the trained model (default=`tanet_car_xyres_16`) + - `-dc or --detector_model_config_path DETECTOR_MODEL_CONFIG_PATH`: path to a model .proto config (default=`../../src/opendr/perception/object_detection3d/voxel_object_detection_3d/second_detector/configs/tanet/car/xyres_16.proto`) + +3. Default output topics: + - Detection messages: `/opendr/objects3d` + - Tracking ID messages: `/opendr/objects_tracking_id` + + For viewing the output, refer to the [notes above.](#notes) + +---- +## Biosignal input + +### Heart Anomaly Detection ROS2 Node + +A ROS2 node for performing heart anomaly (atrial fibrillation) detection from ECG data using GRU or ANBOF models trained on AF dataset. + +You can find the heart anomaly detection ROS2 node python script [here](./opendr_perception/heart_anomaly_detection_node.py) to inspect the code and modify it as you wish to fit your needs. +The node makes use of the toolkit's heart anomaly detection tools: [ANBOF tool](../../../../src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/attention_neural_bag_of_feature_learner.py) and +[GRU tool](../../../../src/opendr/perception/heart_anomaly_detection/gated_recurrent_unit/gated_recurrent_unit_learner.py), whose documentation can be found here: +[ANBOF docs](../../../../docs/reference/attention-neural-bag-of-feature-learner.md) and [GRU docs](../../../../docs/reference/gated-recurrent-unit-learner.md). + +#### Instructions for basic usage: + +1. Start the node responsible for publishing ECG data. + +2. You are then ready to start the heart anomaly detection node: + + ```shell + ros2 run opendr_perception heart_anomaly_detection + ``` + The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-i or --input_ecg_topic INPUT_ECG_TOPIC`: topic name for input ECG data (default=`/ecg/ecg`) + - `-o or --output_heart_anomaly_topic OUTPUT_HEART_ANOMALY_TOPIC`: topic name for heart anomaly detection (default=`/opendr/heart_anomaly`) + - `--device DEVICE`: device to use, either `cpu` or `cuda`, falls back to `cpu` if GPU or CUDA is not found (default=`cuda`) + - `--model MODEL`: the model to use, choices are `anbof` or `gru` (default=`anbof`) + +3. Default output topics: + - Detection messages: `/opendr/heart_anomaly` + + For viewing the output, refer to the [notes above.](#notes) + +---- +## Dataset ROS2 Nodes + +The dataset nodes can be used to publish data from the disk, which is useful to test the functionality without the use of a sensor. +Dataset nodes use a provided `DatasetIterator` object that returns a `(Data, Target)` pair. +If the type of the `Data` object is correct, the node will transform it into a corresponding ROS2 message object and publish it to a desired topic. +The OpenDR toolkit currently provides two such nodes, an image dataset node and a point cloud dataset node. + +### Image Dataset ROS2 Node + +The image dataset node downloads a `nano_MOT20` dataset from OpenDR's FTP server and uses it to publish data to the ROS2 topic, +which is intended to be used with the [2D object tracking nodes](#2d-object-tracking-ros2-nodes). + +You can create an instance of this node with any `DatasetIterator` object that returns `(Image, Target)` as elements, +to use alongside other nodes and datasets. +You can inspect [the node](./opendr_perception/image_dataset_node.py) and modify it to your needs for other image datasets. + +To get an image from a dataset on the disk, you can start a `image_dataset.py` node as: +```shell +ros2 run opendr_perception image_dataset +``` +The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-o or --output_rgb_image_topic`: topic name to publish the data (default=`/opendr/dataset_image`) + - `-f or --fps FPS`: data fps (default=`10`) + - `-d or --dataset_path DATASET_PATH`: path to a dataset (default=`/MOT`) + - `-ks or --mot20_subsets_path MOT20_SUBSETS_PATH`: path to MOT20 subsets (default=`../../src/opendr/perception/object_tracking_2d/datasets/splits/nano_mot20.train`) + +### Point Cloud Dataset ROS2 Node + +The point cloud dataset node downloads a `nano_KITTI` dataset from OpenDR's FTP server and uses it to publish data to the ROS2 topic, +which is intended to be used with the [3D object detection node](#3d-object-detection-voxel-ros2-node), +as well as the [3D object tracking node](#3d-object-tracking-ab3dmot-ros2-node). + +You can create an instance of this node with any `DatasetIterator` object that returns `(PointCloud, Target)` as elements, +to use alongside other nodes and datasets. +You can inspect [the node](./opendr_perception/point_cloud_dataset_node.py) and modify it to your needs for other point cloud datasets. + +To get a point cloud from a dataset on the disk, you can start a `point_cloud_dataset.py` node as: +```shell +ros2 run opendr_perception point_cloud_dataset +``` +The following optional arguments are available: + - `-h or --help`: show a help message and exit + - `-o or --output_point_cloud_topic`: topic name to publish the data (default=`/opendr/dataset_point_cloud`) + - `-f or --fps FPS`: data fps (default=`10`) + - `-d or --dataset_path DATASET_PATH`: path to a dataset, if it does not exist, nano KITTI dataset will be downloaded there (default=`/KITTI/opendr_nano_kitti`) + - `-ks or --kitti_subsets_path KITTI_SUBSETS_PATH`: path to KITTI subsets, used only if a KITTI dataset is downloaded (default=`../../src/opendr/perception/object_detection_3d/datasets/nano_kitti_subsets`) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/__init__.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/__init__.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/__init__.py rename to projects/opendr_ws_2/src/opendr_perception/opendr_perception/__init__.py diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/audiovisual_emotion_recognition_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/audiovisual_emotion_recognition_node.py new file mode 100644 index 0000000000..008b51d7b7 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/audiovisual_emotion_recognition_node.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import numpy as np +import torch +import librosa +import cv2 + +import rclpy +from rclpy.node import Node +import message_filters +from sensor_msgs.msg import Image as ROS_Image +from audio_common_msgs.msg import AudioData +from vision_msgs.msg import Classification2D + +from opendr_bridge import ROS2Bridge +from opendr.perception.multimodal_human_centric import AudiovisualEmotionLearner +from opendr.perception.multimodal_human_centric import spatial_transforms as transforms +from opendr.engine.data import Video, Timeseries + + +class AudiovisualEmotionNode(Node): + + def __init__(self, input_video_topic="/image_raw", input_audio_topic="/audio", + output_emotions_topic="/opendr/audiovisual_emotion", buffer_size=3.6, device="cuda", + delay=0.1): + """ + Creates a ROS2 Node for audiovisual emotion recognition + :param input_video_topic: Topic from which we are reading the input video. Expects detected face of size 224x224 + :type input_video_topic: str + :param input_audio_topic: Topic from which we are reading the input audio + :type input_audio_topic: str + :param output_emotions_topic: Topic to which we are publishing the predicted class + :type output_emotions_topic: str + :param buffer_size: length of audio and video in sec + :type buffer_size: float + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param delay: Define the delay (in seconds) with which rgb message and depth message can be synchronized + :type delay: float + """ + super().__init__("opendr_audiovisual_emotion_recognition_node") + + self.publisher = self.create_publisher(Classification2D, output_emotions_topic, 1) + + video_sub = message_filters.Subscriber(self, ROS_Image, input_video_topic, qos_profile=1) + audio_sub = message_filters.Subscriber(self, AudioData, input_audio_topic, qos_profile=1) + # synchronize video and audio data topics + ts = message_filters.ApproximateTimeSynchronizer([video_sub, audio_sub], queue_size=10, slop=delay, + allow_headerless=True) + ts.registerCallback(self.callback) + + self.bridge = ROS2Bridge() + + self.avlearner = AudiovisualEmotionLearner(device=device, fusion='ia', mod_drop='zerodrop') + if not os.path.exists('model'): + self.avlearner.download('model') + self.avlearner.load('model') + + self.buffer_size = buffer_size + self.data_buffer = np.zeros((1)) + self.video_buffer = np.zeros((1, 224, 224, 3)) + + self.video_transform = transforms.Compose([ + transforms.ToTensor(255)]) + + self.get_logger().info("Audiovisual emotion recognition node started!") + + def callback(self, image_data, audio_data): + """ + Callback that process the input data and publishes to the corresponding topics + :param image_data: input image message, face image + :type image_data: sensor_msgs.msg.Image + :param audio_data: input audio message, speech + :type audio_data: audio_common_msgs.msg.AudioData + """ + audio_data = np.reshape(np.frombuffer(audio_data.data, dtype=np.int16)/32768.0, (1, -1)) + self.data_buffer = np.append(self.data_buffer, audio_data) + + image_data = self.bridge.from_ros_image(image_data, encoding='bgr8').convert(format='channels_last') + image_data = cv2.resize(image_data, (224, 224)) + + self.video_buffer = np.append(self.video_buffer, np.expand_dims(image_data.data, 0), axis=0) + + if self.data_buffer.shape[0] > 16000*self.buffer_size: + audio = librosa.feature.mfcc(self.data_buffer[1:], sr=16000, n_mfcc=10) + audio = Timeseries(audio) + + to_select = select_distributed(15, len(self.video_buffer)-1) + video = self.video_buffer[1:][to_select] + + video = [self.video_transform(img) for img in video] + video = Video(torch.stack(video, 0).permute(1, 0, 2, 3)) + + class_pred = self.avlearner.infer(audio, video) + + # Publish output + ros_class = self.bridge.from_category_to_rosclass(class_pred, self.get_clock().now().to_msg()) + self.publisher.publish(ros_class) + + self.data_buffer = np.zeros((1)) + self.video_buffer = np.zeros((1, 224, 224, 3)) + + +def select_distributed(m, n): return [i*n//m + n//(2*m) for i in range(m)] + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-iv", "--input_video_topic", type=str, default="/image_raw", + help="Listen to video input data on this topic") + parser.add_argument("-ia", "--input_audio_topic", type=str, default="/audio", + help="Listen to audio input data on this topic") + parser.add_argument("-o", "--output_emotions_topic", type=str, default="/opendr/audiovisual_emotion", + help="Topic name for output emotions recognition") + parser.add_argument("--device", type=str, default="cuda", + help="Device to use (cpu, cuda)", choices=["cuda", "cpu"]) + parser.add_argument("--buffer_size", type=float, default=3.6, + help="Size of the audio buffer in seconds") + parser.add_argument("--delay", help="The delay (in seconds) with which RGB message and" + "depth message can be synchronized", type=float, default=0.1) + + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU") + device = "cpu" + except: + print("Using CPU") + device = "cpu" + + emotion_node = AudiovisualEmotionNode(input_video_topic=args.input_video_topic, + input_audio_topic=args.input_audio_topic, + output_emotions_topic=args.output_emotions_topic, + buffer_size=args.buffer_size, device=device, delay=args.delay) + + rclpy.spin(emotion_node) + + emotion_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/face_detection_retinaface_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/face_detection_retinaface_node.py new file mode 100644 index 0000000000..b4c20114c8 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/face_detection_retinaface_node.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import mxnet as mx + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2DArray +from opendr_bridge import ROS2Bridge + +from opendr.perception.object_detection_2d import RetinaFaceLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes +from opendr.engine.data import Image + + +class FaceDetectionNode(Node): + + def __init__(self, input_rgb_image_topic="image_raw", output_rgb_image_topic="/opendr/image_faces_annotated", + detections_topic="/opendr/faces", device="cuda", backbone="resnet"): + """ + Creates a ROS2 Node for face detection with Retinaface. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no face detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param backbone: retinaface backbone, options are either 'mnet' or 'resnet', + where 'mnet' detects masked faces as well + :type backbone: str + """ + super().__init__('opendr_face_detection_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.face_publisher = self.create_publisher(Detection2DArray, detections_topic, 1) + else: + self.face_publisher = None + + self.bridge = ROS2Bridge() + + self.face_detector = RetinaFaceLearner(backbone=backbone, device=device) + self.face_detector.download(path=".", verbose=True) + self.face_detector.load("retinaface_{}".format(backbone)) + self.class_names = ["face", "masked_face"] + + self.get_logger().info("Face detection retinaface node initialized.") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run face detection + boxes = self.face_detector.infer(image) + + if self.face_publisher is not None: + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_boxes(boxes) # Convert to ROS boxes + self.face_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with face detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.class_names) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_faces_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/faces") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--backbone", + help="Retinaface backbone, options are either 'mnet' or 'resnet', where 'mnet' detects " + "masked faces as well", + type=str, default="resnet", choices=["resnet", "mnet"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and mx.context.num_gpus() > 0: + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + face_detection_node = FaceDetectionNode(device=device, backbone=args.backbone, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + + rclpy.spin(face_detection_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + face_detection_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/face_recognition_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/face_recognition_node.py new file mode 100644 index 0000000000..b774ea0eb9 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/face_recognition_node.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import cv2 +import argparse +import torch + +import rclpy +from rclpy.node import Node + +from std_msgs.msg import String +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import ObjectHypothesisWithPose +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.perception.face_recognition import FaceRecognitionLearner +from opendr.perception.object_detection_2d import RetinaFaceLearner +from opendr.perception.object_detection_2d.datasets.transforms import BoundingBoxListToNumpyArray + + +class FaceRecognitionNode(Node): + + def __init__(self, input_rgb_image_topic="image_raw", output_rgb_image_topic="/opendr/image_face_reco_annotated", + detections_topic="/opendr/face_recognition", detections_id_topic="/opendr/face_recognition_id", + database_path="./database", device="cuda", backbone="mobilefacenet"): + """ + Creates a ROS2 Node for face recognition. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the recognized face information (if None, + no face recognition message is published) + :type detections_topic: str + :param detections_id_topic: Topic to which we are publishing the ID of the recognized person (if None, + no ID message is published) + :type detections_id_topic: str + :param device: Device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param backbone: Backbone network + :type backbone: str + :param database_path: Path of the directory where the images of the faces to be recognized are stored + :type database_path: str + """ + super().__init__('opendr_face_recognition_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.face_publisher = self.create_publisher(ObjectHypothesisWithPose, detections_topic, 1) + else: + self.face_publisher = None + + if detections_id_topic is not None: + + self.face_id_publisher = self.create_publisher(String, detections_id_topic, 1) + else: + self.face_id_publisher = None + + self.bridge = ROS2Bridge() + + # Initialize the face recognizer + self.recognizer = FaceRecognitionLearner(device=device, mode='backbone_only', backbone=backbone) + self.recognizer.download(path=".") + self.recognizer.load(".") + self.recognizer.fit_reference(database_path, save_path=".", create_new=True) + + # Initialize the face detector + self.face_detector = RetinaFaceLearner(backbone='mnet', device=device) + self.face_detector.download(path=".", verbose=True) + self.face_detector.load("retinaface_{}".format('mnet')) + self.class_names = ["face", "masked_face"] + + self.get_logger().info("Face recognition node initialized.") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + # Get an OpenCV image back + image = image.opencv() + + # Run face detection and recognition + if image is not None: + bounding_boxes = self.face_detector.infer(image) + if bounding_boxes: + bounding_boxes = BoundingBoxListToNumpyArray()(bounding_boxes) + boxes = bounding_boxes[:, :4] + for idx, box in enumerate(boxes): + (startX, startY, endX, endY) = int(box[0]), int(box[1]), int(box[2]), int(box[3]) + frame = image[startY:endY, startX:endX] + result = self.recognizer.infer(frame) + + # Publish face information and ID + if self.face_publisher is not None: + self.face_publisher.publish(self.bridge.to_ros_face(result)) + + if self.face_id_publisher is not None: + self.face_id_publisher.publish(self.bridge.to_ros_face_id(result)) + + if self.image_publisher is not None: + if result.description != 'Not found': + color = (0, 255, 0) + else: + color = (0, 0, 255) + # Annotate image with face detection/recognition boxes + cv2.rectangle(image, (startX, startY), (endX, endY), color, thickness=2) + cv2.putText(image, result.description, (startX, endY - 10), cv2.FONT_HERSHEY_SIMPLEX, + 1, color, 2, cv2.LINE_AA) + + if self.image_publisher is not None: + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_face_reco_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/face_recognition") + parser.add_argument("-id", "--detections_id_topic", help="Topic name for detection ID messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/face_recognition_id") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--backbone", help="Backbone network, defaults to mobilefacenet", + type=str, default="mobilefacenet", choices=["mobilefacenet"]) + parser.add_argument("--dataset_path", + help="Path of the directory where the images of the faces to be recognized are stored, " + "defaults to \"./database\"", + type=str, default="./database") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + face_recognition_node = FaceRecognitionNode(device=device, backbone=args.backbone, database_path=args.dataset_path, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic, + detections_id_topic=args.detections_id_topic) + + rclpy.spin(face_recognition_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + face_recognition_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/facial_emotion_estimation_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/facial_emotion_estimation_node.py new file mode 100644 index 0000000000..56b22309c0 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/facial_emotion_estimation_node.py @@ -0,0 +1,218 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch +import numpy as np +import cv2 +from torchvision import transforms +import PIL + +import rclpy +from rclpy.node import Node +from std_msgs.msg import String +from vision_msgs.msg import ObjectHypothesis +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.perception.facial_expression_recognition import FacialEmotionLearner +from opendr.perception.facial_expression_recognition import image_processing +from opendr.perception.object_detection_2d import RetinaFaceLearner +from opendr.perception.object_detection_2d.datasets.transforms import BoundingBoxListToNumpyArray + +INPUT_IMAGE_SIZE = (96, 96) +INPUT_IMAGE_NORMALIZATION_MEAN = [0.0, 0.0, 0.0] +INPUT_IMAGE_NORMALIZATION_STD = [1.0, 1.0, 1.0] + + +class FacialEmotionEstimationNode(Node): + def __init__(self, + face_detector_learner, + input_rgb_image_topic="/image_raw", + output_rgb_image_topic="/opendr/image_emotion_estimation_annotated", + output_emotions_topic="/opendr/facial_emotion_estimation", + output_emotions_description_topic="/opendr/facial_emotion_estimation_description", + device="cuda"): + """ + Creates a ROS Node for facial emotion estimation. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param output_emotions_topic: Topic to which we are publishing the facial emotion results + (if None, we are not publishing the info) + :type output_emotions_topic: str + :param output_emotions_description_topic: Topic to which we are publishing the description of the estimated + facial emotion (if None, we are not publishing the description) + :type output_emotions_description_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + """ + super().__init__('opendr_facial_emotion_estimation_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + self.bridge = ROS2Bridge() + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if output_emotions_topic is not None: + self.hypothesis_publisher = self.create_publisher(ObjectHypothesis, output_emotions_topic, 1) + else: + self.hypothesis_publisher = None + + if output_emotions_description_topic is not None: + self.string_publisher = self.create_publisher(String, output_emotions_description_topic, 1) + else: + self.string_publisher = None + + # Initialize the face detector + self.face_detector = face_detector_learner + + # Initialize the facial emotion estimator + self.facial_emotion_estimator = FacialEmotionLearner(device=device, batch_size=2, + ensemble_size=9, + name_experiment='esr_9') + self.facial_emotion_estimator.init_model(num_branches=9) + model_saved_path = self.facial_emotion_estimator.download(path=None, mode="pretrained") + self.facial_emotion_estimator.load(ensemble_size=9, path_to_saved_network=model_saved_path) + + self.get_logger().info("Facial emotion estimation node started.") + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8').opencv() + emotion = None + # Run face detection and emotion estimation + + if image is not None: + bounding_boxes = self.face_detector.infer(image) + if bounding_boxes: + bounding_boxes = BoundingBoxListToNumpyArray()(bounding_boxes) + boxes = bounding_boxes[:, :4] + for idx, box in enumerate(boxes): + (startX, startY, endX, endY) = int(box[0]), int(box[1]), int(box[2]), int(box[3]) + face_crop = image[startY:endY, startX:endX] + + # Preprocess detected face + input_face = _pre_process_input_image(face_crop) + + # Recognize facial expression + emotion, affect = self.facial_emotion_estimator.infer(input_face) + + # Converts from Tensor to ndarray + affect = np.array([a.cpu().detach().numpy() for a in affect]) + affect = affect[0] # a numpy array of valence and arousal values + emotion = emotion[0] # the emotion class with confidence tensor + + cv2.rectangle(image, (startX, startY), (endX, endY), (0, 255, 255), thickness=2) + cv2.putText(image, "Valence: %.2f" % affect[0], (startX, endY - 30), cv2.FONT_HERSHEY_SIMPLEX, + 0.5, (0, 255, 255), 1, cv2.LINE_AA) + cv2.putText(image, "Arousal: %.2f" % affect[1], (startX, endY - 15), cv2.FONT_HERSHEY_SIMPLEX, + 0.5, (0, 255, 255), 1, cv2.LINE_AA) + cv2.putText(image, emotion.description, (startX, endY), cv2.FONT_HERSHEY_SIMPLEX, + 0.5, (0, 255, 255), 1, cv2.LINE_AA) + + if self.hypothesis_publisher is not None and emotion: + self.hypothesis_publisher.publish(self.bridge.to_ros_category(emotion)) + + if self.string_publisher is not None and emotion: + self.string_publisher.publish(self.bridge.to_ros_category_description(emotion)) + + if self.image_publisher is not None: + # Convert the annotated OpenDR image to ROS image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def _pre_process_input_image(image): + """ + Pre-processes an image for ESR-9. + + :param image: (ndarray) + :return: (ndarray) image + """ + + image = image_processing.resize(image, INPUT_IMAGE_SIZE) + image = PIL.Image.fromarray(image) + image = transforms.Normalize(mean=INPUT_IMAGE_NORMALIZATION_MEAN, + std=INPUT_IMAGE_NORMALIZATION_STD)(transforms.ToTensor()(image)).unsqueeze(0) + + return image + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument('-i', '--input_rgb_image_topic', type=str, help='Topic name for input rgb image', + default='/image_raw') + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_emotion_estimation_annotated") + parser.add_argument("-e", "--output_emotions_topic", help="Topic name for output emotion", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/facial_emotion_estimation") + parser.add_argument('-m', '--output_emotions_description_topic', + help='Topic to which we are publishing the description of the estimated facial emotion', + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/facial_emotion_estimation_description") + parser.add_argument('-d', '--device', help='Device to use, either cpu or cuda', + type=str, default="cuda", choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + print("GPU found.") + device = 'cuda' + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU") + device = 'cpu' + except: + print("Using CPU") + device = 'cpu' + + # Initialize the face detector + face_detector = RetinaFaceLearner(backbone="resnet", device=device) + face_detector.download(path=".", verbose=True) + face_detector.load("retinaface_{}".format("resnet")) + + facial_emotion_estimation_node = FacialEmotionEstimationNode( + face_detector, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + output_emotions_topic=args.output_emotions_topic, + output_emotions_description_topic=args.output_emotions_description_topic, + device=device) + + rclpy.spin(facial_emotion_estimation_node) + facial_emotion_estimation_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/fall_detection_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/fall_detection_node.py new file mode 100644 index 0000000000..3057bc7f83 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/fall_detection_node.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import cv2 +import argparse +import torch + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2DArray +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.engine.target import BoundingBox, BoundingBoxList +from opendr.perception.pose_estimation import get_bbox +from opendr.perception.pose_estimation import LightweightOpenPoseLearner +from opendr.perception.fall_detection import FallDetectorLearner + + +class FallDetectionNode(Node): + + def __init__(self, input_rgb_image_topic="image_raw", output_rgb_image_topic="/opendr/image_fallen_annotated", + detections_topic="/opendr/fallen", device="cuda", + num_refinement_stages=2, use_stride=False, half_precision=False): + """ + Creates a ROS2 Node for rule-based fall detection based on Lightweight OpenPose. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no pose detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param num_refinement_stages: Specifies the number of pose estimation refinement stages are added on the + model's head, including the initial stage. Can be 0, 1 or 2, with more stages meaning slower and more accurate + inference + :type num_refinement_stages: int + :param use_stride: Whether to add a stride value in the model, which reduces accuracy but increases + inference speed + :type use_stride: bool + :param half_precision: Enables inference using half (fp16) precision instead of single (fp32) precision. + Valid only for GPU-based inference + :type half_precision: bool + """ + super().__init__('opendr_fall_detection_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.fall_publisher = self.create_publisher(Detection2DArray, detections_topic, 1) + else: + self.fall_publisher = None + + self.bridge = ROS2Bridge() + + # Initialize the pose estimation learner + self.pose_estimator = LightweightOpenPoseLearner(device=device, num_refinement_stages=num_refinement_stages, + mobilenet_use_stride=use_stride, + half_precision=half_precision) + self.pose_estimator.download(path=".", verbose=True) + self.pose_estimator.load("openpose_default") + + # Initialize the fall detection learner + self.fall_detector = FallDetectorLearner(self.pose_estimator) + + self.get_logger().info("Fall detection node initialized.") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run fall detection + detections = self.fall_detector.infer(image) + + # Get an OpenCV image back + image = image.opencv() + + bboxes = BoundingBoxList([]) + fallen_pose_id = 0 + for detection in detections: + fallen = detection[0].data + pose = detection[2] + x, y, w, h = get_bbox(pose) + + if fallen == 1: + if self.image_publisher is not None: + # Paint person bounding box inferred from pose + color = (0, 0, 255) + cv2.rectangle(image, (x, y), (x + w, y + h), color, 2) + cv2.putText(image, "Fallen person", (x, y + h - 10), cv2.FONT_HERSHEY_SIMPLEX, + 1, color, 2, cv2.LINE_AA) + + if self.fall_publisher is not None: + # Convert detected boxes to ROS type and add to list + bboxes.data.append(BoundingBox(left=x, top=y, width=w, height=h, name=fallen_pose_id)) + fallen_pose_id += 1 + + if self.fall_publisher is not None: + if len(bboxes) > 0: + self.fall_publisher.publish(self.bridge.to_ros_boxes(bboxes)) + + if self.image_publisher is not None: + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_fallen_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/fallen") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--accelerate", help="Enables acceleration flags (e.g., stride)", default=False, + action="store_true") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + if args.accelerate: + stride = True + stages = 0 + half_prec = True + else: + stride = False + stages = 2 + half_prec = False + + fall_detection_node = FallDetectionNode(device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic, + num_refinement_stages=stages, use_stride=stride, half_precision=half_prec) + + rclpy.spin(fall_detection_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + fall_detection_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/heart_anomaly_detection_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/heart_anomaly_detection_node.py new file mode 100644 index 0000000000..7934c8ac19 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/heart_anomaly_detection_node.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*-_ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +import rclpy +from rclpy.node import Node +from vision_msgs.msg import Classification2D +from std_msgs.msg import Float32MultiArray + +from opendr_bridge import ROS2Bridge +from opendr.perception.heart_anomaly_detection import GatedRecurrentUnitLearner, AttentionNeuralBagOfFeatureLearner + + +class HeartAnomalyNode(Node): + + def __init__(self, input_ecg_topic="/ecg/ecg", output_heart_anomaly_topic="/opendr/heart_anomaly", + device="cuda", model="anbof"): + """ + Creates a ROS2 Node for heart anomaly (atrial fibrillation) detection from ecg data + :param input_ecg_topic: Topic from which we are reading the input array data + :type input_ecg_topic: str + :param output_heart_anomaly_topic: Topic to which we are publishing the predicted class + :type output_heart_anomaly_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model: model to use: anbof or gru + :type model: str + """ + super().__init__("opendr_heart_anomaly_detection_node") + + self.publisher = self.create_publisher(Classification2D, output_heart_anomaly_topic, 1) + + self.subscriber = self.create_subscription(Float32MultiArray, input_ecg_topic, self.callback, 1) + + self.bridge = ROS2Bridge() + + # AF dataset + self.channels = 1 + self.series_length = 9000 + + if model == 'gru': + self.learner = GatedRecurrentUnitLearner(in_channels=self.channels, series_length=self.series_length, + n_class=4, device=device) + elif model == 'anbof': + self.learner = AttentionNeuralBagOfFeatureLearner(in_channels=self.channels, series_length=self.series_length, + n_class=4, device=device, attention_type='temporal') + + self.learner.download(path='.', fold_idx=0) + self.learner.load(path='.') + + self.get_logger().info("Heart anomaly detection node initialized.") + + def callback(self, msg_data): + """ + Callback that process the input data and publishes to the corresponding topics + :param msg_data: input message + :type msg_data: std_msgs.msg.Float32MultiArray + """ + # Convert Float32MultiArray to OpenDR Timeseries + data = self.bridge.from_rosarray_to_timeseries(msg_data, self.channels, self.series_length) + + # Run ecg classification + class_pred = self.learner.infer(data) + + # Publish results + ros_class = self.bridge.from_category_to_rosclass(class_pred, self.get_clock().now().to_msg()) + self.publisher.publish(ros_class) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_ecg_topic", type=str, default="/ecg/ecg", + help="listen to input ECG data on this topic") + parser.add_argument("-o", "--output_heart_anomaly_topic", type=str, default="/opendr/heart_anomaly", + help="Topic name for heart anomaly detection topic") + parser.add_argument("--model", type=str, default="anbof", help="model to be used for prediction: anbof or gru", + choices=["anbof", "gru"]) + parser.add_argument("--device", type=str, default="cuda", help="Device to use (cpu, cuda)", + choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU") + device = "cpu" + except: + print("Using CPU") + device = "cpu" + + heart_anomaly_detection_node = HeartAnomalyNode(input_ecg_topic=args.input_ecg_topic, + output_heart_anomaly_topic=args.output_heart_anomaly_topic, + model=args.model, device=device) + + rclpy.spin(heart_anomaly_detection_node) + + heart_anomaly_detection_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/hr_pose_estimation_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/hr_pose_estimation_node.py new file mode 100644 index 0000000000..f8c6a1e30e --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/hr_pose_estimation_node.py @@ -0,0 +1,173 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROS2Bridge +from opendr_interface.msg import OpenDRPose2D + +from opendr.engine.data import Image +from opendr.perception.pose_estimation import draw +from opendr.perception.pose_estimation import HighResolutionPoseEstimationLearner + + +class PoseEstimationNode(Node): + + def __init__(self, input_rgb_image_topic="image_raw", output_rgb_image_topic="/opendr/image_pose_annotated", + detections_topic="/opendr/poses", device="cuda", + num_refinement_stages=2, use_stride=False, half_precision=False): + """ + Creates a ROS2 Node for pose estimation with Lightweight OpenPose. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no pose detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param num_refinement_stages: Specifies the number of pose estimation refinement stages are added on the + model's head, including the initial stage. Can be 0, 1 or 2, with more stages meaning slower and more accurate + inference + :type num_refinement_stages: int + :param use_stride: Whether to add a stride value in the model, which reduces accuracy but increases + inference speed + :type use_stride: bool + :param half_precision: Enables inference using half (fp16) precision instead of single (fp32) precision. + Valid only for GPU-based inference + :type half_precision: bool + """ + super().__init__('opendr_pose_estimation_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.pose_publisher = self.create_publisher(OpenDRPose2D, detections_topic, 1) + else: + self.pose_publisher = None + + self.bridge = ROS2Bridge() + + # Initialize the high resolution pose estimation learner + self.pose_estimator = HighResolutionPoseEstimationLearner(device=device, num_refinement_stages=num_refinement_stages, + mobilenet_use_stride=use_stride, + half_precision=half_precision) + self.pose_estimator.download(path=".", verbose=True) + self.pose_estimator.load("openpose_default") + + self.get_logger().info("Pose estimation node initialized.") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run pose estimation + poses = self.pose_estimator.infer(image) + + # Publish detections in ROS message + if self.pose_publisher is not None: + for pose in poses: + if pose.id is None: # Temporary fix for pose not having id + pose.id = -1 + if self.pose_publisher is not None: + # Convert OpenDR pose to ROS2 pose message using bridge and publish it + self.pose_publisher.publish(self.bridge.to_ros_pose(pose)) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with poses + for pose in poses: + draw(image, pose) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image, if \"None\" " + "no output image is published", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_pose_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages, if \"None\" " + "no detection message is published", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/poses") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--accelerate", help="Enables acceleration flags (e.g., stride)", default=False, + action="store_true") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + if args.accelerate: + stride = True + stages = 0 + half_prec = True + else: + stride = False + stages = 2 + half_prec = False + + pose_estimator_node = PoseEstimationNode(device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic, + num_refinement_stages=stages, use_stride=stride, half_precision=half_prec) + + rclpy.spin(pose_estimator_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + pose_estimator_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/image_dataset_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/image_dataset_node.py new file mode 100644 index 0000000000..3587e37aef --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/image_dataset_node.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +import rclpy +from rclpy.node import Node +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROS2Bridge +from opendr.engine.datasets import DatasetIterator +from opendr.perception.object_tracking_2d import MotDataset, RawMotDatasetIterator + + +class ImageDatasetNode(Node): + def __init__( + self, + dataset: DatasetIterator, + output_rgb_image_topic="/opendr/dataset_image", + data_fps=10, + ): + """ + Creates a ROS2 Node for publishing dataset images + """ + + super().__init__('opendr_image_dataset_node') + + self.dataset = dataset + self.bridge = ROS2Bridge() + self.timer = self.create_timer(1.0 / data_fps, self.timer_callback) + self.sample_index = 0 + + self.output_image_publisher = self.create_publisher( + ROS_Image, output_rgb_image_topic, 1 + ) + self.get_logger().info("Publishing images.") + + def timer_callback(self): + image = self.dataset[self.sample_index % len(self.dataset)][0] + # Dataset should have an (Image, Target) pair as elements + + message = self.bridge.to_ros_image( + image, encoding="bgr8" + ) + self.output_image_publisher.publish(message) + + self.sample_index += 1 + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-d", "--dataset_path", help="Path to a dataset", + type=str, default="MOT") + parser.add_argument( + "-ks", "--mot20_subsets_path", help="Path to mot20 subsets", + type=str, default=os.path.join( + "..", "..", "src", "opendr", "perception", "object_tracking_2d", + "datasets", "splits", "nano_mot20.train" + ) + ) + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name to publish the data", + type=str, default="/opendr/dataset_image") + parser.add_argument("-f", "--fps", help="Data FPS", + type=float, default=30) + args = parser.parse_args() + + dataset_path = args.dataset_path + mot20_subsets_path = args.mot20_subsets_path + output_rgb_image_topic = args.output_rgb_image_topic + data_fps = args.fps + + if not os.path.exists(dataset_path): + dataset_path = MotDataset.download_nano_mot20( + "MOT", True + ).path + + dataset = RawMotDatasetIterator( + dataset_path, + { + "mot20": mot20_subsets_path + }, + scan_labels=False + ) + dataset_node = ImageDatasetNode( + dataset, + output_rgb_image_topic=output_rgb_image_topic, + data_fps=data_fps, + ) + + rclpy.spin(dataset_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + dataset_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/landmark_based_facial_expression_recognition_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/landmark_based_facial_expression_recognition_node.py new file mode 100644 index 0000000000..cb43293f19 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/landmark_based_facial_expression_recognition_node.py @@ -0,0 +1,184 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch +import numpy as np + +import rclpy +from rclpy.node import Node +from std_msgs.msg import String +from vision_msgs.msg import ObjectHypothesis +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROS2Bridge + +from opendr.perception.facial_expression_recognition import ProgressiveSpatioTemporalBLNLearner +from opendr.perception.facial_expression_recognition import landmark_extractor +from opendr.perception.facial_expression_recognition import gen_muscle_data +from opendr.perception.facial_expression_recognition import data_normalization + + +class LandmarkFacialExpressionRecognitionNode(Node): + + def __init__(self, input_rgb_image_topic="image_raw", + output_category_topic="/opendr/landmark_expression_recognition", + output_category_description_topic="/opendr/landmark_expression_recognition_description", + device="cpu", model='pstbln_afew', shape_predictor='./predictor_path'): + """ + Creates a ROS2 Node for landmark-based facial expression recognition. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_category_topic: Topic to which we are publishing the recognized facial expression category info + (if None, we are not publishing the info) + :type output_category_topic: str + :param output_category_description_topic: Topic to which we are publishing the description of the recognized + facial expression (if None, we are not publishing the description) + :type output_category_description_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model: model to use for landmark-based facial expression recognition. + (Options: 'pstbln_ck+', 'pstbln_casia', 'pstbln_afew') + :type model: str + :param shape_predictor: pretrained model to use for landmark extraction from a facial image + :type model: str + """ + super().__init__('opendr_landmark_based_facial_expression_recognition_node') + # Set up ROS topics and bridge + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_category_topic is not None: + self.hypothesis_publisher = self.create_publisher(ObjectHypothesis, output_category_topic, 1) + else: + self.hypothesis_publisher = None + + if output_category_description_topic is not None: + self.string_publisher = self.create_publisher(String, output_category_description_topic, 1) + else: + self.string_publisher = None + + self.bridge = ROS2Bridge() + + # Initialize the landmark-based facial expression recognition + if model == 'pstbln_ck+': + num_point = 303 + num_class = 7 + elif model == 'pstbln_casia': + num_point = 309 + num_class = 6 + elif model == 'pstbln_afew': + num_point = 312 + num_class = 7 + self.model_name, self.dataset_name = model.split("_") + self.expression_classifier = ProgressiveSpatioTemporalBLNLearner(device=device, dataset_name=self.dataset_name, + num_class=num_class, num_point=num_point, + num_person=1, in_channels=2, + blocksize=5, topology=[15, 10, 15, 5, 5, 10]) + model_saved_path = "./pretrained_models/" + model + self.expression_classifier.load(model_saved_path, model) + self.shape_predictor = shape_predictor + + self.get_logger().info("landmark-based facial expression recognition node started!") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + landmarks = landmark_extractor(image, './landmarks.npy', self.shape_predictor) + + # 3: sequence numpy data generation from extracted landmarks and normalization: + + numpy_data = _landmark2numpy(landmarks) + norm_data = data_normalization(numpy_data) + muscle_data = gen_muscle_data(norm_data, './muscle_data') + + # Run expression recognition + category = self.expression_classifier.infer(muscle_data) + + if self.hypothesis_publisher is not None: + self.hypothesis_publisher.publish(self.bridge.to_ros_category(category)) + + if self.string_publisher is not None: + self.string_publisher.publish(self.bridge.to_ros_category_description(category)) + + +def _landmark2numpy(landmarks): + num_landmarks = 68 + num_dim = 2 # feature dimension for each facial landmark + num_faces = 1 # number of faces in each frame + num_frames = 15 + numpy_data = np.zeros((1, num_dim, num_frames, num_landmarks, num_faces)) + for t in range(num_frames): + numpy_data[0, 0:num_dim, t, :, 0] = landmarks + return numpy_data + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_category_topic", help="Topic name for output recognized category", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/landmark_expression_recognition") + parser.add_argument("-d", "--output_category_description_topic", help="Topic name for category description", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/landmark_expression_recognition_description") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Model to use, either 'pstbln_ck+', 'pstbln_casia', 'pstbln_afew'", + type=str, default="pstbln_afew", choices=['pstbln_ck+', 'pstbln_casia', 'pstbln_afew']) + parser.add_argument("-s", "--shape_predictor", help="Shape predictor (landmark_extractor) to use", + type=str, default='./predictor_path') + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + landmark_expression_estimation_node = \ + LandmarkFacialExpressionRecognitionNode( + input_rgb_image_topic=args.input_rgb_image_topic, + output_category_topic=args.output_category_topic, + output_category_description_topic=args.output_category_description_topic, + device=device, model=args.model, + shape_predictor=args.shape_predictor) + + rclpy.spin(landmark_expression_estimation_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + landmark_expression_estimation_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_centernet_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_centernet_node.py new file mode 100644 index 0000000000..e0ba51c629 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_centernet_node.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import mxnet as mx + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2DArray +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import CenterNetDetectorLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionCenterNetNode(Node): + + def __init__(self, input_rgb_image_topic="image_raw", output_rgb_image_topic="/opendr/image_objects_annotated", + detections_topic="/opendr/objects", device="cuda", backbone="resnet50_v1b"): + """ + Creates a ROS2 Node for object detection with Centernet. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param backbone: backbone network + :type backbone: str + """ + super().__init__('opendr_object_detection_2d_centernet_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = self.create_publisher(Detection2DArray, detections_topic, 1) + else: + self.object_publisher = None + + self.bridge = ROS2Bridge() + + self.object_detector = CenterNetDetectorLearner(backbone=backbone, device=device) + self.object_detector.download(path=".", verbose=True) + self.object_detector.load("centernet_default") + + self.get_logger().info("Object Detection 2D Centernet node initialized.") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run object detection + boxes = self.object_detector.infer(image, threshold=0.45, keep_size=False) + + if self.object_publisher is not None: + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_boxes(boxes) # Convert to ROS boxes + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--backbone", help="Backbone network, defaults to \"resnet50_v1b\"", + type=str, default="resnet50_v1b", choices=["resnet50_v1b"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and mx.context.num_gpus() > 0: + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_centernet_node = ObjectDetectionCenterNetNode(device=device, backbone=args.backbone, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + + rclpy.spin(object_detection_centernet_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + object_detection_centernet_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_detr_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_detr_node.py new file mode 100644 index 0000000000..154dabf79f --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_detr_node.py @@ -0,0 +1,242 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch +import numpy as np + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2DArray +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import DetrLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionDetrNode(Node): + def __init__( + self, + input_rgb_image_topic="image_raw", + output_rgb_image_topic="/opendr/image_objects_annotated", + detections_topic="/opendr/objects", + device="cuda", + ): + """ + Creates a ROS2 Node for object detection with DETR. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + """ + super().__init__("opendr_object_detection_2d_detr_node") + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.detection_publisher = self.create_publisher(Detection2DArray, detections_topic, 1) + else: + self.detection_publisher = None + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + self.bridge = ROS2Bridge() + + self.class_names = [ + "N/A", + "person", + "bicycle", + "car", + "motorcycle", + "airplane", + "bus", + "train", + "truck", + "boat", + "traffic light", + "fire hydrant", + "N/A", + "stop sign", + "parking meter", + "bench", + "bird", + "cat", + "dog", + "horse", + "sheep", + "cow", + "elephant", + "bear", + "zebra", + "giraffe", + "N/A", + "backpack", + "umbrella", + "N/A", + "N/A", + "handbag", + "tie", + "suitcase", + "frisbee", + "skis", + "snowboard", + "sports ball", + "kite", + "baseball bat", + "baseball glove", + "skateboard", + "surfboard", + "tennis racket", + "bottle", + "N/A", + "wine glass", + "cup", + "fork", + "knife", + "spoon", + "bowl", + "banana", + "apple", + "sandwich", + "orange", + "broccoli", + "carrot", + "hot dog", + "pizza", + "donut", + "cake", + "chair", + "couch", + "potted plant", + "bed", + "N/A", + "dining table", + "N/A", + "N/A", + "toilet", + "N/A", + "tv", + "laptop", + "mouse", + "remote", + "keyboard", + "cell phone", + "microwave", + "oven", + "toaster", + "sink", + "refrigerator", + "N/A", + "book", + "clock", + "vase", + "scissors", + "teddy bear", + "hair drier", + "toothbrush", + ] + + # Initialize the detection estimation + self.object_detector = DetrLearner(device=device) + self.object_detector.download(path=".", verbose=True) + + self.get_logger().info("Object Detection 2D DETR node initialized.") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding="bgr8") + + # Run detection estimation + boxes = self.object_detector.infer(image) + + # Annotate image and publish results: + if self.detection_publisher is not None: + ros_detection = self.bridge.to_ros_bounding_box_list(boxes) + self.detection_publisher.publish(ros_detection) + # We get can the data back using self.bridge.from_ros_bounding_box_list(ros_detection) + # e.g., opendr_detection = self.bridge.from_ros_bounding_box_list(ros_detection) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = np.float32(image.opencv()) + image = draw_bounding_boxes(image, boxes, class_names=self.class_names) + message = self.bridge.to_ros_image(Image(image), encoding="bgr8") + self.image_publisher.publish(message) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_detr_node = ObjectDetectionDetrNode( + device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic, + ) + + rclpy.spin(object_detection_detr_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + object_detection_detr_node.destroy_node() + rclpy.shutdown() + + +if __name__ == "__main__": + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_gem_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_gem_node.py new file mode 100644 index 0000000000..9f0b3b9760 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_gem_node.py @@ -0,0 +1,282 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import argparse +import cv2 +import message_filters +import numpy as np +import rclpy +import torch +from rclpy.node import Node +from opendr_bridge import ROS2Bridge +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2DArray + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import GemLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionGemNode(Node): + def __init__( + self, + input_rgb_image_topic="/camera/color/image_raw", + input_infra_image_topic="/camera/infra/image_raw", + output_rgb_image_topic="/opendr/rgb_image_objects_annotated", + output_infra_image_topic="/opendr/infra_image_objects_annotated", + detections_topic="/opendr/objects", + device="cuda", + pts_rgb=None, + pts_infra=None, + ): + """ + Creates a ROS2 Node for object detection with GEM + :param input_rgb_image_topic: Topic from which we are reading the input rgb image + :type input_rgb_image_topic: str + :param input_infra_image_topic: Topic from which we are reading the input infrared image + :type: input_infra_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated rgb image (if None, we are not + publishing annotated image) + :type output_rgb_image_topic: str + :param output_infra_image_topic: Topic to which we are publishing the annotated infrared image (if None, we are not + publishing annotated image) + :type output_infra_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, we are + not publishing annotations) + :type detections_topic: str + :param device: Device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param pts_rgb: Point on the rgb image that define alignment with the infrared image. These are camera + specific and can be obtained using get_color_infra_alignment.py which is located in the + opendr/perception/object_detection2d/utils module. + :type pts_rgb: {list, numpy.ndarray} + :param pts_infra: Points on the infrared image that define alignment with rgb image. These are camera specific + and can be obtained using get_color_infra_alignment.py which is located in the + opendr/perception/object_detection2d/utils module. + :type pts_infra: {list, numpy.ndarray} + """ + super().__init__("opendr_object_detection_2d_gem_node") + + if output_rgb_image_topic is not None: + self.rgb_publisher = self.create_publisher(msg_type=ROS_Image, topic=output_rgb_image_topic, qos_profile=10) + else: + self.rgb_publisher = None + if output_infra_image_topic is not None: + self.ir_publisher = self.create_publisher(msg_type=ROS_Image, topic=output_infra_image_topic, qos_profile=10) + else: + self.ir_publisher = None + + if detections_topic is not None: + self.detection_publisher = self.create_publisher(msg_type=Detection2DArray, topic=detections_topic, qos_profile=10) + else: + self.detection_publisher = None + if pts_infra is None: + pts_infra = np.array( + [ + [478, 248], + [465, 338], + [458, 325], + [468, 256], + [341, 240], + [335, 310], + [324, 321], + [311, 383], + [434, 365], + [135, 384], + [67, 257], + [167, 206], + [124, 131], + [364, 276], + [424, 269], + [277, 131], + [41, 310], + [202, 320], + [188, 318], + [188, 308], + [196, 241], + [499, 317], + [311, 164], + [220, 216], + [435, 352], + [213, 363], + [390, 364], + [212, 368], + [390, 370], + [467, 324], + [415, 364], + ] + ) + self.get_logger().warn( + "\nUsing default calibration values for pts_infra!" + + "\nThese are probably incorrect." + + "\nThe correct values for pts_infra can be found by running get_rgb_infra_alignment.py." + + "\nThis file is located in the opendr/perception/object_detection2d/utils module." + ) + if pts_rgb is None: + pts_rgb = np.array( + [ + [910, 397], + [889, 572], + [874, 552], + [891, 411], + [635, 385], + [619, 525], + [603, 544], + [576, 682], + [810, 619], + [216, 688], + [90, 423], + [281, 310], + [193, 163], + [684, 449], + [806, 431], + [504, 170], + [24, 538], + [353, 552], + [323, 550], + [323, 529], + [344, 387], + [961, 533], + [570, 233], + [392, 336], + [831, 610], + [378, 638], + [742, 630], + [378, 648], + [742, 640], + [895, 550], + [787, 630], + ] + ) + self.get_logger().warn( + "\nUsing default calibration values for pts_rgb!" + + "\nThese are probably incorrect." + + "\nThe correct values for pts_rgb can be found by running get_color_infra_alignment.py." + + "\nThis file is located in the opendr/perception/object_detection2d/utils module." + ) + # Object classes + self.classes = ["N/A", "chair", "cycle", "bin", "laptop", "drill", "rocker"] + + # Estimating Homography matrix for aligning infra with rgb + self.h, status = cv2.findHomography(pts_infra, pts_rgb) + + self.bridge = ROS2Bridge() + + # Initialize the detection estimation + model_backbone = "resnet50" + + self.gem_learner = GemLearner( + backbone=model_backbone, + num_classes=7, + device=device, + ) + self.gem_learner.fusion_method = "sc_avg" + self.gem_learner.download(path=".", verbose=True) + + # Subscribers + msg_rgb = message_filters.Subscriber(self, ROS_Image, input_rgb_image_topic, 1) + msg_ir = message_filters.Subscriber(self, ROS_Image, input_infra_image_topic, 1) + + sync = message_filters.TimeSynchronizer([msg_rgb, msg_ir], 1) + sync.registerCallback(self.callback) + + def callback(self, msg_rgb, msg_ir): + """ + Callback that process the input data and publishes to the corresponding topics + :param msg_rgb: input rgb image message + :type msg_rgb: sensor_msgs.msg.Image + :param msg_ir: input infrared image message + :type msg_ir: sensor_msgs.msg.Image + """ + # Convert images to OpenDR standard + image_rgb = self.bridge.from_ros_image(msg_rgb).opencv() + image_ir_raw = self.bridge.from_ros_image(msg_ir, "bgr8").opencv() + image_ir = cv2.warpPerspective(image_ir_raw, self.h, (image_rgb.shape[1], image_rgb.shape[0])) + + # Perform inference on images + boxes, w_sensor1, _ = self.gem_learner.infer(image_rgb, image_ir) + + # Annotate image and publish results: + if self.detection_publisher is not None: + ros_detection = self.bridge.to_ros_bounding_box_list(boxes) + self.detection_publisher.publish(ros_detection) + # We can get the data back using self.bridge.from_ros_bounding_box_list(ros_detection) + # e.g., opendr_detection = self.bridge.from_ros_bounding_box_list(ros_detection) + + if self.rgb_publisher is not None: + plot_rgb = draw_bounding_boxes(image_rgb, boxes, class_names=self.classes) + message = self.bridge.to_ros_image(Image(np.uint8(plot_rgb))) + self.rgb_publisher.publish(message) + if self.ir_publisher is not None: + plot_ir = draw_bounding_boxes(image_ir, boxes, class_names=self.classes) + message = self.bridge.to_ros_image(Image(np.uint8(plot_ir))) + self.ir_publisher.publish(message) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-ic", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/camera/color/image_raw") + parser.add_argument("-ii", "--input_infra_image_topic", help="Topic name for input infrared image", + type=str, default="/camera/infra/image_raw") + parser.add_argument("-oc", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/rgb_image_objects_annotated") + parser.add_argument("-oi", "--output_infra_image_topic", help="Topic name for output annotated infrared image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/infra_image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("--device", help='Device to use, either "cpu" or "cuda", defaults to "cuda"', + type=str, default="cuda", choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + gem_node = ObjectDetectionGemNode( + device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + input_infra_image_topic=args.input_infra_image_topic, + output_infra_image_topic=args.output_infra_image_topic, + detections_topic=args.detections_topic, + ) + + rclpy.spin(gem_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + gem_node.destroy_node() + rclpy.shutdown() + + +if __name__ == "__main__": + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_nanodet_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_nanodet_node.py new file mode 100755 index 0000000000..31902c032e --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_nanodet_node.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2DArray +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import NanodetLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionNanodetNode(Node): + + def __init__(self, input_rgb_image_topic="image_raw", output_rgb_image_topic="/opendr/image_objects_annotated", + detections_topic="/opendr/objects", device="cuda", model="plus_m_1.5x_416"): + """ + Creates a ROS2 Node for object detection with Nanodet. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model: the name of the model of which we want to load the config file + :type model: str + """ + super().__init__('object_detection_2d_nanodet_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = self.create_publisher(Detection2DArray, detections_topic, 1) + else: + self.object_publisher = None + + self.bridge = ROS2Bridge() + + # Initialize the object detector + self.object_detector = NanodetLearner(model_to_use=model, device=device) + self.object_detector.download(path=".", mode="pretrained", verbose=True) + self.object_detector.load("./nanodet_{}".format(model)) + + self.get_logger().info("Object Detection 2D Nanodet node initialized.") + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run object detection + boxes = self.object_detector.infer(image, threshold=0.35) + + # Get an OpenCV image back + image = image.opencv() + + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_boxes(boxes) # Convert to ROS boxes + if self.object_publisher is not None: + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Model that config file will be used", type=str, default="plus_m_1.5x_416") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_nanodet_node = ObjectDetectionNanodetNode(device=device, model=args.model, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + + rclpy.spin(object_detection_nanodet_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + object_detection_nanodet_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_ssd_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_ssd_node.py new file mode 100644 index 0000000000..acf4bce4a4 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_ssd_node.py @@ -0,0 +1,170 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import mxnet as mx + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2DArray +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import SingleShotDetectorLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes +from opendr.perception.object_detection_2d import Seq2SeqNMSLearner, SoftNMS, FastNMS, ClusterNMS + + +class ObjectDetectionSSDNode(Node): + + def __init__(self, input_rgb_image_topic="image_raw", output_rgb_image_topic="/opendr/image_objects_annotated", + detections_topic="/opendr/objects", device="cuda", backbone="vgg16_atrous", nms_type='default'): + """ + Creates a ROS2 Node for object detection with SSD. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing + annotated image) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param backbone: backbone network + :type backbone: str + :param nms_type: type of NMS method, can be one + of 'default', 'seq2seq-nms', 'soft-nms', 'fast-nms', 'cluster-nms' + :type nms_type: str + """ + super().__init__('opendr_object_detection_2d_ssd_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = self.create_publisher(Detection2DArray, detections_topic, 1) + else: + self.object_publisher = None + + self.bridge = ROS2Bridge() + + self.object_detector = SingleShotDetectorLearner(backbone=backbone, device=device) + self.object_detector.download(path=".", verbose=True) + self.object_detector.load("ssd_default_person") + self.custom_nms = None + + # Initialize NMS if selected + if nms_type == 'seq2seq-nms': + self.custom_nms = Seq2SeqNMSLearner(fmod_map_type='EDGEMAP', iou_filtering=0.8, + app_feats='fmod', device=device) + self.custom_nms.download(model_name='seq2seq_pets_jpd_fmod', path='.') + self.custom_nms.load('./seq2seq_pets_jpd_fmod/', verbose=True) + self.get_logger().info("Object Detection 2D SSD node seq2seq-nms initialized.") + elif nms_type == 'soft-nms': + self.custom_nms = SoftNMS(nms_thres=0.45, device=device) + self.get_logger().info("Object Detection 2D SSD node soft-nms initialized.") + elif nms_type == 'fast-nms': + self.custom_nms = FastNMS(device=device) + self.get_logger().info("Object Detection 2D SSD node fast-nms initialized.") + elif nms_type == 'cluster-nms': + self.custom_nms = ClusterNMS(device=device) + self.get_logger().info("Object Detection 2D SSD node cluster-nms initialized.") + else: + self.get_logger().info("Object Detection 2D SSD node using default NMS.") + + self.get_logger().info("Object Detection 2D SSD node initialized.") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run object detection + boxes = self.object_detector.infer(image, threshold=0.45, keep_size=False, custom_nms=self.custom_nms) + + if self.object_publisher is not None: + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_boxes(boxes) # Convert to ROS boxes + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, default="/opendr/objects") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--backbone", help="Backbone network, defaults to vgg16_atrous", + type=str, default="vgg16_atrous", choices=["vgg16_atrous"]) + parser.add_argument("--nms_type", help="Non-Maximum Suppression type, defaults to \"default\", options are " + "\"seq2seq-nms\", \"soft-nms\", \"fast-nms\", \"cluster-nms\"", + type=str, default="default", + choices=["default", "seq2seq-nms", "soft-nms", "fast-nms", "cluster-nms"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and mx.context.num_gpus() > 0: + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_ssd_node = ObjectDetectionSSDNode(device=device, backbone=args.backbone, nms_type=args.nms_type, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + + rclpy.spin(object_detection_ssd_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + object_detection_ssd_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_yolov3_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_yolov3_node.py new file mode 100644 index 0000000000..43bd7aab03 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_yolov3_node.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import mxnet as mx + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2DArray +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import YOLOv3DetectorLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionYOLOV3Node(Node): + + def __init__(self, input_rgb_image_topic="image_raw", output_rgb_image_topic="/opendr/image_objects_annotated", + detections_topic="/opendr/objects", device="cuda", backbone="darknet53"): + """ + Creates a ROS2 Node for object detection with YOLOV3 + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param backbone: backbone network + :type backbone: str + """ + super().__init__('object_detection_2d_yolov3_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = self.create_publisher(Detection2DArray, detections_topic, 1) + else: + self.object_publisher = None + + self.bridge = ROS2Bridge() + + self.object_detector = YOLOv3DetectorLearner(backbone=backbone, device=device) + self.object_detector.download(path=".", verbose=True) + self.object_detector.load("yolo_default") + + self.get_logger().info("Object Detection 2D YOLOV3 node initialized.") + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run object detection + boxes = self.object_detector.infer(image, threshold=0.1, keep_size=False) + + if self.object_publisher is not None: + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_bounding_box_list(boxes) # Convert to ROS bounding_box_list + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, default="/opendr/objects") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--backbone", help="Backbone network, defaults to \"darknet53\"", + type=str, default="darknet53", choices=["darknet53"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and mx.context.num_gpus() > 0: + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_yolov3_node = ObjectDetectionYOLOV3Node(device=device, backbone=args.backbone, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + + rclpy.spin(object_detection_yolov3_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + object_detection_yolov3_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_yolov5_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_yolov5_node.py new file mode 100644 index 0000000000..e80d0e34a4 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_2d_yolov5_node.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2DArray +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import YOLOv5DetectorLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class ObjectDetectionYOLOV5Node(Node): + + def __init__(self, input_rgb_image_topic="image_raw", output_rgb_image_topic="/opendr/image_objects_annotated", + detections_topic="/opendr/objects", device="cuda", model="yolov5s"): + """ + Creates a ROS2 Node for object detection with YOLOV5. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no object detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model: model to use + :type model: str + """ + super().__init__('object_detection_2d_yolov5_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.object_publisher = self.create_publisher(Detection2DArray, detections_topic, 1) + else: + self.object_publisher = None + + self.bridge = ROS2Bridge() + + self.object_detector = YOLOv5DetectorLearner(model_name=model, device=device) + + self.get_logger().info("Object Detection 2D YOLOV5 node initialized.") + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run object detection + boxes = self.object_detector.infer(image) + + if self.object_publisher is not None: + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_bounding_box_list(boxes) # Convert to ROS bounding_box_list + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with object detection boxes + image = draw_bounding_boxes(image, boxes, class_names=self.object_detector.classes, line_thickness=3) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages", + type=lambda value: value if value.lower() != "none" else None, default="/opendr/objects") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Model to use, defaults to \"yolov5s\"", type=str, default="yolov5s", + choices=['yolov5s', 'yolov5n', 'yolov5m', 'yolov5l', 'yolov5x', + 'yolov5n6', 'yolov5s6', 'yolov5m6', 'yolov5l6', 'custom']) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detection_yolov5_node = ObjectDetectionYOLOV5Node(device=device, model=args.model, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic) + + rclpy.spin(object_detection_yolov5_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + object_detection_yolov5_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_3d_voxel_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_3d_voxel_node.py new file mode 100644 index 0000000000..4c3b883905 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_detection_3d_voxel_node.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +import argparse +import os +import rclpy +from rclpy.node import Node +from vision_msgs.msg import Detection3DArray +from sensor_msgs.msg import PointCloud as ROS_PointCloud +from opendr_bridge import ROS2Bridge +from opendr.perception.object_detection_3d import VoxelObjectDetection3DLearner + + +class ObjectDetection3DVoxelNode(Node): + def __init__( + self, + input_point_cloud_topic="/opendr/dataset_point_cloud", + detections_topic="/opendr/objects3d", + device="cuda:0", + model_name="tanet_car_xyres_16", + model_config_path=os.path.join( + "$OPENDR_HOME", "src", "opendr", "perception", "object_detection_3d", + "voxel_object_detection_3d", "second_detector", "configs", "tanet", + "ped_cycle", "test_short.proto" + ), + temp_dir="temp", + ): + """ + Creates a ROS2 Node for 3D object detection + :param input_point_cloud_topic: Topic from which we are reading the input point cloud + :type input_point_cloud_topic: str + :param detections_topic: Topic to which we are publishing the annotations + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model_name: the pretrained model to download or a trained model in temp_dir + :type model_name: str + :param temp_dir: where to store models + :type temp_dir: str + """ + + super().__init__('opendr_object_detection_3d_voxel_node') + + self.get_logger().info("Using model_name: {}".format(model_name)) + + self.learner = VoxelObjectDetection3DLearner( + device=device, temp_path=temp_dir, model_config_path=model_config_path + ) + if not os.path.exists(os.path.join(temp_dir, model_name)): + VoxelObjectDetection3DLearner.download(model_name, temp_dir) + + self.learner.load(os.path.join(temp_dir, model_name), verbose=True) + + # Initialize OpenDR ROSBridge object + self.bridge = ROS2Bridge() + + self.detection_publisher = self.create_publisher( + Detection3DArray, detections_topic, 1 + ) + + self.create_subscription(ROS_PointCloud, input_point_cloud_topic, self.callback, 1) + + self.get_logger().info("Object Detection 3D Voxel Node initialized.") + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + point_cloud = self.bridge.from_ros_point_cloud(data) + detection_boxes = self.learner.infer(point_cloud) + + # Convert detected boxes to ROS type and publish + ros_boxes = self.bridge.to_ros_boxes_3d(detection_boxes) + self.detection_publisher.publish(ros_boxes) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_point_cloud_topic", + help="Point Cloud topic provided by either a point_cloud_dataset_node or any other 3D Point Cloud Node", + type=str, default="/opendr/dataset_point_cloud") + parser.add_argument("-d", "--detections_topic", + help="Output detections topic", + type=str, default="/opendr/objects3d") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("-n", "--model_name", help="Name of the trained model", + type=str, default="tanet_car_xyres_16") + parser.add_argument( + "-c", "--model_config_path", help="Path to a model .proto config", + type=str, default=os.path.join( + "$OPENDR_HOME", "src", "opendr", "perception", "object_detection_3d", + "voxel_object_detection_3d", "second_detector", "configs", "tanet", + "car", "xyres_16.proto" + ) + ) + parser.add_argument("-t", "--temp_dir", help="Path to a temp dir with models", + type=str, default="temp") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + voxel_node = ObjectDetection3DVoxelNode( + device=device, + model_name=args.model_name, + model_config_path=args.model_config_path, + input_point_cloud_topic=args.input_point_cloud_topic, + temp_dir=args.temp_dir, + detections_topic=args.detections_topic, + ) + + rclpy.spin(voxel_node) + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + voxel_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_2d_deep_sort_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_2d_deep_sort_node.py new file mode 100644 index 0000000000..30b83a8b75 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_2d_deep_sort_node.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +import argparse +import cv2 +import os +from opendr.engine.target import TrackingAnnotationList +import rclpy +from rclpy.node import Node +from vision_msgs.msg import Detection2DArray +from std_msgs.msg import Int32MultiArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROS2Bridge +from opendr.perception.object_tracking_2d import ( + ObjectTracking2DDeepSortLearner, + ObjectTracking2DFairMotLearner +) +from opendr.engine.data import Image, ImageWithDetections + + +class ObjectTracking2DDeepSortNode(Node): + def __init__( + self, + detector=None, + input_rgb_image_topic="image_raw", + output_detection_topic="/opendr/objects", + output_tracking_id_topic="/opendr/objects_tracking_id", + output_rgb_image_topic="/opendr/image_objects_annotated", + device="cuda:0", + model_name="deep_sort", + temp_dir="temp", + ): + """ + Creates a ROS2 Node for 2D object tracking + :param detector: Learner to generate object detections + :type detector: Learner + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing + annotated image) + :type output_rgb_image_topic: str + :param output_detection_topic: Topic to which we are publishing the detections + :type output_detection_topic: str + :param output_tracking_id_topic: Topic to which we are publishing the tracking ids + :type output_tracking_id_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model_name: the pretrained model to download or a saved model in temp_dir folder to use + :type model_name: str + :param temp_dir: the folder to download models + :type temp_dir: str + """ + + super().__init__('opendr_object_tracking_2d_deep_sort_node') + + self.get_logger().info("Using model_name: {}".format(model_name)) + + self.detector = detector + self.learner = ObjectTracking2DDeepSortLearner( + device=device, temp_path=temp_dir, + ) + if not os.path.exists(os.path.join(temp_dir, model_name)): + ObjectTracking2DDeepSortLearner.download(model_name, temp_dir) + + self.learner.load(os.path.join(temp_dir, model_name), verbose=True) + + self.bridge = ROS2Bridge() + + if output_tracking_id_topic is not None: + self.tracking_id_publisher = self.create_publisher( + Int32MultiArray, output_tracking_id_topic, 1 + ) + + if output_rgb_image_topic is not None: + self.output_image_publisher = self.create_publisher( + ROS_Image, output_rgb_image_topic, 1 + ) + + if output_detection_topic is not None: + self.detection_publisher = self.create_publisher( + Detection2DArray, output_detection_topic, 1 + ) + + self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding="bgr8") + detection_boxes = self.detector.infer(image) + image_with_detections = ImageWithDetections(image.numpy(), detection_boxes) + tracking_boxes = self.learner.infer(image_with_detections, swap_left_top=False) + + if self.output_image_publisher is not None: + frame = image.opencv() + draw_predictions(frame, tracking_boxes) + message = self.bridge.to_ros_image( + Image(frame), encoding="bgr8" + ) + self.output_image_publisher.publish(message) + self.get_logger().info("Published annotated image") + + if self.detection_publisher is not None: + detection_boxes = tracking_boxes.bounding_box_list() + ros_boxes = self.bridge.to_ros_boxes(detection_boxes) + self.detection_publisher.publish(ros_boxes) + self.get_logger().info("Published " + str(len(detection_boxes)) + " detection boxes") + + if self.tracking_id_publisher is not None: + ids = [int(tracking_box.id) for tracking_box in tracking_boxes] + ros_ids = Int32MultiArray() + ros_ids.data = ids + self.tracking_id_publisher.publish(ros_ids) + self.get_logger().info("Published " + str(len(ids)) + " tracking ids") + + +colors = [ + (255, 0, 255), + (0, 0, 255), + (0, 255, 0), + (255, 0, 0), + (35, 69, 55), + (43, 63, 54), +] + + +def draw_predictions(frame, predictions: TrackingAnnotationList, is_centered=False, is_flipped_xy=True): + global colors + w, h, _ = frame.shape + + for prediction in predictions.boxes: + prediction = prediction + + if not hasattr(prediction, "id"): + prediction.id = 0 + + color = colors[int(prediction.id) * 7 % len(colors)] + + x = prediction.left + y = prediction.top + + if is_flipped_xy: + x = prediction.top + y = prediction.left + + if is_centered: + x -= prediction.width + y -= prediction.height + + cv2.rectangle( + frame, + (int(x), int(y)), + ( + int(x + prediction.width), + int(y + prediction.height), + ), + color, + 2, + ) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", + help="Input Image topic provided by either an image_dataset_node, webcam or any other image node", + type=str, default="/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", + help="Output annotated image topic with a visualization of detections and their ids", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", + help="Output detections topic", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("-t", "--tracking_id_topic", + help="Output tracking ids topic with the same element count as in output_detection_topic", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects_tracking_id") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("-n", "--model_name", help="Name of the trained model", + type=str, default="deep_sort", choices=["deep_sort"]) + parser.add_argument("-td", "--temp_dir", help="Path to a temporary directory with models", + type=str, default="temp") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + detection_learner = ObjectTracking2DFairMotLearner( + device=device, temp_path=args.temp_dir, + ) + if not os.path.exists(os.path.join(args.temp_dir, "fairmot_dla34")): + ObjectTracking2DFairMotLearner.download("fairmot_dla34", args.temp_dir) + + detection_learner.load(os.path.join(args.temp_dir, "fairmot_dla34"), verbose=True) + + deep_sort_node = ObjectTracking2DDeepSortNode( + detector=detection_learner, + device=device, + model_name=args.model_name, + input_rgb_image_topic=args.input_rgb_image_topic, + temp_dir=args.temp_dir, + output_detection_topic=args.detections_topic, + output_tracking_id_topic=args.tracking_id_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + ) + rclpy.spin(deep_sort_node) + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + deep_sort_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_2d_fair_mot_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_2d_fair_mot_node.py new file mode 100755 index 0000000000..bcd30f68ac --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_2d_fair_mot_node.py @@ -0,0 +1,231 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +import argparse +import cv2 +import os +from opendr.engine.target import TrackingAnnotationList +import rclpy +from rclpy.node import Node +from vision_msgs.msg import Detection2DArray +from std_msgs.msg import Int32MultiArray +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROS2Bridge +from opendr.perception.object_tracking_2d import ( + ObjectTracking2DFairMotLearner, +) +from opendr.engine.data import Image + + +class ObjectTracking2DFairMotNode(Node): + def __init__( + self, + input_rgb_image_topic="image_raw", + output_rgb_image_topic="/opendr/image_objects_annotated", + output_detection_topic="/opendr/objects", + output_tracking_id_topic="/opendr/objects_tracking_id", + device="cuda:0", + model_name="fairmot_dla34", + temp_dir="temp", + ): + """ + Creates a ROS2 Node for 2D object tracking + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing + annotated image) + :type output_rgb_image_topic: str + :param output_detection_topic: Topic to which we are publishing the detections + :type output_detection_topic: str + :param output_tracking_id_topic: Topic to which we are publishing the tracking ids + :type output_tracking_id_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model_name: the pretrained model to download or a saved model in temp_dir folder to use + :type model_name: str + :param temp_dir: the folder to download models + :type temp_dir: str + """ + + super().__init__('opendr_object_tracking_2d_fair_mot_node') + + self.learner = ObjectTracking2DFairMotLearner( + device=device, temp_path=temp_dir, + ) + if not os.path.exists(os.path.join(temp_dir, model_name)): + ObjectTracking2DFairMotLearner.download(model_name, temp_dir) + + self.learner.load(os.path.join(temp_dir, model_name), verbose=True) + + # Initialize OpenDR ROSBridge object + self.bridge = ROS2Bridge() + + if output_detection_topic is not None: + self.detection_publisher = self.create_publisher( + Detection2DArray, output_detection_topic, 1 + ) + + if output_tracking_id_topic is not None: + self.tracking_id_publisher = self.create_publisher( + Int32MultiArray, output_tracking_id_topic, 1 + ) + + if output_rgb_image_topic is not None: + self.output_image_publisher = self.create_publisher( + ROS_Image, output_rgb_image_topic, 1 + ) + + self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding="bgr8") + tracking_boxes = self.learner.infer(image) + + if self.output_image_publisher is not None: + frame = image.opencv() + draw_predictions(frame, tracking_boxes) + message = self.bridge.to_ros_image( + Image(frame), encoding="bgr8" + ) + self.output_image_publisher.publish(message) + self.get_logger().info("Published annotated image") + + if self.detection_publisher is not None: + detection_boxes = tracking_boxes.bounding_box_list() + ros_boxes = self.bridge.to_ros_boxes(detection_boxes) + self.detection_publisher.publish(ros_boxes) + self.get_logger().info("Published " + str(len(detection_boxes)) + " detection boxes") + + if self.tracking_id_publisher is not None: + ids = [tracking_box.id for tracking_box in tracking_boxes] + ros_ids = Int32MultiArray() + ros_ids.data = ids + self.tracking_id_publisher.publish(ros_ids) + self.get_logger().info("Published " + str(len(ids)) + " tracking ids") + + +colors = [ + (255, 0, 255), + (0, 0, 255), + (0, 255, 0), + (255, 0, 0), + (35, 69, 55), + (43, 63, 54), +] + + +def draw_predictions(frame, predictions: TrackingAnnotationList, is_centered=False, is_flipped_xy=True): + global colors + w, h, _ = frame.shape + + for prediction in predictions.boxes: + prediction = prediction + + if not hasattr(prediction, "id"): + prediction.id = 0 + + color = colors[int(prediction.id) * 7 % len(colors)] + + x = prediction.left + y = prediction.top + + if is_flipped_xy: + x = prediction.top + y = prediction.left + + if is_centered: + x -= prediction.width + y -= prediction.height + + cv2.rectangle( + frame, + (int(x), int(y)), + ( + int(x + prediction.width), + int(y + prediction.height), + ), + color, + 2, + ) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", + help="Input Image topic provided by either an image_dataset_node, webcam or any other image node", + type=str, default="/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", + help="Output annotated image topic with a visualization of detections and their ids", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_objects_annotated") + parser.add_argument("-d", "--detections_topic", + help="Output detections topic", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects") + parser.add_argument("-t", "--tracking_id_topic", + help="Output tracking ids topic with the same element count as in output_detection_topic", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/objects_tracking_id") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("-n", "--model_name", help="Name of the trained model", + type=str, default="fairmot_dla34", choices=["fairmot_dla34"]) + parser.add_argument("-td", "--temp_dir", help="Path to a temporary directory with models", + type=str, default="temp") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + fair_mot_node = ObjectTracking2DFairMotNode( + device=device, + model_name=args.model_name, + input_rgb_image_topic=args.input_rgb_image_topic, + temp_dir=args.temp_dir, + output_detection_topic=args.detections_topic, + output_tracking_id_topic=args.tracking_id_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + ) + + rclpy.spin(fair_mot_node) + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + fair_mot_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_2d_siamrpn_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_2d_siamrpn_node.py new file mode 100644 index 0000000000..f2d49919ad --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_2d_siamrpn_node.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import mxnet as mx + +import cv2 +from math import dist +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Detection2D +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.engine.target import TrackingAnnotation, BoundingBox +from opendr.perception.object_tracking_2d import SiamRPNLearner +from opendr.perception.object_detection_2d import YOLOv3DetectorLearner + + +class ObjectTrackingSiamRPNNode(Node): + + def __init__(self, object_detector, input_rgb_image_topic="/image_raw", + output_rgb_image_topic="/opendr/image_tracking_annotated", + tracker_topic="/opendr/tracked_object", + device="cuda"): + """ + Creates a ROS2 Node for object tracking with SiamRPN. + :param object_detector: An object detector learner to use for initialization + :type object_detector: opendr.engine.learners.Learner + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param tracker_topic: Topic to which we are publishing the annotation + :type tracker_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + """ + super().__init__('opendr_object_tracking_2d_siamrpn_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if tracker_topic is not None: + self.object_publisher = self.create_publisher(Detection2D, tracker_topic, 1) + else: + self.object_publisher = None + + self.bridge = ROS2Bridge() + + self.object_detector = object_detector + # Initialize object tracker + self.tracker = SiamRPNLearner(device=device) + self.image = None + self.initialized = False + + self.get_logger().info("Object Tracking 2D SiamRPN node started.") + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + self.image = image + + if not self.initialized: + # Run object detector to initialize the tracker + image = self.bridge.from_ros_image(data, encoding='bgr8') + boxes = self.object_detector.infer(image) + + img_center = [int(image.data.shape[2] // 2), int(image.data.shape[1] // 2)] # width, height + # Find the box that is closest to the center of the image + center_box = BoundingBox("", left=0, top=0, width=0, height=0) + min_distance = dist([center_box.left, center_box.top], img_center) + for box in boxes: + new_distance = dist([int(box.left + box.width // 2), int(box.top + box.height // 2)], img_center) + if new_distance < min_distance and box.width > 32 and box.height > 32: # Ignore very small boxes + center_box = box + min_distance = dist([center_box.left, center_box.top], img_center) + + # Initialize tracker with the most central box found + init_box = TrackingAnnotation(center_box.name, + center_box.left, center_box.top, center_box.width, center_box.height, + id=0, score=center_box.confidence) + + self.tracker.infer(self.image, init_box) + self.initialized = True + self.get_logger().info("Object Tracking 2D SiamRPN node initialized with the most central bounding box.") + + if self.initialized: + # Run object tracking + box = self.tracker.infer(image) + + if self.object_publisher is not None: + # Publish detections in ROS message + ros_boxes = self.bridge.to_ros_single_tracking_annotation(box) + self.object_publisher.publish(ros_boxes) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + cv2.rectangle(image, (box.left, box.top), + (box.left + box.width, box.top + box.height), + (0, 255, 255), 3) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_tracking_annotated") + parser.add_argument("-t", "--tracker_topic", help="Topic name for tracker messages", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/tracked_object") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and mx.context.num_gpus() > 0: + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + object_detector = YOLOv3DetectorLearner(backbone="darknet53", device=device) + object_detector.download(path=".", verbose=True) + object_detector.load("yolo_default") + + object_tracker_2d_siamrpn_node = ObjectTrackingSiamRPNNode(object_detector=object_detector, device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + tracker_topic=args.tracker_topic) + + rclpy.spin(object_tracker_2d_siamrpn_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + object_tracker_2d_siamrpn_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_3d_ab3dmot_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_3d_ab3dmot_node.py new file mode 100644 index 0000000000..c0cfb95124 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/object_tracking_3d_ab3dmot_node.py @@ -0,0 +1,177 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +import argparse +import os +import rclpy +from rclpy.node import Node +from vision_msgs.msg import Detection3DArray +from std_msgs.msg import Int32MultiArray +from sensor_msgs.msg import PointCloud as ROS_PointCloud +from opendr_bridge import ROS2Bridge +from opendr.perception.object_tracking_3d import ObjectTracking3DAb3dmotLearner +from opendr.perception.object_detection_3d import VoxelObjectDetection3DLearner + + +class ObjectTracking3DAb3dmotNode(Node): + def __init__( + self, + detector=None, + input_point_cloud_topic="/opendr/dataset_point_cloud", + output_detection3d_topic="/opendr/detection3d", + output_tracking3d_id_topic="/opendr/tracking3d_id", + device="cuda:0", + ): + """ + Creates a ROS2 Node for 3D object tracking + :param detector: Learner that provides 3D object detections + :type detector: Learner + :param input_point_cloud_topic: Topic from which we are reading the input point cloud + :type input_point_cloud_topic: str + :param output_detection3d_topic: Topic to which we are publishing the annotations + :type output_detection3d_topic: str + :param output_tracking3d_id_topic: Topic to which we are publishing the tracking ids + :type output_tracking3d_id_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + """ + super().__init__('opendr_object_tracking_3d_ab3dmot_node') + + self.detector = detector + self.learner = ObjectTracking3DAb3dmotLearner( + device=device + ) + + # Initialize OpenDR ROSBridge object + self.bridge = ROS2Bridge() + + if output_detection3d_topic is not None: + self.detection_publisher = self.create_publisher( + Detection3DArray, output_detection3d_topic, 1 + ) + + if output_tracking3d_id_topic is not None: + self.tracking_id_publisher = self.create_publisher( + Int32MultiArray, output_tracking3d_id_topic, 1 + ) + + self.create_subscription(ROS_PointCloud, input_point_cloud_topic, self.callback, 1) + + self.get_logger().info("Object Tracking 3D Ab3dmot Node initialized.") + + def callback(self, data): + """ + Callback that processes the input data and publishes to the corresponding topics. + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + point_cloud = self.bridge.from_ros_point_cloud(data) + detection_boxes = self.detector.infer(point_cloud) + + # Convert detected boxes to ROS type and publish + if self.detection_publisher is not None: + ros_boxes = self.bridge.to_ros_boxes_3d(detection_boxes) + self.detection_publisher.publish(ros_boxes) + self.get_logger().info("Published " + str(len(detection_boxes)) + " detection boxes") + + if self.tracking_id_publisher is not None: + tracking_boxes = self.learner.infer(detection_boxes) + ids = [tracking_box.id for tracking_box in tracking_boxes] + ros_ids = Int32MultiArray() + ros_ids.data = ids + self.tracking_id_publisher.publish(ros_ids) + self.get_logger().info("Published " + str(len(ids)) + " tracking ids") + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_point_cloud_topic", + help="Point Cloud topic provided by either a point_cloud_dataset_node or any other 3D Point Cloud Node", + type=str, default="/opendr/dataset_point_cloud") + parser.add_argument("-d", "--detections_topic", + help="Output detections topic", + type=lambda value: value if value.lower() != "none" else None, default="/opendr/objects3d") + parser.add_argument("-t", "--tracking3d_id_topic", + help="Output tracking ids topic with the same element count as in output_detection_topic", + type=lambda value: value if value.lower() != "none" else None, default="/opendr/objects_tracking_id") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("-dn", "--detector_model_name", help="Name of the trained model", + type=str, default="tanet_car_xyres_16", choices=["tanet_car_xyres_16"]) + parser.add_argument( + "-dc", "--detector_model_config_path", help="Path to a model .proto config", + type=str, default=os.path.join( + "$OPENDR_HOME", "src", "opendr", "perception", "object_detection_3d", + "voxel_object_detection_3d", "second_detector", "configs", "tanet", + "car", "xyres_16.proto" + ) + ) + parser.add_argument("-td", "--temp_dir", help="Path to a temporary directory with models", + type=str, default="temp") + args = parser.parse_args() + + input_point_cloud_topic = args.input_point_cloud_topic + detector_model_name = args.detector_model_name + temp_dir = args.temp_dir + detector_model_config_path = args.detector_model_config_path + output_detection3d_topic = args.detections_topic + output_tracking3d_id_topic = args.tracking3d_id_topic + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + detector = VoxelObjectDetection3DLearner( + device=device, + temp_path=temp_dir, + model_config_path=detector_model_config_path + ) + if not os.path.exists(os.path.join(temp_dir, detector_model_name)): + VoxelObjectDetection3DLearner.download(detector_model_name, temp_dir) + + detector.load(os.path.join(temp_dir, detector_model_name), verbose=True) + + ab3dmot_node = ObjectTracking3DAb3dmotNode( + detector=detector, + device=device, + input_point_cloud_topic=input_point_cloud_topic, + output_detection3d_topic=output_detection3d_topic, + output_tracking3d_id_topic=output_tracking3d_id_topic, + ) + + rclpy.spin(ab3dmot_node) + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + ab3dmot_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/panoptic_segmentation_efficient_ps_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/panoptic_segmentation_efficient_ps_node.py new file mode 100644 index 0000000000..e9459f6480 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/panoptic_segmentation_efficient_ps_node.py @@ -0,0 +1,198 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +from pathlib import Path +import argparse +from typing import Optional + +import rclpy +from rclpy.node import Node +import matplotlib +from sensor_msgs.msg import Image as ROS_Image + +from opendr_bridge import ROS2Bridge +from opendr.perception.panoptic_segmentation import EfficientPsLearner + +# Avoid having a matplotlib GUI in a separate thread in the visualize() function +matplotlib.use('Agg') + + +class EfficientPsNode(Node): + def __init__(self, + input_rgb_image_topic: str, + checkpoint: str, + output_heatmap_topic: Optional[str] = None, + output_rgb_visualization_topic: Optional[str] = None, + detailed_visualization: bool = False + ): + """ + Initialize the EfficientPS ROS2 node and create an instance of the respective learner class. + :param checkpoint: This is either a path to a saved model or one of [Cityscapes, KITTI] to download + pre-trained model weights. + :type checkpoint: str + :param input_rgb_image_topic: ROS topic for the input image stream + :type input_rgb_image_topic: str + :param output_heatmap_topic: ROS topic for the predicted semantic and instance maps + :type output_heatmap_topic: str + :param output_rgb_visualization_topic: ROS topic for the generated visualization of the panoptic map + :type output_rgb_visualization_topic: str + :param detailed_visualization: if True, generate a combined overview of the input RGB image and the + semantic, instance, and panoptic segmentation maps and publish it on output_rgb_visualization_topic + :type detailed_visualization: bool + """ + super().__init__('opendr_efficient_panoptic_segmentation_node') + + self.input_rgb_image_topic = input_rgb_image_topic + self.checkpoint = checkpoint + self.output_heatmap_topic = output_heatmap_topic + self.output_rgb_visualization_topic = output_rgb_visualization_topic + self.detailed_visualization = detailed_visualization + + # Initialize all ROS2 related things + self._bridge = ROS2Bridge() + self._instance_heatmap_publisher = None + self._semantic_heatmap_publisher = None + self._visualization_publisher = None + + # Initialize the panoptic segmentation network + config_file = Path(sys.modules[ + EfficientPsLearner.__module__].__file__).parent / 'configs' / 'singlegpu_cityscapes.py' + self._learner = EfficientPsLearner(str(config_file)) + + # Other + self._tmp_folder = Path(__file__).parent.parent / 'tmp' / 'efficientps' + self._tmp_folder.mkdir(exist_ok=True, parents=True) + + def _init_learner(self) -> bool: + """ + The model can be initialized via + 1. downloading pre-trained weights for Cityscapes or KITTI. + 2. passing a path to an existing checkpoint file. + + This has not been done in the __init__() function since logging is available only once the node is registered. + """ + if self.checkpoint in ['cityscapes', 'kitti']: + file_path = EfficientPsLearner.download(str(self._tmp_folder), + trained_on=self.checkpoint) + self.checkpoint = file_path + + if self._learner.load(self.checkpoint): + self.get_logger().info('Successfully loaded the checkpoint.') + return True + else: + self.get_logger().error('Failed to load the checkpoint.') + return False + + def _init_subscriber(self): + """ + Subscribe to all relevant topics. + """ + self.image_subscriber = self.create_subscription(ROS_Image, self.input_rgb_image_topic, + self.callback, 1) + + def _init_publisher(self): + """ + Set up the publishers as requested by the user. + """ + if self.output_heatmap_topic is not None: + self._instance_heatmap_publisher = self.create_publisher(ROS_Image, + f'{self.output_heatmap_topic}/instance', + 10) + self._semantic_heatmap_publisher = self.create_publisher(ROS_Image, + f'{self.output_heatmap_topic}/semantic', + 10) + if self.output_rgb_visualization_topic is not None: + self._visualization_publisher = self.create_publisher(ROS_Image, + self.output_rgb_visualization_topic, + 10) + + def listen(self): + """ + Start the node and begin processing input data. The order of the function calls ensures that the node does not + try to process input images without being in a trained state. + """ + if self._init_learner(): + self._init_publisher() + self._init_subscriber() + self.get_logger().info('EfficientPS node started!') + rclpy.spin(self) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + self.destroy_node() + rclpy.shutdown() + + def callback(self, data: ROS_Image): + """ + Predict the panoptic segmentation map from the input image and publish the results. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image to OpenDR Image + image = self._bridge.from_ros_image(data) + + try: + # Retrieve a list of two OpenDR heatmaps: [instance map, semantic map] + prediction = self._learner.infer(image) + + # The output topics are only published if there is at least one subscriber + if self._visualization_publisher is not None and self._visualization_publisher.get_subscription_count() > 0: + panoptic_image = EfficientPsLearner.visualize(image, prediction, show_figure=False, + detailed=self.detailed_visualization) + self._visualization_publisher.publish(self._bridge.to_ros_image(panoptic_image, encoding="rgb8")) + + if self._instance_heatmap_publisher is not None and self._instance_heatmap_publisher.get_subscription_count() > 0: + self._instance_heatmap_publisher.publish(self._bridge.to_ros_image(prediction[0])) + if self._semantic_heatmap_publisher is not None and self._semantic_heatmap_publisher.get_subscription_count() > 0: + self._semantic_heatmap_publisher.publish(self._bridge.to_ros_image(prediction[1])) + + except Exception as e: + self.get_logger().error(f'Failed to generate prediction: {e}') + + +def main(args=None): + rclpy.init(args=args) + parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('-i', '--input_rgb_image_topic', type=str, default='/image_raw', + help='listen to RGB images on this topic') + parser.add_argument('-oh', '--output_heatmap_topic', + type=lambda value: value if value.lower() != "none" else None, + default='/opendr/panoptic', + help='publish the semantic and instance maps on this topic as "OUTPUT_HEATMAP_TOPIC/semantic" \ + and "OUTPUT_HEATMAP_TOPIC/instance"') + parser.add_argument('-ov', '--output_rgb_image_topic', + type=lambda value: value if value.lower() != "none" else None, + default='/opendr/panoptic/rgb_visualization', + help='publish the panoptic segmentation map as an RGB image on this topic or a more detailed \ + overview if using the --detailed_visualization flag') + parser.add_argument('--detailed_visualization', action='store_true', + help='generate a combined overview of the input RGB image and the semantic, instance, and \ + panoptic segmentation maps and publish it on OUTPUT_RGB_IMAGE_TOPIC') + parser.add_argument('--checkpoint', type=str, default='cityscapes', + help='download pretrained models [cityscapes, kitti] or load from the provided path') + args = parser.parse_args() + + efficient_ps_node = EfficientPsNode(args.input_rgb_image_topic, + args.checkpoint, + args.output_heatmap_topic, + args.output_rgb_image_topic, + args.detailed_visualization) + efficient_ps_node.listen() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/point_cloud_dataset_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/point_cloud_dataset_node.py new file mode 100644 index 0000000000..5ea7f129ff --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/point_cloud_dataset_node.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +import rclpy +from rclpy.node import Node +from sensor_msgs.msg import PointCloud as ROS_PointCloud +from opendr_bridge import ROS2Bridge +from opendr.engine.datasets import DatasetIterator +from opendr.perception.object_detection_3d import KittiDataset, LabeledPointCloudsDatasetIterator + + +class PointCloudDatasetNode(Node): + def __init__( + self, + dataset: DatasetIterator, + output_point_cloud_topic="/opendr/dataset_point_cloud", + data_fps=10, + ): + """ + Creates a ROS Node for publishing dataset point clouds + """ + + super().__init__('opendr_point_cloud_dataset_node') + + self.dataset = dataset + self.bridge = ROS2Bridge() + self.timer = self.create_timer(1.0 / data_fps, self.timer_callback) + self.sample_index = 0 + + self.output_point_cloud_publisher = self.create_publisher( + ROS_PointCloud, output_point_cloud_topic, 1 + ) + self.get_logger().info("Publishing point_cloud images.") + + def timer_callback(self): + + point_cloud = self.dataset[self.sample_index % len(self.dataset)][0] + # Dataset should have a (PointCloud, Target) pair as elements + + message = self.bridge.to_ros_point_cloud( + point_cloud, self.get_clock().now().to_msg() + ) + self.output_point_cloud_publisher.publish(message) + + self.sample_index += 1 + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-d", "--dataset_path", + help="Path to a dataset. If does not exist, nano KITTI dataset will be downloaded there.", + type=str, default="KITTI/opendr_nano_kitti") + parser.add_argument("-ks", "--kitti_subsets_path", + help="Path to kitti subsets. Used only if a KITTI dataset is downloaded", + type=str, + default="../../src/opendr/perception/object_detection_3d/datasets/nano_kitti_subsets") + parser.add_argument("-o", "--output_point_cloud_topic", help="Topic name to publish the data", + type=str, default="/opendr/dataset_point_cloud") + parser.add_argument("-f", "--fps", help="Data FPS", + type=float, default=10) + args = parser.parse_args() + + dataset_path = args.dataset_path + kitti_subsets_path = args.kitti_subsets_path + output_point_cloud_topic = args.output_point_cloud_topic + data_fps = args.fps + + if not os.path.exists(dataset_path): + dataset_path = KittiDataset.download_nano_kitti( + "KITTI", kitti_subsets_path=kitti_subsets_path, + create_dir=True, + ).path + + dataset = LabeledPointCloudsDatasetIterator( + dataset_path + "/training/velodyne_reduced", + dataset_path + "/training/label_2", + dataset_path + "/training/calib", + ) + + dataset_node = PointCloudDatasetNode( + dataset, output_point_cloud_topic=output_point_cloud_topic, data_fps=data_fps + ) + + rclpy.spin(dataset_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + dataset_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/pose_estimation_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/pose_estimation_node.py new file mode 100644 index 0000000000..9193517314 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/pose_estimation_node.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROS2Bridge +from opendr_interface.msg import OpenDRPose2D + +from opendr.engine.data import Image +from opendr.perception.pose_estimation import draw +from opendr.perception.pose_estimation import LightweightOpenPoseLearner + + +class PoseEstimationNode(Node): + + def __init__(self, input_rgb_image_topic="image_raw", output_rgb_image_topic="/opendr/image_pose_annotated", + detections_topic="/opendr/poses", device="cuda", + num_refinement_stages=2, use_stride=False, half_precision=False): + """ + Creates a ROS2 Node for pose estimation with Lightweight OpenPose. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, no annotated + image is published) + :type output_rgb_image_topic: str + :param detections_topic: Topic to which we are publishing the annotations (if None, no pose detection message + is published) + :type detections_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param num_refinement_stages: Specifies the number of pose estimation refinement stages are added on the + model's head, including the initial stage. Can be 0, 1 or 2, with more stages meaning slower and more accurate + inference + :type num_refinement_stages: int + :param use_stride: Whether to add a stride value in the model, which reduces accuracy but increases + inference speed + :type use_stride: bool + :param half_precision: Enables inference using half (fp16) precision instead of single (fp32) precision. + Valid only for GPU-based inference + :type half_precision: bool + """ + super().__init__('opendr_pose_estimation_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if detections_topic is not None: + self.pose_publisher = self.create_publisher(OpenDRPose2D, detections_topic, 1) + else: + self.pose_publisher = None + + self.bridge = ROS2Bridge() + + self.pose_estimator = LightweightOpenPoseLearner(device=device, num_refinement_stages=num_refinement_stages, + mobilenet_use_stride=use_stride, + half_precision=half_precision) + self.pose_estimator.download(path=".", verbose=True) + self.pose_estimator.load("openpose_default") + + self.get_logger().info("Pose estimation node initialized.") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run pose estimation + poses = self.pose_estimator.infer(image) + + # Publish detections in ROS message + for pose in poses: + if self.pose_publisher is not None: + # Convert OpenDR pose to ROS2 pose message using bridge and publish it + self.pose_publisher.publish(self.bridge.to_ros_pose(pose)) + + if self.image_publisher is not None: + # Get an OpenCV image back + image = image.opencv() + # Annotate image with poses + for pose in poses: + draw(image, pose) + # Convert the annotated OpenDR image to ROS2 image message using bridge and publish it + self.image_publisher.publish(self.bridge.to_ros_image(Image(image), encoding='bgr8')) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated rgb image, if \"None\" " + "no output image is published", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_pose_annotated") + parser.add_argument("-d", "--detections_topic", help="Topic name for detection messages, if \"None\" " + "no detection message is published", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/poses") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--accelerate", help="Enables acceleration flags (e.g., stride)", default=False, + action="store_true") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + if args.accelerate: + stride = True + stages = 0 + half_prec = True + else: + stride = False + stages = 2 + half_prec = False + + pose_estimator_node = PoseEstimationNode(device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + detections_topic=args.detections_topic, + num_refinement_stages=stages, use_stride=stride, half_precision=half_prec) + + rclpy.spin(pose_estimator_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + pose_estimator_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/rgbd_hand_gesture_recognition_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/rgbd_hand_gesture_recognition_node.py new file mode 100755 index 0000000000..8b73944192 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/rgbd_hand_gesture_recognition_node.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +import cv2 +import numpy as np +import torch + +import rclpy +from rclpy.node import Node +import message_filters +from sensor_msgs.msg import Image as ROS_Image +from vision_msgs.msg import Classification2D + +from opendr_bridge import ROS2Bridge +from opendr.engine.data import Image +from opendr.perception.multimodal_human_centric import RgbdHandGestureLearner + + +class RgbdHandGestureNode(Node): + + def __init__(self, input_rgb_image_topic="/kinect2/qhd/image_color_rect", + input_depth_image_topic="/kinect2/qhd/image_depth_rect", + output_gestures_topic="/opendr/gestures", device="cuda", delay=0.1): + """ + Creates a ROS2 Node for gesture recognition from RGBD. Assuming that the following drivers have been installed: + https://github.com/OpenKinect/libfreenect2 and https://github.com/code-iai/iai_kinect2. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param input_depth_image_topic: Topic from which we are reading the input depth image + :type input_depth_image_topic: str + :param output_gestures_topic: Topic to which we are publishing the predicted gesture class + :type output_gestures_topic: str + :param device: Device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param delay: Define the delay (in seconds) with which rgb message and depth message can be synchronized + :type delay: float + """ + super().__init__("opendr_rgbd_hand_gesture_recognition_node") + + self.gesture_publisher = self.create_publisher(Classification2D, output_gestures_topic, 1) + + image_sub = message_filters.Subscriber(self, ROS_Image, input_rgb_image_topic, qos_profile=1) + depth_sub = message_filters.Subscriber(self, ROS_Image, input_depth_image_topic, qos_profile=1) + # synchronize image and depth data topics + ts = message_filters.ApproximateTimeSynchronizer([image_sub, depth_sub], queue_size=10, slop=delay) + ts.registerCallback(self.callback) + + self.bridge = ROS2Bridge() + + # Initialize the gesture recognition + self.gesture_learner = RgbdHandGestureLearner(n_class=16, architecture="mobilenet_v2", device=device) + model_path = './mobilenet_v2' + if not os.path.exists(model_path): + self.gesture_learner.download(path=model_path) + self.gesture_learner.load(path=model_path) + + # mean and std for preprocessing, based on HANDS dataset + self.mean = np.asarray([0.485, 0.456, 0.406, 0.0303]).reshape(1, 1, 4) + self.std = np.asarray([0.229, 0.224, 0.225, 0.0353]).reshape(1, 1, 4) + + self.get_logger().info("RGBD gesture recognition node started!") + + def callback(self, rgb_data, depth_data): + """ + Callback that process the input data and publishes to the corresponding topics + :param rgb_data: input image message + :type rgb_data: sensor_msgs.msg.Image + :param depth_data: input depth image message + :type depth_data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image and preprocess + rgb_image = self.bridge.from_ros_image(rgb_data, encoding='bgr8') + depth_data.encoding = 'mono16' + depth_image = self.bridge.from_ros_image_to_depth(depth_data, encoding='mono16') + img = self.preprocess(rgb_image, depth_image) + + # Run gesture recognition + gesture_class = self.gesture_learner.infer(img) + + # Publish results + ros_gesture = self.bridge.from_category_to_rosclass(gesture_class, self.get_clock().now().to_msg()) + self.gesture_publisher.publish(ros_gesture) + + def preprocess(self, rgb_image, depth_image): + """ + Preprocess rgb_image, depth_image and concatenate them + :param rgb_image: input RGB image + :type rgb_image: engine.data.Image + :param depth_image: input depth image + :type depth_image: engine.data.Image + """ + rgb_image = rgb_image.convert(format='channels_last') / (2**8 - 1) + depth_image = depth_image.convert(format='channels_last') / (2**16 - 1) + + # resize the images to 224x224 + rgb_image = cv2.resize(rgb_image, (224, 224)) + depth_image = cv2.resize(depth_image, (224, 224)) + + # concatenate and standardize + img = np.concatenate([rgb_image, np.expand_dims(depth_image, axis=-1)], axis=-1) + img = (img - self.mean) / self.std + img = Image(img, dtype=np.float32) + return img + + +def main(args=None): + rclpy.init(args=args) + + # Default topics are according to kinectv2 drivers at https://github.com/OpenKinect/libfreenect2 + # and https://github.com/code-iai-iai_kinect2 + parser = argparse.ArgumentParser() + parser.add_argument("-ic", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/kinect2/qhd/image_color_rect") + parser.add_argument("-id", "--input_depth_image_topic", help="Topic name for input depth image", + type=str, default="/kinect2/qhd/image_depth_rect") + parser.add_argument("-o", "--output_gestures_topic", help="Topic name for predicted gesture class", + type=str, default="/opendr/gestures") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", + choices=["cuda", "cpu"]) + parser.add_argument("--delay", help="The delay (in seconds) with which RGB message and" + "depth message can be synchronized", type=float, default=0.1) + + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU") + device = "cpu" + except: + print("Using CPU") + device = "cpu" + + gesture_node = RgbdHandGestureNode(input_rgb_image_topic=args.input_rgb_image_topic, + input_depth_image_topic=args.input_depth_image_topic, + output_gestures_topic=args.output_gestures_topic, device=device, + delay=args.delay) + + rclpy.spin(gesture_node) + + gesture_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/semantic_segmentation_bisenet_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/semantic_segmentation_bisenet_node.py new file mode 100644 index 0000000000..91f860bbd1 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/semantic_segmentation_bisenet_node.py @@ -0,0 +1,197 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import numpy as np +import torch +import cv2 +import colorsys + +import rclpy +from rclpy.node import Node + +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Image +from opendr.engine.target import Heatmap +from opendr.perception.semantic_segmentation import BisenetLearner + + +class BisenetNode(Node): + + def __init__(self, input_rgb_image_topic="/usb_cam/image_raw", output_heatmap_topic="/opendr/heatmap", + output_rgb_image_topic="/opendr/heatmap_visualization", device="cuda"): + """ + Creates a ROS2 Node for semantic segmentation with Bisenet. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_heatmap_topic: Topic to which we are publishing the heatmap in the form of a ROS image containing + class ids + :type output_heatmap_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the heatmap image blended with the + input image and a class legend for visualization purposes + :type output_rgb_image_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + """ + super().__init__('opendr_semantic_segmentation_bisenet_node') + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_heatmap_topic is not None: + self.heatmap_publisher = self.create_publisher(ROS_Image, output_heatmap_topic, 1) + else: + self.heatmap_publisher = None + + if output_rgb_image_topic is not None: + self.visualization_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.visualization_publisher = None + + self.bridge = ROS2Bridge() + + # Initialize the semantic segmentation model + self.learner = BisenetLearner(device=device) + self.learner.download(path="bisenet_camvid") + self.learner.load("bisenet_camvid") + + self.class_names = ["Bicyclist", "Building", "Car", "Column Pole", "Fence", "Pedestrian", "Road", "Sidewalk", + "Sign Symbol", "Sky", "Tree", "Unknown"] + self.colors = self.getDistinctColors(len(self.class_names)) # Generate n distinct colors + + self.get_logger().info("Semantic segmentation bisenet node initialized.") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics. + :param data: Input image message + :type data: sensor_msgs.msg.Image + """ + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + try: + # Run semantic segmentation to retrieve the OpenDR heatmap + heatmap = self.learner.infer(image) + + # Publish heatmap in the form of an image containing class ids + if self.heatmap_publisher is not None: + heatmap = Heatmap(heatmap.data.astype(np.uint8)) # Convert to uint8 + self.heatmap_publisher.publish(self.bridge.to_ros_image(heatmap)) + + # Publish heatmap color visualization blended with the input image and a class color legend + if self.visualization_publisher is not None: + heatmap_colors = Image(self.colors[heatmap.numpy()]) + image = Image(cv2.resize(image.convert("channels_last", "bgr"), (960, 720))) + alpha = 0.4 # 1.0 means full input image, 0.0 means full heatmap + beta = (1.0 - alpha) + image_blended = cv2.addWeighted(image.opencv(), alpha, heatmap_colors.opencv(), beta, 0.0) + # Add a legend + image_blended = self.addLegend(image_blended, np.unique(heatmap.data)) + + self.visualization_publisher.publish(self.bridge.to_ros_image(Image(image_blended), + encoding='bgr8')) + except Exception: + self.get_logger().warn('Failed to generate prediction.') + + def addLegend(self, image, unique_class_ints): + # Text setup + origin_x, origin_y = 5, 5 # Text origin x, y + color_rectangle_size = 25 + font_size = 1.0 + font_thickness = 2 + w_max = 0 + for i in range(len(unique_class_ints)): + text = self.class_names[unique_class_ints[i]] # Class name + x, y = origin_x, origin_y + i * color_rectangle_size # Text position + # Determine class color and convert to regular integers + color = (int(self.colors[unique_class_ints[i]][0]), + int(self.colors[unique_class_ints[i]][1]), + int(self.colors[unique_class_ints[i]][2])) + # Get text width and height + (w, h), _ = cv2.getTextSize(text, cv2.FONT_HERSHEY_SIMPLEX, font_size, font_thickness) + if w >= w_max: + w_max = w + # Draw partial background rectangle + image = cv2.rectangle(image, (x - origin_x, y), + (x + origin_x + color_rectangle_size + w_max, + y + color_rectangle_size), + (255, 255, 255, 0.5), -1) + # Draw color rectangle + image = cv2.rectangle(image, (x, y), + (x + color_rectangle_size, y + color_rectangle_size), color, -1) + # Draw class name text + image = cv2.putText(image, text, (x + color_rectangle_size + 2, y + h), + cv2.FONT_HERSHEY_SIMPLEX, font_size, (0, 0, 0), font_thickness) + return image + + @staticmethod + def HSVToRGB(h, s, v): + (r, g, b) = colorsys.hsv_to_rgb(h, s, v) + return np.array([int(255 * r), int(255 * g), int(255 * b)]) + + def getDistinctColors(self, n): + huePartition = 1.0 / (n + 1) + return np.array([self.HSVToRGB(huePartition * value, 1.0, 1.0) for value in range(0, n)]).astype(np.uint8) + + +def main(args=None): + rclpy.init(args=args) + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_heatmap_topic", help="Topic to which we are publishing the heatmap in the form " + "of a ROS image containing class ids", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/heatmap") + parser.add_argument("-ov", "--output_rgb_image_topic", help="Topic to which we are publishing the heatmap image " + "blended with the input image and a class legend for " + "visualization purposes", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/heatmap_visualization") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + bisenet_node = BisenetNode(device=device, + input_rgb_image_topic=args.input_rgb_image_topic, + output_heatmap_topic=args.output_heatmap_topic, + output_rgb_image_topic=args.output_rgb_image_topic) + + rclpy.spin(bisenet_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + bisenet_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/skeleton_based_action_recognition_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/skeleton_based_action_recognition_node.py new file mode 100644 index 0000000000..dce55a5630 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/skeleton_based_action_recognition_node.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch +import numpy as np + +import rclpy +from rclpy.node import Node +from std_msgs.msg import String +from vision_msgs.msg import ObjectHypothesis +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROS2Bridge +from opendr_interface.msg import OpenDRPose2D + +from opendr.engine.data import Image +from opendr.perception.pose_estimation import draw +from opendr.perception.pose_estimation import LightweightOpenPoseLearner +from opendr.perception.skeleton_based_action_recognition import SpatioTemporalGCNLearner +from opendr.perception.skeleton_based_action_recognition import ProgressiveSpatioTemporalGCNLearner + + +class SkeletonActionRecognitionNode(Node): + + def __init__(self, input_rgb_image_topic="image_raw", + output_rgb_image_topic="/opendr/image_pose_annotated", + pose_annotations_topic="/opendr/poses", + output_category_topic="/opendr/skeleton_recognized_action", + output_category_description_topic="/opendr/skeleton_recognized_action_description", + device="cuda", model='stgcn'): + """ + Creates a ROS2 Node for skeleton-based action recognition + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_rgb_image_topic: Topic to which we are publishing the annotated image (if None, we are not publishing + annotated image) + :type output_rgb_image_topic: str + :param pose_annotations_topic: Topic to which we are publishing the annotations (if None, we are not publishing + annotated pose annotations) + :type pose_annotations_topic: str + :param output_category_topic: Topic to which we are publishing the recognized action category info + (if None, we are not publishing the info) + :type output_category_topic: str + :param output_category_description_topic: Topic to which we are publishing the description of the recognized + action (if None, we are not publishing the description) + :type output_category_description_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model: model to use for skeleton-based action recognition. + (Options: 'stgcn', 'pstgcn') + :type model: str + """ + super().__init__('opendr_skeleton_based_action_recognition_node') + # Set up ROS topics and bridge + + self.image_subscriber = self.create_subscription(ROS_Image, input_rgb_image_topic, self.callback, 1) + + if output_rgb_image_topic is not None: + self.image_publisher = self.create_publisher(ROS_Image, output_rgb_image_topic, 1) + else: + self.image_publisher = None + + if pose_annotations_topic is not None: + self.pose_publisher = self.create_publisher(OpenDRPose2D, pose_annotations_topic, 1) + else: + self.pose_publisher = None + + if output_category_topic is not None: + self.hypothesis_publisher = self.create_publisher(ObjectHypothesis, output_category_topic, 1) + else: + self.hypothesis_publisher = None + + if output_category_description_topic is not None: + self.string_publisher = self.create_publisher(String, output_category_description_topic, 1) + else: + self.string_publisher = None + + self.bridge = ROS2Bridge() + + # Initialize the pose estimation + self.pose_estimator = LightweightOpenPoseLearner(device=device, num_refinement_stages=2, + mobilenet_use_stride=False, + half_precision=False + ) + self.pose_estimator.download(path=".", verbose=True) + self.pose_estimator.load("openpose_default") + + # Initialize the skeleton_based action recognition + if model == 'stgcn': + self.action_classifier = SpatioTemporalGCNLearner(device=device, dataset_name='nturgbd_cv', + method_name=model, in_channels=2, num_point=18, + graph_type='openpose') + elif model == 'pstgcn': + self.action_classifier = ProgressiveSpatioTemporalGCNLearner(device=device, dataset_name='nturgbd_cv', + topology=[5, 4, 5, 2, 3, 4, 3, 4], + in_channels=2, num_point=18, + graph_type='openpose') + + model_saved_path = self.action_classifier.download(path="./pretrained_models/"+model, + method_name=model, mode="pretrained", + file_name=model+'_ntu_cv_lw_openpose') + self.action_classifier.load(model_saved_path, model+'_ntu_cv_lw_openpose') + + self.get_logger().info("Skeleton-based action recognition node started!") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics + :param data: input message + :type data: sensor_msgs.msg.Image + """ + + # Convert sensor_msgs.msg.Image into OpenDR Image + image = self.bridge.from_ros_image(data, encoding='bgr8') + + # Run pose estimation + poses = self.pose_estimator.infer(image) + if len(poses) > 2: + # select two poses with highest energy + poses = _select_2_poses(poses) + + # Get an OpenCV image back + image = image.opencv() + # Annotate image and publish results + for pose in poses: + if self.pose_publisher is not None: + ros_pose = self.bridge.to_ros_pose(pose) + self.pose_publisher.publish(ros_pose) + # We get can the data back using self.bridge.from_ros_pose(ros_pose) + # e.g., opendr_pose = self.bridge.from_ros_pose(ros_pose) + draw(image, pose) + + if self.image_publisher is not None: + message = self.bridge.to_ros_image(Image(image), encoding='bgr8') + self.image_publisher.publish(message) + + num_frames = 300 + poses_list = [] + for _ in range(num_frames): + poses_list.append(poses) + skeleton_seq = _pose2numpy(num_frames, poses_list) + + # Run action recognition + category = self.action_classifier.infer(skeleton_seq) + category.confidence = float(category.confidence.max()) + + if self.hypothesis_publisher is not None: + self.hypothesis_publisher.publish(self.bridge.to_ros_category(category)) + + if self.string_publisher is not None: + self.string_publisher.publish(self.bridge.to_ros_category_description(category)) + + +def _select_2_poses(poses): + selected_poses = [] + energy = [] + for i in range(len(poses)): + s = poses[i].data[:, 0].std() + poses[i].data[:, 1].std() + energy.append(s) + energy = np.array(energy) + index = energy.argsort()[::-1][0:2] + for i in range(len(index)): + selected_poses.append(poses[index[i]]) + return selected_poses + + +def _pose2numpy(num_current_frames, poses_list): + C = 2 + T = 300 + V = 18 + M = 2 # num_person_in + skeleton_seq = np.zeros((1, C, T, V, M)) + for t in range(num_current_frames): + for m in range(len(poses_list[t])): + skeleton_seq[0, 0:2, t, :, m] = np.transpose(poses_list[t][m].data) + return skeleton_seq + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input image", + type=str, default="image_raw") + parser.add_argument("-o", "--output_rgb_image_topic", help="Topic name for output annotated image", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/image_pose_annotated") + parser.add_argument("-p", "--pose_annotations_topic", help="Topic name for pose annotations", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/poses") + parser.add_argument("-c", "--output_category_topic", help="Topic name for recognized action category", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/skeleton_recognized_action") + parser.add_argument("-d", "--output_category_description_topic", help="Topic name for description of the " + "recognized action category", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/skeleton_recognized_action_description") + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Model to use, either \"stgcn\" or \"pstgcn\"", + type=str, default="stgcn", choices=["stgcn", "pstgcn"]) + + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + skeleton_action_recognition_node = \ + SkeletonActionRecognitionNode(input_rgb_image_topic=args.input_rgb_image_topic, + output_rgb_image_topic=args.output_rgb_image_topic, + pose_annotations_topic=args.pose_annotations_topic, + output_category_topic=args.output_category_topic, + output_category_description_topic=args.output_category_description_topic, + device=device, + model=args.model) + + rclpy.spin(skeleton_action_recognition_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + skeleton_action_recognition_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/speech_command_recognition_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/speech_command_recognition_node.py new file mode 100755 index 0000000000..d15f26433a --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/speech_command_recognition_node.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch +import numpy as np + +import rclpy +from rclpy.node import Node +from audio_common_msgs.msg import AudioData +from vision_msgs.msg import Classification2D + +from opendr_bridge import ROS2Bridge +from opendr.engine.data import Timeseries +from opendr.perception.speech_recognition import MatchboxNetLearner, EdgeSpeechNetsLearner, QuadraticSelfOnnLearner + + +class SpeechRecognitionNode(Node): + + def __init__(self, input_audio_topic="/audio", output_speech_command_topic="/opendr/speech_recognition", + buffer_size=1.5, model="matchboxnet", model_path=None, device="cuda"): + """ + Creates a ROS2 Node for speech command recognition + :param input_audio_topic: Topic from which the audio data is received + :type input_audio_topic: str + :param output_speech_command_topic: Topic to which the predictions are published + :type output_speech_command_topic: str + :param buffer_size: Length of the audio buffer in seconds + :type buffer_size: float + :param model: base speech command recognition model: matchboxnet or quad_selfonn + :type model: str + :param device: device for inference ("cpu" or "cuda") + :type device: str + + """ + super().__init__("opendr_speech_command_recognition_node") + + self.publisher = self.create_publisher(Classification2D, output_speech_command_topic, 1) + + self.create_subscription(AudioData, input_audio_topic, self.callback, 1) + + self.bridge = ROS2Bridge() + + # Initialize the internal audio buffer + self.buffer_size = buffer_size + self.data_buffer = np.zeros((1, 1)) + + # Initialize the recognition model + if model == "matchboxnet": + self.learner = MatchboxNetLearner(output_classes_n=20, device=device) + load_path = "./MatchboxNet" + elif model == "edgespeechnets": + self.learner = EdgeSpeechNetsLearner(output_classes_n=20, device=device) + assert model_path is not None, "No pretrained EdgeSpeechNets model available for download" + elif model == "quad_selfonn": + self.learner = QuadraticSelfOnnLearner(output_classes_n=20, device=device) + load_path = "./QuadraticSelfOnn" + + # Download the recognition model + if model_path is None: + self.learner.download_pretrained(path=".") + self.learner.load(load_path) + else: + self.learner.load(model_path) + + self.get_logger().info("Speech command recognition node started!") + + def callback(self, msg_data): + """ + Callback that processes the input data and publishes predictions to the output topic + :param msg_data: incoming message + :type msg_data: audio_common_msgs.msg.AudioData + """ + # Accumulate data until the buffer is full + data = np.reshape(np.frombuffer(msg_data.data, dtype=np.int16)/32768.0, (1, -1)) + self.data_buffer = np.append(self.data_buffer, data, axis=1) + + if self.data_buffer.shape[1] > 16000*self.buffer_size: + + # Convert sample to OpenDR Timeseries and perform classification + input_sample = Timeseries(self.data_buffer) + class_pred = self.learner.infer(input_sample) + + # Publish output + ros_class = self.bridge.from_category_to_rosclass(class_pred, self.get_clock().now().to_msg()) + self.publisher.publish(ros_class) + + # Reset the audio buffer + self.data_buffer = np.zeros((1, 1)) + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_audio_topic", type=str, default="/audio", + help="Listen to input data on this topic") + parser.add_argument("-o", "--output_speech_command_topic", type=str, default="/opendr/speech_recognition", + help="Topic name for speech command output") + parser.add_argument("--device", type=str, default="cuda", choices=["cuda", "cpu"], + help="Device to use (cpu, cuda)") + parser.add_argument("--buffer_size", type=float, default=1.5, help="Size of the audio buffer in seconds") + parser.add_argument("--model", default="matchboxnet", choices=["matchboxnet", "edgespeechnets", "quad_selfonn"], + help="Model to be used for prediction: matchboxnet, edgespeechnets or quad_selfonn") + parser.add_argument("--model_path", type=str, + help="Path to the model files, if not given, the pretrained model will be downloaded") + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU") + device = "cpu" + except: + print("Using CPU") + device = "cpu" + + speech_node = SpeechRecognitionNode(input_audio_topic=args.input_audio_topic, + output_speech_command_topic=args.output_speech_command_topic, + buffer_size=args.buffer_size, model=args.model, model_path=args.model_path, + device=device) + + rclpy.spin(speech_node) + + speech_node.destroy_node() + rclpy.shutdown() + + +if __name__ == "__main__": + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/opendr_perception/video_activity_recognition_node.py b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/video_activity_recognition_node.py new file mode 100644 index 0000000000..9e137036b8 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/opendr_perception/video_activity_recognition_node.py @@ -0,0 +1,250 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch +import torchvision +import cv2 +import rclpy +from rclpy.node import Node +from pathlib import Path + +from std_msgs.msg import String +from vision_msgs.msg import ObjectHypothesis +from sensor_msgs.msg import Image as ROS_Image +from opendr_bridge import ROS2Bridge + +from opendr.engine.data import Video, Image +from opendr.perception.activity_recognition import CLASSES as KINETICS400_CLASSES +from opendr.perception.activity_recognition import CoX3DLearner +from opendr.perception.activity_recognition import X3DLearner + + +class HumanActivityRecognitionNode(Node): + def __init__( + self, + input_rgb_image_topic="image_raw", + output_category_topic="/opendr/human_activity_recognition", + output_category_description_topic="/opendr/human_activity_recognition_description", + device="cuda", + model="cox3d-m", + ): + """ + Creates a ROS2 Node for video-based human activity recognition. + :param input_rgb_image_topic: Topic from which we are reading the input image + :type input_rgb_image_topic: str + :param output_category_topic: Topic to which we are publishing the recognized activity + (if None, we are not publishing the info) + :type output_category_topic: str + :param output_category_description_topic: Topic to which we are publishing the ID of the recognized action + (if None, we are not publishing the ID) + :type output_category_description_topic: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param model: Architecture to use for human activity recognition. + (Options: 'cox3d-s', 'cox3d-m', 'cox3d-l', 'x3d-xs', 'x3d-s', 'x3d-m', 'x3d-l') + :type model: str + """ + super().__init__("opendr_video_human_activity_recognition_node") + assert model in { + "cox3d-s", + "cox3d-m", + "cox3d-l", + "x3d-xs", + "x3d-s", + "x3d-m", + "x3d-l", + } + model_name, model_size = model.split("-") + Learner = {"cox3d": CoX3DLearner, "x3d": X3DLearner}[model_name] + + # Initialize the human activity recognition + self.learner = Learner(device=device, backbone=model_size) + self.learner.download(path="model_weights", model_names={model_size}) + self.learner.load(Path("model_weights") / f"x3d_{model_size}.pyth") + + # Set up preprocessing + if model_name == "cox3d": + self.preprocess = _image_preprocess( + image_size=self.learner.model_hparams["image_size"] + ) + else: # == x3d + self.preprocess = _video_preprocess( + image_size=self.learner.model_hparams["image_size"], + window_size=self.learner.model_hparams["frames_per_clip"], + ) + + # Set up ROS topics and bridge + self.image_subscriber = self.create_subscription( + ROS_Image, input_rgb_image_topic, self.callback, 1 + ) + self.hypothesis_publisher = ( + self.create_publisher(ObjectHypothesis, output_category_topic, 1) + if output_category_topic + else None + ) + self.string_publisher = ( + self.create_publisher(String, output_category_description_topic, 1) + if output_category_description_topic + else None + ) + self.bridge = ROS2Bridge() + self.get_logger().info("Video Human Activity Recognition node initialized.") + + def callback(self, data): + """ + Callback that process the input data and publishes to the corresponding topics + :param data: input message + :type data: sensor_msgs.msg.Image + """ + image = self.bridge.from_ros_image(data, encoding="rgb8") + if image is None: + return + + x = self.preprocess(image.convert("channels_first", "rgb")) + + result = self.learner.infer(x) + assert len(result) == 1 + category = result[0] + # Confidence for predicted class + category.confidence = float(category.confidence.max()) + category.description = KINETICS400_CLASSES[category.data] # Class name + + if self.hypothesis_publisher is not None: + self.hypothesis_publisher.publish(self.bridge.to_ros_category(category)) + + if self.string_publisher is not None: + self.string_publisher.publish( + self.bridge.to_ros_category_description(category) + ) + + +def _resize(image, size=None, inter=cv2.INTER_AREA): + # initialize the dimensions of the image to be resized and + # grab the image size + dim = None + (h, w) = image.shape[:2] + + if h > w: + # calculate the ratio of the width and construct the + # dimensions + r = size / float(w) + dim = (size, int(h * r)) + else: + # calculate the ratio of the height and construct the + # dimensions + r = size / float(h) + dim = (int(w * r), size) + + # resize the image + resized = cv2.resize(image, dim, interpolation=inter) + + # return the resized image + return resized + + +def _image_preprocess(image_size: int): + standardize = torchvision.transforms.Normalize( + mean=(0.45, 0.45, 0.45), std=(0.225, 0.225, 0.225) + ) + + def wrapped(frame): + nonlocal standardize + frame = frame.transpose((1, 2, 0)) # C, H, W -> H, W, C + frame = _resize(frame, size=image_size) + frame = torch.tensor(frame).permute((2, 0, 1)) # H, W, C -> C, H, W + frame = frame / 255.0 # [0, 255] -> [0.0, 1.0] + frame = standardize(frame) + return Image(frame, dtype=float) + + return wrapped + + +def _video_preprocess(image_size: int, window_size: int): + frames = [] + + standardize = torchvision.transforms.Normalize( + mean=(0.45, 0.45, 0.45), std=(0.225, 0.225, 0.225) + ) + + def wrapped(frame): + nonlocal frames, standardize + frame = frame.transpose((1, 2, 0)) # C, H, W -> H, W, C + frame = _resize(frame, size=image_size) + frame = torch.tensor(frame).permute((2, 0, 1)) # H, W, C -> C, H, W + frame = frame / 255.0 # [0, 255] -> [0.0, 1.0] + frame = standardize(frame) + if not frames: + frames = [frame for _ in range(window_size)] + else: + frames.pop(0) + frames.append(frame) + vid = Video(torch.stack(frames, dim=1)) + return vid + + return wrapped + + +def main(args=None): + rclpy.init(args=args) + + parser = argparse.ArgumentParser() + parser.add_argument("-i", "--input_rgb_image_topic", help="Topic name for input rgb image", + type=str, default="/image_raw") + parser.add_argument("-o", "--output_category_topic", help="Topic to which we are publishing the recognized activity", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/human_activity_recognition") + parser.add_argument("-od", "--output_category_description_topic", + help="Topic to which we are publishing the ID of the recognized action", + type=lambda value: value if value.lower() != "none" else None, + default="/opendr/human_activity_recognition_description") + parser.add_argument("--device", help='Device to use, either "cpu" or "cuda", defaults to "cuda"', + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Architecture to use for human activity recognition.", + type=str, default="cox3d-m", + choices=["cox3d-s", "cox3d-m", "cox3d-l", "x3d-xs", "x3d-s", "x3d-m", "x3d-l"]) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except Exception: + print("Using CPU.") + device = "cpu" + + human_activity_recognition_node = HumanActivityRecognitionNode( + input_rgb_image_topic=args.input_rgb_image_topic, + output_category_topic=args.output_category_topic, + output_category_description_topic=args.output_category_description_topic, + device=device, + model=args.model, + ) + rclpy.spin(human_activity_recognition_node) + + # Destroy the node explicitly + # (optional - otherwise it will be done automatically + # when the garbage collector destroys the node object) + human_activity_recognition_node.destroy_node() + rclpy.shutdown() + + +if __name__ == "__main__": + main() diff --git a/projects/opendr_ws_2/src/opendr_perception/package.xml b/projects/opendr_ws_2/src/opendr_perception/package.xml new file mode 100644 index 0000000000..a178dbd084 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/package.xml @@ -0,0 +1,26 @@ + + + + opendr_perception + 2.0.0 + OpenDR ROS2 nodes for the perception package + OpenDR Project Coordinator + Apache License v2.0 + + rclpy + + std_msgs + vision_msgs + geometry_msgs + + opendr_bridge + + ament_copyright + ament_flake8 + ament_pep257 + python3-pytest + + + ament_python + + diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/__init__.py b/projects/opendr_ws_2/src/opendr_perception/resource/opendr_perception similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/__init__.py rename to projects/opendr_ws_2/src/opendr_perception/resource/opendr_perception diff --git a/projects/opendr_ws_2/src/opendr_perception/setup.cfg b/projects/opendr_ws_2/src/opendr_perception/setup.cfg new file mode 100644 index 0000000000..45e65634f1 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/setup.cfg @@ -0,0 +1,6 @@ +[develop] +script_dir=$base/lib/opendr_perception +[install] +install_scripts=$base/lib/opendr_perception +[build_scripts] +executable = /usr/bin/env python3 diff --git a/projects/opendr_ws_2/src/opendr_perception/setup.py b/projects/opendr_ws_2/src/opendr_perception/setup.py new file mode 100644 index 0000000000..50aabf50a2 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/setup.py @@ -0,0 +1,55 @@ +from setuptools import setup + +package_name = 'opendr_perception' + +setup( + name=package_name, + version='2.0.0', + packages=[package_name], + data_files=[ + ('share/ament_index/resource_index/packages', + ['resource/' + package_name]), + ('share/' + package_name, ['package.xml']), + ], + install_requires=['setuptools'], + zip_safe=True, + maintainer='OpenDR Project Coordinator', + maintainer_email='tefas@csd.auth.gr', + description='OpenDR ROS2 nodes for the perception package', + license='Apache License v2.0', + tests_require=['pytest'], + entry_points={ + 'console_scripts': [ + 'pose_estimation = opendr_perception.pose_estimation_node:main', + 'hr_pose_estimation = opendr_perception.hr_pose_estimation_node:main', + 'object_detection_2d_centernet = opendr_perception.object_detection_2d_centernet_node:main', + 'object_detection_2d_detr = opendr_perception.object_detection_2d_detr_node:main', + 'object_detection_2d_yolov3 = opendr_perception.object_detection_2d_yolov3_node:main', + 'object_detection_2d_yolov5 = opendr_perception.object_detection_2d_yolov5_node:main', + 'object_detection_2d_ssd = opendr_perception.object_detection_2d_ssd_node:main', + 'object_detection_2d_nanodet = opendr_perception.object_detection_2d_nanodet_node:main', + 'object_detection_2d_gem = opendr_perception.object_detection_2d_gem_node:main', + 'object_tracking_2d_siamrpn = opendr_perception.object_tracking_2d_siamrpn_node:main', + 'face_detection_retinaface = opendr_perception.face_detection_retinaface_node:main', + 'semantic_segmentation_bisenet = opendr_perception.semantic_segmentation_bisenet_node:main', + 'panoptic_segmentation = opendr_perception.panoptic_segmentation_efficient_ps_node:main', + 'face_recognition = opendr_perception.face_recognition_node:main', + 'fall_detection = opendr_perception.fall_detection_node:main', + 'point_cloud_dataset = opendr_perception.point_cloud_dataset_node:main', + 'image_dataset = opendr_perception.image_dataset_node:main', + 'object_detection_3d_voxel = opendr_perception.object_detection_3d_voxel_node:main', + 'object_tracking_3d_ab3dmot = opendr_perception.object_tracking_3d_ab3dmot_node:main', + 'object_tracking_2d_fair_mot = opendr_perception.object_tracking_2d_fair_mot_node:main', + 'object_tracking_2d_deep_sort = opendr_perception.object_tracking_2d_deep_sort_node:main', + 'video_activity_recognition = opendr_perception.video_activity_recognition_node:main', + 'audiovisual_emotion_recognition = opendr_perception.audiovisual_emotion_recognition_node:main', + 'speech_command_recognition = opendr_perception.speech_command_recognition_node:main', + 'heart_anomaly_detection = opendr_perception.heart_anomaly_detection_node:main', + 'rgbd_hand_gestures_recognition = opendr_perception.rgbd_hand_gesture_recognition_node:main', + 'landmark_based_facial_expression_recognition = \ + opendr_perception.landmark_based_facial_expression_recognition_node:main', + 'facial_emotion_estimation = opendr_perception.facial_emotion_estimation_node:main', + 'skeleton_based_action_recognition = opendr_perception.skeleton_based_action_recognition_node:main', + ], + }, +) diff --git a/projects/opendr_ws_2/src/opendr_perception/test/test_copyright.py b/projects/opendr_ws_2/src/opendr_perception/test/test_copyright.py new file mode 100644 index 0000000000..cc8ff03f79 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/test/test_copyright.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_copyright.main import main +import pytest + + +@pytest.mark.copyright +@pytest.mark.linter +def test_copyright(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found errors' diff --git a/projects/opendr_ws_2/src/opendr_perception/test/test_flake8.py b/projects/opendr_ws_2/src/opendr_perception/test/test_flake8.py new file mode 100644 index 0000000000..27ee1078ff --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/test/test_flake8.py @@ -0,0 +1,25 @@ +# Copyright 2017 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_flake8.main import main_with_errors +import pytest + + +@pytest.mark.flake8 +@pytest.mark.linter +def test_flake8(): + rc, errors = main_with_errors(argv=[]) + assert rc == 0, \ + 'Found %d code style errors / warnings:\n' % len(errors) + \ + '\n'.join(errors) diff --git a/projects/opendr_ws_2/src/opendr_perception/test/test_pep257.py b/projects/opendr_ws_2/src/opendr_perception/test/test_pep257.py new file mode 100644 index 0000000000..b234a3840f --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_perception/test/test_pep257.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_pep257.main import main +import pytest + + +@pytest.mark.linter +@pytest.mark.pep257 +def test_pep257(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found code style errors / warnings' diff --git a/projects/opendr_ws_2/src/opendr_planning/launch/end_to_end_planning_robot_launch.py b/projects/opendr_ws_2/src/opendr_planning/launch/end_to_end_planning_robot_launch.py new file mode 100644 index 0000000000..ae61c2c1f7 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/launch/end_to_end_planning_robot_launch.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import pathlib +import launch +from launch_ros.actions import Node +from launch import LaunchDescription +from ament_index_python.packages import get_package_share_directory +from webots_ros2_driver.webots_launcher import WebotsLauncher, Ros2SupervisorLauncher +from webots_ros2_driver.utils import controller_url_prefix + + +def generate_launch_description(): + package_dir = get_package_share_directory('opendr_planning') + robot_description = pathlib.Path(os.path.join(package_dir, 'resource', 'uav_robot.urdf')).read_text() + + webots = WebotsLauncher( + world=os.path.join(package_dir, 'worlds', 'train-no-dynamic-random-obstacles.wbt') + ) + + ros2_supervisor = Ros2SupervisorLauncher() + + e2e_UAV_robot_driver = Node( + package='webots_ros2_driver', + executable='driver', + output='screen', + additional_env={'WEBOTS_CONTROLLER_URL': controller_url_prefix() + 'quad_plus_sitl'}, + parameters=[ + {'robot_description': robot_description}, + ] + ) + + return LaunchDescription([ + webots, + e2e_UAV_robot_driver, + ros2_supervisor, + launch.actions.RegisterEventHandler( + event_handler=launch.event_handlers.OnProcessExit( + target_action=webots, + on_exit=[launch.actions.EmitEvent(event=launch.events.Shutdown())], + ) + ) + ]) diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/__init__.py b/projects/opendr_ws_2/src/opendr_planning/opendr_planning/__init__.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/__init__.py rename to projects/opendr_ws_2/src/opendr_planning/opendr_planning/__init__.py diff --git a/projects/opendr_ws_2/src/opendr_planning/opendr_planning/end_to_end_planner_node.py b/projects/opendr_ws_2/src/opendr_planning/opendr_planning/end_to_end_planner_node.py new file mode 100755 index 0000000000..9cd8fa2c83 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/opendr_planning/end_to_end_planner_node.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import rclpy +from rclpy.node import Node +import numpy as np +from cv_bridge import CvBridge +from sensor_msgs.msg import Imu, Image +from geometry_msgs.msg import PoseStamped, PointStamped +from opendr.planning.end_to_end_planning import EndToEndPlanningRLLearner +from opendr.planning.end_to_end_planning.utils.euler_quaternion_transformations import euler_from_quaternion +from opendr.planning.end_to_end_planning.utils.euler_quaternion_transformations import euler_to_quaternion + + +class EndToEndPlannerNode(Node): + + def __init__(self): + """ + Creates a ROS Node for end-to-end planner + """ + super().__init__("opendr_end_to_end_planner_node") + self.model_name = "" + self.current_pose = PoseStamped() + self.target_pose = PoseStamped() + self.current_pose.header.frame_id = "map" + self.target_pose.header.frame_id = "map" + self.bridge = CvBridge() + self.input_depth_image_topic = "/quad_plus_sitl/range_finder" + self.position_topic = "/quad_plus_sitl/gps" + self.orientation_topic = "/imu" + self.end_to_end_planner = EndToEndPlanningRLLearner(env=None) + + self.ros2_pub_current_pose = self.create_publisher(PoseStamped, 'current_uav_pose', 10) + self.ros2_pub_target_pose = self.create_publisher(PoseStamped, 'target_uav_pose', 10) + self.create_subscription(Imu, self.orientation_topic, self.imu_callback, 1) + self.create_subscription(PointStamped, self.position_topic, self.gps_callback, 1) + self.create_subscription(Image, self.input_depth_image_topic, self.range_callback, 1) + self.get_logger().info("End-to-end planning node initialized.") + + def range_callback(self, data): + image_arr = self.bridge.imgmsg_to_cv2(data) + self.range_image = ((np.clip(image_arr.reshape((64, 64, 1)), 0, 15) / 15.) * 255).astype(np.uint8) + observation = {'depth_cam': np.copy(self.range_image), 'moving_target': np.array([5, 0, 0])} + action = self.end_to_end_planner.infer(observation, deterministic=True)[0] + self.publish_poses(action) + + def gps_callback(self, data): + self.current_pose.pose.position.x = -data.point.x + self.current_pose.pose.position.y = -data.point.y + self.current_pose.pose.position.z = data.point.z + + def imu_callback(self, data): + self.current_orientation = data.orientation + self.current_yaw = euler_from_quaternion(data.orientation)["yaw"] + self.current_pose.pose.orientation = euler_to_quaternion(0, 0, yaw=self.current_yaw) + + def model_name_callback(self, data): + if data.data[:5] == "robot": + self.model_name = data.data + if data.data[:4] == "quad": + self.model_name = data.data + + def publish_poses(self, action): + self.ros2_pub_current_pose.publish(self.current_pose) + forward_step = np.cos(action[0] * 22.5 / 180 * np.pi) + side_step = np.sin(action[0] * 22.5 / 180 * np.pi) + yaw_step = action[1] * 22.5 / 180 * np.pi + self.target_pose.pose.position.x = self.current_pose.pose.position.x + forward_step * np.cos( + self.current_yaw) - side_step * np.sin(self.current_yaw) + self.target_pose.pose.position.y = self.current_pose.pose.position.y + forward_step * np.sin( + self.current_yaw) + side_step * np.cos(self.current_yaw) + self.target_pose.pose.position.z = self.current_pose.pose.position.z + self.target_pose.pose.orientation = euler_to_quaternion(0, 0, yaw=self.current_yaw + yaw_step) + self.ros2_pub_target_pose.publish(self.target_pose) + + +def main(args=None): + rclpy.init(args=args) + end_to_end_planner_node = EndToEndPlannerNode() + rclpy.spin(end_to_end_planner_node) + end_to_end_planner_node.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_planning/opendr_planning/end_to_end_planning_robot_driver.py b/projects/opendr_ws_2/src/opendr_planning/opendr_planning/end_to_end_planning_robot_driver.py new file mode 100644 index 0000000000..39394af5c7 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/opendr_planning/end_to_end_planning_robot_driver.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import rclpy + + +class EndToEndPlanningUAVRobotDriver: + def init(self, webots_node, properties): + rclpy.init(args=None) + self.__node = rclpy.create_node('end_to_end_planning_uav_robot_driver') + + def step(self): + rclpy.spin_once(self.__node, timeout_sec=0) diff --git a/projects/opendr_ws_2/src/opendr_planning/package.xml b/projects/opendr_ws_2/src/opendr_planning/package.xml new file mode 100644 index 0000000000..b9a5f338c1 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/package.xml @@ -0,0 +1,28 @@ + + + + opendr_planning + 2.0.0 + OpenDR ROS2 nodes for the planning package + OpenDR Project Coordinator + Apache License v2.0 + + webots_ros2_driver + + rclpy + + std_msgs + vision_msgs + geometry_msgs + + opendr_bridge + + ament_copyright + ament_flake8 + ament_pep257 + python3-pytest + + + ament_python + + diff --git a/projects/opendr_ws_2/src/opendr_planning/protos/box.proto b/projects/opendr_ws_2/src/opendr_planning/protos/box.proto new file mode 100644 index 0000000000..9c34af8955 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/protos/box.proto @@ -0,0 +1,88 @@ +#VRML_SIM R2022b utf8 +# license: Copyright Cyberbotics Ltd. Licensed for use only with Webots. +# license url: https://cyberbotics.com/webots_assets_license +# This bounding object with a pipe shape is formed by a group of boxes. +PROTO box [ + field SFFloat height 0.2 # Defines the height of the pipe. + field SFFloat radius 0.5 # Defines the radius of the pipe. + field SFFloat thickness 0.05 # Defines the thickness of the pipe. + field SFInt32 subdivision 8 # Defines the number of polygons used to represent the pipe and so its resolution. + field SFFloat accuracy 0.0001 # Defines how much boxes position can differ on y axis: a 0 value represents an error-free model but it will slow down the simulation. +] +{ + %{ + local wbrandom = require('wbrandom') + + -- parameter checking + local subdivision = fields.subdivision.value + if subdivision > 200 then + io.stderr:write("High value for 'subdivision'. This can slow down the simulation\n") + elseif subdivision < 8 then + io.stderr:write("'subdivision' must be greater than or equal to 8\n") + subdivision = 8 + end + + local height = fields.height.value + if height <= 0 then + io.stderr:write("'height' must be greater than 0\n") + height = fields.height.defaultValue + end + + local radius = fields.radius.value + if radius <= 0 then + io.stderr:write("'radius' must be greater than 0\n") + radius = fields.radius.defaultValue + end + + local thickness = fields.thickness.value + if thickness <= 0 then + io.stderr:write("'thickness' must be greater than 0\n") + thickness = radius / 2 + elseif thickness >= fields.radius.value then + io.stderr:write("'thickness' must be smaller than 'radius'\n") + thickness = radius / 2 + end + + -- global stuff before entering in the main loop + local beta = 2.0 * math.pi / subdivision + local alpha = beta / 2.0 + local innerRadius = radius - thickness + local su = radius * math.cos(alpha) - innerRadius + if su < 0 then + -- fixed edge case: + -- There are 2 inner radius, depending if we measure it along the center or along the edge of the boxes. + -- If the thickness is below the difference of these two radius, then the algorithm can not achieve. + io.stderr:write("Either 'thickness' or 'subdivision' are too small for the box subdivision algorithm.\n") + su = math.abs(su) + end + local sv = height + local sw = radius * math.sin(alpha) * 2.0 + local boxRadius = innerRadius + su / 2.0 + }% + Group { # set of boxes + children [ + %{ for i = 0, (subdivision - 1) do }% + %{ + -- position of an internal box + local gamma = beta * i + beta / 2 + local ax = boxRadius * math.sin(gamma) + local ay = 0 + local az = boxRadius * math.cos(gamma) + local angle = gamma + 0.5 * math.pi + -- add small offset to boxes y translation to reduce constraints + -- on the top and bottom face due to co-planarity + local offset = wbrandom.real(-1.0, 1.0) * fields.accuracy.value; + }% + Transform { + translation %{= ax}% %{= ay + offset }% %{= az}% + rotation 0 1 0 %{= angle }% + children [ + Box { + size %{= su}% %{= sv}% %{= sw}% + } + ] + } + %{ end }% + ] + } +} diff --git a/projects/perception/__init__.py b/projects/opendr_ws_2/src/opendr_planning/resource/opendr_planning similarity index 100% rename from projects/perception/__init__.py rename to projects/opendr_ws_2/src/opendr_planning/resource/opendr_planning diff --git a/projects/opendr_ws_2/src/opendr_planning/resource/uav_robot.urdf b/projects/opendr_ws_2/src/opendr_planning/resource/uav_robot.urdf new file mode 100644 index 0000000000..7b99a8080c --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/resource/uav_robot.urdf @@ -0,0 +1,34 @@ + + + + + + true + true + + + + + + true + true + + + + + + true + true + + + + + true + /imu + true + inertial_unit + + + + + diff --git a/projects/opendr_ws_2/src/opendr_planning/setup.cfg b/projects/opendr_ws_2/src/opendr_planning/setup.cfg new file mode 100644 index 0000000000..35a3135d7e --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/setup.cfg @@ -0,0 +1,6 @@ +[develop] +script_dir=$base/lib/opendr_planning +[install] +install_scripts=$base/lib/opendr_planning +[build_scripts] +executable = /usr/bin/env python3 diff --git a/projects/opendr_ws_2/src/opendr_planning/setup.py b/projects/opendr_ws_2/src/opendr_planning/setup.py new file mode 100644 index 0000000000..37cb78733e --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/setup.py @@ -0,0 +1,31 @@ +from setuptools import setup + +package_name = 'opendr_planning' +data_files = [] +data_files.append(('share/ament_index/resource_index/packages', ['resource/' + package_name])) +data_files.append(('share/' + package_name + '/launch', ['launch/end_to_end_planning_robot_launch.py'])) +data_files.append(('share/' + package_name + '/worlds', ['worlds/train-no-dynamic-random-obstacles.wbt'])) +data_files.append(('share/' + package_name + '/protos', ['protos/box.proto'])) +data_files.append(('share/' + package_name + '/resource', ['resource/uav_robot.urdf'])) +data_files.append(('share/' + package_name, ['package.xml'])) + + +setup( + name=package_name, + version='2.0.0', + packages=[package_name], + data_files=data_files, + install_requires=['setuptools'], + zip_safe=True, + maintainer='OpenDR Project Coordinator', + maintainer_email='tefas@csd.auth.gr', + description='OpenDR ROS2 nodes for the planning package', + license='Apache License v2.0', + tests_require=['pytest'], + entry_points={ + 'console_scripts': [ + 'end_to_end_planner = opendr_planning.end_to_end_planner_node:main', + 'end_to_end_planning_robot_driver = opendr_planning.end_to_end_planning_robot_driver:main', + ], + }, +) diff --git a/projects/opendr_ws_2/src/opendr_planning/test/test_copyright.py b/projects/opendr_ws_2/src/opendr_planning/test/test_copyright.py new file mode 100644 index 0000000000..cc8ff03f79 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/test/test_copyright.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_copyright.main import main +import pytest + + +@pytest.mark.copyright +@pytest.mark.linter +def test_copyright(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found errors' diff --git a/projects/opendr_ws_2/src/opendr_planning/test/test_flake8.py b/projects/opendr_ws_2/src/opendr_planning/test/test_flake8.py new file mode 100644 index 0000000000..27ee1078ff --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/test/test_flake8.py @@ -0,0 +1,25 @@ +# Copyright 2017 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_flake8.main import main_with_errors +import pytest + + +@pytest.mark.flake8 +@pytest.mark.linter +def test_flake8(): + rc, errors = main_with_errors(argv=[]) + assert rc == 0, \ + 'Found %d code style errors / warnings:\n' % len(errors) + \ + '\n'.join(errors) diff --git a/projects/opendr_ws_2/src/opendr_planning/test/test_pep257.py b/projects/opendr_ws_2/src/opendr_planning/test/test_pep257.py new file mode 100644 index 0000000000..b234a3840f --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/test/test_pep257.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_pep257.main import main +import pytest + + +@pytest.mark.linter +@pytest.mark.pep257 +def test_pep257(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found code style errors / warnings' diff --git a/projects/opendr_ws_2/src/opendr_planning/worlds/train-no-dynamic-random-obstacles.wbt b/projects/opendr_ws_2/src/opendr_planning/worlds/train-no-dynamic-random-obstacles.wbt new file mode 100644 index 0000000000..aff3322fe9 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_planning/worlds/train-no-dynamic-random-obstacles.wbt @@ -0,0 +1,503 @@ +#VRML_SIM R2022b utf8 + +EXTERNPROTO "https://raw.githubusercontent.com/cyberbotics/webots/R2022b/projects/appearances/protos/Grass.proto" +EXTERNPROTO "https://raw.githubusercontent.com/cyberbotics/webots/R2022b/projects/appearances/protos/Parquetry.proto" +EXTERNPROTO "https://raw.githubusercontent.com/cyberbotics/webots/R2022b/projects/objects/floors/protos/Floor.proto" +EXTERNPROTO "https://raw.githubusercontent.com/cyberbotics/webots/R2022b/projects/objects/apartment_structure/protos/Wall.proto" +EXTERNPROTO "../protos/box.proto" + +WorldInfo { + gravity 9.80665 + basicTimeStep 1 + FPS 15 + optimalThreadCount 4 + randomSeed 52 +} +Viewpoint { + orientation 0.2493542513111129 -0.0015806740935321666 -0.9684110484822468 3.0320770615235597 + position 31.77129355822201 3.9289180767659815 21.40152949153122 + followType "Mounted Shot" +} +DEF DEF_VEHICLE Robot { + translation -3.20133 -0.667551 2.5 + rotation 0.5387460067434838 -0.5957150074565648 -0.5957150074565648 2.15327 + children [ + Lidar { + translation 0 0.07 0 + rotation 3.4621799999783786e-06 -0.999999999993755 -7.095049999955691e-07 3.14159 + horizontalResolution 32 + fieldOfView 1.57 + verticalFieldOfView 0.1 + numberOfLayers 1 + minRange 0.3 + maxRange 5 + } + RangeFinder { + translation 0 0.1 0 + rotation -0.5773502691896258 -0.5773502691896258 -0.5773502691896258 2.0943951023931957 + maxRange 15 + } + TouchSensor { + translation 0 0.03 0 + rotation 0 1 0 1.5708 + name "touch sensor-collision" + boundingObject box { + } + } + TouchSensor { + translation 0 0.03 0.5 + rotation 0 1 0 1.5708 + name "touch sensor-safety1" + boundingObject box { + radius 1 + subdivision 12 + } + } + TouchSensor { + translation 0 0.03 1 + rotation 0 1 0 1.5708 + name "touch sensor-safety2" + boundingObject box { + radius 1.5 + subdivision 16 + } + } + Receiver { + name "receiver_main" + type "serial" + channel 1 + bufferSize 32 + } + Emitter { + name "emitter_plugin" + description "commuicates with physics plugin" + } + Shape { + appearance Appearance { + material Material { + } + } + geometry Box { + size 0.1 0.1 0.1 + } + } + Camera { + translation 0 0.12 0 + rotation 0.1294279597735375 0.9831056944488314 0.1294279597735375 -1.58783 + name "camera1" + width 128 + height 128 + } + Compass { + name "compass1" + } + GPS { + name "gps" + } + Accelerometer { + name "accelerometer1" + } + InertialUnit { + rotation 0 1 0 1.5707947122222805 + name "inertial_unit" + } + Gyro { + name "gyro1" + } + Transform { + translation 0 0 0.1 + children [ + Shape { + appearance Appearance { + material Material { + } + } + geometry DEF DEF_ARM Cylinder { + height 0.1 + radius 0.01 + } + } + ] + } + Transform { + translation -0.09999999999999999 0 0 + rotation -0.7071067811865476 0 0.7071067811865476 -3.1415923071795864 + children [ + Shape { + appearance Appearance { + material Material { + } + } + geometry USE DEF_ARM + } + ] + } + Transform { + translation 0.09999999999999999 0 0 + rotation 0 -1 0 -1.5707963071795863 + children [ + Shape { + appearance Appearance { + material Material { + diffuseColor 1 0.09999999999999999 0 + } + } + geometry USE DEF_ARM + } + ] + } + Transform { + translation 0 0 -0.1 + children [ + Shape { + appearance Appearance { + material Material { + diffuseColor 0.7999999999999999 0.7999999999999999 0.7999999999999999 + } + } + geometry USE DEF_ARM + } + ] + } + ] + name "quad_plus_sitl" + boundingObject Box { + size 0.1 0.1 0.1 + } + rotationStep 0.261799 + controller "" + customData "1" + supervisor TRUE +} +Background { + skyColor [ + 0.15 0.5 1 + ] +} +DirectionalLight { +} +Floor { + translation 0 0 -1 + rotation 0 0 1 1.5707963267948966 + size 500 750 + appearance Grass { + } +} +Floor { + translation -4 0 -0.96 + rotation 0 0 1 1.5707963267948966 + name "floor(13)" + size 0.5 30 + appearance Parquetry { + type "dark strip" + } +} +Floor { + translation -8 -14 -0.98 + rotation 0 0 1 1.5707963267948966 + name "floor(5)" + size 100 50 + appearance PBRAppearance { + baseColor 0.6 0.8 0.6 + roughness 1 + } +} +DEF cyl1 Solid { + translation -13.30571834554473 -1.447574483178714 2.7665126217916747 + rotation 0.7046199859242116 -0.2718054272768975 -0.6554635650735948 1.3264162624880482 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF cyl_geo1 Cylinder { + height 1.6358972201698152 + radius 0.8305567381873773 + } + castShadows FALSE + } + ] + name "solid(6)" + boundingObject USE cyl_geo1 +} +DEF cyl2 Solid { + translation -11.573784058504305 -0.5709706439613236 2.7898036661292727 + rotation 0.80041453284557 -0.23379069518091386 -0.5519768894224041 3.004019614452083 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF cyl_geo2 Cylinder { + height 1.5666220746502095 + radius 1.4073464879682038 + } + castShadows FALSE + } + ] + name "solid(16)" + boundingObject USE cyl_geo2 +} +DEF cyl3 Solid { + translation 6.495757807871515 -1.6144414097525925 2.055833951531991 + rotation 0.9501520694787192 0.1803287878394691 -0.254347347424059 1.1144016628344635 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF cyl_geo3 Cylinder { + height 2.9932008423005847 + radius 1.3817552987759123 + } + castShadows FALSE + } + ] + name "solid(17)" + boundingObject USE cyl_geo3 +} +DEF cyl4 Solid { + translation 0 0 -10 + rotation 0.8826129905240483 -0.436261871860521 0.17512820480707927 -3.0124718491193443 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF cyl_geo4 Cylinder { + height 2.040387292247227 + radius 1.7321406926258653 + } + castShadows FALSE + } + ] + name "solid(18)" + boundingObject USE cyl_geo4 +} +DEF cyl5 Solid { + translation 0 0 -10 + rotation -0.3917242543263733 0.07876246896092191 -0.9167052863683216 0.9303512269603899 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF cyl_geo5 Cylinder { + height 2.4768414116000366 + radius 0.5824817005442169 + } + castShadows FALSE + } + ] + name "solid(19)" + boundingObject USE cyl_geo5 +} +DEF box1 Solid { + translation 4.4381089093275685 0.5548170365208641 2.05131692563986 + rotation 0.2448556165007751 0.9367176515026089 0.2502114474428831 -2.914945226248721 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF box_geo1 Box { + size 0.8334023756695101 0.6127140086440774 2.1756103342302913 + } + castShadows FALSE + } + ] + name "solid(20)" + boundingObject USE box_geo1 +} +DEF box2 Solid { + translation 0 0 -10 + rotation -0.7163183367896099 0.6204835974021974 0.31919922577254956 2.929261604379051 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF box_geo2 Box { + size 1.6555731912544518 0.8528384366701209 1.5923867066800264 + } + castShadows FALSE + } + ] + name "solid(21)" + boundingObject USE box_geo2 +} +DEF box3 Solid { + translation 0 0 -10 + rotation 0.492702975086357 0.008495842259129496 0.8701560773823055 -3.124774550627343 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF box_geo3 Box { + size 1.114861834585034 1.9899789593315744 1.665194050916234 + } + castShadows FALSE + } + ] + name "solid(22)" + boundingObject USE box_geo3 +} +DEF box4 Solid { + translation 0 0 -10 + rotation -0.47381905460959706 -0.5794103506313973 0.6631584645241805 -2.2430503148315895 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF box_geo4 Box { + size 1.6228519285122363 1.1501776483206156 2.2316284316140305 + } + castShadows FALSE + } + ] + name "solid(23)" + boundingObject USE box_geo4 +} +DEF box5 Solid { + translation 0 0 -10 + rotation 0.1849655628048051 0.930668272300889 0.3156648658130647 3.098971634530017 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF box_geo5 Box { + size 2.198602344698272 0.9299983006419481 1.8591651370902504 + } + castShadows FALSE + } + ] + name "solid(24)" + boundingObject USE box_geo5 +} +DEF sph1 Solid { + translation -19.257198265348357 -3.1661159326488217 2.225830049481242 + rotation 0.46953082387497425 0.2604920627631049 0.8436140650017107 -2.2344190120762484 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF sph_geo1 Sphere { + radius 1.35574388768385 + } + castShadows FALSE + } + ] + name "solid(25)" + boundingObject USE sph_geo1 +} +DEF sph2 Solid { + translation 0.2181211849140201 -0.5886797657584887 2.5285623758667715 + rotation 0.46953082387497425 0.2604920627631049 0.8436140650017107 -2.2344190120762484 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF sph_geo2 Sphere { + radius 1.365103979645272 + } + castShadows FALSE + } + ] + name "solid(26)" + boundingObject USE sph_geo2 +} +DEF sph3 Solid { + translation 0 0 -10 + rotation 0.46953082387497425 0.2604920627631049 0.8436140650017107 -2.2344190120762484 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF sph_geo3 Sphere { + radius 1.5576301083903183 + } + castShadows FALSE + } + ] + name "solid(27)" + boundingObject USE sph_geo3 +} +DEF sph4 Solid { + translation 0 0 -10 + rotation 0.46953082387497425 0.2604920627631049 0.8436140650017107 -2.2344190120762484 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF sph_geo4 Sphere { + radius 1.8204413448018755 + } + castShadows FALSE + } + ] + name "solid(28)" + boundingObject USE sph_geo4 +} +DEF sph5 Solid { + translation 0 0 -10 + rotation 0.46953082387497425 0.2604920627631049 0.8436140650017107 -2.2344190120762484 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF sph_geo5 Sphere { + radius 2.2713871330568587 + } + castShadows FALSE + } + ] + name "solid(29)" + boundingObject USE sph_geo5 +} +DEF wall1 Wall { + translation -4 -4.602323054921962 -9 + size 30 0.1 7 +} +DEF wall2 Wall { + translation -4 4.602323054921962 -9 + name "wall(2)" + size 30 0.1 7 +} diff --git a/projects/perception/activity_recognition/demos/online_recognition/activity_recognition/__init__.py b/projects/opendr_ws_2/src/opendr_simulation/opendr_simulation/__init__.py similarity index 100% rename from projects/perception/activity_recognition/demos/online_recognition/activity_recognition/__init__.py rename to projects/opendr_ws_2/src/opendr_simulation/opendr_simulation/__init__.py diff --git a/projects/opendr_ws_2/src/opendr_simulation/opendr_simulation/human_model_generation_client.py b/projects/opendr_ws_2/src/opendr_simulation/opendr_simulation/human_model_generation_client.py new file mode 100644 index 0000000000..c4ca320acc --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_simulation/opendr_simulation/human_model_generation_client.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import rclpy +from rclpy.node import Node + +import cv2 +import os +import argparse +from cv_bridge import CvBridge +from opendr_bridge import ROS2Bridge +from std_msgs.msg import Bool +from opendr_interface.srv import ImgToMesh +from opendr.simulation.human_model_generation.utilities.model_3D import Model_3D + + +class HumanModelGenerationClient(Node): + + def __init__(self, service_name="human_model_generation"): + """ + Creates a ROS Client for human model generation + :param service_name: The name of the service + :type service_name: str + """ + super().__init__('human_model_generation_client') + self.bridge_cv = CvBridge() + self.bridge_ros = ROS2Bridge() + self.cli = self.create_client(ImgToMesh, service_name) + while not self.cli.wait_for_service(timeout_sec=1.0): + self.get_logger().info('service not available, waiting again...') + self.req = ImgToMesh.Request() + + def send_request(self, img_rgb, img_msk, extract_pose): + """ + Send request to service assigned with the task to generate a human model from an image + :param img_rgb: The RGB image depicting a human + :type img_rgb: engine.data.Image + :param img_msk: The image, used as mask, for depicting a human's silhouette + :type img_msk: engine.data.Image + :param extract_pose: Defines whether to extract the pose of the depicted human or not + :type extract_pose: bool + :return: A tuple containing the generated human model and the extracted 3D pose + :rtype: tuple, (opendr.simulation.human_model_generation.utilities.model_3D.Model_3D, engine.target.Pose) + """ + extract_pose_ros = Bool() + extract_pose_ros.data = extract_pose + self.req.img_rgb = self.bridge_cv.cv2_to_imgmsg(img_rgb, encoding="bgr8") + self.req.img_msk = self.bridge_cv.cv2_to_imgmsg(img_msk, encoding="bgr8") + self.req.extract_pose = extract_pose_ros + self.future = self.cli.call_async(self.req) + rclpy.spin_until_future_complete(self, self.future) + resp = self.future.result() + pose = self.bridge_ros.from_ros_pose_3D(resp.pose) + vertices, triangles = self.bridge_ros.from_ros_mesh(resp.mesh) + vertex_colors = self.bridge_ros.from_ros_colors(resp.vertex_colors) + human_model = Model_3D(vertices, triangles, vertex_colors) + return human_model, pose + + +def main(): + + parser = argparse.ArgumentParser() + parser.add_argument("--srv_name", help="The name of the service", + type=str, default="human_model_generation") + parser.add_argument("--img_rgb", help="Path for RGB image", type=str, + default=os.path.join(os.environ['OPENDR_HOME'], 'projects/python/simulation/' + 'human_model_generation/demos/' + 'imgs_input/rgb/result_0004.jpg')) + parser.add_argument("--img_msk", help="Path for mask image", type=str, + default=os.path.join(os.environ['OPENDR_HOME'], 'projects/python/simulation/' + 'human_model_generation/demos/' + 'imgs_input/msk/result_0004.jpg')) + parser.add_argument("--rot_angles", help="Yaw angles for rotating the generated model", + nargs="+", default=['30', '120']) + parser.add_argument("--extract_pose", help="Whether to extract pose or not", action='store_true') + parser.add_argument("--plot_kps", help="Whether to plot the keypoints of the extracted pose", + action='store_true') + parser.add_argument("--out_path", help="Path for outputting the renderings/models", type=str, + default=os.path.join(os.environ['OPENDR_HOME'], 'projects/opendr_ws_2')) + args = parser.parse_args() + rot_angles = [int(x) for x in args.rot_angles] + img_rgb = cv2.imread(args.img_rgb) + img_msk = cv2.imread(args.img_msk) + rclpy.init() + client = HumanModelGenerationClient(service_name=args.srv_name) + [human_model, pose] = client.send_request(img_rgb, img_msk, extract_pose=args.extract_pose) + human_model.save_obj_mesh(os.path.join(args.out_path, 'human_model.obj')) + [out_imgs, _] = human_model.get_img_views(rot_angles, human_pose_3D=pose, plot_kps=args.plot_kps) + for i, out_img in enumerate(out_imgs): + cv2.imwrite(os.path.join(args.out_path, 'rendering' + str(rot_angles[i]) + '.jpg'), out_imgs[i].opencv()) + client.destroy_node() + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_simulation/opendr_simulation/human_model_generation_service.py b/projects/opendr_ws_2/src/opendr_simulation/opendr_simulation/human_model_generation_service.py new file mode 100644 index 0000000000..39d1a97fa6 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_simulation/opendr_simulation/human_model_generation_service.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import rclpy +from rclpy.node import Node +import argparse +import os +import torch +from opendr_bridge import ROS2Bridge +from opendr.simulation.human_model_generation.pifu_generator_learner import PIFuGeneratorLearner +from opendr_interface.srv import ImgToMesh +from opendr.engine.target import Pose +from rclpy.callback_groups import MutuallyExclusiveCallbackGroup + + +class PifuService(Node): + + def __init__(self, service_name='human_model_generation', device="cuda", checkpoint_dir='.'): + """ + Creates a ROS Service for human model generation + :param service_name: The name of the service + :type service_name: str + :param device: device on which we are running inference ('cpu' or 'cuda') + :type device: str + :param checkpoint_dir: the directory where the PIFu weights will be downloaded/loaded + :type checkpoint_dir: str + """ + super().__init__('human_model_generation_service') + self.bridge = ROS2Bridge() + self.service_name = service_name + # Initialize the pose estimation + self.model_generator = PIFuGeneratorLearner(device=device, checkpoint_dir=checkpoint_dir) + my_callback_group = MutuallyExclusiveCallbackGroup() + + self.srv = self.create_service(ImgToMesh, 'human_model_generation', self.gen_callback, callback_group=my_callback_group) + + def gen_callback(self, request, response): + """ + Callback that process the input data and publishes to the corresponding topics + :param request: The service request + :type request: SrvTypeRequest + :param response: SrvTypeResponse + :type response: The service response + :return response: SrvTypeResponse + :type response: The service response + """ + img_rgb = self.bridge.from_ros_image(request.img_rgb) + img_msk = self.bridge.from_ros_image(request.img_msk) + extract_pose = request.extract_pose.data + output = self.model_generator.infer([img_rgb], [img_msk], extract_pose=extract_pose) + if extract_pose is True: + model_3D = output[0] + pose = output[1] + else: + model_3D = output + pose = Pose([], 0.0) + verts = model_3D.get_vertices() + faces = model_3D.get_faces() + vert_colors = model_3D.vert_colors + response.mesh = self.bridge.to_ros_mesh(verts, faces) + response.vertex_colors = self.bridge.to_ros_colors(vert_colors) + response.pose = self.bridge.to_ros_pose_3D(pose) + return response + + +def main(): + + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use, either \"cpu\" or \"cuda\", defaults to \"cuda\"", + type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--srv_name", help="The name of the service", + type=str, default="human_model_generation") + parser.add_argument("--checkpoint_dir", help="Path to directory for the checkpoints of the method's network", + type=str, default=os.path.join(os.environ['OPENDR_HOME'], 'projects/opendr_ws_2')) + args = parser.parse_args() + + try: + if args.device == "cuda" and torch.cuda.is_available(): + device = "cuda" + elif args.device == "cuda": + print("GPU not found. Using CPU instead.") + device = "cpu" + else: + print("Using CPU.") + device = "cpu" + except: + print("Using CPU.") + device = "cpu" + + rclpy.init() + pifu_service = PifuService(service_name=args.srv_name, device=device, checkpoint_dir=args.checkpoint_dir) + rclpy.spin(pifu_service) + rclpy.shutdown() + + +if __name__ == '__main__': + main() diff --git a/projects/opendr_ws_2/src/opendr_simulation/package.xml b/projects/opendr_ws_2/src/opendr_simulation/package.xml new file mode 100644 index 0000000000..bcba4eab8d --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_simulation/package.xml @@ -0,0 +1,36 @@ + + + + opendr_simulation + 2.0.0 + OpenDR ROS2 nodes for the simulation package + OpenDR Project Coordinator + Apache License v2.0 + std_msgs + shape_msgs + sensor_msgs + vision_msgs + ament_cmake + rosidl_default_generators + rosidl_default_runtime + opendr_interface + rclpy + opendr_bridge + rosidl_interface_packages + + ament_copyright + ament_flake8 + ament_pep257 + python3-pytest + ament_lint_auto + ament_lint_common + + + ament_python + + + + + + + diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/__init__.py b/projects/opendr_ws_2/src/opendr_simulation/resource/opendr_simulation similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/utils/__init__.py rename to projects/opendr_ws_2/src/opendr_simulation/resource/opendr_simulation diff --git a/projects/opendr_ws_2/src/opendr_simulation/setup.cfg b/projects/opendr_ws_2/src/opendr_simulation/setup.cfg new file mode 100644 index 0000000000..58800215e6 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_simulation/setup.cfg @@ -0,0 +1,6 @@ +[develop] +script_dir=$base/lib/opendr_simulation +[install] +install_scripts=$base/lib/opendr_simulation +[build_scripts] +executable = /usr/bin/env python3 diff --git a/projects/opendr_ws_2/src/opendr_simulation/setup.py b/projects/opendr_ws_2/src/opendr_simulation/setup.py new file mode 100644 index 0000000000..0cd2cca844 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_simulation/setup.py @@ -0,0 +1,27 @@ +from setuptools import setup + +package_name = 'opendr_simulation' + +setup( + name=package_name, + version='2.0.0', + packages=[package_name], + data_files=[ + ('share/ament_index/resource_index/packages', + ['resource/' + package_name]), + ('share/' + package_name, ['package.xml']), + ], + install_requires=['setuptools'], + zip_safe=True, + maintainer='OpenDR Project Coordinator', + maintainer_email='tefas@csd.auth.gr', + description='OpenDR ROS2 nodes for the simulation package', + license='Apache License v2.0', + tests_require=['pytest'], + entry_points={ + 'console_scripts': [ + 'human_model_generation_service = opendr_simulation.human_model_generation_service:main', + 'human_model_generation_client = opendr_simulation.human_model_generation_client:main' + ], + }, +) diff --git a/projects/opendr_ws_2/src/opendr_simulation/test/test_copyright.py b/projects/opendr_ws_2/src/opendr_simulation/test/test_copyright.py new file mode 100644 index 0000000000..cc8ff03f79 --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_simulation/test/test_copyright.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_copyright.main import main +import pytest + + +@pytest.mark.copyright +@pytest.mark.linter +def test_copyright(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found errors' diff --git a/projects/opendr_ws_2/src/opendr_simulation/test/test_flake8.py b/projects/opendr_ws_2/src/opendr_simulation/test/test_flake8.py new file mode 100644 index 0000000000..27ee1078ff --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_simulation/test/test_flake8.py @@ -0,0 +1,25 @@ +# Copyright 2017 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_flake8.main import main_with_errors +import pytest + + +@pytest.mark.flake8 +@pytest.mark.linter +def test_flake8(): + rc, errors = main_with_errors(argv=[]) + assert rc == 0, \ + 'Found %d code style errors / warnings:\n' % len(errors) + \ + '\n'.join(errors) diff --git a/projects/opendr_ws_2/src/opendr_simulation/test/test_pep257.py b/projects/opendr_ws_2/src/opendr_simulation/test/test_pep257.py new file mode 100644 index 0000000000..b234a3840f --- /dev/null +++ b/projects/opendr_ws_2/src/opendr_simulation/test/test_pep257.py @@ -0,0 +1,23 @@ +# Copyright 2015 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ament_pep257.main import main +import pytest + + +@pytest.mark.linter +@pytest.mark.pep257 +def test_pep257(): + rc = main(argv=['.', 'test']) + assert rc == 0, 'Found code style errors / warnings' diff --git a/projects/python/README.md b/projects/python/README.md new file mode 100644 index 0000000000..b1a72da808 --- /dev/null +++ b/projects/python/README.md @@ -0,0 +1,6 @@ +# Python usage examples and tutorials + + +This folder contains several usage examples and tutorials that demonstrate the functionalities of OpenDR toolkit. +The usage examples follow the same structure as the Python packages that are provided by OpenDR, i.e., they are provided separately for [perception](perception), [control](control) and [simulation](simulation) tools. +Furthermore, usage examples of other utilities are provided in [utils](utils). diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/__init__.py b/projects/python/__init__.py similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/__init__.py rename to projects/python/__init__.py diff --git a/projects/control/eagerx/README.md b/projects/python/control/eagerx/README.md similarity index 97% rename from projects/control/eagerx/README.md rename to projects/python/control/eagerx/README.md index 26825812a6..0a63adce48 100644 --- a/projects/control/eagerx/README.md +++ b/projects/python/control/eagerx/README.md @@ -22,7 +22,7 @@ Specifically the following examples are provided: Example usage: ```bash -cd $OPENDR_HOME/projects/control/eagerx/demos +cd $OPENDR_HOME/projects/python/control/eagerx/demos python3 [demo_name] ``` diff --git a/projects/control/eagerx/data/with_actions.h5 b/projects/python/control/eagerx/data/with_actions.h5 similarity index 100% rename from projects/control/eagerx/data/with_actions.h5 rename to projects/python/control/eagerx/data/with_actions.h5 diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/__init__.py b/projects/python/control/eagerx/demos/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/__init__.py rename to projects/python/control/eagerx/demos/__init__.py diff --git a/projects/control/eagerx/demos/demo_classifier.py b/projects/python/control/eagerx/demos/demo_classifier.py similarity index 100% rename from projects/control/eagerx/demos/demo_classifier.py rename to projects/python/control/eagerx/demos/demo_classifier.py diff --git a/projects/control/eagerx/demos/demo_full_state.py b/projects/python/control/eagerx/demos/demo_full_state.py similarity index 100% rename from projects/control/eagerx/demos/demo_full_state.py rename to projects/python/control/eagerx/demos/demo_full_state.py diff --git a/projects/control/eagerx/demos/demo_pid.py b/projects/python/control/eagerx/demos/demo_pid.py similarity index 100% rename from projects/control/eagerx/demos/demo_pid.py rename to projects/python/control/eagerx/demos/demo_pid.py diff --git a/projects/control/eagerx/dependencies.ini b/projects/python/control/eagerx/dependencies.ini similarity index 100% rename from projects/control/eagerx/dependencies.ini rename to projects/python/control/eagerx/dependencies.ini diff --git a/projects/control/mobile_manipulation/CMakeLists.txt b/projects/python/control/mobile_manipulation/CMakeLists.txt similarity index 100% rename from projects/control/mobile_manipulation/CMakeLists.txt rename to projects/python/control/mobile_manipulation/CMakeLists.txt diff --git a/projects/control/mobile_manipulation/README.md b/projects/python/control/mobile_manipulation/README.md similarity index 100% rename from projects/control/mobile_manipulation/README.md rename to projects/python/control/mobile_manipulation/README.md diff --git a/projects/control/mobile_manipulation/best_defaults.yaml b/projects/python/control/mobile_manipulation/best_defaults.yaml similarity index 100% rename from projects/control/mobile_manipulation/best_defaults.yaml rename to projects/python/control/mobile_manipulation/best_defaults.yaml diff --git a/projects/control/mobile_manipulation/mobile_manipulation_demo.py b/projects/python/control/mobile_manipulation/mobile_manipulation_demo.py similarity index 100% rename from projects/control/mobile_manipulation/mobile_manipulation_demo.py rename to projects/python/control/mobile_manipulation/mobile_manipulation_demo.py diff --git a/projects/control/mobile_manipulation/package.xml b/projects/python/control/mobile_manipulation/package.xml similarity index 100% rename from projects/control/mobile_manipulation/package.xml rename to projects/python/control/mobile_manipulation/package.xml diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/Kallax.dae b/projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/Kallax.dae similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/Kallax.dae rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/Kallax.dae diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer1.dae b/projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer1.dae similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer1.dae rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer1.dae diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer1_tex_0.jpg b/projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer1_tex_0.jpg similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer1_tex_0.jpg rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer1_tex_0.jpg diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer2.dae b/projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer2.dae similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer2.dae rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer2.dae diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer2_tex_0.jpg b/projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer2_tex_0.jpg similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer2_tex_0.jpg rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/KallaxDrawer2_tex_0.jpg diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/Kallax_tex_0.jpg b/projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/Kallax_tex_0.jpg similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax/meshes/Kallax_tex_0.jpg rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax/meshes/Kallax_tex_0.jpg diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax/model.config b/projects/python/control/mobile_manipulation/robots_world/models/Kallax/model.config similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax/model.config rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax/model.config diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax/model.sdf b/projects/python/control/mobile_manipulation/robots_world/models/Kallax/model.sdf similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax/model.sdf rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax/model.sdf diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax.dae b/projects/python/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax.dae similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax.dae rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax.dae diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_Tuer.dae b/projects/python/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_Tuer.dae similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_Tuer.dae rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_Tuer.dae diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_Tuer_tex_0.jpg b/projects/python/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_Tuer_tex_0.jpg similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_Tuer_tex_0.jpg rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_Tuer_tex_0.jpg diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_tex_0.jpg b/projects/python/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_tex_0.jpg similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_tex_0.jpg rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax2/meshes/Kallax_tex_0.jpg diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax2/model.config b/projects/python/control/mobile_manipulation/robots_world/models/Kallax2/model.config similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax2/model.config rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax2/model.config diff --git a/projects/control/mobile_manipulation/robots_world/models/Kallax2/model.sdf b/projects/python/control/mobile_manipulation/robots_world/models/Kallax2/model.sdf similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/Kallax2/model.sdf rename to projects/python/control/mobile_manipulation/robots_world/models/Kallax2/model.sdf diff --git a/projects/control/mobile_manipulation/robots_world/models/muesli2/meshes/muesli1_tex_0.jpg b/projects/python/control/mobile_manipulation/robots_world/models/muesli2/meshes/muesli1_tex_0.jpg similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/muesli2/meshes/muesli1_tex_0.jpg rename to projects/python/control/mobile_manipulation/robots_world/models/muesli2/meshes/muesli1_tex_0.jpg diff --git a/projects/control/mobile_manipulation/robots_world/models/muesli2/meshes/muesli2.dae b/projects/python/control/mobile_manipulation/robots_world/models/muesli2/meshes/muesli2.dae similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/muesli2/meshes/muesli2.dae rename to projects/python/control/mobile_manipulation/robots_world/models/muesli2/meshes/muesli2.dae diff --git a/projects/control/mobile_manipulation/robots_world/models/muesli2/model.config b/projects/python/control/mobile_manipulation/robots_world/models/muesli2/model.config similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/muesli2/model.config rename to projects/python/control/mobile_manipulation/robots_world/models/muesli2/model.config diff --git a/projects/control/mobile_manipulation/robots_world/models/muesli2/model.sdf b/projects/python/control/mobile_manipulation/robots_world/models/muesli2/model.sdf similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/muesli2/model.sdf rename to projects/python/control/mobile_manipulation/robots_world/models/muesli2/model.sdf diff --git a/projects/control/mobile_manipulation/robots_world/models/reemc_table_low/model.config b/projects/python/control/mobile_manipulation/robots_world/models/reemc_table_low/model.config similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/reemc_table_low/model.config rename to projects/python/control/mobile_manipulation/robots_world/models/reemc_table_low/model.config diff --git a/projects/control/mobile_manipulation/robots_world/models/reemc_table_low/table.sdf b/projects/python/control/mobile_manipulation/robots_world/models/reemc_table_low/table.sdf similarity index 100% rename from projects/control/mobile_manipulation/robots_world/models/reemc_table_low/table.sdf rename to projects/python/control/mobile_manipulation/robots_world/models/reemc_table_low/table.sdf diff --git a/projects/control/mobile_manipulation/rviz_config.rviz b/projects/python/control/mobile_manipulation/rviz_config.rviz similarity index 100% rename from projects/control/mobile_manipulation/rviz_config.rviz rename to projects/python/control/mobile_manipulation/rviz_config.rviz diff --git a/projects/control/single_demo_grasp/README.md b/projects/python/control/single_demo_grasp/README.md similarity index 78% rename from projects/control/single_demo_grasp/README.md rename to projects/python/control/single_demo_grasp/README.md index d28ef3d661..0486c939e0 100755 --- a/projects/control/single_demo_grasp/README.md +++ b/projects/python/control/single_demo_grasp/README.md @@ -26,7 +26,7 @@ $ make install_runtime_dependencies After installing dependencies, the user must source the workspace in the shell in order to detect the packages: ``` -$ source projects/control/single_demo_grasp/simulation_ws/devel/setup.bash +$ source projects/python/control/single_demo_grasp/simulation_ws/devel/setup.bash ``` ## Demos @@ -38,7 +38,7 @@ three different nodes must be launched consecutively in order to properly run th ``` 1. $ cd path/to/opendr/home # change accordingly 2. $ source bin/setup.bash -3. $ source projects/control/single_demo_grasp/simulation_ws/devel/setup.bash +3. $ source projects/python/control/single_demo_grasp/simulation_ws/devel/setup.bash 4. $ export WEBOTS_HOME=/usr/local/webots 5. $ roslaunch single_demo_grasping_demo panda_sim.launch ``` @@ -47,7 +47,7 @@ three different nodes must be launched consecutively in order to properly run th ``` 1. $ cd path/to/opendr/home # change accordingly 2. $ source bin/setup.bash -3. $ source projects/control/single_demo_grasp/simulation_ws/devel/setup.bash +3. $ source projects/python/control/single_demo_grasp/simulation_ws/devel/setup.bash 4. $ roslaunch single_demo_grasping_demo camera_stream_inference.launch ``` @@ -55,20 +55,20 @@ three different nodes must be launched consecutively in order to properly run th ``` 1. $ cd path/to/opendr/home # change accordingly 2. $ source bin/setup.bash -3. $ source projects/control/single_demo_grasp/simulation_ws/devel/setup.bash +3. $ source projects/python/control/single_demo_grasp/simulation_ws/devel/setup.bash 4. $ roslaunch single_demo_grasping_demo panda_sim_control.launch ``` ## Examples You can find an example on how to use the learner class to run inference and see the result in the following directory: ``` -$ cd projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/ +$ cd projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/ ``` simply run: ``` 1. $ cd path/to/opendr/home # change accordingly 2. $ source bin/setup.bash -3. $ source projects/control/single_demo_grasp/simulation_ws/devel/setup.bash -4. $ cd projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/ +3. $ source projects/python/control/single_demo_grasp/simulation_ws/devel/setup.bash +4. $ cd projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/ 5. $ ./single_demo_inference.py ``` diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/CMakeLists.txt b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/CMakeLists.txt similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/CMakeLists.txt rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/CMakeLists.txt diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/mainpage.dox b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/mainpage.dox similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/mainpage.dox rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/mainpage.dox diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/finger.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/finger.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/finger.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/finger.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/hand.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/hand.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/hand.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/hand.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link0.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link0.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link0.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link0.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link1.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link1.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link1.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link1.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link2.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link2.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link2.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link2.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link3.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link3.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link3.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link3.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link4.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link4.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link4.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link4.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link5.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link5.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link5.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link5.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link6.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link6.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link6.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link6.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link7.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link7.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link7.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/meshes/visual/link7.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/package.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/package.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/package.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/package.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/dual_panda_example.urdf.xacro b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/dual_panda_example.urdf.xacro similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/dual_panda_example.urdf.xacro rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/dual_panda_example.urdf.xacro diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/hand.urdf.xacro b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/hand.urdf.xacro similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/hand.urdf.xacro rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/hand.urdf.xacro diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/hand.xacro b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/hand.xacro similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/hand.xacro rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/hand.xacro diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm.urdf.xacro b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm.urdf.xacro similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm.urdf.xacro rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm.urdf.xacro diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm.xacro b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm.xacro similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm.xacro rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm.xacro diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm_hand.urdf.xacro b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm_hand.urdf.xacro similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm_hand.urdf.xacro rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/robots/panda_arm_hand.urdf.xacro diff --git a/projects/control/single_demo_grasp/simulation_ws/src/franka_description/rosdoc.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/rosdoc.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/franka_description/rosdoc.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/franka_description/rosdoc.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/.setup_assistant b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/.setup_assistant similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/.setup_assistant rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/.setup_assistant diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/CHANGELOG.rst b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/CHANGELOG.rst similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/CHANGELOG.rst rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/CHANGELOG.rst diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/CMakeLists.txt b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/CMakeLists.txt similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/CMakeLists.txt rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/CMakeLists.txt diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/README.md b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/README.md similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/README.md rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/README.md diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/chomp_planning.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/chomp_planning.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/chomp_planning.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/chomp_planning.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/fake_controllers.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/fake_controllers.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/fake_controllers.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/fake_controllers.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/hand.xacro b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/hand.xacro similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/hand.xacro rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/hand.xacro diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/joint_limits.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/joint_limits.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/joint_limits.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/joint_limits.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/kinematics.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/kinematics.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/kinematics.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/kinematics.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/lerp_planning.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/lerp_planning.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/lerp_planning.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/lerp_planning.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/ompl_planning.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/ompl_planning.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/ompl_planning.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/ompl_planning.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm.srdf.xacro b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm.srdf.xacro similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm.srdf.xacro rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm.srdf.xacro diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm.xacro b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm.xacro similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm.xacro rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm.xacro diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm_hand.srdf.xacro b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm_hand.srdf.xacro similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm_hand.srdf.xacro rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_arm_hand.srdf.xacro diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_controllers.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_controllers.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_controllers.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_controllers.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_gripper_controllers.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_gripper_controllers.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_gripper_controllers.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/panda_gripper_controllers.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/sensors_kinect_depthmap.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/sensors_kinect_depthmap.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/sensors_kinect_depthmap.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/sensors_kinect_depthmap.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/sensors_kinect_pointcloud.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/sensors_kinect_pointcloud.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/sensors_kinect_pointcloud.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/sensors_kinect_pointcloud.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/stomp_planning.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/stomp_planning.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/stomp_planning.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/stomp_planning.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/trajopt_planning.yaml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/trajopt_planning.yaml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/trajopt_planning.yaml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/config/trajopt_planning.yaml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/chomp_planning_pipeline.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/chomp_planning_pipeline.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/chomp_planning_pipeline.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/chomp_planning_pipeline.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/default_warehouse_db.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/default_warehouse_db.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/default_warehouse_db.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/default_warehouse_db.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/demo.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/demo.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/demo.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/demo.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/demo_chomp.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/demo_chomp.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/demo_chomp.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/demo_chomp.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/fake_moveit_controller_manager.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/fake_moveit_controller_manager.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/fake_moveit_controller_manager.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/fake_moveit_controller_manager.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/joystick_control.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/joystick_control.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/joystick_control.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/joystick_control.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/lerp_planning_pipeline.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/lerp_planning_pipeline.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/lerp_planning_pipeline.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/lerp_planning_pipeline.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/move_group.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/move_group.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/move_group.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/move_group.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit.rviz b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit.rviz similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit.rviz rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit.rviz diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit_empty.rviz b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit_empty.rviz similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit_empty.rviz rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit_empty.rviz diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit_rviz.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit_rviz.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit_rviz.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/moveit_rviz.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/ompl-chomp_planning_pipeline.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/ompl-chomp_planning_pipeline.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/ompl-chomp_planning_pipeline.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/ompl-chomp_planning_pipeline.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/ompl_planning_pipeline.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/ompl_planning_pipeline.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/ompl_planning_pipeline.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/ompl_planning_pipeline.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_control_moveit_rviz.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_control_moveit_rviz.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_control_moveit_rviz.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_control_moveit_rviz.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_gripper_moveit_controller_manager.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_gripper_moveit_controller_manager.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_gripper_moveit_controller_manager.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_gripper_moveit_controller_manager.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit_controller_manager.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit_controller_manager.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit_controller_manager.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit_controller_manager.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit_sensor_manager.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit_sensor_manager.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit_sensor_manager.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/panda_moveit_sensor_manager.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/planning_context.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/planning_context.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/planning_context.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/planning_context.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/planning_pipeline.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/planning_pipeline.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/planning_pipeline.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/planning_pipeline.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/run_benchmark_ompl.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/run_benchmark_ompl.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/run_benchmark_ompl.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/run_benchmark_ompl.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/run_benchmark_trajopt.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/run_benchmark_trajopt.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/run_benchmark_trajopt.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/run_benchmark_trajopt.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/sensor_manager.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/sensor_manager.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/sensor_manager.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/sensor_manager.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/setup_assistant.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/setup_assistant.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/setup_assistant.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/setup_assistant.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/stomp_planning_pipeline.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/stomp_planning_pipeline.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/stomp_planning_pipeline.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/stomp_planning_pipeline.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/trajectory_execution.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/trajectory_execution.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/trajectory_execution.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/trajectory_execution.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/trajopt_planning_pipeline.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/trajopt_planning_pipeline.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/trajopt_planning_pipeline.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/trajopt_planning_pipeline.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/warehouse.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/warehouse.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/warehouse.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/warehouse.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/warehouse_settings.launch.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/warehouse_settings.launch.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/warehouse_settings.launch.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/launch/warehouse_settings.launch.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/package.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/package.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/package.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/panda_moveit_config/package.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/CMakeLists.txt b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/CMakeLists.txt similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/CMakeLists.txt rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/CMakeLists.txt diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/README.md b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/README.md similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/README.md rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/README.md diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/inference_utils.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/inference_utils.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/inference_utils.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/inference_utils.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/samples/0.jpg b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/samples/0.jpg similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/samples/0.jpg rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/samples/0.jpg diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_grasp_camera_stream.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_grasp_camera_stream.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_grasp_camera_stream.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_grasp_camera_stream.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_inference.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_inference.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_inference.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/inference/single_demo_inference.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/camera_stream_inference.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/camera_stream_inference.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/camera_stream_inference.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/camera_stream_inference.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_controller.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_controller.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_controller.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_controller.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_sim.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_sim.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_sim.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_sim.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_sim_control.launch b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_sim_control.launch similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_sim_control.launch rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/launch/panda_sim_control.launch diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/objects/cran_feld_pendulum.stl b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/objects/cran_feld_pendulum.stl similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/objects/cran_feld_pendulum.stl rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/objects/cran_feld_pendulum.stl diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/objects/d435.dae b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/objects/d435.dae similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/objects/d435.dae rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/objects/d435.dae diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/package.xml b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/package.xml similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/package.xml rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/package.xml diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/BallBearing.proto b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/BallBearing.proto similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/BallBearing.proto rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/BallBearing.proto diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CommonLine.proto b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CommonLine.proto similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CommonLine.proto rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CommonLine.proto diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CranfieldFace.proto b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CranfieldFace.proto similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CranfieldFace.proto rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CranfieldFace.proto diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CylinderPneumatic.proto b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CylinderPneumatic.proto similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CylinderPneumatic.proto rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/CylinderPneumatic.proto diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/FuelLine.proto b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/FuelLine.proto similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/FuelLine.proto rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/FuelLine.proto diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/Housing.proto b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/Housing.proto similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/Housing.proto rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/Housing.proto diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/Pendulum.proto b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/Pendulum.proto similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/Pendulum.proto rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/Pendulum.proto diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/RodEnd.proto b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/RodEnd.proto similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/RodEnd.proto rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/RodEnd.proto diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/panda_arm_hand.proto b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/panda_arm_hand.proto similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/panda_arm_hand.proto rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/protos/panda_arm_hand.proto diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/camera_publisher.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/camera_publisher.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/camera_publisher.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/camera_publisher.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/constants.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/constants.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/constants.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/constants.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/gripper_command.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/gripper_command.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/gripper_command.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/gripper_command.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/joint_state_publisher.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/joint_state_publisher.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/joint_state_publisher.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/joint_state_publisher.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/panda_ros.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/panda_ros.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/panda_ros.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/panda_ros.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/single_demo_grasp_action.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/single_demo_grasp_action.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/single_demo_grasp_action.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/single_demo_grasp_action.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/trajectory_follower.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/trajectory_follower.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/trajectory_follower.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/trajectory_follower.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/utilities.py b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/utilities.py similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/utilities.py rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/scripts/utilities.py diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/worlds/.franka_simulation.wbproj b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/worlds/.franka_simulation.wbproj similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/worlds/.franka_simulation.wbproj rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/worlds/.franka_simulation.wbproj diff --git a/projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/worlds/franka_simulation.wbt b/projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/worlds/franka_simulation.wbt similarity index 100% rename from projects/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/worlds/franka_simulation.wbt rename to projects/python/control/single_demo_grasp/simulation_ws/src/single_demo_grasping_demo/worlds/franka_simulation.wbt diff --git a/projects/perception/.gitignore b/projects/python/perception/.gitignore similarity index 100% rename from projects/perception/.gitignore rename to projects/python/perception/.gitignore diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/__init__.py b/projects/python/perception/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/__init__.py rename to projects/python/perception/__init__.py diff --git a/projects/perception/activity_recognition/benchmark/README.md b/projects/python/perception/activity_recognition/benchmark/README.md similarity index 76% rename from projects/perception/activity_recognition/benchmark/README.md rename to projects/python/perception/activity_recognition/benchmark/README.md index 8e8dcef68e..29e38ecf76 100644 --- a/projects/perception/activity_recognition/benchmark/README.md +++ b/projects/python/perception/activity_recognition/benchmark/README.md @@ -25,4 +25,10 @@ X3D CoX3D ```bash ./benchmark_cox3d.py -``` \ No newline at end of file +``` + +CoTransEnc +```bash +./benchmark_cotransenc.py +``` +NB: The CoTransEnc module benchmarks various configurations of the Continual Transformer Encoder modules only. This doesn't include any feature-extraction that you might want to use beforehand. \ No newline at end of file diff --git a/projects/python/perception/activity_recognition/benchmark/benchmark_cotransenc.py b/projects/python/perception/activity_recognition/benchmark/benchmark_cotransenc.py new file mode 100644 index 0000000000..f5957fd021 --- /dev/null +++ b/projects/python/perception/activity_recognition/benchmark/benchmark_cotransenc.py @@ -0,0 +1,89 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import yaml +from opendr.perception.activity_recognition import CoTransEncLearner + +from pytorch_benchmark import benchmark +import logging +from typing import List, Union +from opendr.engine.target import Category +from opendr.engine.data import Image + +logger = logging.getLogger("benchmark") +logging.basicConfig() +logger.setLevel("DEBUG") + + +def benchmark_cotransenc(): + temp_dir = "./projects/python/perception/activity_recognition/benchmark/tmp" + num_runs = 100 + batch_size = 1 + + for num_layers in [1, 2]: # --------- A few plausible hparams ---------- + for (input_dims, sequence_len) in [(1024, 32), (2048, 64), (4096, 64)]: + print( + f"==== Benchmarking CoTransEncLearner (l{num_layers}-d{input_dims}-t{sequence_len}) ====" + ) + learner = CoTransEncLearner( + device="cuda" if torch.cuda.is_available() else "cpu", + temp_path=temp_dir + f"/{num_layers}_{input_dims}_{sequence_len}", + num_layers=num_layers, + input_dims=input_dims, + hidden_dims=input_dims // 2, + sequence_len=sequence_len, + num_heads=input_dims // 128, + batch_size=batch_size, + ) + learner.optimize() + + sample = torch.randn(1, input_dims) + + # Warm-up continual inference not needed for optimized version: + # for _ in range(sequence_len - 1): + # learner.infer(sample) + + def get_device_fn(*args): + nonlocal learner + return next(learner.model.parameters()).device + + def transfer_to_device_fn( + sample: Union[torch.Tensor, List[Category], List[Image]], + device: torch.device, + ): + if isinstance(sample, torch.Tensor): + return sample.to(device=device) + + assert isinstance(sample, Category) + return Category( + prediction=sample.data, + confidence=sample.confidence.to(device=device), + ) + + results1 = benchmark( + model=learner.infer, + sample=sample, + num_runs=num_runs, + get_device_fn=get_device_fn, + transfer_to_device_fn=transfer_to_device_fn, + batch_size=batch_size, + print_fn=print, + ) + print(yaml.dump({"learner.infer": results1})) + + +if __name__ == "__main__": + benchmark_cotransenc() diff --git a/projects/perception/activity_recognition/benchmark/benchmark_cox3d.py b/projects/python/perception/activity_recognition/benchmark/benchmark_cox3d.py similarity index 84% rename from projects/perception/activity_recognition/benchmark/benchmark_cox3d.py rename to projects/python/perception/activity_recognition/benchmark/benchmark_cox3d.py index fb63294bac..a9ffa468a4 100644 --- a/projects/perception/activity_recognition/benchmark/benchmark_cox3d.py +++ b/projects/python/perception/activity_recognition/benchmark/benchmark_cox3d.py @@ -29,7 +29,7 @@ def benchmark_cox3d(): - temp_dir = "./projects/perception/activity_recognition/benchmark/tmp" + temp_dir = "./projects/python/perception/activity_recognition/benchmark/tmp" num_runs = 100 @@ -75,12 +75,13 @@ def benchmark_cox3d(): temp_path=temp_dir, backbone=backbone, ) + learner.optimize() sample = torch.randn( batch_size[backbone], *input_shape[backbone] - ) # (B, C, T, H, W) - image_samples = [Image(v) for v in sample] - image_sample = [Image(sample[0])] + ) # (B, C, H, W) + # image_samples = [Image(v) for v in sample] + # image_sample = [Image(sample[0])] def get_device_fn(*args): nonlocal learner @@ -101,15 +102,18 @@ def transfer_to_device_fn( assert isinstance(sample[0], Category) return [ - Category(prediction=s.data, confidence=s.confidence.to(device=device),) + Category( + prediction=s.data, + confidence=s.confidence.to(device=device), + ) for s in sample ] print("== Benchmarking learner.infer ==") results1 = benchmark( model=learner.infer, - sample=image_samples, - sample_with_batch_size1=image_sample, + sample=sample, + # sample_with_batch_size1=image_sample, num_runs=num_runs, get_device_fn=get_device_fn, transfer_to_device_fn=transfer_to_device_fn, @@ -118,10 +122,6 @@ def transfer_to_device_fn( ) print(yaml.dump({"learner.infer": results1})) - print("== Benchmarking model directly ==") - results2 = benchmark(learner.model, sample, num_runs=num_runs, print_fn=print) - print(yaml.dump({"learner.model.forward": results2})) - if __name__ == "__main__": benchmark_cox3d() diff --git a/projects/perception/activity_recognition/benchmark/benchmark_x3d.py b/projects/python/perception/activity_recognition/benchmark/benchmark_x3d.py similarity index 85% rename from projects/perception/activity_recognition/benchmark/benchmark_x3d.py rename to projects/python/perception/activity_recognition/benchmark/benchmark_x3d.py index 5256cf308d..d60b5cc8f6 100644 --- a/projects/perception/activity_recognition/benchmark/benchmark_x3d.py +++ b/projects/python/perception/activity_recognition/benchmark/benchmark_x3d.py @@ -29,7 +29,7 @@ def benchmark_x3d(): - temp_dir = "./projects/perception/activity_recognition/benchmark/tmp" + temp_dir = "./projects/python/perception/activity_recognition/benchmark/tmp" num_runs = 100 @@ -74,14 +74,16 @@ def benchmark_x3d(): device="cuda" if torch.cuda.is_available() else "cpu", temp_path=temp_dir, backbone=backbone, + batch_size=batch_size[backbone], ) + learner.optimize() learner.model.eval() sample = torch.randn( batch_size[backbone], *input_shape[backbone] ) # (B, C, T, H, W) - video_samples = [Video(v) for v in sample] - video_sample = [Video(sample[0])] + # video_samples = [Video(v) for v in sample] + # video_sample = [Video(sample[0])] def get_device_fn(*args): nonlocal learner @@ -102,15 +104,18 @@ def transfer_to_device_fn( assert isinstance(sample[0], Category) return [ - Category(prediction=s.data, confidence=s.confidence.to(device=device),) + Category( + prediction=s.data, + confidence=s.confidence.to(device=device), + ) for s in sample ] print("== Benchmarking learner.infer ==") results1 = benchmark( model=learner.infer, - sample=video_samples, - sample_with_batch_size1=video_sample, + sample=sample, + # sample_with_batch_size1=sample[0].unsqueeze(0), num_runs=num_runs, get_device_fn=get_device_fn, transfer_to_device_fn=transfer_to_device_fn, @@ -119,10 +124,6 @@ def transfer_to_device_fn( ) print(yaml.dump({"learner.infer": results1})) - print("== Benchmarking model directly ==") - results2 = benchmark(learner.model, sample, num_runs=num_runs, print_fn=print) - print(yaml.dump({"learner.model.forward": results2})) - if __name__ == "__main__": benchmark_x3d() diff --git a/projects/perception/activity_recognition/benchmark/install_on_server.sh b/projects/python/perception/activity_recognition/benchmark/install_on_server.sh similarity index 100% rename from projects/perception/activity_recognition/benchmark/install_on_server.sh rename to projects/python/perception/activity_recognition/benchmark/install_on_server.sh diff --git a/projects/perception/activity_recognition/benchmark/requirements.txt b/projects/python/perception/activity_recognition/benchmark/requirements.txt similarity index 100% rename from projects/perception/activity_recognition/benchmark/requirements.txt rename to projects/python/perception/activity_recognition/benchmark/requirements.txt diff --git a/projects/python/perception/activity_recognition/demos/continual_transformer_encoder/README.md b/projects/python/perception/activity_recognition/demos/continual_transformer_encoder/README.md new file mode 100644 index 0000000000..e804ca345b --- /dev/null +++ b/projects/python/perception/activity_recognition/demos/continual_transformer_encoder/README.md @@ -0,0 +1,13 @@ +# Continual Transformer Encoder demo + +The file [demo.py](demo.py) is a demo of how to use the `CoTransEncLearner`, including fitting, evaluation, runtime optimization and inference. + +To fit, evaluate and perform inference, use the following command: +```bash +python demo.py --fit --eval --infer +``` + +Please use the "--help" command see further script options: +```bash +python demo.py --help +``` diff --git a/projects/python/perception/activity_recognition/demos/continual_transformer_encoder/demo.py b/projects/python/perception/activity_recognition/demos/continual_transformer_encoder/demo.py new file mode 100644 index 0000000000..b575adc2bf --- /dev/null +++ b/projects/python/perception/activity_recognition/demos/continual_transformer_encoder/demo.py @@ -0,0 +1,88 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +from opendr.perception.activity_recognition import CoTransEncLearner +from opendr.perception.activity_recognition.datasets import DummyTimeseriesDataset + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--fit", help="Fit the model", default=False, action="store_true") + parser.add_argument("--num_fit_steps", help="Numer of steps to fit the model", type=int, default=10) + parser.add_argument("--eval", help="Evaluate the model", default=False, action="store_true") + parser.add_argument("--optimize", help="Perform inference using the model", default=False, action="store_true") + parser.add_argument("--infer", help="Perform inference using the model", default=False, action="store_true") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cpu") + parser.add_argument("--input_dims", help="Input dimensionality of the model and dataset", type=float, default=8) + parser.add_argument("--hidden_dims", help="The number of hidden dimensions of the model", type=float, default=32) + parser.add_argument("--sequence_len", help="The length of the time-series to consider", type=int, default=64) + parser.add_argument("--num_heads", help="Number of attention heads to employ", type=int, default=8) + parser.add_argument("--batch_size", help="The batch size of the model", type=int, default=2) + return parser.parse_args() + + +def main(args): + # Define learner + learner = CoTransEncLearner( + batch_size=args.batch_size, + device="cpu", + input_dims=args.input_dims, + hidden_dims=args.hidden_dims, + sequence_len=args.sequence_len, + num_heads=args.num_heads, + num_classes=4, + ) + + # Define datasets + train_ds = DummyTimeseriesDataset( + sequence_len=args.sequence_len, + num_sines=args.input_dims, + num_datapoints=args.sequence_len * 2, + ) + val_ds = DummyTimeseriesDataset( + sequence_len=args.sequence_len, + num_sines=args.input_dims, + num_datapoints=args.sequence_len * 2, + base_offset=args.sequence_len * 2, + ) + test_ds = DummyTimeseriesDataset( + sequence_len=args.sequence_len, + num_sines=args.input_dims, + num_datapoints=args.sequence_len * 2, + base_offset=args.sequence_len * 4, + ) + + # Invoke operations + if args.fit: + learner.fit(dataset=train_ds, val_dataset=val_ds, steps=args.num_fit_steps) + + if args.eval: + results = learner.eval(test_ds) + print("Evaluation results: ", results) + + if args.optimize: + learner.optimize() + + if args.infer: + dl = torch.utils.data.DataLoader(val_ds, batch_size=args.batch_size, num_workers=0) + tensor = next(iter(dl))[0][0] + category = learner.infer(tensor) + print(f"Inferred category.data = {category.data}, category.confidence = {category.confidence.detach().numpy()}") + + +if __name__ == "__main__": + main(parse_args()) diff --git a/projects/perception/activity_recognition/demos/online_recognition/README.md b/projects/python/perception/activity_recognition/demos/online_recognition/README.md similarity index 100% rename from projects/perception/activity_recognition/demos/online_recognition/README.md rename to projects/python/perception/activity_recognition/demos/online_recognition/README.md diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/__init__.py b/projects/python/perception/activity_recognition/demos/online_recognition/activity_recognition/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/__init__.py rename to projects/python/perception/activity_recognition/demos/online_recognition/activity_recognition/__init__.py diff --git a/projects/perception/activity_recognition/demos/online_recognition/activity_recognition/screenshot.png b/projects/python/perception/activity_recognition/demos/online_recognition/activity_recognition/screenshot.png similarity index 100% rename from projects/perception/activity_recognition/demos/online_recognition/activity_recognition/screenshot.png rename to projects/python/perception/activity_recognition/demos/online_recognition/activity_recognition/screenshot.png diff --git a/projects/perception/activity_recognition/demos/online_recognition/activity_recognition/video.gif b/projects/python/perception/activity_recognition/demos/online_recognition/activity_recognition/video.gif similarity index 100% rename from projects/perception/activity_recognition/demos/online_recognition/activity_recognition/video.gif rename to projects/python/perception/activity_recognition/demos/online_recognition/activity_recognition/video.gif diff --git a/projects/perception/activity_recognition/demos/online_recognition/demo.py b/projects/python/perception/activity_recognition/demos/online_recognition/demo.py similarity index 99% rename from projects/perception/activity_recognition/demos/online_recognition/demo.py rename to projects/python/perception/activity_recognition/demos/online_recognition/demo.py index 5bfd19d9ed..62cbbe364f 100644 --- a/projects/perception/activity_recognition/demos/online_recognition/demo.py +++ b/projects/python/perception/activity_recognition/demos/online_recognition/demo.py @@ -52,12 +52,12 @@ def index(): def runnig_fps(alpha=0.1): - t0 = time.time_ns() + t0 = time.perf_counter() fps_avg = 10 def wrapped(): nonlocal t0, alpha, fps_avg - t1 = time.time_ns() + t1 = time.perf_counter() delta = (t1 - t0) * 1e-9 t0 = t1 fps_avg = alpha * (1 / delta) + (1 - alpha) * fps_avg diff --git a/projects/perception/activity_recognition/demos/online_recognition/requirements.txt b/projects/python/perception/activity_recognition/demos/online_recognition/requirements.txt similarity index 100% rename from projects/perception/activity_recognition/demos/online_recognition/requirements.txt rename to projects/python/perception/activity_recognition/demos/online_recognition/requirements.txt diff --git a/projects/perception/activity_recognition/demos/online_recognition/setup.py b/projects/python/perception/activity_recognition/demos/online_recognition/setup.py similarity index 100% rename from projects/perception/activity_recognition/demos/online_recognition/setup.py rename to projects/python/perception/activity_recognition/demos/online_recognition/setup.py diff --git a/projects/perception/activity_recognition/demos/online_recognition/templates/index.html b/projects/python/perception/activity_recognition/demos/online_recognition/templates/index.html similarity index 100% rename from projects/perception/activity_recognition/demos/online_recognition/templates/index.html rename to projects/python/perception/activity_recognition/demos/online_recognition/templates/index.html diff --git a/projects/perception/face_recognition/README.md b/projects/python/perception/face_recognition/README.md similarity index 100% rename from projects/perception/face_recognition/README.md rename to projects/python/perception/face_recognition/README.md diff --git a/projects/perception/face_recognition/demos/benchmarking_demo.py b/projects/python/perception/face_recognition/demos/benchmarking_demo.py similarity index 100% rename from projects/perception/face_recognition/demos/benchmarking_demo.py rename to projects/python/perception/face_recognition/demos/benchmarking_demo.py diff --git a/projects/perception/face_recognition/demos/eval_demo.py b/projects/python/perception/face_recognition/demos/eval_demo.py similarity index 100% rename from projects/perception/face_recognition/demos/eval_demo.py rename to projects/python/perception/face_recognition/demos/eval_demo.py diff --git a/projects/perception/face_recognition/demos/inference_demo.py b/projects/python/perception/face_recognition/demos/inference_demo.py similarity index 100% rename from projects/perception/face_recognition/demos/inference_demo.py rename to projects/python/perception/face_recognition/demos/inference_demo.py diff --git a/projects/perception/face_recognition/demos/inference_tutorial.ipynb b/projects/python/perception/face_recognition/demos/inference_tutorial.ipynb similarity index 100% rename from projects/perception/face_recognition/demos/inference_tutorial.ipynb rename to projects/python/perception/face_recognition/demos/inference_tutorial.ipynb diff --git a/projects/perception/face_recognition/demos/webcam_demo.py b/projects/python/perception/face_recognition/demos/webcam_demo.py similarity index 100% rename from projects/perception/face_recognition/demos/webcam_demo.py rename to projects/python/perception/face_recognition/demos/webcam_demo.py diff --git a/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/README.md b/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/README.md new file mode 100644 index 0000000000..8bd720511e --- /dev/null +++ b/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/README.md @@ -0,0 +1,77 @@ +# Image-based Facial Expression Recognition Demo + +This folder contains an implemented demo of image_based_facial_expression_recognition method implemented by [[1]](#1). +The demo framework has three main features: +- Image: recognizes facial expressions in images. +- Video: recognizes facial expressions in videos in a frame-based approach. +- Webcam: connects to a webcam and recognizes facial expressions of the closest face detected by a face detection algorithm. +The demo utilizes OpenCV face detector Haar Cascade [[2]](https://ieeexplore.ieee.org/abstract/document/990517) for real-time face detection. + +#### Running demo +The pretrained models on AffectNet Categorical dataset are provided by [[1]](#1) which can be found [here](https://github.com/siqueira-hc/Efficient-Facial-Feature-Learning-with-Wide-Ensemble-based-Convolutional-Neural-Networks/tree/master/model/ml/trained_models/esr_9). +**Please note that the pretrained weights cannot be used for commercial purposes** +To recognize a facial expression in images, run the following command: +```python +python inference_demo.py image -i ./media/jackie.jpg -d +``` + +The argument `image` indicates that the input is an image. The location of the image is specified after `-i` and `-d` sets the display mode to true. +If the location of image file is not specified, the demo automatically downloads a sample image file from the FTP server. + +```python +python inference_demo.py image -i 'image_path' -d +``` + +To recognize a facial expression in videos, run the following command: +```python +python inference_demo.py video -i 'video_path' -d -f 5 +``` +The argument `video` indicates that the input is a video. The location of the video is specified after `-i`. `-d` sets the display mode to true, `-f` defines the number of frames to be processed. +If the location of video file is not specified, the demo automatically downloads a sample video file from the FTP server. + +To recognize a facial expression in images captured from a webcam, run the following command: +```python +python inference_demo.py webcam -d +``` +The argument `webcam` indicates the framework to capture images from a webcam. `-d` sets the display mode to true. + +#### List of Arguments +Positional arguments: + +- **mode**:\ +Select the running mode of the demo which are 'image', 'video' or 'webcam'. +Input values: {image, video, webcam}. + +Optional arguments: + +- **-h (--help)**:\ +Display the help message. + +- **-d (--display)**:\ +Display a window with the input data on the left and the output data on the right (i.e., detected face, emotions, and affect values). + +- **-i (--input)**:\ +Define the full path to an image or video. + +- **-c (--device)**:\ +Specifies the device, which can be 'cuda' or 'cpu'. + +- **-w (--webcam)**:\ +Define the webcam to be used while the framework is running by 'id' when the webcam mode is selected. The default camera is used, if 'id' is not specified. + +- **-f (--frames)**:\ +Set the number of frames to be processed for each 30 frames. The lower is the number, the faster is the process. + + +## Acknowledgement +This work has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement No 871449 (OpenDR). This publication reflects the authors’ views only. The European Commission is not responsible for any use that may be made of the information it contains. + + +## References +[1] +[Siqueira, Henrique, Sven Magg, and Stefan Wermter. "Efficient facial feature learning with wide ensemble-based convolutional neural networks." Proceedings of the AAAI conference on artificial intelligence. Vol. 34. No. 04. 2020.]( +https://ojs.aaai.org/index.php/AAAI/article/view/6037) + +[2] +[Viola, Paul, and Michael Jones. "Rapid object detection using a boosted cascade of simple features." Proceedings of the 2001 IEEE computer society conference on computer vision and pattern recognition. CVPR 2001. Vol. 1. Ieee, 2001]( +https://ieeexplore.ieee.org/abstract/document/990517) diff --git a/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/benchmark_esr.py b/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/benchmark_esr.py new file mode 100644 index 0000000000..2e5910a794 --- /dev/null +++ b/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/benchmark_esr.py @@ -0,0 +1,91 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import torch +import yaml +from pytorch_benchmark import benchmark +import logging +import argparse + +# opendr imports +from opendr.perception.facial_expression_recognition import FacialEmotionLearner +from opendr.engine.data import Image + + +logger = logging.getLogger("benchmark") +logging.basicConfig() +logger.setLevel("DEBUG") + + +def benchmark_esr(args): + results_dir = "./results" + if not os.path.exists(results_dir): + os.makedirs(results_dir) + device = args.device + + print(f"==== Benchmarking {args.method} ====") + + learner = FacialEmotionLearner(device=device, ensemble_size=args.ensemble_size, diversify=True) + learner.init_model(num_branches=args.ensemble_size) + + if device == 'cuda': + learner.model.cuda() + + num_runs = 100 + batch_size = 32 + C = 3 + H = 96 + W = 96 + input_face = torch.randn(C, H, W) + input_img = Image(input_face) + input_batch = [] + for i in range(batch_size): + input_batch.append(input_img) + if type(input_batch) is list: + input_batch = torch.stack([torch.tensor(v.data) for v in input_batch]) + + def get_device_fn(*args): + # nonlocal learner + return next(learner.model.parameters()).device + + def transfer_to_device_fn(sample, device,): + return sample + + print("== Benchmarking learner.infer ==") + results1 = benchmark(model=learner.infer, + sample=input_batch, + sample_with_batch_size1=None, + num_runs=num_runs, + get_device_fn=get_device_fn, + transfer_to_device_fn=transfer_to_device_fn, + batch_size=batch_size, + print_fn=print, + ) + with open(results_dir + f"/benchmark_{args.method}_{device}.txt", "a") as f: + print("== Benchmarking learner.infer ==", file=f) + print(yaml.dump({"learner.infer": results1}), file=f) + print("\n\n", file=f) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda") + parser.add_argument('--method', type=str, default='div_esr_9', + help='action detection method') + parser.add_argument('--ensemble_size', type=int, default=9, + help='number of ensemble branches') + + args = parser.parse_args() + benchmark_esr(args) diff --git a/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/face_detector/frontal_face.xml b/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/face_detector/frontal_face.xml new file mode 100644 index 0000000000..cbd1aa89e9 --- /dev/null +++ b/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/face_detector/frontal_face.xml @@ -0,0 +1,33314 @@ + + + +BOOST + HAAR + 24 + 24 + + 211 + + 0 + 25 + + <_> + 9 + -5.0425500869750977e+00 + + <_> + + 0 -1 0 -3.1511999666690826e-02 + + 2.0875380039215088e+00 -2.2172100543975830e+00 + <_> + + 0 -1 1 1.2396000325679779e-02 + + -1.8633940219879150e+00 1.3272049427032471e+00 + <_> + + 0 -1 2 2.1927999332547188e-02 + + -1.5105249881744385e+00 1.0625729560852051e+00 + <_> + + 0 -1 3 5.7529998011887074e-03 + + -8.7463897466659546e-01 1.1760339736938477e+00 + <_> + + 0 -1 4 1.5014000236988068e-02 + + -7.7945697307586670e-01 1.2608419656753540e+00 + <_> + + 0 -1 5 9.9371001124382019e-02 + + 5.5751299858093262e-01 -1.8743000030517578e+00 + <_> + + 0 -1 6 2.7340000960975885e-03 + + -1.6911929845809937e+00 4.4009700417518616e-01 + <_> + + 0 -1 7 -1.8859000876545906e-02 + + -1.4769539833068848e+00 4.4350099563598633e-01 + <_> + + 0 -1 8 5.9739998541772366e-03 + + -8.5909199714660645e-01 8.5255599021911621e-01 + <_> + 16 + -4.9842400550842285e+00 + + <_> + + 0 -1 9 -2.1110000088810921e-02 + + 1.2435649633407593e+00 -1.5713009834289551e+00 + <_> + + 0 -1 10 2.0355999469757080e-02 + + -1.6204780340194702e+00 1.1817760467529297e+00 + <_> + + 0 -1 11 2.1308999508619308e-02 + + -1.9415930509567261e+00 7.0069098472595215e-01 + <_> + + 0 -1 12 9.1660000383853912e-02 + + -5.5670100450515747e-01 1.7284419536590576e+00 + <_> + + 0 -1 13 3.6288000643253326e-02 + + 2.6763799786567688e-01 -2.1831810474395752e+00 + <_> + + 0 -1 14 -1.9109999760985374e-02 + + -2.6730210781097412e+00 4.5670801401138306e-01 + <_> + + 0 -1 15 8.2539999857544899e-03 + + -1.0852910280227661e+00 5.3564202785491943e-01 + <_> + + 0 -1 16 1.8355000764131546e-02 + + -3.5200199484825134e-01 9.3339198827743530e-01 + <_> + + 0 -1 17 -7.0569999516010284e-03 + + 9.2782098054885864e-01 -6.6349899768829346e-01 + <_> + + 0 -1 18 -9.8770000040531158e-03 + + 1.1577470302581787e+00 -2.9774799942970276e-01 + <_> + + 0 -1 19 1.5814000740647316e-02 + + -4.1960600018501282e-01 1.3576040267944336e+00 + <_> + + 0 -1 20 -2.0700000226497650e-02 + + 1.4590020179748535e+00 -1.9739399850368500e-01 + <_> + + 0 -1 21 -1.3760800659656525e-01 + + 1.1186759471893311e+00 -5.2915501594543457e-01 + <_> + + 0 -1 22 1.4318999834358692e-02 + + -3.5127198696136475e-01 1.1440860033035278e+00 + <_> + + 0 -1 23 1.0253000073134899e-02 + + -6.0850602388381958e-01 7.7098500728607178e-01 + <_> + + 0 -1 24 9.1508001089096069e-02 + + 3.8817799091339111e-01 -1.5122940540313721e+00 + <_> + 27 + -4.6551899909973145e+00 + + <_> + + 0 -1 25 6.9747000932693481e-02 + + -1.0130879878997803e+00 1.4687349796295166e+00 + <_> + + 0 -1 26 3.1502999365329742e-02 + + -1.6463639736175537e+00 1.0000629425048828e+00 + <_> + + 0 -1 27 1.4260999858379364e-02 + + 4.6480301022529602e-01 -1.5959889888763428e+00 + <_> + + 0 -1 28 1.4453000389039516e-02 + + -6.5511900186538696e-01 8.3021801710128784e-01 + <_> + + 0 -1 29 -3.0509999487549067e-03 + + -1.3982310295104980e+00 4.2550599575042725e-01 + <_> + + 0 -1 30 3.2722998410463333e-02 + + -5.0702601671218872e-01 1.0526109933853149e+00 + <_> + + 0 -1 31 -7.2960001416504383e-03 + + 3.6356899142265320e-01 -1.3464889526367188e+00 + <_> + + 0 -1 32 5.0425000488758087e-02 + + -3.0461400747299194e-01 1.4504129886627197e+00 + <_> + + 0 -1 33 4.6879000961780548e-02 + + -4.0286201238632202e-01 1.2145609855651855e+00 + <_> + + 0 -1 34 -6.9358997046947479e-02 + + 1.0539360046386719e+00 -4.5719701051712036e-01 + <_> + + 0 -1 35 -4.9033999443054199e-02 + + -1.6253089904785156e+00 1.5378999710083008e-01 + <_> + + 0 -1 36 8.4827996790409088e-02 + + 2.8402999043464661e-01 -1.5662059783935547e+00 + <_> + + 0 -1 37 -1.7229999648407102e-03 + + -1.0147459506988525e+00 2.3294800519943237e-01 + <_> + + 0 -1 38 1.1562199890613556e-01 + + -1.6732899844646454e-01 1.2804069519042969e+00 + <_> + + 0 -1 39 -5.1279999315738678e-02 + + 1.5162390470504761e+00 -3.0271100997924805e-01 + <_> + + 0 -1 40 -4.2706999927759171e-02 + + 1.7631920576095581e+00 -5.1832001656293869e-02 + <_> + + 0 -1 41 3.7178099155426025e-01 + + -3.1389200687408447e-01 1.5357979536056519e+00 + <_> + + 0 -1 42 1.9412999972701073e-02 + + -1.0017599910497665e-01 9.3655401468276978e-01 + <_> + + 0 -1 43 1.7439000308513641e-02 + + -4.0379899740219116e-01 9.6293002367019653e-01 + <_> + + 0 -1 44 3.9638999849557877e-02 + + 1.7039099335670471e-01 -2.9602990150451660e+00 + <_> + + 0 -1 45 -9.1469995677471161e-03 + + 8.8786798715591431e-01 -4.3818700313568115e-01 + <_> + + 0 -1 46 1.7219999572262168e-03 + + -3.7218600511550903e-01 4.0018901228904724e-01 + <_> + + 0 -1 47 3.0231000855565071e-02 + + 6.5924003720283508e-02 -2.6469180583953857e+00 + <_> + + 0 -1 48 -7.8795999288558960e-02 + + -1.7491459846496582e+00 2.8475299477577209e-01 + <_> + + 0 -1 49 2.1110000088810921e-03 + + -9.3908101320266724e-01 2.3205199837684631e-01 + <_> + + 0 -1 50 2.7091000229120255e-02 + + -5.2664000540971756e-02 1.0756820440292358e+00 + <_> + + 0 -1 51 -4.4964998960494995e-02 + + -1.8294479846954346e+00 9.9561996757984161e-02 + <_> + 32 + -4.4531588554382324e+00 + + <_> + + 0 -1 52 -6.5701000392436981e-02 + + 1.1558510065078735e+00 -1.0716359615325928e+00 + <_> + + 0 -1 53 1.5839999541640282e-02 + + -1.5634720325469971e+00 7.6877099275588989e-01 + <_> + + 0 -1 54 1.4570899307727814e-01 + + -5.7450097799301147e-01 1.3808720111846924e+00 + <_> + + 0 -1 55 6.1389999464154243e-03 + + -1.4570560455322266e+00 5.1610302925109863e-01 + <_> + + 0 -1 56 6.7179999314248562e-03 + + -8.3533602952957153e-01 5.8522200584411621e-01 + <_> + + 0 -1 57 1.8518000841140747e-02 + + -3.1312099099159241e-01 1.1696679592132568e+00 + <_> + + 0 -1 58 1.9958000630140305e-02 + + -4.3442600965499878e-01 9.5446902513504028e-01 + <_> + + 0 -1 59 -2.7755001187324524e-01 + + 1.4906179904937744e+00 -1.3815900683403015e-01 + <_> + + 0 -1 60 9.1859996318817139e-03 + + -9.6361500024795532e-01 2.7665498852729797e-01 + <_> + + 0 -1 61 -3.7737999111413956e-02 + + -2.4464108943939209e+00 2.3619599640369415e-01 + <_> + + 0 -1 62 1.8463000655174255e-02 + + 1.7539200186729431e-01 -1.3423130512237549e+00 + <_> + + 0 -1 63 -1.1114999651908875e-02 + + 4.8710799217224121e-01 -8.9851897954940796e-01 + <_> + + 0 -1 64 3.3927999436855316e-02 + + 1.7874200642108917e-01 -1.6342279911041260e+00 + <_> + + 0 -1 65 -3.5649001598358154e-02 + + -1.9607399702072144e+00 1.8102499842643738e-01 + <_> + + 0 -1 66 -1.1438000015914440e-02 + + 9.9010699987411499e-01 -3.8103199005126953e-01 + <_> + + 0 -1 67 -6.5236002206802368e-02 + + -2.5794160366058350e+00 2.4753600358963013e-01 + <_> + + 0 -1 68 -4.2272001504898071e-02 + + 1.4411840438842773e+00 -2.9508298635482788e-01 + <_> + + 0 -1 69 1.9219999667257071e-03 + + -4.9608600139617920e-01 6.3173598051071167e-01 + <_> + + 0 -1 70 -1.2921799719333649e-01 + + -2.3314270973205566e+00 5.4496999830007553e-02 + <_> + + 0 -1 71 2.2931000217795372e-02 + + -8.4447097778320312e-01 3.8738098740577698e-01 + <_> + + 0 -1 72 -3.4120000898838043e-02 + + -1.4431500434875488e+00 9.8422996699810028e-02 + <_> + + 0 -1 73 2.6223000138998032e-02 + + 1.8223099410533905e-01 -1.2586519718170166e+00 + <_> + + 0 -1 74 2.2236999124288559e-02 + + 6.9807998836040497e-02 -2.3820950984954834e+00 + <_> + + 0 -1 75 -5.8240001089870930e-03 + + 3.9332500100135803e-01 -2.7542799711227417e-01 + <_> + + 0 -1 76 4.3653000146150589e-02 + + 1.4832699298858643e-01 -1.1368780136108398e+00 + <_> + + 0 -1 77 5.7266999036073685e-02 + + 2.4628099799156189e-01 -1.2687400579452515e+00 + <_> + + 0 -1 78 2.3409998975694180e-03 + + -7.5448900461196899e-01 2.7163800597190857e-01 + <_> + + 0 -1 79 1.2996000237762928e-02 + + -3.6394900083541870e-01 7.0959198474884033e-01 + <_> + + 0 -1 80 -2.6517000049352646e-02 + + -2.3221859931945801e+00 3.5744000226259232e-02 + <_> + + 0 -1 81 -5.8400002308189869e-03 + + 4.2194300889968872e-01 -4.8184998333454132e-02 + <_> + + 0 -1 82 -1.6568999737501144e-02 + + 1.1099940538406372e+00 -3.4849700331687927e-01 + <_> + + 0 -1 83 -6.8157002329826355e-02 + + -3.3269989490509033e+00 2.1299000084400177e-01 + <_> + 52 + -4.3864588737487793e+00 + + <_> + + 0 -1 84 3.9974000304937363e-02 + + -1.2173449993133545e+00 1.0826710462570190e+00 + <_> + + 0 -1 85 1.8819500505924225e-01 + + -4.8289400339126587e-01 1.4045250415802002e+00 + <_> + + 0 -1 86 7.8027002513408661e-02 + + -1.0782150030136108e+00 7.4040299654006958e-01 + <_> + + 0 -1 87 1.1899999663000926e-04 + + -1.2019979953765869e+00 3.7749201059341431e-01 + <_> + + 0 -1 88 8.5056997835636139e-02 + + -4.3939098715782166e-01 1.2647340297698975e+00 + <_> + + 0 -1 89 8.9720003306865692e-03 + + -1.8440499901771545e-01 4.5726400613784790e-01 + <_> + + 0 -1 90 8.8120000436902046e-03 + + 3.0396699905395508e-01 -9.5991098880767822e-01 + <_> + + 0 -1 91 -2.3507999256253242e-02 + + 1.2487529516220093e+00 4.6227999031543732e-02 + <_> + + 0 -1 92 7.0039997808635235e-03 + + -5.9442102909088135e-01 5.3963297605514526e-01 + <_> + + 0 -1 93 3.3851999789476395e-02 + + 2.8496098518371582e-01 -1.4895249605178833e+00 + <_> + + 0 -1 94 -3.2530000898987055e-03 + + 4.8120799660682678e-01 -5.2712398767471313e-01 + <_> + + 0 -1 95 2.9097000136971474e-02 + + 2.6743900775909424e-01 -1.6007850170135498e+00 + <_> + + 0 -1 96 -8.4790000692009926e-03 + + -1.3107639551162720e+00 1.5243099629878998e-01 + <_> + + 0 -1 97 -1.0795000009238720e-02 + + 4.5613598823547363e-01 -7.2050899267196655e-01 + <_> + + 0 -1 98 -2.4620000272989273e-02 + + -1.7320619821548462e+00 6.8363003432750702e-02 + <_> + + 0 -1 99 3.7380000576376915e-03 + + -1.9303299486637115e-01 6.8243497610092163e-01 + <_> + + 0 -1 100 -1.2264000251889229e-02 + + -1.6095290184020996e+00 7.5268000364303589e-02 + <_> + + 0 -1 101 -4.8670000396668911e-03 + + 7.4286502599716187e-01 -2.1510200202465057e-01 + <_> + + 0 -1 102 7.6725997030735016e-02 + + -2.6835098862648010e-01 1.3094140291213989e+00 + <_> + + 0 -1 103 2.8578000143170357e-02 + + -5.8793000876903534e-02 1.2196329832077026e+00 + <_> + + 0 -1 104 1.9694000482559204e-02 + + -3.5142898559570312e-01 8.4926998615264893e-01 + <_> + + 0 -1 105 -2.9093999415636063e-02 + + -1.0507299900054932e+00 2.9806300997734070e-01 + <_> + + 0 -1 106 -2.9144000262022018e-02 + + 8.2547801733016968e-01 -3.2687199115753174e-01 + <_> + + 0 -1 107 1.9741000607609749e-02 + + 2.0452600717544556e-01 -8.3760201930999756e-01 + <_> + + 0 -1 108 4.3299999088048935e-03 + + 2.0577900111675262e-01 -6.6829800605773926e-01 + <_> + + 0 -1 109 -3.5500999540090561e-02 + + -1.2969900369644165e+00 1.3897499442100525e-01 + <_> + + 0 -1 110 -1.6172999516129494e-02 + + -1.3110569715499878e+00 7.5751997530460358e-02 + <_> + + 0 -1 111 -2.2151000797748566e-02 + + -1.0524389743804932e+00 1.9241100549697876e-01 + <_> + + 0 -1 112 -2.2707000374794006e-02 + + -1.3735309839248657e+00 6.6780999302864075e-02 + <_> + + 0 -1 113 1.6607999801635742e-02 + + -3.7135999649763107e-02 7.7846401929855347e-01 + <_> + + 0 -1 114 -1.3309000059962273e-02 + + -9.9850702285766602e-01 1.2248100340366364e-01 + <_> + + 0 -1 115 -3.3732000738382339e-02 + + 1.4461359977722168e+00 1.3151999562978745e-02 + <_> + + 0 -1 116 1.6935000196099281e-02 + + -3.7121298909187317e-01 5.2842199802398682e-01 + <_> + + 0 -1 117 3.3259999472647905e-03 + + -5.7568502426147461e-01 3.9261901378631592e-01 + <_> + + 0 -1 118 8.3644002676010132e-02 + + 1.6116000711917877e-02 -2.1173279285430908e+00 + <_> + + 0 -1 119 2.5785198807716370e-01 + + -8.1609003245830536e-02 9.8782497644424438e-01 + <_> + + 0 -1 120 -3.6566998809576035e-02 + + -1.1512110233306885e+00 9.6459001302719116e-02 + <_> + + 0 -1 121 -1.6445999965071678e-02 + + 3.7315499782562256e-01 -1.4585399627685547e-01 + <_> + + 0 -1 122 -3.7519999314099550e-03 + + 2.6179298758506775e-01 -5.8156698942184448e-01 + <_> + + 0 -1 123 -6.3660000450909138e-03 + + 7.5477397441864014e-01 -1.7055200040340424e-01 + <_> + + 0 -1 124 -3.8499999791383743e-03 + + 2.2653999924659729e-01 -6.3876402378082275e-01 + <_> + + 0 -1 125 -4.5494001358747482e-02 + + -1.2640299797058105e+00 2.5260698795318604e-01 + <_> + + 0 -1 126 -2.3941000923514366e-02 + + 8.7068402767181396e-01 -2.7104699611663818e-01 + <_> + + 0 -1 127 -7.7558003365993500e-02 + + -1.3901610374450684e+00 2.3612299561500549e-01 + <_> + + 0 -1 128 2.3614000529050827e-02 + + 6.6140003502368927e-02 -1.2645419836044312e+00 + <_> + + 0 -1 129 -2.5750000495463610e-03 + + -5.3841698169708252e-01 3.0379098653793335e-01 + <_> + + 0 -1 130 1.2010800093412399e-01 + + -3.5343000292778015e-01 5.2866202592849731e-01 + <_> + + 0 -1 131 2.2899999748915434e-03 + + -5.8701997995376587e-01 2.4061000347137451e-01 + <_> + + 0 -1 132 6.9716997444629669e-02 + + -3.3348900079727173e-01 5.1916301250457764e-01 + <_> + + 0 -1 133 -4.6670001000165939e-02 + + 6.9795399904251099e-01 -1.4895999804139137e-02 + <_> + + 0 -1 134 -5.0129000097513199e-02 + + 8.6146199703216553e-01 -2.5986000895500183e-01 + <_> + + 0 -1 135 3.0147999525070190e-02 + + 1.9332799315452576e-01 -5.9131097793579102e-01 + <_> + 53 + -4.1299300193786621e+00 + + <_> + + 0 -1 136 9.1085001826286316e-02 + + -8.9233100414276123e-01 1.0434230566024780e+00 + <_> + + 0 -1 137 1.2818999588489532e-02 + + -1.2597670555114746e+00 5.5317097902297974e-01 + <_> + + 0 -1 138 1.5931999310851097e-02 + + -8.6254400014877319e-01 6.3731801509857178e-01 + <_> + + 0 -1 139 2.2780001163482666e-03 + + -7.4639201164245605e-01 5.3155601024627686e-01 + <_> + + 0 -1 140 3.1840998679399490e-02 + + -1.2650489807128906e+00 3.6153900623321533e-01 + <_> + + 0 -1 141 2.6960000395774841e-03 + + -9.8290401697158813e-01 3.6013001203536987e-01 + <_> + + 0 -1 142 -1.2055000290274620e-02 + + 6.4068400859832764e-01 -5.0125002861022949e-01 + <_> + + 0 -1 143 2.1324999630451202e-02 + + -2.4034999310970306e-01 8.5448002815246582e-01 + <_> + + 0 -1 144 3.0486000701785088e-02 + + -3.4273600578308105e-01 1.1428849697113037e+00 + <_> + + 0 -1 145 -4.5079998672008514e-02 + + 1.0976949930191040e+00 -1.7974600195884705e-01 + <_> + + 0 -1 146 -7.1700997650623322e-02 + + 1.5735000371932983e+00 -3.1433498859405518e-01 + <_> + + 0 -1 147 5.9218000620603561e-02 + + -2.7582401037216187e-01 1.0448570251464844e+00 + <_> + + 0 -1 148 6.7010000348091125e-03 + + -1.0974019765853882e+00 1.9801199436187744e-01 + <_> + + 0 -1 149 4.1046999394893646e-02 + + 3.0547699332237244e-01 -1.3287999629974365e+00 + <_> + + 0 -1 150 -8.5499999113380909e-04 + + 2.5807100534439087e-01 -7.0052897930145264e-01 + <_> + + 0 -1 151 -3.0360000208020210e-02 + + -1.2306419610977173e+00 2.2609399259090424e-01 + <_> + + 0 -1 152 -1.2930000200867653e-02 + + 4.0758600831031799e-01 -5.1234501600265503e-01 + <_> + + 0 -1 153 3.7367999553680420e-02 + + -9.4755001366138458e-02 6.1765098571777344e-01 + <_> + + 0 -1 154 2.4434000253677368e-02 + + -4.1100600361824036e-01 4.7630500793457031e-01 + <_> + + 0 -1 155 5.7007998228073120e-02 + + 2.5249299407005310e-01 -6.8669801950454712e-01 + <_> + + 0 -1 156 -1.6313999891281128e-02 + + -9.3928402662277222e-01 1.1448100209236145e-01 + <_> + + 0 -1 157 -1.7648899555206299e-01 + + 1.2451089620590210e+00 -5.6519001722335815e-02 + <_> + + 0 -1 158 1.7614600062370300e-01 + + -3.2528200745582581e-01 8.2791501283645630e-01 + <_> + + 0 -1 159 -7.3910001665353775e-03 + + 3.4783700108528137e-01 -1.7929099500179291e-01 + <_> + + 0 -1 160 6.0890998691320419e-02 + + 5.5098000913858414e-02 -1.5480779409408569e+00 + <_> + + 0 -1 161 -2.9123000800609589e-02 + + -1.0255639553070068e+00 2.4106900393962860e-01 + <_> + + 0 -1 162 -4.5648999512195587e-02 + + 1.0301599502563477e+00 -3.1672099232673645e-01 + <_> + + 0 -1 163 3.7333000451326370e-02 + + 2.1620599925518036e-01 -8.2589900493621826e-01 + <_> + + 0 -1 164 -2.4411000311374664e-02 + + -1.5957959890365601e+00 5.1139000803232193e-02 + <_> + + 0 -1 165 -5.9806998819112778e-02 + + -1.0312290191650391e+00 1.3092300295829773e-01 + <_> + + 0 -1 166 -3.0106000602245331e-02 + + -1.4781630039215088e+00 3.7211999297142029e-02 + <_> + + 0 -1 167 7.4209999293088913e-03 + + -2.4024100601673126e-01 4.9333998560905457e-01 + <_> + + 0 -1 168 -2.1909999195486307e-03 + + 2.8941500186920166e-01 -5.7259601354598999e-01 + <_> + + 0 -1 169 2.0860999822616577e-02 + + -2.3148399591445923e-01 6.3765901327133179e-01 + <_> + + 0 -1 170 -6.6990000195801258e-03 + + -1.2107750177383423e+00 6.4018003642559052e-02 + <_> + + 0 -1 171 1.8758000805974007e-02 + + 2.4461300671100616e-01 -9.9786698818206787e-01 + <_> + + 0 -1 172 -4.4323001056909561e-02 + + -1.3699189424514771e+00 3.6051999777555466e-02 + <_> + + 0 -1 173 2.2859999909996986e-02 + + 2.1288399398326874e-01 -1.0397620201110840e+00 + <_> + + 0 -1 174 -9.8600005730986595e-04 + + 3.2443600893020630e-01 -5.4291802644729614e-01 + <_> + + 0 -1 175 1.7239000648260117e-02 + + -2.8323900699615479e-01 4.4468200206756592e-01 + <_> + + 0 -1 176 -3.4531001001596451e-02 + + -2.3107020854949951e+00 -3.1399999279528856e-03 + <_> + + 0 -1 177 6.7006997764110565e-02 + + 2.8715699911117554e-01 -6.4481002092361450e-01 + <_> + + 0 -1 178 2.3776899278163910e-01 + + -2.7174800634384155e-01 8.0219101905822754e-01 + <_> + + 0 -1 179 -1.2903000228106976e-02 + + -1.5317620038986206e+00 2.1423600614070892e-01 + <_> + + 0 -1 180 1.0514999739825726e-02 + + 7.7037997543811798e-02 -1.0581140518188477e+00 + <_> + + 0 -1 181 1.6969000920653343e-02 + + 1.4306700229644775e-01 -8.5828399658203125e-01 + <_> + + 0 -1 182 -7.2460002265870571e-03 + + -1.1020129919052124e+00 6.4906999468803406e-02 + <_> + + 0 -1 183 1.0556999593973160e-02 + + 1.3964000158011913e-02 6.3601499795913696e-01 + <_> + + 0 -1 184 6.1380001716315746e-03 + + -3.4545901417732239e-01 5.6296801567077637e-01 + <_> + + 0 -1 185 1.3158000074326992e-02 + + 1.9927300512790680e-01 -1.5040320158004761e+00 + <_> + + 0 -1 186 3.1310000922530890e-03 + + -4.0903699398040771e-01 3.7796398997306824e-01 + <_> + + 0 -1 187 -1.0920699685811996e-01 + + -2.2227079868316650e+00 1.2178199738264084e-01 + <_> + + 0 -1 188 8.1820003688335419e-03 + + -2.8652000427246094e-01 6.7890799045562744e-01 + <_> + 62 + -4.0218091011047363e+00 + + <_> + + 0 -1 189 3.1346999108791351e-02 + + -8.8884598016738892e-01 9.4936800003051758e-01 + <_> + + 0 -1 190 3.1918000429868698e-02 + + -1.1146880388259888e+00 4.8888999223709106e-01 + <_> + + 0 -1 191 6.5939999185502529e-03 + + -1.0097689628601074e+00 4.9723801016807556e-01 + <_> + + 0 -1 192 2.6148000732064247e-02 + + 2.5991299748420715e-01 -1.2537480592727661e+00 + <_> + + 0 -1 193 1.2845000252127647e-02 + + -5.7138597965240479e-01 5.9659498929977417e-01 + <_> + + 0 -1 194 2.6344999670982361e-02 + + -5.5203199386596680e-01 3.0217400193214417e-01 + <_> + + 0 -1 195 -1.5083000063896179e-02 + + -1.2871240377426147e+00 2.2354200482368469e-01 + <_> + + 0 -1 196 -3.8887001574039459e-02 + + 1.7425049543380737e+00 -9.9747002124786377e-02 + <_> + + 0 -1 197 -5.7029998861253262e-03 + + -1.0523240566253662e+00 1.8362599611282349e-01 + <_> + + 0 -1 198 -1.4860000228509307e-03 + + 5.6784200668334961e-01 -4.6742001175880432e-01 + <_> + + 0 -1 199 -2.8486000373959541e-02 + + 1.3082909584045410e+00 -2.6460900902748108e-01 + <_> + + 0 -1 200 6.6224999725818634e-02 + + -4.6210700273513794e-01 4.1749599575996399e-01 + <_> + + 0 -1 201 8.8569996878504753e-03 + + -4.1474899649620056e-01 5.9204798936843872e-01 + <_> + + 0 -1 202 1.1355999857187271e-02 + + 3.6103099584579468e-01 -4.5781201124191284e-01 + <_> + + 0 -1 203 -2.7679998893290758e-03 + + -8.9238899946212769e-01 1.4199000597000122e-01 + <_> + + 0 -1 204 1.1246999725699425e-02 + + 2.9353401064872742e-01 -9.7330600023269653e-01 + <_> + + 0 -1 205 7.1970000863075256e-03 + + -7.9334902763366699e-01 1.8313400447368622e-01 + <_> + + 0 -1 206 3.1768999993801117e-02 + + 1.5523099899291992e-01 -1.3245639801025391e+00 + <_> + + 0 -1 207 2.5173999369144440e-02 + + 3.4214999526739120e-02 -2.0948131084442139e+00 + <_> + + 0 -1 208 7.5360001064836979e-03 + + -3.9450600743293762e-01 5.1333999633789062e-01 + <_> + + 0 -1 209 3.2873000949621201e-02 + + 8.8372997939586639e-02 -1.2814120054244995e+00 + <_> + + 0 -1 210 -2.7379998937249184e-03 + + 5.5286502838134766e-01 -4.6384999155998230e-01 + <_> + + 0 -1 211 -3.8075000047683716e-02 + + -1.8497270345687866e+00 4.5944001525640488e-02 + <_> + + 0 -1 212 -3.8984000682830811e-02 + + -4.8223701119422913e-01 3.4760600328445435e-01 + <_> + + 0 -1 213 2.8029999230057001e-03 + + -4.5154699683189392e-01 4.2806300520896912e-01 + <_> + + 0 -1 214 -5.4145999252796173e-02 + + -8.4520798921585083e-01 1.6674900054931641e-01 + <_> + + 0 -1 215 -8.3280000835657120e-03 + + 3.5348299145698547e-01 -4.7163200378417969e-01 + <_> + + 0 -1 216 3.3778000622987747e-02 + + 1.8463100492954254e-01 -1.6686669588088989e+00 + <_> + + 0 -1 217 -1.1238099634647369e-01 + + -1.2521569728851318e+00 3.5992000252008438e-02 + <_> + + 0 -1 218 -1.0408000089228153e-02 + + -8.1620401144027710e-01 2.3428599536418915e-01 + <_> + + 0 -1 219 -4.9439999274909496e-03 + + -9.2584699392318726e-01 1.0034800320863724e-01 + <_> + + 0 -1 220 -9.3029998242855072e-03 + + 5.6499302387237549e-01 -1.8881900608539581e-01 + <_> + + 0 -1 221 -1.1749999597668648e-02 + + 8.0302399396896362e-01 -3.8277000188827515e-01 + <_> + + 0 -1 222 -2.3217000067234039e-02 + + -8.4926998615264893e-01 1.9671200215816498e-01 + <_> + + 0 -1 223 1.6866000369191170e-02 + + -4.0591898560523987e-01 5.0695300102233887e-01 + <_> + + 0 -1 224 -2.4031000211834908e-02 + + -1.5297520160675049e+00 2.3344999551773071e-01 + <_> + + 0 -1 225 -3.6945998668670654e-02 + + 6.3007700443267822e-01 -3.1780400872230530e-01 + <_> + + 0 -1 226 -6.1563998460769653e-02 + + 5.8627897500991821e-01 -1.2107999995350838e-02 + <_> + + 0 -1 227 2.1661000326275826e-02 + + -2.5623700022697449e-01 1.0409849882125854e+00 + <_> + + 0 -1 228 -3.6710000131279230e-03 + + 2.9171100258827209e-01 -8.3287298679351807e-01 + <_> + + 0 -1 229 4.4849000871181488e-02 + + -3.9633199572563171e-01 4.5662000775337219e-01 + <_> + + 0 -1 230 5.7195000350475311e-02 + + 2.1023899316787720e-01 -1.5004800558090210e+00 + <_> + + 0 -1 231 -1.1342000216245651e-02 + + 4.4071298837661743e-01 -3.8653799891471863e-01 + <_> + + 0 -1 232 -1.2004000134766102e-02 + + 9.3954598903656006e-01 -1.0589499771595001e-01 + <_> + + 0 -1 233 2.2515999153256416e-02 + + 9.4480002298951149e-03 -1.6799509525299072e+00 + <_> + + 0 -1 234 -1.9809000194072723e-02 + + -1.0133639574050903e+00 2.4146600067615509e-01 + <_> + + 0 -1 235 1.5891000628471375e-02 + + -3.7507599592208862e-01 4.6614098548889160e-01 + <_> + + 0 -1 236 -9.1420002281665802e-03 + + -8.0484098196029663e-01 1.7816999554634094e-01 + <_> + + 0 -1 237 -4.4740000739693642e-03 + + -1.0562069416046143e+00 7.3305003345012665e-02 + <_> + + 0 -1 238 1.2742500007152557e-01 + + 2.0165599882602692e-01 -1.5467929840087891e+00 + <_> + + 0 -1 239 4.7703001648187637e-02 + + -3.7937799096107483e-01 3.7885999679565430e-01 + <_> + + 0 -1 240 5.3608000278472900e-02 + + 2.1220499277114868e-01 -1.2399710416793823e+00 + <_> + + 0 -1 241 -3.9680998772382736e-02 + + -1.0257550477981567e+00 5.1282998174428940e-02 + <_> + + 0 -1 242 -6.7327000200748444e-02 + + -1.0304750204086304e+00 2.3005299270153046e-01 + <_> + + 0 -1 243 1.3337600231170654e-01 + + -2.0869000256061554e-01 1.2272510528564453e+00 + <_> + + 0 -1 244 -2.0919300615787506e-01 + + 8.7929898500442505e-01 -4.4254999607801437e-02 + <_> + + 0 -1 245 -6.5589003264904022e-02 + + 1.0443429946899414e+00 -2.1682099997997284e-01 + <_> + + 0 -1 246 6.1882998794317245e-02 + + 1.3798199594020844e-01 -1.9009059667587280e+00 + <_> + + 0 -1 247 -2.5578999891877174e-02 + + -1.6607600450515747e+00 5.8439997956156731e-03 + <_> + + 0 -1 248 -3.4827001392841339e-02 + + 7.9940402507781982e-01 -8.2406997680664062e-02 + <_> + + 0 -1 249 -1.8209999427199364e-02 + + -9.6073997020721436e-01 6.6320002079010010e-02 + <_> + + 0 -1 250 1.5070999972522259e-02 + + 1.9899399578571320e-01 -7.6433002948760986e-01 + <_> + 72 + -3.8832089900970459e+00 + + <_> + + 0 -1 251 4.6324998140335083e-02 + + -1.0362670421600342e+00 8.2201498746871948e-01 + <_> + + 0 -1 252 1.5406999737024307e-02 + + -1.2327589988708496e+00 2.9647698998451233e-01 + <_> + + 0 -1 253 1.2808999978005886e-02 + + -7.5852298736572266e-01 5.7985502481460571e-01 + <_> + + 0 -1 254 4.9150999635457993e-02 + + -3.8983899354934692e-01 8.9680302143096924e-01 + <_> + + 0 -1 255 1.2621000409126282e-02 + + -7.1799302101135254e-01 5.0440901517868042e-01 + <_> + + 0 -1 256 -1.8768999725580215e-02 + + 5.5147600173950195e-01 -7.0555400848388672e-01 + <_> + + 0 -1 257 4.1965000331401825e-02 + + -4.4782099127769470e-01 7.0985502004623413e-01 + <_> + + 0 -1 258 -5.1401998847723007e-02 + + -1.0932120084762573e+00 2.6701900362968445e-01 + <_> + + 0 -1 259 -7.0960998535156250e-02 + + 8.3618402481079102e-01 -3.8318100571632385e-01 + <_> + + 0 -1 260 1.6745999455451965e-02 + + -2.5733101367950439e-01 2.5966501235961914e-01 + <_> + + 0 -1 261 -6.2400000169873238e-03 + + 3.1631499528884888e-01 -5.8796900510787964e-01 + <_> + + 0 -1 262 -3.9397999644279480e-02 + + -1.0491210222244263e+00 1.6822400689125061e-01 + <_> + + 0 -1 263 0. + + 1.6144199669361115e-01 -8.7876898050308228e-01 + <_> + + 0 -1 264 -2.2307999432086945e-02 + + -6.9053500890731812e-01 2.3607000708580017e-01 + <_> + + 0 -1 265 1.8919999711215496e-03 + + 2.4989199638366699e-01 -5.6583297252655029e-01 + <_> + + 0 -1 266 1.0730000212788582e-03 + + -5.0415802001953125e-01 3.8374501466751099e-01 + <_> + + 0 -1 267 3.9230998605489731e-02 + + 4.2619001120328903e-02 -1.3875889778137207e+00 + <_> + + 0 -1 268 6.2238000333309174e-02 + + 1.4119400084018707e-01 -1.0688860416412354e+00 + <_> + + 0 -1 269 2.1399999968707561e-03 + + -8.9622402191162109e-01 1.9796399772167206e-01 + <_> + + 0 -1 270 9.1800000518560410e-04 + + -4.5337298512458801e-01 4.3532699346542358e-01 + <_> + + 0 -1 271 -6.9169998168945312e-03 + + 3.3822798728942871e-01 -4.4793000817298889e-01 + <_> + + 0 -1 272 -2.3866999894380569e-02 + + -7.8908598423004150e-01 2.2511799633502960e-01 + <_> + + 0 -1 273 -1.0262800008058548e-01 + + -2.2831439971923828e+00 -5.3960001096129417e-03 + <_> + + 0 -1 274 -9.5239998772740364e-03 + + 3.9346700906753540e-01 -5.2242201566696167e-01 + <_> + + 0 -1 275 3.9877001196146011e-02 + + 3.2799001783132553e-02 -1.5079489946365356e+00 + <_> + + 0 -1 276 -1.3144999742507935e-02 + + -1.0839990377426147e+00 1.8482400476932526e-01 + <_> + + 0 -1 277 -5.0590999424457550e-02 + + -1.8822289705276489e+00 -2.2199999075382948e-03 + <_> + + 0 -1 278 2.4917000904679298e-02 + + 1.4593400061130524e-01 -2.2196519374847412e+00 + <_> + + 0 -1 279 -7.6370001770555973e-03 + + -1.0164569616317749e+00 5.8797001838684082e-02 + <_> + + 0 -1 280 4.2911998927593231e-02 + + 1.5443000197410583e-01 -1.1843889951705933e+00 + <_> + + 0 -1 281 2.3000000510364771e-04 + + -7.7305799722671509e-01 1.2189900130033493e-01 + <_> + + 0 -1 282 9.0929996222257614e-03 + + -1.1450099945068359e-01 7.1091300249099731e-01 + <_> + + 0 -1 283 1.1145000346004963e-02 + + 7.0000998675823212e-02 -1.0534820556640625e+00 + <_> + + 0 -1 284 -5.2453000098466873e-02 + + -1.7594360113143921e+00 1.9523799419403076e-01 + <_> + + 0 -1 285 -2.3020699620246887e-01 + + 9.5840299129486084e-01 -2.5045698881149292e-01 + <_> + + 0 -1 286 -1.6365999355912209e-02 + + 4.6731901168823242e-01 -2.1108399331569672e-01 + <_> + + 0 -1 287 -1.7208000645041466e-02 + + 7.0835697650909424e-01 -2.8018298745155334e-01 + <_> + + 0 -1 288 -3.6648001521825790e-02 + + -1.1013339757919312e+00 2.4341100454330444e-01 + <_> + + 0 -1 289 -1.0304999537765980e-02 + + -1.0933129787445068e+00 5.6258998811244965e-02 + <_> + + 0 -1 290 -1.3713000342249870e-02 + + -2.6438099145889282e-01 1.9821000099182129e-01 + <_> + + 0 -1 291 2.9308000579476357e-02 + + -2.2142399847507477e-01 1.0525950193405151e+00 + <_> + + 0 -1 292 2.4077000096440315e-02 + + 1.8485699594020844e-01 -1.7203969955444336e+00 + <_> + + 0 -1 293 6.1280000954866409e-03 + + -9.2721498012542725e-01 5.8752998709678650e-02 + <_> + + 0 -1 294 -2.2377999499440193e-02 + + 1.9646559953689575e+00 2.7785999700427055e-02 + <_> + + 0 -1 295 -7.0440000854432583e-03 + + 2.1427600085735321e-01 -4.8407599329948425e-01 + <_> + + 0 -1 296 -4.0603000670671463e-02 + + -1.1754349470138550e+00 1.6061200201511383e-01 + <_> + + 0 -1 297 -2.4466000497341156e-02 + + -1.1239900588989258e+00 4.1110001504421234e-02 + <_> + + 0 -1 298 2.5309999473392963e-03 + + -1.7169700562953949e-01 3.2178801298141479e-01 + <_> + + 0 -1 299 -1.9588999450206757e-02 + + 8.2720202207565308e-01 -2.6376700401306152e-01 + <_> + + 0 -1 300 -2.9635999351739883e-02 + + -1.1524770259857178e+00 1.4999300241470337e-01 + <_> + + 0 -1 301 -1.5030000358819962e-02 + + -1.0491830110549927e+00 4.0160998702049255e-02 + <_> + + 0 -1 302 -6.0715001076459885e-02 + + -1.0903840065002441e+00 1.5330800414085388e-01 + <_> + + 0 -1 303 -1.2790000066161156e-02 + + 4.2248600721359253e-01 -4.2399200797080994e-01 + <_> + + 0 -1 304 -2.0247999578714371e-02 + + -9.1866999864578247e-01 1.8485699594020844e-01 + <_> + + 0 -1 305 -3.0683999881148338e-02 + + -1.5958670377731323e+00 2.5760000571608543e-03 + <_> + + 0 -1 306 -2.0718000829219818e-02 + + -6.6299998760223389e-01 3.1037199497222900e-01 + <_> + + 0 -1 307 -1.7290000105276704e-03 + + 1.9183400273323059e-01 -6.5084999799728394e-01 + <_> + + 0 -1 308 -3.1394001096487045e-02 + + -6.3643002510070801e-01 1.5408399701118469e-01 + <_> + + 0 -1 309 1.9003000110387802e-02 + + -1.8919399380683899e-01 1.5294510126113892e+00 + <_> + + 0 -1 310 6.1769997701048851e-03 + + -1.0597900301218033e-01 6.4859598875045776e-01 + <_> + + 0 -1 311 -1.0165999643504620e-02 + + -1.0802700519561768e+00 3.7176001816987991e-02 + <_> + + 0 -1 312 -1.4169999631121755e-03 + + 3.4157499670982361e-01 -9.7737997770309448e-02 + <_> + + 0 -1 313 -4.0799998678267002e-03 + + 4.7624599933624268e-01 -3.4366300702095032e-01 + <_> + + 0 -1 314 -4.4096998870372772e-02 + + 9.7634297609329224e-01 -1.9173000007867813e-02 + <_> + + 0 -1 315 -6.0669999569654465e-02 + + -2.1752851009368896e+00 -2.8925999999046326e-02 + <_> + + 0 -1 316 -3.2931998372077942e-02 + + -6.4383101463317871e-01 1.6494099795818329e-01 + <_> + + 0 -1 317 -1.4722800254821777e-01 + + -1.4745830297470093e+00 2.5839998852461576e-03 + <_> + + 0 -1 318 -1.1930000036954880e-02 + + 4.2441400885581970e-01 -1.7712600529193878e-01 + <_> + + 0 -1 319 1.4517900347709656e-01 + + 2.5444999337196350e-02 -1.2779400348663330e+00 + <_> + + 0 -1 320 5.1447998732328415e-02 + + 1.5678399801254272e-01 -1.5188430547714233e+00 + <_> + + 0 -1 321 3.1479999888688326e-03 + + -4.0424400568008423e-01 3.2429701089859009e-01 + <_> + + 0 -1 322 -4.3600000441074371e-02 + + -1.9932260513305664e+00 1.5018600225448608e-01 + <_> + 83 + -3.8424909114837646e+00 + + <_> + + 0 -1 323 1.2899599969387054e-01 + + -6.2161999940872192e-01 1.1116520166397095e+00 + <_> + + 0 -1 324 -9.1261997818946838e-02 + + 1.0143059492111206e+00 -6.1335200071334839e-01 + <_> + + 0 -1 325 1.4271999709308147e-02 + + -1.0261659622192383e+00 3.9779999852180481e-01 + <_> + + 0 -1 326 3.2889999449253082e-02 + + -1.1386079788208008e+00 2.8690800070762634e-01 + <_> + + 0 -1 327 1.2590000405907631e-02 + + -5.6645601987838745e-01 4.5172399282455444e-01 + <_> + + 0 -1 328 1.4661000110208988e-02 + + 3.0505999922752380e-01 -6.8129599094390869e-01 + <_> + + 0 -1 329 -3.3555999398231506e-02 + + -1.7208939790725708e+00 6.1439000070095062e-02 + <_> + + 0 -1 330 1.4252699911594391e-01 + + 2.3192200064659119e-01 -1.7297149896621704e+00 + <_> + + 0 -1 331 -6.2079997733235359e-03 + + -1.2163300514221191e+00 1.2160199880599976e-01 + <_> + + 0 -1 332 1.8178999423980713e-02 + + 3.2553699612617493e-01 -8.1003999710083008e-01 + <_> + + 0 -1 333 2.5036999955773354e-02 + + -3.1698799133300781e-01 6.7361402511596680e-01 + <_> + + 0 -1 334 4.6560999006032944e-02 + + -1.1089800298213959e-01 8.4082502126693726e-01 + <_> + + 0 -1 335 -8.9999996125698090e-03 + + 3.9574500918388367e-01 -4.7624599933624268e-01 + <_> + + 0 -1 336 4.0805999189615250e-02 + + -1.8000000272877514e-04 9.4570702314376831e-01 + <_> + + 0 -1 337 -3.4221999347209930e-02 + + 7.5206297636032104e-01 -3.1531500816345215e-01 + <_> + + 0 -1 338 -3.9716001600027084e-02 + + -8.3139598369598389e-01 1.7744399607181549e-01 + <_> + + 0 -1 339 2.5170000735670328e-03 + + -5.9377998113632202e-01 2.4657000601291656e-01 + <_> + + 0 -1 340 2.7428999543190002e-02 + + 1.5998399257659912e-01 -4.2781999707221985e-01 + <_> + + 0 -1 341 3.4986000508069992e-02 + + 3.5055998712778091e-02 -1.5988600254058838e+00 + <_> + + 0 -1 342 4.4970000162720680e-03 + + -5.2034300565719604e-01 3.7828299403190613e-01 + <_> + + 0 -1 343 2.7699999045580626e-03 + + -5.3182601928710938e-01 2.4951000511646271e-01 + <_> + + 0 -1 344 3.5174001008272171e-02 + + 1.9983400404453278e-01 -1.4446129798889160e+00 + <_> + + 0 -1 345 2.5970999151468277e-02 + + 4.4426999986171722e-02 -1.3622980117797852e+00 + <_> + + 0 -1 346 -1.5783999115228653e-02 + + -9.1020399332046509e-01 2.7190300822257996e-01 + <_> + + 0 -1 347 -7.5880000367760658e-03 + + 9.2064999043941498e-02 -8.1628900766372681e-01 + <_> + + 0 -1 348 2.0754000172019005e-02 + + 2.1185700595378876e-01 -7.4729001522064209e-01 + <_> + + 0 -1 349 5.9829000383615494e-02 + + -2.7301099896430969e-01 8.0923300981521606e-01 + <_> + + 0 -1 350 3.9039000868797302e-02 + + -1.0432299971580505e-01 8.6226201057434082e-01 + <_> + + 0 -1 351 2.1665999665856361e-02 + + 6.2709003686904907e-02 -9.8894298076629639e-01 + <_> + + 0 -1 352 -2.7496999129652977e-02 + + -9.2690998315811157e-01 1.5586300194263458e-01 + <_> + + 0 -1 353 1.0462000034749508e-02 + + 1.3418099284172058e-01 -7.0386397838592529e-01 + <_> + + 0 -1 354 2.4870999157428741e-02 + + 1.9706700742244720e-01 -4.0263301134109497e-01 + <_> + + 0 -1 355 -1.6036000102758408e-02 + + -1.1409829854965210e+00 7.3997996747493744e-02 + <_> + + 0 -1 356 4.8627000302076340e-02 + + 1.6990399360656738e-01 -7.2152197360992432e-01 + <_> + + 0 -1 357 1.2619999470189214e-03 + + -4.7389799356460571e-01 2.6254999637603760e-01 + <_> + + 0 -1 358 -8.8035002350807190e-02 + + -2.1606519222259521e+00 1.4554800093173981e-01 + <_> + + 0 -1 359 1.8356999382376671e-02 + + 4.4750999659299850e-02 -1.0766370296478271e+00 + <_> + + 0 -1 360 3.5275001078844070e-02 + + -3.2919000834226608e-02 1.2153890132904053e+00 + <_> + + 0 -1 361 -2.0392900705337524e-01 + + -1.3187999725341797e+00 1.5503999777138233e-02 + <_> + + 0 -1 362 -1.6619000583887100e-02 + + 3.6850199103355408e-01 -1.5283699333667755e-01 + <_> + + 0 -1 363 3.7739001214504242e-02 + + -2.5727799534797668e-01 7.0655298233032227e-01 + <_> + + 0 -1 364 2.2720000706613064e-03 + + -7.7602997422218323e-02 3.3367800712585449e-01 + <_> + + 0 -1 365 -1.4802999794483185e-02 + + -7.8524798154830933e-01 7.6934002339839935e-02 + <_> + + 0 -1 366 -4.8319000750780106e-02 + + 1.7022320032119751e+00 4.9722000956535339e-02 + <_> + + 0 -1 367 -2.9539000242948532e-02 + + 7.7670699357986450e-01 -2.4534299969673157e-01 + <_> + + 0 -1 368 -4.6169001609086990e-02 + + -1.4922779798507690e+00 1.2340000271797180e-01 + <_> + + 0 -1 369 -2.8064999729394913e-02 + + -2.1345369815826416e+00 -2.5797000154852867e-02 + <_> + + 0 -1 370 -5.7339998893439770e-03 + + 5.6982600688934326e-01 -1.2056600302457809e-01 + <_> + + 0 -1 371 -1.0111000388860703e-02 + + 6.7911398410797119e-01 -2.6638001203536987e-01 + <_> + + 0 -1 372 1.1359999887645245e-02 + + 2.4789799749851227e-01 -6.4493000507354736e-01 + <_> + + 0 -1 373 5.1809001713991165e-02 + + 1.4716000296175480e-02 -1.2395579814910889e+00 + <_> + + 0 -1 374 3.3291999250650406e-02 + + -8.2559995353221893e-03 1.0168470144271851e+00 + <_> + + 0 -1 375 -1.4494000002741814e-02 + + 4.5066800713539124e-01 -3.6250999569892883e-01 + <_> + + 0 -1 376 -3.4221999347209930e-02 + + -9.5292502641677856e-01 2.0684599876403809e-01 + <_> + + 0 -1 377 -8.0654002726078033e-02 + + -2.0139501094818115e+00 -2.3084999993443489e-02 + <_> + + 0 -1 378 -8.9399999706074595e-04 + + 3.9572000503540039e-01 -2.9351300001144409e-01 + <_> + + 0 -1 379 9.7162000834941864e-02 + + -2.4980300664901733e-01 1.0859220027923584e+00 + <_> + + 0 -1 380 3.6614000797271729e-02 + + -5.7844001799821854e-02 1.2162159681320190e+00 + <_> + + 0 -1 381 5.1693998277187347e-02 + + 4.3062999844551086e-02 -1.0636160373687744e+00 + <_> + + 0 -1 382 -2.4557000026106834e-02 + + -4.8946800827980042e-01 1.7182900011539459e-01 + <_> + + 0 -1 383 3.2736799120903015e-01 + + -2.9688599705696106e-01 5.1798301935195923e-01 + <_> + + 0 -1 384 7.6959999278187752e-03 + + -5.9805899858474731e-01 2.4803200364112854e-01 + <_> + + 0 -1 385 1.6172200441360474e-01 + + -2.9613999649882317e-02 -2.3162529468536377e+00 + <_> + + 0 -1 386 -4.7889999113976955e-03 + + 3.7457901239395142e-01 -3.2779198884963989e-01 + <_> + + 0 -1 387 -1.8402999266982079e-02 + + -9.9692702293395996e-01 7.2948001325130463e-02 + <_> + + 0 -1 388 7.7665001153945923e-02 + + 1.4175699651241302e-01 -1.7238730192184448e+00 + <_> + + 0 -1 389 1.8921000882983208e-02 + + -2.1273100376129150e-01 1.0165189504623413e+00 + <_> + + 0 -1 390 -7.9397998750209808e-02 + + -1.3164349794387817e+00 1.4981999993324280e-01 + <_> + + 0 -1 391 -6.8037003278732300e-02 + + 4.9421998858451843e-01 -2.9091000556945801e-01 + <_> + + 0 -1 392 -6.1010001227259636e-03 + + 4.2430499196052551e-01 -3.3899301290512085e-01 + <_> + + 0 -1 393 3.1927000731229782e-02 + + -3.1046999618411064e-02 -2.3459999561309814e+00 + <_> + + 0 -1 394 -2.9843999072909355e-02 + + -7.8989601135253906e-01 1.5417699515819550e-01 + <_> + + 0 -1 395 -8.0541998147964478e-02 + + -2.2509229183197021e+00 -3.0906999483704567e-02 + <_> + + 0 -1 396 3.8109999150037766e-03 + + -2.5577300786972046e-01 2.3785500228404999e-01 + <_> + + 0 -1 397 3.3647000789642334e-02 + + -2.2541399300098419e-01 9.2307400703430176e-01 + <_> + + 0 -1 398 8.2809999585151672e-03 + + -2.8896200656890869e-01 3.1046199798583984e-01 + <_> + + 0 -1 399 1.0104399919509888e-01 + + -3.4864000976085663e-02 -2.7102620601654053e+00 + <_> + + 0 -1 400 -1.0009000077843666e-02 + + 5.9715402126312256e-01 -3.3831000328063965e-02 + <_> + + 0 -1 401 7.1919998154044151e-03 + + -4.7738000750541687e-01 2.2686000168323517e-01 + <_> + + 0 -1 402 2.4969000369310379e-02 + + 2.2877700626850128e-01 -1.0435529947280884e+00 + <_> + + 0 -1 403 2.7908000349998474e-01 + + -2.5818100571632385e-01 7.6780498027801514e-01 + <_> + + 0 -1 404 -4.4213000684976578e-02 + + -5.9798002243041992e-01 2.8039899468421936e-01 + <_> + + 0 -1 405 -1.4136999845504761e-02 + + 7.0987302064895630e-01 -2.5645199418067932e-01 + <_> + 91 + -3.6478610038757324e+00 + + <_> + + 0 -1 406 1.3771200180053711e-01 + + -5.5870598554611206e-01 1.0953769683837891e+00 + <_> + + 0 -1 407 3.4460999071598053e-02 + + -7.1171897649765015e-01 5.2899599075317383e-01 + <_> + + 0 -1 408 1.8580000847578049e-02 + + -1.1157519817352295e+00 4.0593999624252319e-01 + <_> + + 0 -1 409 2.5041999295353889e-02 + + -4.0892499685287476e-01 7.4129998683929443e-01 + <_> + + 0 -1 410 5.7179000228643417e-02 + + -3.8054299354553223e-01 7.3647701740264893e-01 + <_> + + 0 -1 411 1.4932000078260899e-02 + + -6.9945502281188965e-01 3.7950998544692993e-01 + <_> + + 0 -1 412 8.8900001719594002e-03 + + -5.4558598995208740e-01 3.6332499980926514e-01 + <_> + + 0 -1 413 3.0435999855399132e-02 + + -1.0124599933624268e-01 7.9585897922515869e-01 + <_> + + 0 -1 414 -4.4160000979900360e-02 + + 8.4410899877548218e-01 -3.2976400852203369e-01 + <_> + + 0 -1 415 1.8461000174283981e-02 + + 2.6326599717140198e-01 -9.6736502647399902e-01 + <_> + + 0 -1 416 1.0614999569952488e-02 + + 1.5251900255680084e-01 -1.0589870214462280e+00 + <_> + + 0 -1 417 -4.5974001288414001e-02 + + -1.9918340444564819e+00 1.3629099726676941e-01 + <_> + + 0 -1 418 8.2900002598762512e-02 + + -3.2037198543548584e-01 6.0304200649261475e-01 + <_> + + 0 -1 419 -8.9130001142621040e-03 + + 5.9586602449417114e-01 -2.1139599382877350e-01 + <_> + + 0 -1 420 4.2814001441001892e-02 + + 2.2925000637769699e-02 -1.4679330587387085e+00 + <_> + + 0 -1 421 -8.7139997631311417e-03 + + -4.3989500403404236e-01 2.0439699292182922e-01 + <_> + + 0 -1 422 -4.3390002101659775e-03 + + -8.9066797494888306e-01 1.0469999909400940e-01 + <_> + + 0 -1 423 8.0749997869133949e-03 + + 2.1164199709892273e-01 -4.0231600403785706e-01 + <_> + + 0 -1 424 9.6739001572132111e-02 + + 1.3319999910891056e-02 -1.6085360050201416e+00 + <_> + + 0 -1 425 -3.0536999925971031e-02 + + 1.0063740015029907e+00 -1.3413299620151520e-01 + <_> + + 0 -1 426 -6.0855999588966370e-02 + + -1.4689979553222656e+00 9.4240000471472740e-03 + <_> + + 0 -1 427 -3.8162000477313995e-02 + + -8.1636399030685425e-01 2.6171201467514038e-01 + <_> + + 0 -1 428 -9.6960002556443214e-03 + + 1.1561699956655502e-01 -7.1693199872970581e-01 + <_> + + 0 -1 429 4.8902999609708786e-02 + + 1.3050499558448792e-01 -1.6448370218276978e+00 + <_> + + 0 -1 430 -4.1611999273300171e-02 + + -1.1795840263366699e+00 2.5017000734806061e-02 + <_> + + 0 -1 431 -2.0188000053167343e-02 + + 6.3188201189041138e-01 -1.0490400344133377e-01 + <_> + + 0 -1 432 -9.7900000400841236e-04 + + 1.8507799506187439e-01 -5.3565901517868042e-01 + <_> + + 0 -1 433 -3.3622000366449356e-02 + + -9.3127602338790894e-01 2.0071500539779663e-01 + <_> + + 0 -1 434 1.9455999135971069e-02 + + 3.8029000163078308e-02 -1.0112210512161255e+00 + <_> + + 0 -1 435 -3.1800000579096377e-04 + + 3.6457699537277222e-01 -2.7610900998115540e-01 + <_> + + 0 -1 436 -3.8899999344721437e-04 + + 1.9665899872779846e-01 -5.3410500288009644e-01 + <_> + + 0 -1 437 -9.3496002256870270e-02 + + -1.6772350072860718e+00 2.0727099478244781e-01 + <_> + + 0 -1 438 -7.7877998352050781e-02 + + -3.0760629177093506e+00 -3.5803999751806259e-02 + <_> + + 0 -1 439 1.6947999596595764e-02 + + 2.1447399258613586e-01 -7.1376299858093262e-01 + <_> + + 0 -1 440 -2.1459000185132027e-02 + + -1.1468060016632080e+00 1.5855999663472176e-02 + <_> + + 0 -1 441 -1.2865999713540077e-02 + + 8.3812397718429565e-01 -6.5944001078605652e-02 + <_> + + 0 -1 442 7.8220004215836525e-03 + + -2.8026801347732544e-01 7.9376900196075439e-01 + <_> + + 0 -1 443 1.0294400155544281e-01 + + 1.7832300066947937e-01 -6.8412202596664429e-01 + <_> + + 0 -1 444 -3.7487998604774475e-02 + + 9.6189999580383301e-01 -2.1735599637031555e-01 + <_> + + 0 -1 445 2.5505999103188515e-02 + + 1.0103999637067318e-02 1.2461110353469849e+00 + <_> + + 0 -1 446 6.6700001480057836e-04 + + -5.3488200902938843e-01 1.4746299386024475e-01 + <_> + + 0 -1 447 -2.8867900371551514e-01 + + 8.2172799110412598e-01 -1.4948000200092793e-02 + <_> + + 0 -1 448 9.1294996440410614e-02 + + -1.9605399668216705e-01 1.0803170204162598e+00 + <_> + + 0 -1 449 1.2056600302457809e-01 + + -2.3848999291658401e-02 1.1392610073089600e+00 + <_> + + 0 -1 450 -7.3775000870227814e-02 + + -1.3583840131759644e+00 -4.2039998807013035e-03 + <_> + + 0 -1 451 -3.3128000795841217e-02 + + -6.4483201503753662e-01 2.4142199754714966e-01 + <_> + + 0 -1 452 -4.3937001377344131e-02 + + 8.4285402297973633e-01 -2.0624800026416779e-01 + <_> + + 0 -1 453 1.8110199272632599e-01 + + 1.9212099909782410e-01 -1.2222139835357666e+00 + <_> + + 0 -1 454 -1.1850999668240547e-02 + + -7.2677397727966309e-01 5.2687998861074448e-02 + <_> + + 0 -1 455 4.5920000411570072e-03 + + -3.6305201053619385e-01 2.9223799705505371e-01 + <_> + + 0 -1 456 7.0620002225041389e-03 + + 5.8116000145673752e-02 -6.7161601781845093e-01 + <_> + + 0 -1 457 -2.3715000599622726e-02 + + 4.7142100334167480e-01 1.8580000847578049e-02 + <_> + + 0 -1 458 -6.7171998322010040e-02 + + -1.1331889629364014e+00 2.3780999705195427e-02 + <_> + + 0 -1 459 -6.5310001373291016e-02 + + 9.8253500461578369e-01 2.8362000361084938e-02 + <_> + + 0 -1 460 2.2791000083088875e-02 + + -2.8213700652122498e-01 5.8993399143218994e-01 + <_> + + 0 -1 461 -1.9037999212741852e-02 + + -6.3711500167846680e-01 2.6514598727226257e-01 + <_> + + 0 -1 462 -6.8689999170601368e-03 + + 3.7487301230430603e-01 -3.3232098817825317e-01 + <_> + + 0 -1 463 -4.0146000683307648e-02 + + -1.3048729896545410e+00 1.5724299848079681e-01 + <_> + + 0 -1 464 -4.0530998259782791e-02 + + -2.0458049774169922e+00 -2.6925999671220779e-02 + <_> + + 0 -1 465 -1.2253999710083008e-02 + + 7.7649402618408203e-01 -4.2971000075340271e-02 + <_> + + 0 -1 466 -2.7219999581575394e-02 + + 1.7424400150775909e-01 -4.4600901007652283e-01 + <_> + + 0 -1 467 -8.8366001844406128e-02 + + -1.5036419630050659e+00 1.4289900660514832e-01 + <_> + + 0 -1 468 -7.9159997403621674e-03 + + 2.8666698932647705e-01 -3.7923699617385864e-01 + <_> + + 0 -1 469 -4.1960000991821289e-02 + + 1.3846950531005859e+00 6.5026998519897461e-02 + <_> + + 0 -1 470 4.5662999153137207e-02 + + -2.2452299296855927e-01 7.9521000385284424e-01 + <_> + + 0 -1 471 -1.4090600609779358e-01 + + -1.5879319906234741e+00 1.1359000205993652e-01 + <_> + + 0 -1 472 -5.9216000139713287e-02 + + -1.1945960521697998e+00 -7.1640000678598881e-03 + <_> + + 0 -1 473 4.3390002101659775e-03 + + -1.5528699755668640e-01 4.0664499998092651e-01 + <_> + + 0 -1 474 -2.0369999110698700e-03 + + 2.5927901268005371e-01 -3.8368299603462219e-01 + <_> + + 0 -1 475 2.7516499161720276e-01 + + -8.8497996330261230e-02 7.6787501573562622e-01 + <_> + + 0 -1 476 -2.6601999998092651e-02 + + 7.5024497509002686e-01 -2.2621999680995941e-01 + <_> + + 0 -1 477 4.0906000882387161e-02 + + 1.2158600240945816e-01 -1.4566910266876221e+00 + <_> + + 0 -1 478 5.5320002138614655e-03 + + -3.6611500382423401e-01 2.5968599319458008e-01 + <_> + + 0 -1 479 3.1879000365734100e-02 + + -7.5019001960754395e-02 4.8484799265861511e-01 + <_> + + 0 -1 480 -4.1482001543045044e-02 + + 7.8220397233963013e-01 -2.1992200613021851e-01 + <_> + + 0 -1 481 -9.6130996942520142e-02 + + -8.9456301927566528e-01 1.4680700004100800e-01 + <_> + + 0 -1 482 -1.1568999849259853e-02 + + 8.2714098691940308e-01 -2.0275600254535675e-01 + <_> + + 0 -1 483 1.8312999978661537e-02 + + 1.6367999836802483e-02 2.7306801080703735e-01 + <_> + + 0 -1 484 -3.4166000783443451e-02 + + 1.1307320594787598e+00 -1.8810899555683136e-01 + <_> + + 0 -1 485 -2.4476999416947365e-02 + + -5.7791298627853394e-01 1.5812499821186066e-01 + <_> + + 0 -1 486 4.8957001417875290e-02 + + -2.2564999759197235e-02 -1.6373280286788940e+00 + <_> + + 0 -1 487 -2.0702999085187912e-02 + + -5.4512101411819458e-01 2.4086999893188477e-01 + <_> + + 0 -1 488 -2.3002000525593758e-02 + + -1.2236540317535400e+00 -7.3440000414848328e-03 + <_> + + 0 -1 489 6.4585000276565552e-02 + + 1.4695599675178528e-01 -4.4967499375343323e-01 + <_> + + 0 -1 490 1.2666000053286552e-02 + + -2.7873900532722473e-01 4.3876600265502930e-01 + <_> + + 0 -1 491 -1.2002999894320965e-02 + + -2.4289099872112274e-01 2.5350099802017212e-01 + <_> + + 0 -1 492 -2.6443999260663986e-02 + + -8.5864800214767456e-01 2.6025999337434769e-02 + <_> + + 0 -1 493 -2.5547999888658524e-02 + + 6.9287902116775513e-01 -2.1160000469535589e-03 + <_> + + 0 -1 494 3.9115000516176224e-02 + + -1.6589100658893585e-01 1.5209139585494995e+00 + <_> + + 0 -1 495 -6.0330000706017017e-03 + + 4.3856900930404663e-01 -2.1613700687885284e-01 + <_> + + 0 -1 496 -3.3936999738216400e-02 + + -9.7998398542404175e-01 2.2133000195026398e-02 + <_> + 99 + -3.8700489997863770e+00 + + <_> + + 0 -1 497 4.0672998875379562e-02 + + -9.0474700927734375e-01 6.4410597085952759e-01 + <_> + + 0 -1 498 2.5609999895095825e-02 + + -7.9216998815536499e-01 5.7489997148513794e-01 + <_> + + 0 -1 499 1.9959500432014465e-01 + + -3.0099600553512573e-01 1.3143850564956665e+00 + <_> + + 0 -1 500 1.2404999695718288e-02 + + -8.9882999658584595e-01 2.9205799102783203e-01 + <_> + + 0 -1 501 3.9207998663187027e-02 + + -4.1955199837684631e-01 5.3463298082351685e-01 + <_> + + 0 -1 502 -3.0843999236822128e-02 + + 4.5793399214744568e-01 -4.4629099965095520e-01 + <_> + + 0 -1 503 -3.5523001104593277e-02 + + 9.1310501098632812e-01 -2.7373200654983521e-01 + <_> + + 0 -1 504 -6.1650000512599945e-02 + + -1.4697799682617188e+00 2.0364099740982056e-01 + <_> + + 0 -1 505 -1.1739999987185001e-02 + + -1.0482879877090454e+00 6.7801997065544128e-02 + <_> + + 0 -1 506 6.6933996975421906e-02 + + 2.9274499416351318e-01 -5.2282899618148804e-01 + <_> + + 0 -1 507 -2.0631000399589539e-02 + + -1.2855139970779419e+00 4.4550999999046326e-02 + <_> + + 0 -1 508 -2.2357000038027763e-02 + + -8.5753798484802246e-01 1.8434000015258789e-01 + <_> + + 0 -1 509 1.1500000255182385e-03 + + 1.6405500471591949e-01 -6.9125002622604370e-01 + <_> + + 0 -1 510 3.5872999578714371e-02 + + 1.5756499767303467e-01 -8.4262597560882568e-01 + <_> + + 0 -1 511 3.0659999698400497e-02 + + 2.1637000143527985e-02 -1.3634690046310425e+00 + <_> + + 0 -1 512 5.5559999309480190e-03 + + -1.6737000644207001e-01 2.5888401269912720e-01 + <_> + + 0 -1 513 -6.1160000041127205e-03 + + -9.7271800041198730e-01 6.6100001335144043e-02 + <_> + + 0 -1 514 -3.0316999182105064e-02 + + 9.8474198579788208e-01 -1.6448000445961952e-02 + <_> + + 0 -1 515 -9.7200004383921623e-03 + + 4.7604700922966003e-01 -3.2516700029373169e-01 + <_> + + 0 -1 516 -5.7126998901367188e-02 + + -9.5920699834823608e-01 1.9938200712203979e-01 + <_> + + 0 -1 517 4.0059997700154781e-03 + + -5.2612501382827759e-01 2.2428700327873230e-01 + <_> + + 0 -1 518 3.3734001219272614e-02 + + 1.7070099711418152e-01 -1.0737580060958862e+00 + <_> + + 0 -1 519 -3.4641999751329422e-02 + + -1.1343129873275757e+00 3.6540001630783081e-02 + <_> + + 0 -1 520 4.6923000365495682e-02 + + 2.5832301378250122e-01 -7.1535801887512207e-01 + <_> + + 0 -1 521 -8.7660001590847969e-03 + + 1.9640900194644928e-01 -5.3355097770690918e-01 + <_> + + 0 -1 522 6.5627999603748322e-02 + + -5.1194999366998672e-02 9.7610700130462646e-01 + <_> + + 0 -1 523 -4.4165000319480896e-02 + + 1.0631920099258423e+00 -2.3462599515914917e-01 + <_> + + 0 -1 524 1.7304999753832817e-02 + + -1.8582899868488312e-01 4.5889899134635925e-01 + <_> + + 0 -1 525 3.3135998994112015e-02 + + -2.9381999745965004e-02 -2.6651329994201660e+00 + <_> + + 0 -1 526 -2.1029999479651451e-02 + + 9.9979901313781738e-01 2.4937000125646591e-02 + <_> + + 0 -1 527 2.9783999547362328e-02 + + -2.9605999588966370e-02 -2.1695868968963623e+00 + <_> + + 0 -1 528 5.5291999131441116e-02 + + -7.5599999399855733e-04 7.4651998281478882e-01 + <_> + + 0 -1 529 -3.3597998321056366e-02 + + -1.5274159908294678e+00 1.1060000397264957e-02 + <_> + + 0 -1 530 1.9602999091148376e-02 + + 3.3574998378753662e-02 9.9526202678680420e-01 + <_> + + 0 -1 531 -2.0787000656127930e-02 + + 7.6612901687622070e-01 -2.4670800566673279e-01 + <_> + + 0 -1 532 3.2536000013351440e-02 + + 1.6263400018215179e-01 -6.1134302616119385e-01 + <_> + + 0 -1 533 -1.0788000188767910e-02 + + -9.7839701175689697e-01 2.8969999402761459e-02 + <_> + + 0 -1 534 -9.9560003727674484e-03 + + 4.6145799756050110e-01 -1.3510499894618988e-01 + <_> + + 0 -1 535 -3.7489999085664749e-03 + + 2.5458198785781860e-01 -5.1955598592758179e-01 + <_> + + 0 -1 536 -4.1779998689889908e-02 + + -8.0565100908279419e-01 1.5208500623703003e-01 + <_> + + 0 -1 537 -3.4221000969409943e-02 + + -1.3137799501419067e+00 -3.5800000187009573e-03 + <_> + + 0 -1 538 1.0130000300705433e-02 + + 2.0175799727439880e-01 -6.1339598894119263e-01 + <_> + + 0 -1 539 -8.9849002659320831e-02 + + 9.7632801532745361e-01 -2.0884799957275391e-01 + <_> + + 0 -1 540 2.6097999885678291e-02 + + -1.8807999789714813e-01 4.7705799341201782e-01 + <_> + + 0 -1 541 -3.7539999466389418e-03 + + -6.7980402708053589e-01 1.1288800090551376e-01 + <_> + + 0 -1 542 3.1973000615835190e-02 + + 1.8951700627803802e-01 -1.4967479705810547e+00 + <_> + + 0 -1 543 1.9332999363541603e-02 + + -2.3609900474548340e-01 8.1320500373840332e-01 + <_> + + 0 -1 544 1.9490000559017062e-03 + + 2.4830399453639984e-01 -6.9211997091770172e-02 + <_> + + 0 -1 545 -4.4146999716758728e-02 + + -1.0418920516967773e+00 4.8053000122308731e-02 + <_> + + 0 -1 546 -4.4681999832391739e-02 + + 5.1346302032470703e-01 -7.3799998499453068e-03 + <_> + + 0 -1 547 -1.0757499933242798e-01 + + 1.6202019453048706e+00 -1.8667599558830261e-01 + <_> + + 0 -1 548 -1.2846800684928894e-01 + + 2.9869480133056641e+00 9.5427997410297394e-02 + <_> + + 0 -1 549 -4.4757999479770660e-02 + + 6.0405302047729492e-01 -2.7058699727058411e-01 + <_> + + 0 -1 550 -4.3990999460220337e-02 + + -6.1790502071380615e-01 1.5997199714183807e-01 + <_> + + 0 -1 551 -1.2268999963998795e-01 + + 6.6327202320098877e-01 -2.3636999726295471e-01 + <_> + + 0 -1 552 -1.9982999190688133e-02 + + -1.1228660345077515e+00 1.9616700708866119e-01 + <_> + + 0 -1 553 -1.5527999959886074e-02 + + -1.0770269632339478e+00 2.0693000406026840e-02 + <_> + + 0 -1 554 -4.8971001058816910e-02 + + 8.1168299913406372e-01 -1.7252000048756599e-02 + <_> + + 0 -1 555 5.5975999683141708e-02 + + -2.2529000416398048e-02 -1.7356760501861572e+00 + <_> + + 0 -1 556 -9.8580000922083855e-03 + + 6.7881399393081665e-01 -5.8180000633001328e-02 + <_> + + 0 -1 557 1.3481000438332558e-02 + + 5.7847999036312103e-02 -7.7255302667617798e-01 + <_> + + 0 -1 558 6.5609999001026154e-03 + + -1.3146899640560150e-01 6.7055797576904297e-01 + <_> + + 0 -1 559 7.1149999275803566e-03 + + -3.7880599498748779e-01 3.0978998541831970e-01 + <_> + + 0 -1 560 4.8159998841583729e-03 + + -5.8470398187637329e-01 2.5602099299430847e-01 + <_> + + 0 -1 561 9.5319999381899834e-03 + + -3.0217000842094421e-01 4.1253298521041870e-01 + <_> + + 0 -1 562 -2.7474999427795410e-02 + + 5.9154701232910156e-01 1.7963999882340431e-02 + <_> + + 0 -1 563 -3.9519999176263809e-02 + + 9.6913498640060425e-01 -2.1020300686359406e-01 + <_> + + 0 -1 564 -3.0658999457955360e-02 + + 9.1155898571014404e-01 4.0550000965595245e-02 + <_> + + 0 -1 565 -1.4680000022053719e-03 + + -6.0489797592163086e-01 1.6960899531841278e-01 + <_> + + 0 -1 566 1.9077600538730621e-01 + + 4.3515000492334366e-02 8.1892901659011841e-01 + <_> + + 0 -1 567 5.1790000870823860e-03 + + -9.3617302179336548e-01 2.4937000125646591e-02 + <_> + + 0 -1 568 2.4126000702381134e-02 + + 1.8175500631332397e-01 -3.4185901284217834e-01 + <_> + + 0 -1 569 -2.6383999735116959e-02 + + -1.2912579774856567e+00 -3.4280000254511833e-03 + <_> + + 0 -1 570 5.4139997810125351e-03 + + -4.6291999518871307e-02 2.5269600749015808e-01 + <_> + + 0 -1 571 5.4216001182794571e-02 + + -1.2848000042140484e-02 -1.4304540157318115e+00 + <_> + + 0 -1 572 2.3799999326001853e-04 + + -2.6676699519157410e-01 3.3588299155235291e-01 + <_> + + 0 -1 573 1.5216999687254429e-02 + + -5.1367300748825073e-01 1.3005100190639496e-01 + <_> + + 0 -1 574 1.7007999122142792e-02 + + 4.1575899720191956e-01 -3.1241199374198914e-01 + <_> + + 0 -1 575 3.0496999621391296e-02 + + -2.4820999801158905e-01 7.0828497409820557e-01 + <_> + + 0 -1 576 6.5430002287030220e-03 + + -2.2637000679969788e-01 1.9184599816799164e-01 + <_> + + 0 -1 577 1.4163999259471893e-01 + + 6.5227001905441284e-02 -8.8809502124786377e-01 + <_> + + 0 -1 578 1.9338000565767288e-02 + + 1.8891200423240662e-01 -2.7397701144218445e-01 + <_> + + 0 -1 579 -1.7324000597000122e-02 + + -9.4866698980331421e-01 2.4196999147534370e-02 + <_> + + 0 -1 580 -6.2069999985396862e-03 + + 3.6938399076461792e-01 -1.7494900524616241e-01 + <_> + + 0 -1 581 -1.6109000891447067e-02 + + 9.6159499883651733e-01 -2.0005300641059875e-01 + <_> + + 0 -1 582 -1.0122500360012054e-01 + + -3.0699110031127930e+00 1.1363799870014191e-01 + <_> + + 0 -1 583 -7.5509999878704548e-03 + + 2.2921000421047211e-01 -4.5645099878311157e-01 + <_> + + 0 -1 584 4.4247999787330627e-02 + + -3.1599999056197703e-04 3.9225301146507263e-01 + <_> + + 0 -1 585 -1.1636000126600266e-01 + + 9.5233702659606934e-01 -2.0201599597930908e-01 + <_> + + 0 -1 586 4.7360002063214779e-03 + + -9.9177002906799316e-02 2.0370499789714813e-01 + <_> + + 0 -1 587 2.2459000349044800e-02 + + 8.7280003353953362e-03 -1.0217070579528809e+00 + <_> + + 0 -1 588 -1.2109000235795975e-02 + + 6.4812600612640381e-01 -9.0149000287055969e-02 + <_> + + 0 -1 589 5.6120000779628754e-02 + + -3.6759998649358749e-02 -1.9275590181350708e+00 + <_> + + 0 -1 590 -8.7379999458789825e-03 + + 6.9261300563812256e-01 -6.8374998867511749e-02 + <_> + + 0 -1 591 6.6399998031556606e-03 + + -4.0569800138473511e-01 1.8625700473785400e-01 + <_> + + 0 -1 592 -1.8131999298930168e-02 + + -6.4518201351165771e-01 2.1976399421691895e-01 + <_> + + 0 -1 593 -2.2718999534845352e-02 + + 9.7776198387145996e-01 -1.8654300272464752e-01 + <_> + + 0 -1 594 1.2705000117421150e-02 + + -1.0546600073575974e-01 3.7404099106788635e-01 + <_> + + 0 -1 595 -1.3682999648153782e-02 + + 6.1064100265502930e-01 -2.6881098747253418e-01 + <_> + 115 + -3.7160909175872803e+00 + + <_> + + 0 -1 596 3.1357999891042709e-02 + + -1.0183910131454468e+00 5.7528597116470337e-01 + <_> + + 0 -1 597 9.3050003051757812e-02 + + -4.1297501325607300e-01 1.0091199874877930e+00 + <_> + + 0 -1 598 2.5949999690055847e-02 + + -5.8587902784347534e-01 5.6606197357177734e-01 + <_> + + 0 -1 599 1.6472000628709793e-02 + + -9.2857497930526733e-01 3.0924499034881592e-01 + <_> + + 0 -1 600 -1.8779999809339643e-03 + + 1.1951000243425369e-01 -1.1180130243301392e+00 + <_> + + 0 -1 601 -9.0129999443888664e-03 + + -5.7849502563476562e-01 3.3154401183128357e-01 + <_> + + 0 -1 602 2.2547999396920204e-02 + + -3.8325101137161255e-01 5.2462202310562134e-01 + <_> + + 0 -1 603 -3.7780001759529114e-02 + + 1.1790670156478882e+00 -3.4166999161243439e-02 + <_> + + 0 -1 604 -5.3799999877810478e-03 + + -8.6265897750854492e-01 1.1867900192737579e-01 + <_> + + 0 -1 605 -2.3893000558018684e-02 + + -7.4950599670410156e-01 2.1011400222778320e-01 + <_> + + 0 -1 606 -2.6521999388933182e-02 + + 9.2128598690032959e-01 -2.8252801299095154e-01 + <_> + + 0 -1 607 1.2280000373721123e-02 + + 2.6662799715995789e-01 -7.0013600587844849e-01 + <_> + + 0 -1 608 9.6594996750354767e-02 + + -2.8453999757766724e-01 7.3168998956680298e-01 + <_> + + 0 -1 609 -2.7414999902248383e-02 + + -6.1492699384689331e-01 1.5576200187206268e-01 + <_> + + 0 -1 610 -1.5767000615596771e-02 + + 5.7551199197769165e-01 -3.4362199902534485e-01 + <_> + + 0 -1 611 -2.1100000012665987e-03 + + 3.2599699497222900e-01 -1.3008299469947815e-01 + <_> + + 0 -1 612 1.2006999924778938e-02 + + 8.9322999119758606e-02 -9.6025598049163818e-01 + <_> + + 0 -1 613 -1.5421999618411064e-02 + + 3.4449499845504761e-01 -4.6711999177932739e-01 + <_> + + 0 -1 614 -4.1579999960958958e-03 + + 2.3696300387382507e-01 -5.2563297748565674e-01 + <_> + + 0 -1 615 -2.1185999736189842e-02 + + -7.4267697334289551e-01 2.1702000498771667e-01 + <_> + + 0 -1 616 -1.7077000811696053e-02 + + -9.0471798181533813e-01 6.6012002527713776e-02 + <_> + + 0 -1 617 -4.0849998593330383e-02 + + -3.4446600079536438e-01 2.1503700315952301e-01 + <_> + + 0 -1 618 -8.1930002197623253e-03 + + -9.3388599157333374e-01 5.0471000373363495e-02 + <_> + + 0 -1 619 -1.9238000735640526e-02 + + -5.3203701972961426e-01 1.7240600287914276e-01 + <_> + + 0 -1 620 -4.4192001223564148e-02 + + 9.2075002193450928e-01 -2.2148500382900238e-01 + <_> + + 0 -1 621 -6.2392000108957291e-02 + + -7.1053802967071533e-01 1.8323899805545807e-01 + <_> + + 0 -1 622 -1.0079999919980764e-03 + + -8.7063097953796387e-01 5.5330000817775726e-02 + <_> + + 0 -1 623 2.3870000615715981e-02 + + -2.2854200005531311e-01 5.2415597438812256e-01 + <_> + + 0 -1 624 2.1391000598669052e-02 + + -3.0325898528099060e-01 5.5860602855682373e-01 + <_> + + 0 -1 625 2.0254999399185181e-02 + + 2.6901501417160034e-01 -7.0261800289154053e-01 + <_> + + 0 -1 626 -2.8772000223398209e-02 + + -1.1835030317306519e+00 4.6512000262737274e-02 + <_> + + 0 -1 627 3.4199999645352364e-03 + + -5.4652100801467896e-01 2.5962498784065247e-01 + <_> + + 0 -1 628 5.6983001530170441e-02 + + -2.6982900500297546e-01 5.8170700073242188e-01 + <_> + + 0 -1 629 -9.3892000615596771e-02 + + -9.1046398878097534e-01 1.9677700102329254e-01 + <_> + + 0 -1 630 1.7699999734759331e-02 + + -4.4003298878669739e-01 2.1349500119686127e-01 + <_> + + 0 -1 631 2.2844199836254120e-01 + + 2.3605000227689743e-02 7.7171599864959717e-01 + <_> + + 0 -1 632 -1.8287500739097595e-01 + + 7.9228597879409790e-01 -2.4644799530506134e-01 + <_> + + 0 -1 633 -6.9891996681690216e-02 + + 8.0267798900604248e-01 -3.6072000861167908e-02 + <_> + + 0 -1 634 1.5297000296413898e-02 + + -2.0072300732135773e-01 1.1030600070953369e+00 + <_> + + 0 -1 635 6.7500001750886440e-03 + + -4.5967999845743179e-02 7.2094500064849854e-01 + <_> + + 0 -1 636 -1.5983000397682190e-02 + + -9.0357202291488647e-01 4.4987998902797699e-02 + <_> + + 0 -1 637 1.3088000006973743e-02 + + 3.5297098755836487e-01 -3.7710601091384888e-01 + <_> + + 0 -1 638 1.3061000034213066e-02 + + -1.9583599269390106e-01 1.1198940277099609e+00 + <_> + + 0 -1 639 -3.9907000958919525e-02 + + -1.3998429775238037e+00 1.9145099818706512e-01 + <_> + + 0 -1 640 1.5026999637484550e-02 + + 2.3600000422447920e-03 -1.1611249446868896e+00 + <_> + + 0 -1 641 -2.0517999306321144e-02 + + -4.8908099532127380e-01 1.6743400692939758e-01 + <_> + + 0 -1 642 -2.2359000518918037e-02 + + -1.2202980518341064e+00 -1.1975999921560287e-02 + <_> + + 0 -1 643 -7.9150004312396049e-03 + + 3.7228098511695862e-01 -8.5063003003597260e-02 + <_> + + 0 -1 644 1.5258000232279301e-02 + + -2.9412600398063660e-01 5.9406399726867676e-01 + <_> + + 0 -1 645 -3.1665999442338943e-02 + + -1.4395569562911987e+00 1.3578799366950989e-01 + <_> + + 0 -1 646 -3.0773999169468880e-02 + + -2.2545371055603027e+00 -3.3971000462770462e-02 + <_> + + 0 -1 647 -1.5483000315725803e-02 + + 3.7700700759887695e-01 1.5847999602556229e-02 + <_> + + 0 -1 648 3.5167001187801361e-02 + + -2.9446101188659668e-01 5.3159099817276001e-01 + <_> + + 0 -1 649 -1.7906000837683678e-02 + + -9.9788200855255127e-01 1.6235999763011932e-01 + <_> + + 0 -1 650 -3.1799999997019768e-03 + + 4.7657001763582230e-02 -7.5249898433685303e-01 + <_> + + 0 -1 651 1.5720000490546227e-02 + + 1.4873799681663513e-01 -6.5375399589538574e-01 + <_> + + 0 -1 652 2.9864000156521797e-02 + + -1.4952000230550766e-02 -1.2275190353393555e+00 + <_> + + 0 -1 653 2.9899999499320984e-03 + + -1.4263699948787689e-01 4.3272799253463745e-01 + <_> + + 0 -1 654 8.4749996662139893e-02 + + -1.9280999898910522e-02 -1.1946409940719604e+00 + <_> + + 0 -1 655 -5.8724999427795410e-02 + + -1.7328219413757324e+00 1.4374700188636780e-01 + <_> + + 0 -1 656 4.4755998998880386e-02 + + -2.4140599370002747e-01 5.4019999504089355e-01 + <_> + + 0 -1 657 4.0369000285863876e-02 + + 5.7680001482367516e-03 5.6578099727630615e-01 + <_> + + 0 -1 658 3.7735998630523682e-02 + + 3.8180999457836151e-02 -7.9370397329330444e-01 + <_> + + 0 -1 659 6.0752999037504196e-02 + + 7.6453000307083130e-02 1.4813209772109985e+00 + <_> + + 0 -1 660 -1.9832000136375427e-02 + + -1.6971720457077026e+00 -2.7370000258088112e-02 + <_> + + 0 -1 661 -1.6592699289321899e-01 + + 6.2976002693176270e-01 3.1762998551130295e-02 + <_> + + 0 -1 662 6.9014996290206909e-02 + + -3.3463200926780701e-01 3.0076700448989868e-01 + <_> + + 0 -1 663 1.1358000338077545e-02 + + 2.2741499543190002e-01 -3.8224700093269348e-01 + <_> + + 0 -1 664 1.7000000225380063e-03 + + 1.9223800301551819e-01 -5.2735102176666260e-01 + <_> + + 0 -1 665 7.9769000411033630e-02 + + 9.1491997241973877e-02 2.1049048900604248e+00 + <_> + + 0 -1 666 -5.7144001126289368e-02 + + -1.7452130317687988e+00 -4.0910001844167709e-02 + <_> + + 0 -1 667 7.3830001056194305e-03 + + -2.4214799702167511e-01 3.5577800869941711e-01 + <_> + + 0 -1 668 -1.8040999770164490e-02 + + 1.1779999732971191e+00 -1.7676700651645660e-01 + <_> + + 0 -1 669 9.4503000378608704e-02 + + 1.3936099410057068e-01 -1.2993700504302979e+00 + <_> + + 0 -1 670 5.4210000671446323e-03 + + -5.4608601331710815e-01 1.3916400074958801e-01 + <_> + + 0 -1 671 7.0290002040565014e-03 + + -2.1597200632095337e-01 3.9258098602294922e-01 + <_> + + 0 -1 672 3.4515999257564545e-02 + + 6.3188999891281128e-02 -7.2108101844787598e-01 + <_> + + 0 -1 673 -5.1924999803304672e-02 + + 6.8667602539062500e-01 6.3272997736930847e-02 + <_> + + 0 -1 674 -6.9162003695964813e-02 + + 1.7411810159683228e+00 -1.6619299352169037e-01 + <_> + + 0 -1 675 -5.5229999125003815e-03 + + 3.0694699287414551e-01 -1.6662900149822235e-01 + <_> + + 0 -1 676 6.8599998950958252e-02 + + -2.1405400335788727e-01 7.3185002803802490e-01 + <_> + + 0 -1 677 -6.7038998007774353e-02 + + -7.9360598325729370e-01 2.0525799691677094e-01 + <_> + + 0 -1 678 -2.1005000919103622e-02 + + 3.7344399094581604e-01 -2.9618600010871887e-01 + <_> + + 0 -1 679 2.0278999581933022e-02 + + -1.5200000256299973e-02 4.0555301308631897e-01 + <_> + + 0 -1 680 -4.7107998281717300e-02 + + 1.2116849422454834e+00 -1.7464299499988556e-01 + <_> + + 0 -1 681 1.8768499791622162e-01 + + -2.2909000515937805e-02 6.9645798206329346e-01 + <_> + + 0 -1 682 -4.3228998780250549e-02 + + -1.0602480173110962e+00 -5.5599998449906707e-04 + <_> + + 0 -1 683 2.0004000514745712e-02 + + -3.2751001417636871e-02 5.3805100917816162e-01 + <_> + + 0 -1 684 8.0880001187324524e-03 + + 3.7548001855611801e-02 -7.4768900871276855e-01 + <_> + + 0 -1 685 2.7101000770926476e-02 + + -8.1790000200271606e-02 3.3387100696563721e-01 + <_> + + 0 -1 686 -9.1746002435684204e-02 + + -1.9213509559631348e+00 -3.8952998816967010e-02 + <_> + + 0 -1 687 -1.2454999610781670e-02 + + 4.8360601067543030e-01 1.8168000504374504e-02 + <_> + + 0 -1 688 1.4649000018835068e-02 + + -1.9906699657440186e-01 7.2815400362014771e-01 + <_> + + 0 -1 689 2.9101999476552010e-02 + + 1.9871099293231964e-01 -4.9216800928115845e-01 + <_> + + 0 -1 690 8.7799998000264168e-03 + + -1.9499599933624268e-01 7.7317398786544800e-01 + <_> + + 0 -1 691 -5.4740000516176224e-02 + + 1.8087190389633179e+00 6.8323001265525818e-02 + <_> + + 0 -1 692 -1.4798000454902649e-02 + + 7.8064900636672974e-01 -1.8709599971771240e-01 + <_> + + 0 -1 693 2.5012999773025513e-02 + + 1.5285299718379974e-01 -1.6021020412445068e+00 + <_> + + 0 -1 694 4.6548001468181610e-02 + + -1.6738200187683105e-01 1.1902060508728027e+00 + <_> + + 0 -1 695 1.7624000087380409e-02 + + -1.0285499691963196e-01 3.9175900816917419e-01 + <_> + + 0 -1 696 1.6319599747657776e-01 + + -3.5624001175165176e-02 -1.6098170280456543e+00 + <_> + + 0 -1 697 1.3137999922037125e-02 + + -5.6359000504016876e-02 5.4158902168273926e-01 + <_> + + 0 -1 698 -1.5665000304579735e-02 + + 2.8063100576400757e-01 -3.1708601117134094e-01 + <_> + + 0 -1 699 8.0554001033306122e-02 + + 1.2640400230884552e-01 -1.0297529697418213e+00 + <_> + + 0 -1 700 3.5363998264074326e-02 + + 2.0752999931573868e-02 -7.9105597734451294e-01 + <_> + + 0 -1 701 3.2986998558044434e-02 + + 1.9057099521160126e-01 -8.3839899301528931e-01 + <_> + + 0 -1 702 1.2195000424981117e-02 + + 7.3729000985622406e-02 -6.2780702114105225e-01 + <_> + + 0 -1 703 4.3065998703241348e-02 + + 4.7384999692440033e-02 1.5712939500808716e+00 + <_> + + 0 -1 704 3.0326999723911285e-02 + + -2.7314600348472595e-01 3.8572001457214355e-01 + <_> + + 0 -1 705 3.5493001341819763e-02 + + 5.4593998938798904e-02 5.2583402395248413e-01 + <_> + + 0 -1 706 -1.4596999622881413e-02 + + 3.8152599334716797e-01 -2.8332400321960449e-01 + <_> + + 0 -1 707 1.2606999836862087e-02 + + 1.5455099940299988e-01 -3.0501499772071838e-01 + <_> + + 0 -1 708 1.0172000154852867e-02 + + 2.3637000471353531e-02 -8.7217897176742554e-01 + <_> + + 0 -1 709 2.8843000531196594e-02 + + 1.6090999543666840e-01 -2.0277599990367889e-01 + <_> + + 0 -1 710 5.5100000463426113e-04 + + -6.1545401811599731e-01 8.0935999751091003e-02 + <_> + 127 + -3.5645289421081543e+00 + + <_> + + 0 -1 711 4.8344001173973083e-02 + + -8.4904599189758301e-01 5.6974399089813232e-01 + <_> + + 0 -1 712 3.2460000365972519e-02 + + -8.1417298316955566e-01 4.4781699776649475e-01 + <_> + + 0 -1 713 3.3339999616146088e-02 + + -3.6423799395561218e-01 6.7937397956848145e-01 + <_> + + 0 -1 714 6.4019998535513878e-03 + + -1.1885459423065186e+00 1.9238699972629547e-01 + <_> + + 0 -1 715 -5.6889997795224190e-03 + + 3.3085298538208008e-01 -7.1334099769592285e-01 + <_> + + 0 -1 716 1.2698000296950340e-02 + + -5.0990802049636841e-01 1.1376299709081650e-01 + <_> + + 0 -1 717 6.0549997724592686e-03 + + -1.0470550060272217e+00 2.0222599804401398e-01 + <_> + + 0 -1 718 2.6420000940561295e-03 + + -5.0559401512145996e-01 3.6441200971603394e-01 + <_> + + 0 -1 719 -1.6925999894738197e-02 + + -9.9541902542114258e-01 1.2602199614048004e-01 + <_> + + 0 -1 720 2.8235999867320061e-02 + + -9.4137996435165405e-02 5.7780402898788452e-01 + <_> + + 0 -1 721 1.0428999550640583e-02 + + 2.3272900283336639e-01 -5.2569699287414551e-01 + <_> + + 0 -1 722 9.8860003054141998e-03 + + -1.0316299647092819e-01 4.7657600045204163e-01 + <_> + + 0 -1 723 2.6015000417828560e-02 + + -1.0920000495389104e-03 -1.5581729412078857e+00 + <_> + + 0 -1 724 -2.5537999346852303e-02 + + -6.5451401472091675e-01 1.8843199312686920e-01 + <_> + + 0 -1 725 -3.5310001112520695e-03 + + 2.8140598535537720e-01 -4.4575300812721252e-01 + <_> + + 0 -1 726 9.2449998483061790e-03 + + 1.5612000226974487e-01 -2.1370999515056610e-01 + <_> + + 0 -1 727 2.1030999720096588e-02 + + -2.9170298576354980e-01 5.2234101295471191e-01 + <_> + + 0 -1 728 -5.1063001155853271e-02 + + 1.3661290407180786e+00 3.0465999618172646e-02 + <_> + + 0 -1 729 -6.2330000102519989e-02 + + 1.2207020521163940e+00 -2.2434400022029877e-01 + <_> + + 0 -1 730 -3.2963000237941742e-02 + + -8.2016801834106445e-01 1.4531899988651276e-01 + <_> + + 0 -1 731 -3.7418000400066376e-02 + + -1.2218099832534790e+00 1.9448999315500259e-02 + <_> + + 0 -1 732 1.2402799725532532e-01 + + 1.2082300335168839e-01 -9.8729300498962402e-01 + <_> + + 0 -1 733 -8.9229997247457504e-03 + + -1.1688489913940430e+00 2.1105000749230385e-02 + <_> + + 0 -1 734 -5.9879999607801437e-02 + + -1.0689330101013184e+00 1.9860200583934784e-01 + <_> + + 0 -1 735 6.2620001845061779e-03 + + -3.6229598522186279e-01 3.8000801205635071e-01 + <_> + + 0 -1 736 -1.7673000693321228e-02 + + 4.9094098806381226e-01 -1.4606699347496033e-01 + <_> + + 0 -1 737 1.7579000443220139e-02 + + 5.8728098869323730e-01 -2.7774399518966675e-01 + <_> + + 0 -1 738 5.1560001447796822e-03 + + -7.5194999575614929e-02 6.0193097591400146e-01 + <_> + + 0 -1 739 -1.0599999688565731e-02 + + 2.7637401223182678e-01 -3.7794300913810730e-01 + <_> + + 0 -1 740 2.0884099602699280e-01 + + -5.3599998354911804e-03 1.0317809581756592e+00 + <_> + + 0 -1 741 -2.6412999257445335e-02 + + 8.2336401939392090e-01 -2.2480599582195282e-01 + <_> + + 0 -1 742 5.8892000466585159e-02 + + 1.3098299503326416e-01 -1.1853699684143066e+00 + <_> + + 0 -1 743 -1.1579000391066074e-02 + + -9.0667802095413208e-01 4.4126998633146286e-02 + <_> + + 0 -1 744 4.5988000929355621e-02 + + 1.0143999941647053e-02 1.0740900039672852e+00 + <_> + + 0 -1 745 -2.2838000208139420e-02 + + 1.7791990041732788e+00 -1.7315499484539032e-01 + <_> + + 0 -1 746 -8.1709995865821838e-03 + + 5.7386302947998047e-01 -7.4106000363826752e-02 + <_> + + 0 -1 747 3.5359999164938927e-03 + + -3.2072898745536804e-01 4.0182501077651978e-01 + <_> + + 0 -1 748 4.9444999545812607e-02 + + 1.9288000464439392e-01 -1.2166700363159180e+00 + <_> + + 0 -1 749 3.5139999818056822e-03 + + 6.9568000733852386e-02 -7.1323698759078979e-01 + <_> + + 0 -1 750 -3.0996000394225121e-02 + + -3.8862198591232300e-01 1.8098799884319305e-01 + <_> + + 0 -1 751 8.6452998220920563e-02 + + -2.5792999193072319e-02 -1.5453219413757324e+00 + <_> + + 0 -1 752 -1.3652600347995758e-01 + + -1.9199420213699341e+00 1.6613300144672394e-01 + <_> + + 0 -1 753 -5.7689999230206013e-03 + + -1.2822589874267578e+00 -1.5907999128103256e-02 + <_> + + 0 -1 754 -1.7899999395012856e-02 + + -4.0409898757934570e-01 2.3591600358486176e-01 + <_> + + 0 -1 755 -1.9969999790191650e-02 + + -7.2891902923583984e-01 5.6235000491142273e-02 + <_> + + 0 -1 756 -5.7493001222610474e-02 + + 5.7830798625946045e-01 -1.5796000137925148e-02 + <_> + + 0 -1 757 -8.3056002855300903e-02 + + 9.1511601209640503e-01 -2.1121400594711304e-01 + <_> + + 0 -1 758 -5.3771000355482101e-02 + + -5.1931297779083252e-01 1.8576000630855560e-01 + <_> + + 0 -1 759 -8.3670001477003098e-03 + + 2.4109700322151184e-01 -3.9648601412773132e-01 + <_> + + 0 -1 760 5.5406998842954636e-02 + + 1.6771200299263000e-01 -2.5664970874786377e+00 + <_> + + 0 -1 761 -6.7180998623371124e-02 + + -1.3658570051193237e+00 -1.4232000336050987e-02 + <_> + + 0 -1 762 -2.3900000378489494e-02 + + -1.7084569931030273e+00 1.6507799923419952e-01 + <_> + + 0 -1 763 5.5949999950826168e-03 + + -3.1373998522758484e-01 3.2837900519371033e-01 + <_> + + 0 -1 764 2.1294999867677689e-02 + + 1.4953400194644928e-01 -4.8579800128936768e-01 + <_> + + 0 -1 765 -2.4613000452518463e-02 + + 7.4346399307250977e-01 -2.2305199503898621e-01 + <_> + + 0 -1 766 -1.9626000896096230e-02 + + -4.0918299555778503e-01 1.8893200159072876e-01 + <_> + + 0 -1 767 -5.3266000002622604e-02 + + 8.1381601095199585e-01 -2.0853699743747711e-01 + <_> + + 0 -1 768 7.1290000341832638e-03 + + 3.2996100187301636e-01 -5.9937399625778198e-01 + <_> + + 0 -1 769 -2.2486999630928040e-02 + + -1.2551610469818115e+00 -2.0413000136613846e-02 + <_> + + 0 -1 770 -8.2310996949672699e-02 + + 1.3821430206298828e+00 5.9308998286724091e-02 + <_> + + 0 -1 771 1.3097000122070312e-01 + + -3.5843998193740845e-02 -1.5396369695663452e+00 + <_> + + 0 -1 772 1.4293000102043152e-02 + + -1.8475200235843658e-01 3.7455001473426819e-01 + <_> + + 0 -1 773 6.3479999080300331e-03 + + -4.4901099801063538e-01 1.3876999914646149e-01 + <_> + + 0 -1 774 -4.6055000275373459e-02 + + 6.7832601070404053e-01 -1.7071999609470367e-02 + <_> + + 0 -1 775 5.7693999260663986e-02 + + -1.1955999769270420e-02 -1.2261159420013428e+00 + <_> + + 0 -1 776 -6.0609998181462288e-03 + + 3.3958598971366882e-01 6.2800000887364149e-04 + <_> + + 0 -1 777 -5.2163001149892807e-02 + + -1.0621069669723511e+00 -1.3779999688267708e-02 + <_> + + 0 -1 778 4.6572998166084290e-02 + + 1.4538800716400146e-01 -1.2384550571441650e+00 + <_> + + 0 -1 779 7.5309998355805874e-03 + + -2.4467700719833374e-01 5.1377099752426147e-01 + <_> + + 0 -1 780 2.1615000441670418e-02 + + 1.3072599470615387e-01 -7.0996797084808350e-01 + <_> + + 0 -1 781 -1.7864000052213669e-02 + + -1.0474660396575928e+00 4.9599999329075217e-04 + <_> + + 0 -1 782 -3.7195000797510147e-02 + + -1.5126730203628540e+00 1.4801399409770966e-01 + <_> + + 0 -1 783 -3.1100001069717109e-04 + + 1.3971500098705292e-01 -4.6867498755455017e-01 + <_> + + 0 -1 784 2.5042999535799026e-02 + + 2.8632000088691711e-01 -4.1794699430465698e-01 + <_> + + 0 -1 785 9.3449996784329414e-03 + + -2.7336201071739197e-01 4.3444699048995972e-01 + <_> + + 0 -1 786 3.2363999634981155e-02 + + 1.8438899517059326e-01 -9.5019298791885376e-01 + <_> + + 0 -1 787 -6.2299999408423901e-03 + + 3.2581999897956848e-01 -3.0815601348876953e-01 + <_> + + 0 -1 788 5.1488999277353287e-02 + + 1.1416000127792358e-01 -1.9795479774475098e+00 + <_> + + 0 -1 789 -2.6449000462889671e-02 + + -1.1067299842834473e+00 -8.5519999265670776e-03 + <_> + + 0 -1 790 -1.5420000068843365e-02 + + 8.0138701200485229e-01 -3.2035000622272491e-02 + <_> + + 0 -1 791 1.9456999376416206e-02 + + -2.6449498534202576e-01 3.8753899931907654e-01 + <_> + + 0 -1 792 3.3620998263359070e-02 + + 1.6052000224590302e-02 5.8840900659561157e-01 + <_> + + 0 -1 793 2.8906000778079033e-02 + + 1.5216000378131866e-02 -9.4723600149154663e-01 + <_> + + 0 -1 794 2.0300000323913991e-04 + + -3.0766001343727112e-01 2.1235899627208710e-01 + <_> + + 0 -1 795 -4.9141999334096909e-02 + + -1.6058609485626221e+00 -3.1094999983906746e-02 + <_> + + 0 -1 796 7.6425999402999878e-02 + + 7.4758999049663544e-02 1.1639410257339478e+00 + <_> + + 0 -1 797 2.3897999897599220e-02 + + -6.4320000819861889e-03 -1.1150749921798706e+00 + <_> + + 0 -1 798 3.8970001041889191e-03 + + -2.4105699360370636e-01 2.0858900249004364e-01 + <_> + + 0 -1 799 -8.9445002377033234e-02 + + 1.9157789945602417e+00 -1.5721100568771362e-01 + <_> + + 0 -1 800 -1.5008999966084957e-02 + + -2.5174099206924438e-01 1.8179899454116821e-01 + <_> + + 0 -1 801 -1.1145999655127525e-02 + + -6.9349497556686401e-01 4.4927999377250671e-02 + <_> + + 0 -1 802 9.4578996300697327e-02 + + 1.8102100491523743e-01 -7.4978601932525635e-01 + <_> + + 0 -1 803 5.5038899183273315e-01 + + -3.0974000692367554e-02 -1.6746139526367188e+00 + <_> + + 0 -1 804 4.1381001472473145e-02 + + 6.3910000026226044e-02 7.6561200618743896e-01 + <_> + + 0 -1 805 2.4771999567747116e-02 + + 1.1380000039935112e-02 -8.8559401035308838e-01 + <_> + + 0 -1 806 5.0999000668525696e-02 + + 1.4890299737453461e-01 -2.4634211063385010e+00 + <_> + + 0 -1 807 -1.6893999651074409e-02 + + 3.8870999217033386e-01 -2.9880300164222717e-01 + <_> + + 0 -1 808 -1.2162300199270248e-01 + + -1.5542800426483154e+00 1.6300800442695618e-01 + <_> + + 0 -1 809 -3.6049999762326479e-03 + + 2.1842800080776215e-01 -3.7312099337577820e-01 + <_> + + 0 -1 810 1.1575400084257126e-01 + + -4.7061000019311905e-02 5.9403699636459351e-01 + <_> + + 0 -1 811 3.6903999745845795e-02 + + -2.5508600473403931e-01 5.5397301912307739e-01 + <_> + + 0 -1 812 1.1483999900519848e-02 + + -1.8129499256610870e-01 4.0682798624038696e-01 + <_> + + 0 -1 813 -2.0233999937772751e-02 + + 5.4311197996139526e-01 -2.3822399973869324e-01 + <_> + + 0 -1 814 -2.8765000402927399e-02 + + -6.9172298908233643e-01 1.5943300724029541e-01 + <_> + + 0 -1 815 -5.8320001699030399e-03 + + 2.9447799921035767e-01 -3.4005999565124512e-01 + <_> + + 0 -1 816 -5.5468998849391937e-02 + + 9.2200797796249390e-01 9.4093002378940582e-02 + <_> + + 0 -1 817 -1.4801000244915485e-02 + + -7.9539698362350464e-01 3.1521998345851898e-02 + <_> + + 0 -1 818 -7.0940000005066395e-03 + + 3.3096000552177429e-01 -5.0886999815702438e-02 + <_> + + 0 -1 819 -4.5124001801013947e-02 + + -1.3719749450683594e+00 -2.1408999338746071e-02 + <_> + + 0 -1 820 6.4377002418041229e-02 + + 6.3901998102664948e-02 9.1478300094604492e-01 + <_> + + 0 -1 821 -1.4727000147104263e-02 + + 3.6050599813461304e-01 -2.8614500164985657e-01 + <_> + + 0 -1 822 4.5007001608610153e-02 + + -1.5619699656963348e-01 5.3160297870635986e-01 + <_> + + 0 -1 823 -1.1330000124871731e-03 + + 1.3422900438308716e-01 -4.4358900189399719e-01 + <_> + + 0 -1 824 4.9451000988483429e-02 + + 1.0571800172328949e-01 -2.5589139461517334e+00 + <_> + + 0 -1 825 2.9102999716997147e-02 + + -1.0088000446557999e-02 -1.1073939800262451e+00 + <_> + + 0 -1 826 3.4786000847816467e-02 + + -2.7719999197870493e-03 5.6700998544692993e-01 + <_> + + 0 -1 827 -6.1309998854994774e-03 + + -4.6889400482177734e-01 1.2636399269104004e-01 + <_> + + 0 -1 828 1.5525000169873238e-02 + + -8.4279999136924744e-03 8.7469202280044556e-01 + <_> + + 0 -1 829 2.9249999206513166e-03 + + -3.4434300661087036e-01 2.0851600170135498e-01 + <_> + + 0 -1 830 -5.3571000695228577e-02 + + 1.4982949495315552e+00 5.7328000664710999e-02 + <_> + + 0 -1 831 -1.9217999652028084e-02 + + -9.9234098196029663e-01 -9.3919998034834862e-03 + <_> + + 0 -1 832 -5.5282998830080032e-02 + + -5.7682299613952637e-01 1.6860599815845490e-01 + <_> + + 0 -1 833 5.6336000561714172e-02 + + -3.3775001764297485e-02 -1.3889650106430054e+00 + <_> + + 0 -1 834 -2.3824000731110573e-02 + + 4.0182098746299744e-01 1.8360000103712082e-03 + <_> + + 0 -1 835 1.7810000572353601e-03 + + 1.8145999312400818e-01 -4.1743400692939758e-01 + <_> + + 0 -1 836 -3.7689000368118286e-02 + + 5.4683101177215576e-01 1.8219999969005585e-02 + <_> + + 0 -1 837 -2.4144999682903290e-02 + + 6.8352097272872925e-01 -1.9650200009346008e-01 + <_> + 135 + -3.7025990486145020e+00 + + <_> + + 0 -1 838 2.7444999665021896e-02 + + -8.9984202384948730e-01 5.1876497268676758e-01 + <_> + + 0 -1 839 1.1554100364446640e-01 + + -5.6524401903152466e-01 7.0551300048828125e-01 + <_> + + 0 -1 840 -2.2297000512480736e-02 + + 3.6079999804496765e-01 -6.6864597797393799e-01 + <_> + + 0 -1 841 1.3325000181794167e-02 + + -5.5573397874832153e-01 3.5789999365806580e-01 + <_> + + 0 -1 842 -3.8060001097619534e-03 + + -1.0713000297546387e+00 1.8850000202655792e-01 + <_> + + 0 -1 843 -2.6819999329745770e-03 + + -7.1584302186965942e-01 2.6344498991966248e-01 + <_> + + 0 -1 844 3.3819999080151320e-03 + + -4.6930798888206482e-01 2.6658400893211365e-01 + <_> + + 0 -1 845 3.7643000483512878e-02 + + 2.1098700165748596e-01 -1.0804339647293091e+00 + <_> + + 0 -1 846 -1.3861999846994877e-02 + + 6.6912001371383667e-01 -2.7942800521850586e-01 + <_> + + 0 -1 847 -2.7350001037120819e-03 + + -9.5332300662994385e-01 2.4051299691200256e-01 + <_> + + 0 -1 848 -3.8336999714374542e-02 + + 8.1432801485061646e-01 -2.4919399619102478e-01 + <_> + + 0 -1 849 -3.4697998315095901e-02 + + 1.2330100536346436e+00 6.8600000813603401e-03 + <_> + + 0 -1 850 2.3360999301075935e-02 + + -3.0794700980186462e-01 7.0714497566223145e-01 + <_> + + 0 -1 851 3.5057999193668365e-02 + + 2.1205900609493256e-01 -1.4399830102920532e+00 + <_> + + 0 -1 852 -1.3256999664008617e-02 + + -9.0260702371597290e-01 4.8610001802444458e-02 + <_> + + 0 -1 853 1.2740000151097775e-02 + + 2.2655199468135834e-01 -4.4643801450729370e-01 + <_> + + 0 -1 854 3.6400000099092722e-03 + + -3.9817899465560913e-01 3.4665399789810181e-01 + <_> + + 0 -1 855 1.0064700245857239e-01 + + 1.8383599817752838e-01 -1.3410769701004028e+00 + <_> + + 0 -1 856 0. + + 1.5536400675773621e-01 -5.1582497358322144e-01 + <_> + + 0 -1 857 1.1708999983966351e-02 + + 2.1651400625705719e-01 -7.2705197334289551e-01 + <_> + + 0 -1 858 -3.5964999347925186e-02 + + -1.4789500236511230e+00 -2.4317000061273575e-02 + <_> + + 0 -1 859 -2.1236000582575798e-02 + + -1.6844099760055542e-01 1.9526599347591400e-01 + <_> + + 0 -1 860 1.4874000102281570e-02 + + 3.7335999310016632e-02 -8.7557297945022583e-01 + <_> + + 0 -1 861 -5.1409997977316380e-03 + + 3.3466500043869019e-01 -2.4109700322151184e-01 + <_> + + 0 -1 862 2.3450000211596489e-02 + + 5.5320002138614655e-03 -1.2509720325469971e+00 + <_> + + 0 -1 863 -2.5062000378966331e-02 + + 4.5212399959564209e-01 -8.4469996392726898e-02 + <_> + + 0 -1 864 -7.7400001464411616e-04 + + 1.5249900519847870e-01 -4.8486500978469849e-01 + <_> + + 0 -1 865 -4.0483999997377396e-02 + + -1.3024920225143433e+00 1.7983500659465790e-01 + <_> + + 0 -1 866 2.8170999139547348e-02 + + -2.4410900473594666e-01 6.2271100282669067e-01 + <_> + + 0 -1 867 4.5692998915910721e-02 + + 2.8122000396251678e-02 9.2394399642944336e-01 + <_> + + 0 -1 868 3.9707001298666000e-02 + + -2.2332799434661865e-01 7.7674001455307007e-01 + <_> + + 0 -1 869 5.0517000257968903e-02 + + 2.0319999754428864e-01 -1.0895930528640747e+00 + <_> + + 0 -1 870 -1.7266999930143356e-02 + + 6.8598401546478271e-01 -2.3304499685764313e-01 + <_> + + 0 -1 871 8.0186001956462860e-02 + + -1.0292000137269497e-02 6.1881101131439209e-01 + <_> + + 0 -1 872 9.7676001489162445e-02 + + -2.0070299506187439e-01 1.0088349580764771e+00 + <_> + + 0 -1 873 -1.5572000294923782e-02 + + 4.7615298628807068e-01 4.5623999089002609e-02 + <_> + + 0 -1 874 -1.5305000357329845e-02 + + -1.1077369451522827e+00 4.5239999890327454e-03 + <_> + + 0 -1 875 -1.6485000029206276e-02 + + 1.0152939558029175e+00 1.6327999532222748e-02 + <_> + + 0 -1 876 -2.6141999289393425e-02 + + 4.1723299026489258e-01 -2.8645500540733337e-01 + <_> + + 0 -1 877 8.8679995387792587e-03 + + 2.1404999494552612e-01 -1.6772800683975220e-01 + <_> + + 0 -1 878 -2.6886999607086182e-02 + + -1.1564220190048218e+00 -1.0324000380933285e-02 + <_> + + 0 -1 879 7.7789998613297939e-03 + + 3.5359498858451843e-01 -2.9611301422119141e-01 + <_> + + 0 -1 880 -1.5974000096321106e-02 + + -1.5374109745025635e+00 -2.9958000406622887e-02 + <_> + + 0 -1 881 2.0866999402642250e-02 + + 2.0244100689888000e-01 -7.1270197629928589e-01 + <_> + + 0 -1 882 8.5482001304626465e-02 + + -2.5932999327778816e-02 -1.5156569480895996e+00 + <_> + + 0 -1 883 2.3872999474406242e-02 + + 1.6803400218486786e-01 -3.8806200027465820e-01 + <_> + + 0 -1 884 -3.9105001837015152e-02 + + -1.1958349943161011e+00 -2.0361000671982765e-02 + <_> + + 0 -1 885 -7.7946998178958893e-02 + + -1.0898950099945068e+00 1.4530299603939056e-01 + <_> + + 0 -1 886 -1.6876000910997391e-02 + + 2.8049701452255249e-01 -4.1336300969123840e-01 + <_> + + 0 -1 887 1.1875600367784500e-01 + + -4.3490998446941376e-02 4.1263699531555176e-01 + <_> + + 0 -1 888 1.5624199807643890e-01 + + -2.6429599523544312e-01 5.5127799510955811e-01 + <_> + + 0 -1 889 -4.5908000320196152e-02 + + 6.0189199447631836e-01 1.8921000882983208e-02 + <_> + + 0 -1 890 -1.0309999808669090e-02 + + 3.8152998685836792e-01 -2.9507899284362793e-01 + <_> + + 0 -1 891 9.5769003033638000e-02 + + 1.3246500492095947e-01 -4.6266800165176392e-01 + <_> + + 0 -1 892 1.3686999678611755e-02 + + 1.1738699674606323e-01 -5.1664102077484131e-01 + <_> + + 0 -1 893 2.3990001063793898e-03 + + -3.4007599949836731e-01 2.0953500270843506e-01 + <_> + + 0 -1 894 3.3264998346567154e-02 + + -1.7052799463272095e-01 1.4366799592971802e+00 + <_> + + 0 -1 895 -3.3206000924110413e-02 + + 6.1295700073242188e-01 -4.1549999266862869e-02 + <_> + + 0 -1 896 2.7979998849332333e-03 + + -4.8554301261901855e-01 1.3372699916362762e-01 + <_> + + 0 -1 897 -6.5792001783847809e-02 + + -4.0257668495178223e+00 1.0876700282096863e-01 + <_> + + 0 -1 898 2.1430000197142363e-03 + + -3.9179998636245728e-01 2.2427099943161011e-01 + <_> + + 0 -1 899 2.2363999858498573e-02 + + -8.6429998278617859e-02 3.7785199284553528e-01 + <_> + + 0 -1 900 -5.7410001754760742e-02 + + 1.1454069614410400e+00 -1.9736599922180176e-01 + <_> + + 0 -1 901 6.6550001502037048e-03 + + -2.1105000749230385e-02 5.8453398942947388e-01 + <_> + + 0 -1 902 1.2326999567449093e-02 + + 3.7817001342773438e-02 -6.6987001895904541e-01 + <_> + + 0 -1 903 -8.1869997084140778e-03 + + 5.6366002559661865e-01 -7.6877996325492859e-02 + <_> + + 0 -1 904 3.6681000143289566e-02 + + -1.7343300580978394e-01 1.1670149564743042e+00 + <_> + + 0 -1 905 -4.0220400691032410e-01 + + 1.2640819549560547e+00 4.3398998677730560e-02 + <_> + + 0 -1 906 -2.2126000374555588e-02 + + 6.6978102922439575e-01 -2.1605299413204193e-01 + <_> + + 0 -1 907 -1.3156999833881855e-02 + + -4.1198599338531494e-01 2.0215000212192535e-01 + <_> + + 0 -1 908 -1.2860000133514404e-02 + + -9.1582697629928589e-01 3.9232999086380005e-02 + <_> + + 0 -1 909 2.1627999842166901e-02 + + 3.8719999138265848e-03 3.5668200254440308e-01 + <_> + + 0 -1 910 1.1896000243723392e-02 + + -3.7303900718688965e-01 1.9235099852085114e-01 + <_> + + 0 -1 911 -1.9548999145627022e-02 + + -4.2374899983406067e-01 2.4429599940776825e-01 + <_> + + 0 -1 912 6.4444996416568756e-02 + + -1.6558900475502014e-01 1.2697030305862427e+00 + <_> + + 0 -1 913 1.0898499935865402e-01 + + 1.4894300699234009e-01 -2.1534640789031982e+00 + <_> + + 0 -1 914 -3.4077998250722885e-02 + + 1.3779460191726685e+00 -1.6198499500751495e-01 + <_> + + 0 -1 915 -3.7489999085664749e-03 + + -3.3828601241111755e-01 2.1152900159358978e-01 + <_> + + 0 -1 916 -1.0971999727189541e-02 + + 7.6517897844314575e-01 -1.9692599773406982e-01 + <_> + + 0 -1 917 -1.1485000140964985e-02 + + -6.9271200895309448e-01 2.1657100319862366e-01 + <_> + + 0 -1 918 2.5984000414609909e-02 + + -1.1983999982476234e-02 -9.9697297811508179e-01 + <_> + + 0 -1 919 4.2159999720752239e-03 + + -1.0205700248479843e-01 4.8884400725364685e-01 + <_> + + 0 -1 920 -4.7697000205516815e-02 + + 1.0666010379791260e+00 -1.7576299607753754e-01 + <_> + + 0 -1 921 4.0300001273863018e-04 + + 1.8524800240993500e-01 -7.4790000915527344e-01 + <_> + + 0 -1 922 1.1539600044488907e-01 + + -2.2019700706005096e-01 5.4509997367858887e-01 + <_> + + 0 -1 923 1.6021000221371651e-02 + + 2.5487500429153442e-01 -5.0740098953247070e-01 + <_> + + 0 -1 924 5.6632000952959061e-02 + + -1.1256000027060509e-02 -9.5968097448348999e-01 + <_> + + 0 -1 925 -1.0726000182330608e-02 + + -2.8544700145721436e-01 1.6994799673557281e-01 + <_> + + 0 -1 926 1.2420000135898590e-01 + + -3.6139998584985733e-02 -1.3132710456848145e+00 + <_> + + 0 -1 927 -5.3799999877810478e-03 + + 3.3092701435089111e-01 1.3307999819517136e-02 + <_> + + 0 -1 928 1.1908000335097313e-02 + + -3.4830299019813538e-01 2.4041900038719177e-01 + <_> + + 0 -1 929 -4.3007999658584595e-02 + + -1.4390469789505005e+00 1.5599599480628967e-01 + <_> + + 0 -1 930 -3.3149998635053635e-02 + + -1.1805850267410278e+00 -1.2347999960184097e-02 + <_> + + 0 -1 931 -2.1341999992728233e-02 + + 2.2119441032409668e+00 6.2737002968788147e-02 + <_> + + 0 -1 932 -1.2218999676406384e-02 + + -1.8709750175476074e+00 -4.5499999076128006e-02 + <_> + + 0 -1 933 -1.6860999166965485e-02 + + -7.6912701129913330e-01 1.5330000221729279e-01 + <_> + + 0 -1 934 -2.4999999441206455e-03 + + -6.2987399101257324e-01 5.1600001752376556e-02 + <_> + + 0 -1 935 -4.5037999749183655e-02 + + 8.5428899526596069e-01 6.2600001692771912e-03 + <_> + + 0 -1 936 3.9057999849319458e-02 + + -3.2458998262882233e-02 -1.3325669765472412e+00 + <_> + + 0 -1 937 6.6720000468194485e-03 + + -1.9423599541187286e-01 3.7328699231147766e-01 + <_> + + 0 -1 938 -1.6361000016331673e-02 + + 2.0605869293212891e+00 -1.5042699873447418e-01 + <_> + + 0 -1 939 6.1719999648630619e-03 + + -1.1610999703407288e-01 2.5455400347709656e-01 + <_> + + 0 -1 940 4.5722000300884247e-02 + + -1.6340000554919243e-02 -1.0449140071868896e+00 + <_> + + 0 -1 941 4.1209999471902847e-03 + + -4.1997998952865601e-02 3.9680999517440796e-01 + <_> + + 0 -1 942 -1.7800000205170363e-04 + + -6.6422599554061890e-01 3.3443000167608261e-02 + <_> + + 0 -1 943 7.1109998971223831e-03 + + -5.8231998234987259e-02 3.7857300043106079e-01 + <_> + + 0 -1 944 -4.9864001572132111e-02 + + 6.1019402742385864e-01 -2.1005700528621674e-01 + <_> + + 0 -1 945 -2.5011999532580376e-02 + + -5.7100099325180054e-01 1.7848399281501770e-01 + <_> + + 0 -1 946 3.0939999967813492e-02 + + 5.6363001465797424e-02 -6.4731001853942871e-01 + <_> + + 0 -1 947 4.6271000057458878e-02 + + 1.7482399940490723e-01 -9.8909401893615723e-01 + <_> + + 0 -1 948 -3.1870000530034304e-03 + + -6.6804802417755127e-01 3.2267000526189804e-02 + <_> + + 0 -1 949 -2.4351999163627625e-02 + + 2.9444900155067444e-01 -1.3599999947473407e-03 + <_> + + 0 -1 950 1.1974000371992588e-02 + + -2.8345099091529846e-01 4.7171199321746826e-01 + <_> + + 0 -1 951 1.3070000335574150e-02 + + -1.0834600031375885e-01 5.7193297147750854e-01 + <_> + + 0 -1 952 5.9163000434637070e-02 + + -5.0939001142978668e-02 -1.9059720039367676e+00 + <_> + + 0 -1 953 -4.1094999760389328e-02 + + 4.5104598999023438e-01 -9.7599998116493225e-03 + <_> + + 0 -1 954 -8.3989001810550690e-02 + + -2.0349199771881104e+00 -5.1019001752138138e-02 + <_> + + 0 -1 955 4.4619001448154449e-02 + + 1.7041100561618805e-01 -1.2278720140457153e+00 + <_> + + 0 -1 956 2.4419000372290611e-02 + + -2.1796999499201775e-02 -1.0822949409484863e+00 + <_> + + 0 -1 957 -4.3870001100003719e-03 + + 3.0466699600219727e-01 -3.7066599726676941e-01 + <_> + + 0 -1 958 2.4607999250292778e-02 + + -3.1169500946998596e-01 2.3657299578189850e-01 + <_> + + 0 -1 959 -8.5182003676891327e-02 + + -1.7982350587844849e+00 1.5254299342632294e-01 + <_> + + 0 -1 960 2.1844999864697456e-02 + + -5.1888000220060349e-02 -1.9017189741134644e+00 + <_> + + 0 -1 961 -1.6829000785946846e-02 + + 2.1025900542736053e-01 2.1656999364495277e-02 + <_> + + 0 -1 962 3.2547999173402786e-02 + + -2.0292599499225616e-01 6.0944002866744995e-01 + <_> + + 0 -1 963 2.4709999561309814e-03 + + -9.5371198654174805e-01 1.8568399548530579e-01 + <_> + + 0 -1 964 5.5415999144315720e-02 + + -1.4405299723148346e-01 2.1506340503692627e+00 + <_> + + 0 -1 965 -1.0635499656200409e-01 + + -1.0911970138549805e+00 1.3228000700473785e-01 + <_> + + 0 -1 966 -7.9889995977282524e-03 + + 1.0253400355577469e-01 -5.1744902133941650e-01 + <_> + + 0 -1 967 7.5567997992038727e-02 + + 5.8965001255273819e-02 1.2354209423065186e+00 + <_> + + 0 -1 968 -9.2805996537208557e-02 + + -1.3431650400161743e+00 -3.4462999552488327e-02 + <_> + + 0 -1 969 4.9431998282670975e-02 + + 4.9601998180150986e-02 1.6054730415344238e+00 + <_> + + 0 -1 970 -1.1772999539971352e-02 + + -1.0261050462722778e+00 -4.1559999808669090e-03 + <_> + + 0 -1 971 8.5886001586914062e-02 + + 8.4642998874187469e-02 9.5220798254013062e-01 + <_> + + 0 -1 972 8.1031002104282379e-02 + + -1.4687100052833557e-01 1.9359990358352661e+00 + <_> + 136 + -3.4265899658203125e+00 + + <_> + + 0 -1 973 -3.3840999007225037e-02 + + 6.5889501571655273e-01 -6.9755297899246216e-01 + <_> + + 0 -1 974 1.5410000458359718e-02 + + -9.0728402137756348e-01 3.0478599667549133e-01 + <_> + + 0 -1 975 5.4905999451875687e-02 + + -4.9774798750877380e-01 5.7132601737976074e-01 + <_> + + 0 -1 976 2.1390000358223915e-02 + + -4.2565199732780457e-01 5.8096802234649658e-01 + <_> + + 0 -1 977 7.8849997371435165e-03 + + -4.7905999422073364e-01 4.3016499280929565e-01 + <_> + + 0 -1 978 -3.7544999271631241e-02 + + 5.0861597061157227e-01 -1.9985899329185486e-01 + <_> + + 0 -1 979 1.5925799310207367e-01 + + -2.3263600468635559e-01 1.0993319749832153e+00 + <_> + + 0 -1 980 -6.8939998745918274e-02 + + 4.0569001436233521e-01 5.6855000555515289e-02 + <_> + + 0 -1 981 -3.3695001155138016e-02 + + 4.5132800936698914e-01 -3.3332800865173340e-01 + <_> + + 0 -1 982 -6.3314996659755707e-02 + + -8.5015702247619629e-01 2.2341699898242950e-01 + <_> + + 0 -1 983 7.3699997738003731e-03 + + -9.3082201480865479e-01 5.9216998517513275e-02 + <_> + + 0 -1 984 -9.5969997346401215e-03 + + -1.2794899940490723e+00 1.8447299301624298e-01 + <_> + + 0 -1 985 -1.3067999482154846e-01 + + 5.8426898717880249e-01 -2.6007199287414551e-01 + <_> + + 0 -1 986 5.7402998208999634e-02 + + -5.3789000958204269e-02 7.1175599098205566e-01 + <_> + + 0 -1 987 -7.2340001352131367e-03 + + -8.6962199211120605e-01 7.5214996933937073e-02 + <_> + + 0 -1 988 3.1098999083042145e-02 + + -7.5006999075412750e-02 9.0781599283218384e-01 + <_> + + 0 -1 989 3.5854000598192215e-02 + + -2.4795499444007874e-01 7.2272098064422607e-01 + <_> + + 0 -1 990 -3.1534999608993530e-02 + + -1.1238329410552979e+00 2.0988300442695618e-01 + <_> + + 0 -1 991 -1.9437000155448914e-02 + + -1.4499390125274658e+00 -1.5100000426173210e-02 + <_> + + 0 -1 992 -7.2420001961290836e-03 + + 5.3864902257919312e-01 -1.1375399678945541e-01 + <_> + + 0 -1 993 8.1639997661113739e-03 + + 6.6889002919197083e-02 -7.6872897148132324e-01 + <_> + + 0 -1 994 -4.3653000146150589e-02 + + 1.1413530111312866e+00 4.0217000991106033e-02 + <_> + + 0 -1 995 2.6569999754428864e-02 + + -2.4719099700450897e-01 5.9295099973678589e-01 + <_> + + 0 -1 996 3.2216999679803848e-02 + + -4.0024999529123306e-02 3.2688000798225403e-01 + <_> + + 0 -1 997 -7.2236001491546631e-02 + + 5.8729398250579834e-01 -2.5396001338958740e-01 + <_> + + 0 -1 998 3.1424999237060547e-02 + + 1.5315100550651550e-01 -5.6042098999023438e-01 + <_> + + 0 -1 999 -4.7699999413453043e-04 + + 1.6958899796009064e-01 -5.2626699209213257e-01 + <_> + + 0 -1 1000 2.7189999818801880e-03 + + -1.4944599568843842e-01 2.9658699035644531e-01 + <_> + + 0 -1 1001 3.2875001430511475e-02 + + -3.9943501353263855e-01 2.5156599283218384e-01 + <_> + + 0 -1 1002 -1.4553000219166279e-02 + + 2.7972599864006042e-01 -4.7203800082206726e-01 + <_> + + 0 -1 1003 3.8017999380826950e-02 + + -2.9200001154094934e-03 -1.1300059556961060e+00 + <_> + + 0 -1 1004 2.8659999370574951e-03 + + 4.1111800074577332e-01 -2.6220801472663879e-01 + <_> + + 0 -1 1005 -4.1606999933719635e-02 + + -1.4293819665908813e+00 -1.9132999703288078e-02 + <_> + + 0 -1 1006 -2.4802999570965767e-02 + + -2.5013598799705505e-01 1.5978699922561646e-01 + <_> + + 0 -1 1007 1.0098000057041645e-02 + + 4.3738998472690582e-02 -6.9986099004745483e-01 + <_> + + 0 -1 1008 -2.0947000011801720e-02 + + -9.4137799739837646e-01 2.3204000294208527e-01 + <_> + + 0 -1 1009 2.2458000108599663e-02 + + -2.7185800671577454e-01 4.5319199562072754e-01 + <_> + + 0 -1 1010 -3.7110999226570129e-02 + + -1.0314660072326660e+00 1.4421799778938293e-01 + <_> + + 0 -1 1011 -1.0648000054061413e-02 + + 6.3107001781463623e-01 -2.5520798563957214e-01 + <_> + + 0 -1 1012 5.5422998964786530e-02 + + 1.6206599771976471e-01 -1.7722640037536621e+00 + <_> + + 0 -1 1013 2.1601999178528786e-02 + + -2.5016099214553833e-01 5.4119801521301270e-01 + <_> + + 0 -1 1014 8.7000000348780304e-05 + + -2.9008901119232178e-01 3.3507999777793884e-01 + <_> + + 0 -1 1015 1.4406000263988972e-02 + + -7.8840004280209541e-03 -1.1677219867706299e+00 + <_> + + 0 -1 1016 1.0777399688959122e-01 + + 1.1292000114917755e-01 -2.4940319061279297e+00 + <_> + + 0 -1 1017 3.5943999886512756e-02 + + -1.9480599462985992e-01 9.5757502317428589e-01 + <_> + + 0 -1 1018 -3.9510000497102737e-03 + + 3.0927801132202148e-01 -2.5530201196670532e-01 + <_> + + 0 -1 1019 2.0942000672221184e-02 + + -7.6319999061524868e-03 -1.0086350440979004e+00 + <_> + + 0 -1 1020 -2.9877999797463417e-02 + + -4.6027699112892151e-01 1.9507199525833130e-01 + <_> + + 0 -1 1021 2.5971999391913414e-02 + + -1.2187999673187733e-02 -1.0035500526428223e+00 + <_> + + 0 -1 1022 1.0603000409901142e-02 + + -7.5969003140926361e-02 4.1669899225234985e-01 + <_> + + 0 -1 1023 8.5819996893405914e-03 + + -2.6648598909378052e-01 3.9111500978469849e-01 + <_> + + 0 -1 1024 2.1270999684929848e-02 + + 1.8273900449275970e-01 -3.6052298545837402e-01 + <_> + + 0 -1 1025 7.4518002569675446e-02 + + -1.8938399851322174e-01 9.2658001184463501e-01 + <_> + + 0 -1 1026 4.6569998376071453e-03 + + -1.4506199955940247e-01 3.3294600248336792e-01 + <_> + + 0 -1 1027 1.7119999974966049e-03 + + -5.2464002370834351e-01 8.9879997074604034e-02 + <_> + + 0 -1 1028 9.8500004969537258e-04 + + -3.8381999731063843e-01 2.4392999708652496e-01 + <_> + + 0 -1 1029 2.8233999386429787e-02 + + -5.7879998348653316e-03 -1.2617139816284180e+00 + <_> + + 0 -1 1030 -3.2678000628948212e-02 + + -5.7953298091888428e-01 1.6955299675464630e-01 + <_> + + 0 -1 1031 2.2536000236868858e-02 + + 2.2281000390648842e-02 -8.7869602441787720e-01 + <_> + + 0 -1 1032 -2.1657999604940414e-02 + + -6.5108501911163330e-01 1.2966899573802948e-01 + <_> + + 0 -1 1033 7.6799998059868813e-03 + + -3.3965200185775757e-01 2.2013300657272339e-01 + <_> + + 0 -1 1034 1.4592000283300877e-02 + + 1.5077300369739532e-01 -5.0452399253845215e-01 + <_> + + 0 -1 1035 2.7868000790476799e-02 + + -2.5045299530029297e-01 4.5741999149322510e-01 + <_> + + 0 -1 1036 5.6940000504255295e-03 + + -1.0948500037193298e-01 5.5757802724838257e-01 + <_> + + 0 -1 1037 -1.0002999566495419e-02 + + -9.7366297245025635e-01 1.8467999994754791e-02 + <_> + + 0 -1 1038 -4.0719998069107533e-03 + + 3.8222199678421021e-01 -1.6921100020408630e-01 + <_> + + 0 -1 1039 -2.2593999281525612e-02 + + -1.0391089916229248e+00 5.1839998923242092e-03 + <_> + + 0 -1 1040 -3.9579998701810837e-02 + + -5.5109229087829590e+00 1.1163999885320663e-01 + <_> + + 0 -1 1041 -1.7537999898195267e-02 + + 9.5485800504684448e-01 -1.8584500253200531e-01 + <_> + + 0 -1 1042 9.0300003066658974e-03 + + 1.0436000302433968e-02 8.2114797830581665e-01 + <_> + + 0 -1 1043 -7.9539995640516281e-03 + + 2.2632899880409241e-01 -3.4568199515342712e-01 + <_> + + 0 -1 1044 2.7091000229120255e-02 + + 1.6430099308490753e-01 -1.3926379680633545e+00 + <_> + + 0 -1 1045 -2.0625999197363853e-02 + + -8.6366099119186401e-01 2.3880000226199627e-03 + <_> + + 0 -1 1046 -7.1989998221397400e-02 + + -2.8192629814147949e+00 1.1570499837398529e-01 + <_> + + 0 -1 1047 -2.6964999735355377e-02 + + -1.2946130037307739e+00 -2.4661000818014145e-02 + <_> + + 0 -1 1048 -4.7377999871969223e-02 + + -8.1306397914886475e-01 1.1831399798393250e-01 + <_> + + 0 -1 1049 -1.0895600169897079e-01 + + 6.5937900543212891e-01 -2.0843900740146637e-01 + <_> + + 0 -1 1050 1.3574000447988510e-02 + + 7.4240001849830151e-03 5.3152197599411011e-01 + <_> + + 0 -1 1051 -6.6920001991093159e-03 + + 3.0655801296234131e-01 -3.1084299087524414e-01 + <_> + + 0 -1 1052 -3.9070001803338528e-03 + + 2.5576499104499817e-01 -5.2932001650333405e-02 + <_> + + 0 -1 1053 -3.7613000720739365e-02 + + -1.4350049495697021e+00 -1.5448000282049179e-02 + <_> + + 0 -1 1054 8.6329998448491096e-03 + + -1.6884399950504303e-01 4.2124900221824646e-01 + <_> + + 0 -1 1055 -3.2097000628709793e-02 + + -6.4979398250579834e-01 4.1110001504421234e-02 + <_> + + 0 -1 1056 5.8495998382568359e-02 + + -5.2963998168706894e-02 6.3368302583694458e-01 + <_> + + 0 -1 1057 -4.0901999920606613e-02 + + -9.2101097106933594e-01 9.0640000998973846e-03 + <_> + + 0 -1 1058 -1.9925000146031380e-02 + + 5.3759998083114624e-01 -6.2996998429298401e-02 + <_> + + 0 -1 1059 -4.6020001173019409e-03 + + -5.4333502054214478e-01 8.4104999899864197e-02 + <_> + + 0 -1 1060 1.6824999824166298e-02 + + 1.5563699603080750e-01 -4.0171200037002563e-01 + <_> + + 0 -1 1061 9.4790002331137657e-03 + + -2.4245299398899078e-01 5.1509499549865723e-01 + <_> + + 0 -1 1062 -1.9534999504685402e-02 + + -5.1118397712707520e-01 1.3831999897956848e-01 + <_> + + 0 -1 1063 1.0746000334620476e-02 + + -2.1854999661445618e-01 6.2828701734542847e-01 + <_> + + 0 -1 1064 3.7927001714706421e-02 + + 1.1640299856662750e-01 -2.7301959991455078e+00 + <_> + + 0 -1 1065 1.6390999779105186e-02 + + -1.4635999687016010e-02 -1.0797250270843506e+00 + <_> + + 0 -1 1066 -1.9785000011324883e-02 + + 1.2166420221328735e+00 3.3275000751018524e-02 + <_> + + 0 -1 1067 1.1067000217735767e-02 + + -2.5388300418853760e-01 4.4038599729537964e-01 + <_> + + 0 -1 1068 5.2479999139904976e-03 + + 2.2496800124645233e-01 -2.4216499924659729e-01 + <_> + + 0 -1 1069 -1.1141999624669552e-02 + + 2.5018098950386047e-01 -3.0811500549316406e-01 + <_> + + 0 -1 1070 -1.0666999965906143e-02 + + -3.2729101181030273e-01 2.6168298721313477e-01 + <_> + + 0 -1 1071 1.0545299947261810e-01 + + -5.5750001221895218e-02 -1.9605729579925537e+00 + <_> + + 0 -1 1072 5.4827999323606491e-02 + + -1.9519999623298645e-03 7.3866099119186401e-01 + <_> + + 0 -1 1073 1.7760999500751495e-02 + + -3.0647200345993042e-01 2.6346999406814575e-01 + <_> + + 0 -1 1074 -3.1185999512672424e-02 + + -2.4600900709629059e-01 1.7082199454307556e-01 + <_> + + 0 -1 1075 -5.7296000421047211e-02 + + 4.7033500671386719e-01 -2.6048299670219421e-01 + <_> + + 0 -1 1076 -1.1312000453472137e-02 + + 3.8628900051116943e-01 -2.8817000985145569e-01 + <_> + + 0 -1 1077 3.0592000111937523e-02 + + -4.8826001584529877e-02 -1.7638969421386719e+00 + <_> + + 0 -1 1078 1.8489999929443002e-03 + + 2.1099899709224701e-01 -2.5940999388694763e-02 + <_> + + 0 -1 1079 1.1419000104069710e-02 + + -1.6829599440097809e-01 1.0278660058975220e+00 + <_> + + 0 -1 1080 8.1403002142906189e-02 + + 1.1531999707221985e-01 -1.2482399940490723e+00 + <_> + + 0 -1 1081 5.3495999425649643e-02 + + -4.6303998678922653e-02 -1.7165969610214233e+00 + <_> + + 0 -1 1082 -2.3948000743985176e-02 + + -4.0246599912643433e-01 2.0562100410461426e-01 + <_> + + 0 -1 1083 6.7690000869333744e-03 + + -3.3152300119400024e-01 2.0683400332927704e-01 + <_> + + 0 -1 1084 -3.2343998551368713e-02 + + -7.2632801532745361e-01 2.0073500275611877e-01 + <_> + + 0 -1 1085 3.7863001227378845e-02 + + -1.5631000697612762e-01 1.6697460412979126e+00 + <_> + + 0 -1 1086 1.5440000221133232e-02 + + 1.9487400352954865e-01 -3.5384199023246765e-01 + <_> + + 0 -1 1087 -4.4376000761985779e-02 + + 8.2093602418899536e-01 -1.8193599581718445e-01 + <_> + + 0 -1 1088 -2.3102000355720520e-02 + + -4.3044099211692810e-01 1.2375400215387344e-01 + <_> + + 0 -1 1089 1.9400000572204590e-02 + + -2.9726000502705574e-02 -1.1597590446472168e+00 + <_> + + 0 -1 1090 1.0385700315237045e-01 + + 1.1149899661540985e-01 -4.6835222244262695e+00 + <_> + + 0 -1 1091 -1.8964000046253204e-02 + + 2.1773819923400879e+00 -1.4544400572776794e-01 + <_> + + 0 -1 1092 3.8750998675823212e-02 + + -4.9446001648902893e-02 3.4018298983573914e-01 + <_> + + 0 -1 1093 2.2766999900341034e-02 + + -3.2802999019622803e-01 3.0531400442123413e-01 + <_> + + 0 -1 1094 -3.1357001513242722e-02 + + 1.1520819664001465e+00 2.7305999770760536e-02 + <_> + + 0 -1 1095 9.6909999847412109e-03 + + -3.8799500465393066e-01 2.1512599289417267e-01 + <_> + + 0 -1 1096 -4.9284998327493668e-02 + + -1.6774909496307373e+00 1.5774199366569519e-01 + <_> + + 0 -1 1097 -3.9510998874902725e-02 + + -9.7647899389266968e-01 -1.0552000254392624e-02 + <_> + + 0 -1 1098 4.7997999936342239e-02 + + 2.0843900740146637e-01 -6.8992799520492554e-01 + <_> + + 0 -1 1099 5.1422998309135437e-02 + + -1.6665300726890564e-01 1.2149239778518677e+00 + <_> + + 0 -1 1100 1.4279999770224094e-02 + + 2.3627699911594391e-01 -4.1396799683570862e-01 + <_> + + 0 -1 1101 -9.1611996293067932e-02 + + -9.2830902338027954e-01 -1.8345000222325325e-02 + <_> + + 0 -1 1102 6.5080001950263977e-03 + + -7.3647201061248779e-01 1.9497099518775940e-01 + <_> + + 0 -1 1103 3.5723000764846802e-02 + + 1.4197799563407898e-01 -4.2089301347732544e-01 + <_> + + 0 -1 1104 5.0638001412153244e-02 + + 1.1644000187516212e-02 7.8486597537994385e-01 + <_> + + 0 -1 1105 -1.4613999985158443e-02 + + -1.1909500360488892e+00 -3.5128001123666763e-02 + <_> + + 0 -1 1106 -3.8662999868392944e-02 + + 2.4314730167388916e+00 6.5647996962070465e-02 + <_> + + 0 -1 1107 -4.0346998721361160e-02 + + 7.1755301952362061e-01 -1.9108299911022186e-01 + <_> + + 0 -1 1108 2.3902000859379768e-02 + + 1.5646199882030487e-01 -7.9294800758361816e-01 + <_> + 137 + -3.5125269889831543e+00 + + <_> + + 0 -1 1109 8.5640000179409981e-03 + + -8.1450700759887695e-01 5.8875298500061035e-01 + <_> + + 0 -1 1110 -1.3292600214481354e-01 + + 9.3213397264480591e-01 -2.9367300868034363e-01 + <_> + + 0 -1 1111 9.8400004208087921e-03 + + -5.6462901830673218e-01 4.1647699475288391e-01 + <_> + + 0 -1 1112 5.0889998674392700e-03 + + -7.9232800006866455e-01 1.6975000500679016e-01 + <_> + + 0 -1 1113 -6.1039000749588013e-02 + + -1.4169000387191772e+00 2.5020999833941460e-02 + <_> + + 0 -1 1114 -4.6599999768659472e-04 + + 3.7982499599456787e-01 -4.1567099094390869e-01 + <_> + + 0 -1 1115 3.3889999613165855e-03 + + -4.0768599510192871e-01 3.5548499226570129e-01 + <_> + + 0 -1 1116 2.1006999537348747e-02 + + -2.4080100655555725e-01 8.6112701892852783e-01 + <_> + + 0 -1 1117 7.5559997931122780e-03 + + -8.7467199563980103e-01 9.8572000861167908e-02 + <_> + + 0 -1 1118 2.4779999628663063e-02 + + 1.5566200017929077e-01 -6.9229799509048462e-01 + <_> + + 0 -1 1119 -3.5620000213384628e-02 + + -1.1472270488739014e+00 3.6359999328851700e-02 + <_> + + 0 -1 1120 1.9810000434517860e-02 + + 1.5516200661659241e-01 -6.9520097970962524e-01 + <_> + + 0 -1 1121 1.5019999817013741e-02 + + 4.1990000754594803e-02 -9.6622800827026367e-01 + <_> + + 0 -1 1122 -2.3137999698519707e-02 + + 4.3396899104118347e-01 2.4160000029951334e-03 + <_> + + 0 -1 1123 -1.8743000924587250e-02 + + 4.3481099605560303e-01 -3.2522499561309814e-01 + <_> + + 0 -1 1124 4.5080000162124634e-01 + + -9.4573996961116791e-02 7.2421300411224365e-01 + <_> + + 0 -1 1125 1.1854999698698521e-02 + + -3.8133099675178528e-01 3.0098399519920349e-01 + <_> + + 0 -1 1126 -2.4830000475049019e-02 + + 8.9300602674484253e-01 -1.0295899957418442e-01 + <_> + + 0 -1 1127 -4.4743001461029053e-02 + + 8.6280298233032227e-01 -2.1716499328613281e-01 + <_> + + 0 -1 1128 -1.4600000344216824e-02 + + 6.0069400072097778e-01 -1.5906299650669098e-01 + <_> + + 0 -1 1129 -2.4527000263333321e-02 + + -1.5872869491577148e+00 -2.1817000582814217e-02 + <_> + + 0 -1 1130 2.3024000227451324e-02 + + 1.6853399574756622e-01 -3.8106900453567505e-01 + <_> + + 0 -1 1131 -2.4917000904679298e-02 + + 5.0810897350311279e-01 -2.7279898524284363e-01 + <_> + + 0 -1 1132 1.0130000300705433e-03 + + -4.3138799071311951e-01 2.6438099145889282e-01 + <_> + + 0 -1 1133 1.5603000298142433e-02 + + -3.1624200940132141e-01 5.5715900659561157e-01 + <_> + + 0 -1 1134 -2.6685999706387520e-02 + + 1.0553920269012451e+00 2.9074000194668770e-02 + <_> + + 0 -1 1135 1.3940000208094716e-03 + + -7.1873801946640015e-01 6.5390996634960175e-02 + <_> + + 0 -1 1136 -6.4799998654052615e-04 + + 2.4884399771690369e-01 -2.0978200435638428e-01 + <_> + + 0 -1 1137 -3.1888000667095184e-02 + + -6.8844497203826904e-01 6.3589997589588165e-02 + <_> + + 0 -1 1138 -4.9290000461041927e-03 + + -5.9152501821517944e-01 2.7943599224090576e-01 + <_> + + 0 -1 1139 3.1168000772595406e-02 + + 4.5223999768495560e-02 -8.8639199733734131e-01 + <_> + + 0 -1 1140 -3.3663000911474228e-02 + + -6.1590200662612915e-01 1.5749299526214600e-01 + <_> + + 0 -1 1141 1.1966999620199203e-02 + + -3.0606698989868164e-01 4.2293301224708557e-01 + <_> + + 0 -1 1142 -3.4680001437664032e-02 + + -1.3734940290451050e+00 1.5908700227737427e-01 + <_> + + 0 -1 1143 9.9290004000067711e-03 + + -5.5860197544097900e-01 1.2119200080633163e-01 + <_> + + 0 -1 1144 5.9574998915195465e-02 + + 4.9720001406967640e-03 8.2055401802062988e-01 + <_> + + 0 -1 1145 -6.5428003668785095e-02 + + 1.5651429891586304e+00 -1.6817499697208405e-01 + <_> + + 0 -1 1146 -9.2895999550819397e-02 + + -1.5794529914855957e+00 1.4661799371242523e-01 + <_> + + 0 -1 1147 -4.1184000670909882e-02 + + -1.5518720149993896e+00 -2.9969999566674232e-02 + <_> + + 0 -1 1148 2.1447999402880669e-02 + + 1.7196300625801086e-01 -6.9343197345733643e-01 + <_> + + 0 -1 1149 -2.5569999590516090e-02 + + -1.3061310052871704e+00 -2.4336999282240868e-02 + <_> + + 0 -1 1150 -4.1200999170541763e-02 + + -1.3821059465408325e+00 1.4801800251007080e-01 + <_> + + 0 -1 1151 -1.7668999731540680e-02 + + -7.0889997482299805e-01 3.6524001508951187e-02 + <_> + + 0 -1 1152 9.0060001239180565e-03 + + -4.0913999080657959e-02 8.0373102426528931e-01 + <_> + + 0 -1 1153 -1.1652999557554722e-02 + + 5.7546800374984741e-01 -2.4991700053215027e-01 + <_> + + 0 -1 1154 -7.4780001305043697e-03 + + -4.9280899763107300e-01 1.9810900092124939e-01 + <_> + + 0 -1 1155 8.5499999113380909e-04 + + -4.8858100175857544e-01 1.3563099503517151e-01 + <_> + + 0 -1 1156 -3.0538000166416168e-02 + + -6.0278397798538208e-01 1.8522000312805176e-01 + <_> + + 0 -1 1157 -1.8846999853849411e-02 + + 2.3565599322319031e-01 -3.5136300325393677e-01 + <_> + + 0 -1 1158 -8.1129996106028557e-03 + + -8.1304997205734253e-02 2.1069599688053131e-01 + <_> + + 0 -1 1159 -3.4830000251531601e-02 + + -1.2065670490264893e+00 -1.4251999557018280e-02 + <_> + + 0 -1 1160 1.9021000713109970e-02 + + 2.3349900543689728e-01 -4.5664900541305542e-01 + <_> + + 0 -1 1161 -1.9004000350832939e-02 + + -8.1075799465179443e-01 1.3140000402927399e-02 + <_> + + 0 -1 1162 -8.9057996869087219e-02 + + 6.1542397737503052e-01 3.2983001321554184e-02 + <_> + + 0 -1 1163 6.8620000965893269e-03 + + -2.9583099484443665e-01 2.7003699541091919e-01 + <_> + + 0 -1 1164 -2.8240999206900597e-02 + + -6.1102700233459473e-01 1.7357499897480011e-01 + <_> + + 0 -1 1165 -3.2099999953061342e-04 + + -5.3322899341583252e-01 6.8539001047611237e-02 + <_> + + 0 -1 1166 -1.0829100012779236e-01 + + -1.2879559993743896e+00 1.1801700294017792e-01 + <_> + + 0 -1 1167 1.5878999605774879e-02 + + -1.7072600126266479e-01 1.1103910207748413e+00 + <_> + + 0 -1 1168 8.6859995499253273e-03 + + -1.0995099693536758e-01 4.6010500192642212e-01 + <_> + + 0 -1 1169 -2.5234999135136604e-02 + + 1.0220669507980347e+00 -1.8694299459457397e-01 + <_> + + 0 -1 1170 -1.3508999720215797e-02 + + -7.8316599130630493e-01 1.4202600717544556e-01 + <_> + + 0 -1 1171 -7.7149998396635056e-03 + + -8.8060700893402100e-01 1.1060000397264957e-02 + <_> + + 0 -1 1172 7.1580000221729279e-02 + + 1.1369399726390839e-01 -1.1032789945602417e+00 + <_> + + 0 -1 1173 -1.3554000295698643e-02 + + -8.1096500158309937e-01 3.4080001059919596e-03 + <_> + + 0 -1 1174 2.9450000729411840e-03 + + -7.2879999876022339e-02 3.4998100996017456e-01 + <_> + + 0 -1 1175 -5.0833001732826233e-02 + + -1.2868590354919434e+00 -2.8842000290751457e-02 + <_> + + 0 -1 1176 -8.7989997118711472e-03 + + 4.7613599896430969e-01 -1.4690400660037994e-01 + <_> + + 0 -1 1177 2.1424399316310883e-01 + + -5.9702001512050629e-02 -2.4802260398864746e+00 + <_> + + 0 -1 1178 1.3962999917566776e-02 + + 1.7420299351215363e-01 -4.3911001086235046e-01 + <_> + + 0 -1 1179 4.2502000927925110e-02 + + -1.9965299963951111e-01 7.0654797554016113e-01 + <_> + + 0 -1 1180 1.9827999174594879e-02 + + -6.9136001169681549e-02 6.1643397808074951e-01 + <_> + + 0 -1 1181 -3.3560000360012054e-02 + + -1.2740780115127563e+00 -2.5673000141978264e-02 + <_> + + 0 -1 1182 6.3542999327182770e-02 + + 1.2403500080108643e-01 -1.0776289701461792e+00 + <_> + + 0 -1 1183 2.1933000534772873e-02 + + 1.4952000230550766e-02 -7.1023499965667725e-01 + <_> + + 0 -1 1184 -7.8424997627735138e-02 + + 6.2033998966217041e-01 3.3610999584197998e-02 + <_> + + 0 -1 1185 1.4390000142157078e-02 + + -3.6324599385261536e-01 1.7308300733566284e-01 + <_> + + 0 -1 1186 -6.7309997975826263e-02 + + 5.2374100685119629e-01 1.2799999676644802e-02 + <_> + + 0 -1 1187 1.3047499954700470e-01 + + -1.7122499644756317e-01 1.1235200166702271e+00 + <_> + + 0 -1 1188 -4.6245999634265900e-02 + + -1.1908329725265503e+00 1.7425599694252014e-01 + <_> + + 0 -1 1189 -2.9842000454664230e-02 + + 8.3930599689483643e-01 -1.8064199388027191e-01 + <_> + + 0 -1 1190 -3.8099999073892832e-04 + + 3.5532799363136292e-01 -2.3842300474643707e-01 + <_> + + 0 -1 1191 -2.2378999739885330e-02 + + -8.7943899631500244e-01 -7.8399997437372804e-04 + <_> + + 0 -1 1192 -1.5569999814033508e-03 + + -1.4253300428390503e-01 2.5876200199127197e-01 + <_> + + 0 -1 1193 1.2013000436127186e-02 + + -2.9015499353408813e-01 2.6051101088523865e-01 + <_> + + 0 -1 1194 2.4384999647736549e-02 + + -3.1438998878002167e-02 5.8695900440216064e-01 + <_> + + 0 -1 1195 -4.7180999070405960e-02 + + 6.9430100917816162e-01 -2.1816100180149078e-01 + <_> + + 0 -1 1196 -2.4893999099731445e-02 + + -6.4599299430847168e-01 1.5611599385738373e-01 + <_> + + 0 -1 1197 2.1944999694824219e-02 + + -2.7742000296711922e-02 -1.1346880197525024e+00 + <_> + + 0 -1 1198 1.8809899687767029e-01 + + -1.0076000355184078e-02 1.2429029941558838e+00 + <_> + + 0 -1 1199 -7.7872000634670258e-02 + + 8.5008001327514648e-01 -1.9015499949455261e-01 + <_> + + 0 -1 1200 -4.8769000917673111e-02 + + -2.0763080120086670e+00 1.2179400026798248e-01 + <_> + + 0 -1 1201 -1.7115000635385513e-02 + + -8.5687297582626343e-01 7.8760003671050072e-03 + <_> + + 0 -1 1202 -2.7499999850988388e-03 + + 3.8645499944686890e-01 -1.1391499638557434e-01 + <_> + + 0 -1 1203 -9.8793998360633850e-02 + + -1.7233899831771851e+00 -5.6063000112771988e-02 + <_> + + 0 -1 1204 -2.1936999633908272e-02 + + 5.4749399423599243e-01 -4.2481999844312668e-02 + <_> + + 0 -1 1205 6.1096999794244766e-02 + + -3.8945000618696213e-02 -1.0807880163192749e+00 + <_> + + 0 -1 1206 -2.4563999846577644e-02 + + 5.8311098814010620e-01 -9.7599998116493225e-04 + <_> + + 0 -1 1207 3.3752001821994781e-02 + + -1.3795999810099602e-02 -8.4730297327041626e-01 + <_> + + 0 -1 1208 3.8199000060558319e-02 + + 1.5114299952983856e-01 -7.9473400115966797e-01 + <_> + + 0 -1 1209 -2.0117999985814095e-02 + + 5.1579099893569946e-01 -2.1445399522781372e-01 + <_> + + 0 -1 1210 2.4734999984502792e-02 + + -2.2105000913143158e-02 4.2917698621749878e-01 + <_> + + 0 -1 1211 -2.4357000365853310e-02 + + -8.6201298236846924e-01 -3.6760000512003899e-03 + <_> + + 0 -1 1212 -2.6442000642418861e-02 + + -4.5397499203681946e-01 2.2462800145149231e-01 + <_> + + 0 -1 1213 -3.4429999068379402e-03 + + 1.3073000311851501e-01 -3.8622701168060303e-01 + <_> + + 0 -1 1214 1.0701700299978256e-01 + + 1.3158600032329559e-01 -7.9306900501251221e-01 + <_> + + 0 -1 1215 4.5152999460697174e-02 + + -2.5296801328659058e-01 4.0672400593757629e-01 + <_> + + 0 -1 1216 4.4349998235702515e-02 + + 2.2613000124692917e-02 7.9618102312088013e-01 + <_> + + 0 -1 1217 1.0839999886229634e-03 + + -3.9158400893211365e-01 1.1639100313186646e-01 + <_> + + 0 -1 1218 7.1433000266551971e-02 + + 8.2466997206211090e-02 1.2530590295791626e+00 + <_> + + 0 -1 1219 3.5838000476360321e-02 + + -1.8203300237655640e-01 7.7078700065612793e-01 + <_> + + 0 -1 1220 -2.0839000120759010e-02 + + -6.1744397878646851e-01 1.5891399979591370e-01 + <_> + + 0 -1 1221 4.2525801062583923e-01 + + -4.8978000879287720e-02 -1.8422030210494995e+00 + <_> + + 0 -1 1222 1.1408000253140926e-02 + + 1.7918199300765991e-01 -1.5383499860763550e-01 + <_> + + 0 -1 1223 -1.5364999882876873e-02 + + -8.4016501903533936e-01 -1.0280000278726220e-03 + <_> + + 0 -1 1224 -1.5212000347673893e-02 + + -1.8995699286460876e-01 1.7130999267101288e-01 + <_> + + 0 -1 1225 -1.8972000107169151e-02 + + -7.9541999101638794e-01 6.6800001077353954e-03 + <_> + + 0 -1 1226 -3.3330000005662441e-03 + + -2.3530800640583038e-01 2.4730099737644196e-01 + <_> + + 0 -1 1227 9.3248002231121063e-02 + + -5.4758001118898392e-02 -1.8324300050735474e+00 + <_> + + 0 -1 1228 -1.2555000372231007e-02 + + 2.6385200023651123e-01 -3.8526400923728943e-01 + <_> + + 0 -1 1229 -2.7070000767707825e-02 + + -6.6929799318313599e-01 2.0340999588370323e-02 + <_> + + 0 -1 1230 -2.3677000775933266e-02 + + 6.7265301942825317e-01 -1.4344000257551670e-02 + <_> + + 0 -1 1231 -1.4275000430643559e-02 + + 3.0186399817466736e-01 -2.8514400124549866e-01 + <_> + + 0 -1 1232 2.8096999973058701e-02 + + 1.4766000211238861e-01 -1.4078520536422729e+00 + <_> + + 0 -1 1233 5.0840001553297043e-02 + + -1.8613600730895996e-01 7.9953002929687500e-01 + <_> + + 0 -1 1234 1.1505999602377415e-02 + + 1.9118399918079376e-01 -8.5035003721714020e-02 + <_> + + 0 -1 1235 -1.4661000110208988e-02 + + 4.5239299535751343e-01 -2.2205199301242828e-01 + <_> + + 0 -1 1236 2.2842499613761902e-01 + + 1.3488399982452393e-01 -1.2894610166549683e+00 + <_> + + 0 -1 1237 1.1106900125741959e-01 + + -2.0753799378871918e-01 5.4561597108840942e-01 + <_> + + 0 -1 1238 3.2450000289827585e-03 + + 3.2053700089454651e-01 -1.6403500735759735e-01 + <_> + + 0 -1 1239 8.5309997200965881e-02 + + -2.0210500061511993e-01 5.3296798467636108e-01 + <_> + + 0 -1 1240 2.2048000246286392e-02 + + 1.5698599815368652e-01 -1.7014099657535553e-01 + <_> + + 0 -1 1241 -1.5676999464631081e-02 + + -6.2863498926162720e-01 4.0761999785900116e-02 + <_> + + 0 -1 1242 3.3112901449203491e-01 + + 1.6609300673007965e-01 -1.0326379537582397e+00 + <_> + + 0 -1 1243 8.8470000773668289e-03 + + -2.5076198577880859e-01 3.1660598516464233e-01 + <_> + + 0 -1 1244 4.6080000698566437e-02 + + 1.5352100133895874e-01 -1.6333500146865845e+00 + <_> + + 0 -1 1245 -3.7703000009059906e-02 + + 5.6873798370361328e-01 -2.0102599263191223e-01 + <_> + 159 + -3.5939640998840332e+00 + + <_> + + 0 -1 1246 -8.1808999180793762e-02 + + 5.7124799489974976e-01 -6.7438799142837524e-01 + <_> + + 0 -1 1247 2.1761199831962585e-01 + + -3.8610199093818665e-01 9.0343999862670898e-01 + <_> + + 0 -1 1248 1.4878000132739544e-02 + + 2.2241599857807159e-01 -1.2779350280761719e+00 + <_> + + 0 -1 1249 5.2434999495744705e-02 + + -2.8690400719642639e-01 7.5742298364639282e-01 + <_> + + 0 -1 1250 9.1429995372891426e-03 + + -6.4880400896072388e-01 2.2268800437450409e-01 + <_> + + 0 -1 1251 7.9169999808073044e-03 + + -2.9253599047660828e-01 3.1030198931694031e-01 + <_> + + 0 -1 1252 -2.6084000244736671e-02 + + 4.5532700419425964e-01 -3.8500601053237915e-01 + <_> + + 0 -1 1253 -2.9400000348687172e-03 + + -5.1264399290084839e-01 2.7432298660278320e-01 + <_> + + 0 -1 1254 5.7130001485347748e-02 + + 1.5788000077009201e-02 -1.2133100032806396e+00 + <_> + + 0 -1 1255 -6.1309998854994774e-03 + + 3.9174601435661316e-01 -3.0866798758506775e-01 + <_> + + 0 -1 1256 -4.0405001491308212e-02 + + 1.1901949644088745e+00 -2.0347100496292114e-01 + <_> + + 0 -1 1257 -2.0297000184655190e-02 + + -6.8239498138427734e-01 2.0458699762821198e-01 + <_> + + 0 -1 1258 -1.7188999801874161e-02 + + -8.4939897060394287e-01 3.8433000445365906e-02 + <_> + + 0 -1 1259 -2.4215999990701675e-02 + + -1.1039420366287231e+00 1.5975099802017212e-01 + <_> + + 0 -1 1260 5.6869000196456909e-02 + + -1.9595299661159515e-01 1.1806850433349609e+00 + <_> + + 0 -1 1261 3.6199999158270657e-04 + + -4.0847799181938171e-01 3.2938599586486816e-01 + <_> + + 0 -1 1262 9.9790003150701523e-03 + + -2.9673001170158386e-01 4.1547900438308716e-01 + <_> + + 0 -1 1263 -5.2625000476837158e-02 + + -1.3069299459457397e+00 1.7862600088119507e-01 + <_> + + 0 -1 1264 -1.3748999685049057e-02 + + 2.3665800690650940e-01 -4.4536599516868591e-01 + <_> + + 0 -1 1265 -3.0517000705003738e-02 + + 2.9018300771713257e-01 -1.1210100352764130e-01 + <_> + + 0 -1 1266 -3.0037501454353333e-01 + + -2.4237680435180664e+00 -4.2830999940633774e-02 + <_> + + 0 -1 1267 -3.5990998148918152e-02 + + 8.8206499814987183e-01 -4.7012999653816223e-02 + <_> + + 0 -1 1268 -5.5112000554800034e-02 + + 8.0119001865386963e-01 -2.0490999519824982e-01 + <_> + + 0 -1 1269 3.3762000501155853e-02 + + 1.4617599546909332e-01 -1.1349489688873291e+00 + <_> + + 0 -1 1270 -8.2710003480315208e-03 + + -8.1604897975921631e-01 1.8988000229001045e-02 + <_> + + 0 -1 1271 -5.4399999789893627e-03 + + -7.0980900526046753e-01 2.2343699634075165e-01 + <_> + + 0 -1 1272 3.1059999018907547e-03 + + -7.2808599472045898e-01 4.0224999189376831e-02 + <_> + + 0 -1 1273 5.3651999682188034e-02 + + 1.7170900106430054e-01 -1.1163710355758667e+00 + <_> + + 0 -1 1274 -1.2541399896144867e-01 + + 2.7680370807647705e+00 -1.4611500501632690e-01 + <_> + + 0 -1 1275 9.2542000114917755e-02 + + 1.1609800159931183e-01 -3.9635529518127441e+00 + <_> + + 0 -1 1276 3.8513999432325363e-02 + + -7.6399999670684338e-03 -9.8780900239944458e-01 + <_> + + 0 -1 1277 -2.0200000144541264e-03 + + 2.3059999942779541e-01 -7.4970299005508423e-01 + <_> + + 0 -1 1278 9.7599998116493225e-03 + + -3.1137999892234802e-01 3.0287799239158630e-01 + <_> + + 0 -1 1279 2.4095000699162483e-02 + + -4.9529999494552612e-02 5.2690100669860840e-01 + <_> + + 0 -1 1280 -1.7982000485062599e-02 + + -1.1610640287399292e+00 -5.7000000961124897e-03 + <_> + + 0 -1 1281 -1.0555000044405460e-02 + + -2.7189099788665771e-01 2.3597699403762817e-01 + <_> + + 0 -1 1282 -7.2889998555183411e-03 + + -5.4219102859497070e-01 8.1914000213146210e-02 + <_> + + 0 -1 1283 2.3939000442624092e-02 + + 1.7975799739360809e-01 -6.7049497365951538e-01 + <_> + + 0 -1 1284 -1.8365999683737755e-02 + + 6.2664300203323364e-01 -2.0970100164413452e-01 + <_> + + 0 -1 1285 1.5715999528765678e-02 + + 2.4193699657917023e-01 -1.0444309711456299e+00 + <_> + + 0 -1 1286 -4.8804000020027161e-02 + + -9.4060599803924561e-01 -3.7519999314099550e-03 + <_> + + 0 -1 1287 6.7130001261830330e-03 + + -7.5432002544403076e-02 6.1575299501419067e-01 + <_> + + 0 -1 1288 9.7770001739263535e-03 + + 3.9285000413656235e-02 -8.4810298681259155e-01 + <_> + + 0 -1 1289 1.4744999818503857e-02 + + 1.6968999803066254e-01 -5.0906401872634888e-01 + <_> + + 0 -1 1290 9.7079001367092133e-02 + + -3.3103000372648239e-02 -1.2706379890441895e+00 + <_> + + 0 -1 1291 4.8285998404026031e-02 + + 9.4329997897148132e-02 2.7203190326690674e+00 + <_> + + 0 -1 1292 9.7810002043843269e-03 + + -3.9533400535583496e-01 1.5363800525665283e-01 + <_> + + 0 -1 1293 -3.9893999695777893e-02 + + -2.2767400741577148e-01 1.3913999497890472e-01 + <_> + + 0 -1 1294 2.2848000749945641e-02 + + -2.7391999959945679e-01 3.4199500083923340e-01 + <_> + + 0 -1 1295 6.7179999314248562e-03 + + -1.0874299705028534e-01 4.8125401139259338e-01 + <_> + + 0 -1 1296 5.9599999338388443e-02 + + -4.9522001296281815e-02 -2.0117089748382568e+00 + <_> + + 0 -1 1297 6.9340001791715622e-03 + + 1.5037499368190765e-01 -1.1271899938583374e-01 + <_> + + 0 -1 1298 1.5757000073790550e-02 + + -2.0885000005364418e-02 -1.1651979684829712e+00 + <_> + + 0 -1 1299 -4.9690000712871552e-02 + + -8.0213499069213867e-01 1.4372299611568451e-01 + <_> + + 0 -1 1300 5.2347000688314438e-02 + + -2.0836700499057770e-01 6.1677598953247070e-01 + <_> + + 0 -1 1301 2.2430999204516411e-02 + + 2.0305900275707245e-01 -7.5326198339462280e-01 + <_> + + 0 -1 1302 4.1142001748085022e-02 + + -1.8118199706077576e-01 1.0033359527587891e+00 + <_> + + 0 -1 1303 -2.1632000803947449e-02 + + 4.9998998641967773e-01 -3.4662999212741852e-02 + <_> + + 0 -1 1304 -8.2808002829551697e-02 + + 1.1711900234222412e+00 -1.8433600664138794e-01 + <_> + + 0 -1 1305 8.5060000419616699e-03 + + -6.3225001096725464e-02 2.9024899005889893e-01 + <_> + + 0 -1 1306 7.8905001282691956e-02 + + -2.3274500668048859e-01 5.9695798158645630e-01 + <_> + + 0 -1 1307 -9.0207003057003021e-02 + + -8.2211899757385254e-01 1.7772200703620911e-01 + <_> + + 0 -1 1308 -2.9269000515341759e-02 + + 6.0860699415206909e-01 -2.1468900144100189e-01 + <_> + + 0 -1 1309 6.9499998353421688e-03 + + -4.2665999382734299e-02 6.0512101650238037e-01 + <_> + + 0 -1 1310 -8.0629996955394745e-03 + + -1.1508270502090454e+00 -2.7286000549793243e-02 + <_> + + 0 -1 1311 1.9595999270677567e-02 + + -9.1880001127719879e-03 5.6857800483703613e-01 + <_> + + 0 -1 1312 -1.4884999953210354e-02 + + 3.7658798694610596e-01 -2.7149501442909241e-01 + <_> + + 0 -1 1313 2.5217000395059586e-02 + + -9.9991001188755035e-02 2.4664700031280518e-01 + <_> + + 0 -1 1314 -1.5855999663472176e-02 + + 6.6826701164245605e-01 -2.0614700019359589e-01 + <_> + + 0 -1 1315 2.9441000893712044e-02 + + 1.5832200646400452e-01 -7.6060897111892700e-01 + <_> + + 0 -1 1316 -8.5279997438192368e-03 + + 3.8212299346923828e-01 -2.5407800078392029e-01 + <_> + + 0 -1 1317 2.4421999230980873e-02 + + 1.5105099976062775e-01 -2.8752899169921875e-01 + <_> + + 0 -1 1318 -3.3886998891830444e-02 + + -6.8002802133560181e-01 3.4327000379562378e-02 + <_> + + 0 -1 1319 -2.0810000132769346e-03 + + 2.5413900613784790e-01 -2.6859098672866821e-01 + <_> + + 0 -1 1320 3.0358999967575073e-02 + + -3.0842000618577003e-02 -1.1476809978485107e+00 + <_> + + 0 -1 1321 4.0210001170635223e-03 + + -3.5253798961639404e-01 2.9868099093437195e-01 + <_> + + 0 -1 1322 2.7681000530719757e-02 + + -3.8148999214172363e-02 -1.3262039422988892e+00 + <_> + + 0 -1 1323 7.9039996489882469e-03 + + -2.3737000301480293e-02 7.0503002405166626e-01 + <_> + + 0 -1 1324 4.4031001627445221e-02 + + 1.0674899816513062e-01 -4.5261201262474060e-01 + <_> + + 0 -1 1325 -3.2370999455451965e-02 + + 4.6674901247024536e-01 -6.1546999961137772e-02 + <_> + + 0 -1 1326 2.0933000370860100e-02 + + -2.8447899222373962e-01 4.3845599889755249e-01 + <_> + + 0 -1 1327 2.5227999314665794e-02 + + -2.2537000477313995e-02 7.0389097929000854e-01 + <_> + + 0 -1 1328 6.5520000644028187e-03 + + -3.2554900646209717e-01 2.4023699760437012e-01 + <_> + + 0 -1 1329 -5.8557998389005661e-02 + + -1.2227720022201538e+00 1.1668799817562103e-01 + <_> + + 0 -1 1330 3.1899999827146530e-02 + + -1.9305000081658363e-02 -1.0973169803619385e+00 + <_> + + 0 -1 1331 -3.0445000156760216e-02 + + 6.5582501888275146e-01 7.5090996921062469e-02 + <_> + + 0 -1 1332 1.4933000318706036e-02 + + -5.2155798673629761e-01 1.1523099988698959e-01 + <_> + + 0 -1 1333 -4.9008000642061234e-02 + + -7.8303998708724976e-01 1.6657200455665588e-01 + <_> + + 0 -1 1334 8.3158999681472778e-02 + + -2.6879999786615372e-03 -8.5282301902770996e-01 + <_> + + 0 -1 1335 2.3902999237179756e-02 + + -5.1010999828577042e-02 4.1999098658561707e-01 + <_> + + 0 -1 1336 1.6428999602794647e-02 + + 1.9232999533414841e-02 -6.5049099922180176e-01 + <_> + + 0 -1 1337 -1.1838000267744064e-02 + + -6.2409800291061401e-01 1.5411199629306793e-01 + <_> + + 0 -1 1338 -1.6799999866634607e-04 + + 1.7589199542999268e-01 -3.4338700771331787e-01 + <_> + + 0 -1 1339 1.9193999469280243e-02 + + 4.3418999761343002e-02 7.9069197177886963e-01 + <_> + + 0 -1 1340 -1.0032000020146370e-02 + + 4.5648899674415588e-01 -2.2494800388813019e-01 + <_> + + 0 -1 1341 -1.4004000462591648e-02 + + 3.3570998907089233e-01 -4.8799999058246613e-03 + <_> + + 0 -1 1342 -1.0319899767637253e-01 + + -2.3378000259399414e+00 -5.8933001011610031e-02 + <_> + + 0 -1 1343 -9.5697000622749329e-02 + + -6.6153901815414429e-01 2.0098599791526794e-01 + <_> + + 0 -1 1344 -4.1480999439954758e-02 + + 4.5939201116561890e-01 -2.2314099967479706e-01 + <_> + + 0 -1 1345 2.4099999573081732e-03 + + -2.6898598670959473e-01 2.4922999739646912e-01 + <_> + + 0 -1 1346 1.0724999755620956e-01 + + -1.8640199303627014e-01 7.2769802808761597e-01 + <_> + + 0 -1 1347 3.1870000530034304e-03 + + -2.4608999490737915e-02 2.8643900156021118e-01 + <_> + + 0 -1 1348 2.9167000204324722e-02 + + -3.4683000296354294e-02 -1.1162580251693726e+00 + <_> + + 0 -1 1349 1.1287000030279160e-02 + + 6.3760001212358475e-03 6.6632097959518433e-01 + <_> + + 0 -1 1350 -1.2001000344753265e-02 + + 4.2420101165771484e-01 -2.6279801130294800e-01 + <_> + + 0 -1 1351 -1.2695999816060066e-02 + + -2.1957000717520714e-02 1.8936799466609955e-01 + <_> + + 0 -1 1352 2.4597000330686569e-02 + + -3.4963998943567276e-02 -1.0989320278167725e+00 + <_> + + 0 -1 1353 4.5953001827001572e-02 + + 1.1109799891710281e-01 -2.9306049346923828e+00 + <_> + + 0 -1 1354 -2.7241000905632973e-02 + + 2.9101699590682983e-01 -2.7407899498939514e-01 + <_> + + 0 -1 1355 4.0063999593257904e-02 + + 1.1877900362014771e-01 -6.2801802158355713e-01 + <_> + + 0 -1 1356 2.3055000230669975e-02 + + 1.4813800156116486e-01 -3.7007498741149902e-01 + <_> + + 0 -1 1357 -2.3737000301480293e-02 + + -5.3724801540374756e-01 1.9358199834823608e-01 + <_> + + 0 -1 1358 7.7522002160549164e-02 + + -6.0194000601768494e-02 -1.9489669799804688e+00 + <_> + + 0 -1 1359 -1.3345000334084034e-02 + + -4.5229598879814148e-01 1.8741500377655029e-01 + <_> + + 0 -1 1360 -2.1719999611377716e-02 + + 1.2144249677658081e+00 -1.5365800261497498e-01 + <_> + + 0 -1 1361 -7.1474999189376831e-02 + + -2.3047130107879639e+00 1.0999900102615356e-01 + <_> + + 0 -1 1362 -5.4999999701976776e-03 + + -7.1855199337005615e-01 2.0100999623537064e-02 + <_> + + 0 -1 1363 2.6740999892354012e-02 + + 7.3545001447200775e-02 9.8786002397537231e-01 + <_> + + 0 -1 1364 -3.9407998323440552e-02 + + -1.2227380275726318e+00 -4.3506998568773270e-02 + <_> + + 0 -1 1365 2.5888999924063683e-02 + + 1.3409300148487091e-01 -1.1770780086517334e+00 + <_> + + 0 -1 1366 4.8925001174211502e-02 + + -3.0810000374913216e-02 -9.3479502201080322e-01 + <_> + + 0 -1 1367 3.6892998963594437e-02 + + 1.3333700597286224e-01 -1.4998290538787842e+00 + <_> + + 0 -1 1368 7.8929997980594635e-02 + + -1.4538800716400146e-01 1.5631790161132812e+00 + <_> + + 0 -1 1369 2.9006000608205795e-02 + + 1.9383700191974640e-01 -6.7642802000045776e-01 + <_> + + 0 -1 1370 6.3089998438954353e-03 + + -3.7465399503707886e-01 1.0857500135898590e-01 + <_> + + 0 -1 1371 -6.5830998122692108e-02 + + 8.1059402227401733e-01 3.0201999470591545e-02 + <_> + + 0 -1 1372 -6.8965002894401550e-02 + + 8.3772599697113037e-01 -1.7140999436378479e-01 + <_> + + 0 -1 1373 -1.1669100075960159e-01 + + -9.4647198915481567e-01 1.3123199343681335e-01 + <_> + + 0 -1 1374 -1.3060000492259860e-03 + + 4.6007998287677765e-02 -5.2011597156524658e-01 + <_> + + 0 -1 1375 -4.4558998197317123e-02 + + -1.9423669576644897e+00 1.3200700283050537e-01 + <_> + + 0 -1 1376 5.1033001393079758e-02 + + -2.1480999886989594e-01 4.8673900961875916e-01 + <_> + + 0 -1 1377 -3.1578000634908676e-02 + + 5.9989798069000244e-01 7.9159997403621674e-03 + <_> + + 0 -1 1378 2.1020000800490379e-02 + + -2.2069500386714935e-01 5.4046201705932617e-01 + <_> + + 0 -1 1379 -1.3824200630187988e-01 + + 6.2957501411437988e-01 -2.1712999790906906e-02 + <_> + + 0 -1 1380 5.2228998392820358e-02 + + -2.3360900580883026e-01 4.9760800600051880e-01 + <_> + + 0 -1 1381 2.5884000584483147e-02 + + 1.8041999638080597e-01 -2.2039200365543365e-01 + <_> + + 0 -1 1382 -1.2138999998569489e-02 + + -6.9731897115707397e-01 1.5712000429630280e-02 + <_> + + 0 -1 1383 -2.4237999692559242e-02 + + 3.4593299031257629e-01 7.1469999849796295e-02 + <_> + + 0 -1 1384 -2.5272000581026077e-02 + + -8.7583297491073608e-01 -9.8240002989768982e-03 + <_> + + 0 -1 1385 1.2597000226378441e-02 + + 2.3649999499320984e-01 -2.8731200098991394e-01 + <_> + + 0 -1 1386 5.7330999523401260e-02 + + -6.1530999839305878e-02 -2.2326040267944336e+00 + <_> + + 0 -1 1387 1.6671000048518181e-02 + + -1.9850100576877594e-01 4.0810701251029968e-01 + <_> + + 0 -1 1388 -2.2818999364972115e-02 + + 9.6487599611282349e-01 -2.0245699584484100e-01 + <_> + + 0 -1 1389 3.7000001611886546e-05 + + -5.8908998966217041e-02 2.7055400609970093e-01 + <_> + + 0 -1 1390 -7.6700001955032349e-03 + + -4.5317101478576660e-01 8.9628003537654877e-02 + <_> + + 0 -1 1391 9.4085998833179474e-02 + + 1.1604599654674530e-01 -1.0951169729232788e+00 + <_> + + 0 -1 1392 -6.2267001718282700e-02 + + 1.8096530437469482e+00 -1.4773200452327728e-01 + <_> + + 0 -1 1393 1.7416000366210938e-02 + + 2.3068200051784515e-01 -4.2417600750923157e-01 + <_> + + 0 -1 1394 -2.2066000849008560e-02 + + 4.9270299077033997e-01 -2.0630900561809540e-01 + <_> + + 0 -1 1395 -1.0404000058770180e-02 + + 6.0924297571182251e-01 2.8130000457167625e-02 + <_> + + 0 -1 1396 -9.3670003116130829e-03 + + 4.0171200037002563e-01 -2.1681700646877289e-01 + <_> + + 0 -1 1397 -2.9039999470114708e-02 + + -8.4876501560211182e-01 1.4246800541877747e-01 + <_> + + 0 -1 1398 -2.1061999723315239e-02 + + -7.9198300838470459e-01 -1.2595999985933304e-02 + <_> + + 0 -1 1399 -3.7000998854637146e-02 + + -6.7488902807235718e-01 1.2830400466918945e-01 + <_> + + 0 -1 1400 1.0735999792814255e-02 + + 3.6779999732971191e-02 -6.3393002748489380e-01 + <_> + + 0 -1 1401 1.6367599368095398e-01 + + 1.3803899288177490e-01 -4.7189000248908997e-01 + <_> + + 0 -1 1402 9.4917997717857361e-02 + + -1.3855700194835663e-01 1.9492419958114624e+00 + <_> + + 0 -1 1403 3.5261999815702438e-02 + + 1.3721899688243866e-01 -2.1186530590057373e+00 + <_> + + 0 -1 1404 1.2811000458896160e-02 + + -2.0008100569248199e-01 4.9507799744606018e-01 + <_> + 155 + -3.3933560848236084e+00 + + <_> + + 0 -1 1405 1.3904400169849396e-01 + + -4.6581199765205383e-01 7.6431602239608765e-01 + <_> + + 0 -1 1406 1.1916999705135822e-02 + + -9.4398999214172363e-01 3.9726299047470093e-01 + <_> + + 0 -1 1407 -1.0006999596953392e-02 + + 3.2718798518180847e-01 -6.3367402553558350e-01 + <_> + + 0 -1 1408 -6.0479999519884586e-03 + + 2.7427899837493896e-01 -5.7446998357772827e-01 + <_> + + 0 -1 1409 -1.2489999644458294e-03 + + 2.3629300296306610e-01 -6.8593502044677734e-01 + <_> + + 0 -1 1410 3.2382000237703323e-02 + + -5.7630199193954468e-01 2.7492699027061462e-01 + <_> + + 0 -1 1411 -1.3957999646663666e-02 + + -6.1061501502990723e-01 2.4541600048542023e-01 + <_> + + 0 -1 1412 1.1159999994561076e-03 + + -5.6539100408554077e-01 2.7179300785064697e-01 + <_> + + 0 -1 1413 2.7000000045518391e-05 + + -8.0235999822616577e-01 1.1509100347757339e-01 + <_> + + 0 -1 1414 -2.5700000696815550e-04 + + -8.1205898523330688e-01 2.3844699561595917e-01 + <_> + + 0 -1 1415 4.0460000745952129e-03 + + 1.3909600675106049e-01 -6.6163200139999390e-01 + <_> + + 0 -1 1416 1.4356000348925591e-02 + + -1.6485199332237244e-01 4.1901698708534241e-01 + <_> + + 0 -1 1417 -5.5374998599290848e-02 + + 1.4425870180130005e+00 -1.8820199370384216e-01 + <_> + + 0 -1 1418 9.3594998121261597e-02 + + 1.3548299670219421e-01 -9.1636097431182861e-01 + <_> + + 0 -1 1419 2.6624999940395355e-02 + + -3.3748298883438110e-01 3.9233601093292236e-01 + <_> + + 0 -1 1420 3.7469998933374882e-03 + + -1.1615400016307831e-01 4.4399300217628479e-01 + <_> + + 0 -1 1421 -3.1886000186204910e-02 + + -9.9498301744461060e-01 1.6120000509545207e-03 + <_> + + 0 -1 1422 -2.2600000724196434e-02 + + -4.8067399859428406e-01 1.7007300257682800e-01 + <_> + + 0 -1 1423 2.5202000513672829e-02 + + 3.5580001771450043e-02 -8.0215400457382202e-01 + <_> + + 0 -1 1424 -3.1036999076604843e-02 + + -1.0895340442657471e+00 1.8081900477409363e-01 + <_> + + 0 -1 1425 -2.6475999504327774e-02 + + 9.5671200752258301e-01 -2.1049399673938751e-01 + <_> + + 0 -1 1426 -1.3853999786078930e-02 + + -1.0370320081710815e+00 2.2166700661182404e-01 + <_> + + 0 -1 1427 -6.2925003468990326e-02 + + 9.0199398994445801e-01 -1.9085299968719482e-01 + <_> + + 0 -1 1428 -4.4750999659299850e-02 + + -1.0119110345840454e+00 1.4691199362277985e-01 + <_> + + 0 -1 1429 -2.0428000018000603e-02 + + 6.1624497175216675e-01 -2.3552699387073517e-01 + <_> + + 0 -1 1430 -8.0329999327659607e-03 + + -8.3279997110366821e-02 2.1728700399398804e-01 + <_> + + 0 -1 1431 8.7280003353953362e-03 + + 6.5458998084068298e-02 -6.0318702459335327e-01 + <_> + + 0 -1 1432 -2.7202000841498375e-02 + + -9.3447399139404297e-01 1.5270000696182251e-01 + <_> + + 0 -1 1433 -1.6471000388264656e-02 + + -8.4177100658416748e-01 1.3332000002264977e-02 + <_> + + 0 -1 1434 -1.3744000345468521e-02 + + 6.0567200183868408e-01 -9.2021003365516663e-02 + <_> + + 0 -1 1435 2.9164999723434448e-02 + + -2.8114000335335732e-02 -1.4014569520950317e+00 + <_> + + 0 -1 1436 3.7457000464200974e-02 + + 1.3080599904060364e-01 -4.9382498860359192e-01 + <_> + + 0 -1 1437 -2.5070000439882278e-02 + + -1.1289390325546265e+00 -1.4600000344216824e-02 + <_> + + 0 -1 1438 -6.3812002539634705e-02 + + 7.5871598720550537e-01 -1.8200000049546361e-03 + <_> + + 0 -1 1439 -9.3900002539157867e-03 + + 2.9936400055885315e-01 -2.9487800598144531e-01 + <_> + + 0 -1 1440 -7.6000002445653081e-04 + + 1.9725000485777855e-02 1.9993899762630463e-01 + <_> + + 0 -1 1441 -2.1740999072790146e-02 + + -8.5247898101806641e-01 4.9169998615980148e-02 + <_> + + 0 -1 1442 -1.7869999632239342e-02 + + -5.9985999017953873e-02 1.5222500264644623e-01 + <_> + + 0 -1 1443 -2.4831000715494156e-02 + + 3.5603401064872742e-01 -2.6259899139404297e-01 + <_> + + 0 -1 1444 1.5715500712394714e-01 + + 1.5599999460391700e-04 1.0428730249404907e+00 + <_> + + 0 -1 1445 6.9026999175548553e-02 + + -3.3006999641656876e-02 -1.1796669960021973e+00 + <_> + + 0 -1 1446 -1.1021999642252922e-02 + + 5.8987700939178467e-01 -5.7647999376058578e-02 + <_> + + 0 -1 1447 -1.3834999874234200e-02 + + 5.9502798318862915e-01 -2.4418599903583527e-01 + <_> + + 0 -1 1448 -3.0941000208258629e-02 + + -1.1723799705505371e+00 1.6907000541687012e-01 + <_> + + 0 -1 1449 2.1258000284433365e-02 + + -1.8900999799370766e-02 -1.0684759616851807e+00 + <_> + + 0 -1 1450 9.3079999089241028e-02 + + 1.6305600106716156e-01 -1.3375270366668701e+00 + <_> + + 0 -1 1451 2.9635999351739883e-02 + + -2.2524799406528473e-01 4.5400100946426392e-01 + <_> + + 0 -1 1452 -1.2199999764561653e-04 + + 2.7409100532531738e-01 -3.7371399998664856e-01 + <_> + + 0 -1 1453 -4.2098000645637512e-02 + + -7.5828802585601807e-01 1.7137000337243080e-02 + <_> + + 0 -1 1454 -2.2505000233650208e-02 + + -2.2759300470352173e-01 2.3698699474334717e-01 + <_> + + 0 -1 1455 -1.2862999923527241e-02 + + 1.9252400100231171e-01 -3.2127100229263306e-01 + <_> + + 0 -1 1456 2.7860000729560852e-02 + + 1.6723699867725372e-01 -1.0209059715270996e+00 + <_> + + 0 -1 1457 -2.7807999402284622e-02 + + 1.2824759483337402e+00 -1.7225299775600433e-01 + <_> + + 0 -1 1458 -6.1630001291632652e-03 + + -5.4072898626327515e-01 2.3885700106620789e-01 + <_> + + 0 -1 1459 -2.0436000078916550e-02 + + 6.3355398178100586e-01 -2.1090599894523621e-01 + <_> + + 0 -1 1460 -1.2307999655604362e-02 + + -4.9778199195861816e-01 1.7402599751949310e-01 + <_> + + 0 -1 1461 -4.0493998676538467e-02 + + -1.1848740577697754e+00 -3.3890999853610992e-02 + <_> + + 0 -1 1462 2.9657000675797462e-02 + + 2.1740999072790146e-02 1.0069919824600220e+00 + <_> + + 0 -1 1463 6.8379999138414860e-03 + + 2.9217999428510666e-02 -5.9906297922134399e-01 + <_> + + 0 -1 1464 1.6164999455213547e-02 + + -2.1000799536705017e-01 3.7637299299240112e-01 + <_> + + 0 -1 1465 5.0193000584840775e-02 + + 2.5319999549537897e-03 -7.1668201684951782e-01 + <_> + + 0 -1 1466 1.9680000841617584e-03 + + -2.1921400725841522e-01 3.2298699021339417e-01 + <_> + + 0 -1 1467 2.4979999288916588e-02 + + -9.6840001642704010e-03 -7.7572900056838989e-01 + <_> + + 0 -1 1468 -1.5809999778866768e-02 + + 4.4637501239776611e-01 -6.1760000884532928e-02 + <_> + + 0 -1 1469 3.7206999957561493e-02 + + -2.0495399832725525e-01 5.7722198963165283e-01 + <_> + + 0 -1 1470 -7.9264998435974121e-02 + + -7.6745402812957764e-01 1.2550400197505951e-01 + <_> + + 0 -1 1471 -1.7152000218629837e-02 + + -1.4121830463409424e+00 -5.1704000681638718e-02 + <_> + + 0 -1 1472 3.2740000635385513e-02 + + 1.9334000349044800e-01 -6.3633698225021362e-01 + <_> + + 0 -1 1473 -1.1756999790668488e-01 + + 8.4325402975082397e-01 -1.8018600344657898e-01 + <_> + + 0 -1 1474 1.2057200074195862e-01 + + 1.2530000507831573e-01 -2.1213600635528564e+00 + <_> + + 0 -1 1475 4.2779999785125256e-03 + + -4.6604400873184204e-01 8.9643999934196472e-02 + <_> + + 0 -1 1476 -7.2544999420642853e-02 + + 5.1826500892639160e-01 1.6823999583721161e-02 + <_> + + 0 -1 1477 1.7710599303245544e-01 + + -3.0910000205039978e-02 -1.1046639680862427e+00 + <_> + + 0 -1 1478 8.4229996427893639e-03 + + 2.4445800483226776e-01 -3.8613098859786987e-01 + <_> + + 0 -1 1479 -1.3035000301897526e-02 + + 9.8004400730133057e-01 -1.7016500234603882e-01 + <_> + + 0 -1 1480 1.8912000581622124e-02 + + 2.0248499512672424e-01 -3.8545900583267212e-01 + <_> + + 0 -1 1481 2.1447999402880669e-02 + + -2.5717198848724365e-01 3.5181200504302979e-01 + <_> + + 0 -1 1482 6.3357003033161163e-02 + + 1.6994799673557281e-01 -9.1383802890777588e-01 + <_> + + 0 -1 1483 -3.2435998320579529e-02 + + -8.5681599378585815e-01 -2.1680999547243118e-02 + <_> + + 0 -1 1484 -2.3564999923110008e-02 + + 5.6115597486495972e-01 -2.2400000307243317e-04 + <_> + + 0 -1 1485 1.8789000809192657e-02 + + -2.5459799170494080e-01 3.4512901306152344e-01 + <_> + + 0 -1 1486 3.1042000278830528e-02 + + 7.5719999149441719e-03 3.4800198674201965e-01 + <_> + + 0 -1 1487 -1.1226999573409557e-02 + + -6.0219800472259521e-01 4.2814999818801880e-02 + <_> + + 0 -1 1488 -1.2845999561250210e-02 + + 4.2020401358604431e-01 -5.3801000118255615e-02 + <_> + + 0 -1 1489 -1.2791999615728855e-02 + + 2.2724500298500061e-01 -3.2398000359535217e-01 + <_> + + 0 -1 1490 6.8651996552944183e-02 + + 9.3532003462314606e-02 10. + <_> + + 0 -1 1491 5.2789999172091484e-03 + + -2.6926299929618835e-01 3.3303201198577881e-01 + <_> + + 0 -1 1492 -3.8779001682996750e-02 + + -7.2365301847457886e-01 1.7806500196456909e-01 + <_> + + 0 -1 1493 6.1820000410079956e-03 + + -3.5119399428367615e-01 1.6586300730705261e-01 + <_> + + 0 -1 1494 1.7515200376510620e-01 + + 1.1623100191354752e-01 -1.5419290065765381e+00 + <_> + + 0 -1 1495 1.1627999693155289e-01 + + -9.1479998081922531e-03 -9.9842602014541626e-01 + <_> + + 0 -1 1496 -2.2964000701904297e-02 + + 2.0565399527549744e-01 1.5432000160217285e-02 + <_> + + 0 -1 1497 -5.1410000771284103e-02 + + 5.8072400093078613e-01 -2.0118400454521179e-01 + <_> + + 0 -1 1498 2.2474199533462524e-01 + + 1.8728999421000481e-02 1.0829299688339233e+00 + <_> + + 0 -1 1499 9.4860000535845757e-03 + + -3.3171299099922180e-01 1.9902999699115753e-01 + <_> + + 0 -1 1500 -1.1846300214529037e-01 + + 1.3711010217666626e+00 6.8926997482776642e-02 + <_> + + 0 -1 1501 3.7810999900102615e-02 + + -9.3600002583116293e-04 -8.3996999263763428e-01 + <_> + + 0 -1 1502 2.2202000021934509e-02 + + -1.1963999830186367e-02 3.6673998832702637e-01 + <_> + + 0 -1 1503 -3.6366000771522522e-02 + + 3.7866500020027161e-01 -2.7714800834655762e-01 + <_> + + 0 -1 1504 -1.3184699416160583e-01 + + -2.7481179237365723e+00 1.0666900128126144e-01 + <_> + + 0 -1 1505 -4.1655998677015305e-02 + + 4.7524300217628479e-01 -2.3249800503253937e-01 + <_> + + 0 -1 1506 -3.3151999115943909e-02 + + -5.7929402589797974e-01 1.7434400320053101e-01 + <_> + + 0 -1 1507 1.5769999474287033e-02 + + -1.1284000240266323e-02 -8.3701401948928833e-01 + <_> + + 0 -1 1508 -3.9363000541925430e-02 + + 3.4821599721908569e-01 -1.7455400526523590e-01 + <_> + + 0 -1 1509 -6.7849002778530121e-02 + + 1.4225699901580811e+00 -1.4765599370002747e-01 + <_> + + 0 -1 1510 -2.6775000616908073e-02 + + 2.3947000503540039e-01 1.3271999545395374e-02 + <_> + + 0 -1 1511 3.9919000118970871e-02 + + -8.9999996125698090e-03 -7.5938898324966431e-01 + <_> + + 0 -1 1512 1.0065600275993347e-01 + + -1.8685000017285347e-02 7.6245301961898804e-01 + <_> + + 0 -1 1513 -8.1022001802921295e-02 + + -9.0439099073410034e-01 -8.5880002006888390e-03 + <_> + + 0 -1 1514 -2.1258000284433365e-02 + + -2.1319599449634552e-01 2.1919700503349304e-01 + <_> + + 0 -1 1515 -1.0630999691784382e-02 + + 1.9598099589347839e-01 -3.5768100619316101e-01 + <_> + + 0 -1 1516 8.1300002057105303e-04 + + -9.2794999480247498e-02 2.6145899295806885e-01 + <_> + + 0 -1 1517 3.4650000743567944e-03 + + -5.5336099863052368e-01 2.7386000379920006e-02 + <_> + + 0 -1 1518 1.8835999071598053e-02 + + 1.8446099758148193e-01 -6.6934299468994141e-01 + <_> + + 0 -1 1519 -2.5631999596953392e-02 + + 1.9382879734039307e+00 -1.4708900451660156e-01 + <_> + + 0 -1 1520 -4.0939999744296074e-03 + + -2.6451599597930908e-01 2.0733200013637543e-01 + <_> + + 0 -1 1521 -8.9199998183175921e-04 + + -5.5031597614288330e-01 5.0374999642372131e-02 + <_> + + 0 -1 1522 -4.9518000334501266e-02 + + -2.5615389347076416e+00 1.3141700625419617e-01 + <_> + + 0 -1 1523 1.1680999770760536e-02 + + -2.4819800257682800e-01 3.9982700347900391e-01 + <_> + + 0 -1 1524 3.4563999623060226e-02 + + 1.6178800165653229e-01 -7.1418899297714233e-01 + <_> + + 0 -1 1525 -8.2909995689988136e-03 + + 2.2180099785327911e-01 -2.9181700944900513e-01 + <_> + + 0 -1 1526 -2.2358000278472900e-02 + + 3.1044098734855652e-01 -2.7280000504106283e-03 + <_> + + 0 -1 1527 -3.0801000073552132e-02 + + -9.5672702789306641e-01 -8.3400001749396324e-03 + <_> + + 0 -1 1528 4.3779000639915466e-02 + + 1.2556900084018707e-01 -1.1759619712829590e+00 + <_> + + 0 -1 1529 4.3046001344919205e-02 + + -5.8876998722553253e-02 -1.8568470478057861e+00 + <_> + + 0 -1 1530 2.7188999578356743e-02 + + 4.2858000844717026e-02 3.9036700129508972e-01 + <_> + + 0 -1 1531 9.4149997457861900e-03 + + -4.3567001819610596e-02 -1.1094470024108887e+00 + <_> + + 0 -1 1532 9.4311997294425964e-02 + + 4.0256999433040619e-02 9.8442298173904419e-01 + <_> + + 0 -1 1533 1.7025099694728851e-01 + + 2.9510000720620155e-02 -6.9509297609329224e-01 + <_> + + 0 -1 1534 -4.7148000448942184e-02 + + 1.0338569879531860e+00 6.7602001130580902e-02 + <_> + + 0 -1 1535 1.1186300218105316e-01 + + -6.8682998418807983e-02 -2.4985830783843994e+00 + <_> + + 0 -1 1536 -1.4353999868035316e-02 + + -5.9481900930404663e-01 1.5001699328422546e-01 + <_> + + 0 -1 1537 3.4024000167846680e-02 + + -6.4823001623153687e-02 -2.1382639408111572e+00 + <_> + + 0 -1 1538 2.1601999178528786e-02 + + 5.5309999734163284e-02 7.8292900323867798e-01 + <_> + + 0 -1 1539 2.1771999076008797e-02 + + -7.1279997937381268e-03 -7.2148102521896362e-01 + <_> + + 0 -1 1540 8.2416996359825134e-02 + + 1.4609499275684357e-01 -1.3636670112609863e+00 + <_> + + 0 -1 1541 8.4671996533870697e-02 + + -1.7784699797630310e-01 7.2857701778411865e-01 + <_> + + 0 -1 1542 -5.5128000676631927e-02 + + -5.9402400255203247e-01 1.9357800483703613e-01 + <_> + + 0 -1 1543 -6.4823001623153687e-02 + + -1.0783840417861938e+00 -4.0734000504016876e-02 + <_> + + 0 -1 1544 -2.2769000381231308e-02 + + 7.7900201082229614e-01 3.4960000775754452e-03 + <_> + + 0 -1 1545 5.4756000638008118e-02 + + -6.5683998167514801e-02 -1.8188409805297852e+00 + <_> + + 0 -1 1546 -8.9000001025851816e-05 + + -1.7891999334096909e-02 2.0768299698829651e-01 + <_> + + 0 -1 1547 9.8361998796463013e-02 + + -5.5946998298168182e-02 -1.4153920412063599e+00 + <_> + + 0 -1 1548 -7.0930002257227898e-03 + + 3.4135299921035767e-01 -1.2089899927377701e-01 + <_> + + 0 -1 1549 5.0278000533580780e-02 + + -2.6286700367927551e-01 2.5797298550605774e-01 + <_> + + 0 -1 1550 -5.7870000600814819e-03 + + -1.3178600370883942e-01 1.7350199818611145e-01 + <_> + + 0 -1 1551 1.3973999768495560e-02 + + 2.8518000617623329e-02 -6.1152201890945435e-01 + <_> + + 0 -1 1552 2.1449999883770943e-02 + + 2.6181999593973160e-02 3.0306598544120789e-01 + <_> + + 0 -1 1553 -2.9214000329375267e-02 + + 4.4940599799156189e-01 -2.2803099453449249e-01 + <_> + + 0 -1 1554 4.8099999548867345e-04 + + -1.9879999756813049e-01 2.0744499564170837e-01 + <_> + + 0 -1 1555 1.7109999898821115e-03 + + -5.4037201404571533e-01 6.7865997552871704e-02 + <_> + + 0 -1 1556 8.6660003289580345e-03 + + -1.3128000311553478e-02 5.2297902107238770e-01 + <_> + + 0 -1 1557 6.3657999038696289e-02 + + 6.8299002945423126e-02 -4.9235099554061890e-01 + <_> + + 0 -1 1558 -2.7968000620603561e-02 + + 6.8183898925781250e-01 7.8781001269817352e-02 + <_> + + 0 -1 1559 4.8953998833894730e-02 + + -2.0622399449348450e-01 5.0388097763061523e-01 + <_> + 169 + -3.2396929264068604e+00 + + <_> + + 0 -1 1560 -2.9312999919056892e-02 + + 7.1284699440002441e-01 -5.8230698108673096e-01 + <_> + + 0 -1 1561 1.2415099889039993e-01 + + -3.6863499879837036e-01 6.0067200660705566e-01 + <_> + + 0 -1 1562 7.9349996522068977e-03 + + -8.6008298397064209e-01 2.1724699437618256e-01 + <_> + + 0 -1 1563 3.0365999788045883e-02 + + -2.7186998724937439e-01 6.1247897148132324e-01 + <_> + + 0 -1 1564 2.5218000635504723e-02 + + -3.4748300909996033e-01 5.0427699089050293e-01 + <_> + + 0 -1 1565 1.0014000348746777e-02 + + -3.1898999214172363e-01 4.1376799345016479e-01 + <_> + + 0 -1 1566 -1.6775000840425491e-02 + + -6.9048100709915161e-01 9.4830997288227081e-02 + <_> + + 0 -1 1567 -2.6950000319629908e-03 + + -2.0829799771308899e-01 2.3737199604511261e-01 + <_> + + 0 -1 1568 4.2257998138666153e-02 + + -4.9366700649261475e-01 1.8170599639415741e-01 + <_> + + 0 -1 1569 -4.8505000770092010e-02 + + 1.3429640531539917e+00 3.9769001305103302e-02 + <_> + + 0 -1 1570 2.8992999345064163e-02 + + 4.6496000140905380e-02 -8.1643497943878174e-01 + <_> + + 0 -1 1571 -4.0089000016450882e-02 + + -7.1197801828384399e-01 2.2553899884223938e-01 + <_> + + 0 -1 1572 -4.1021998971700668e-02 + + 1.0057929754257202e+00 -1.9690200686454773e-01 + <_> + + 0 -1 1573 1.1838000267744064e-02 + + -1.2600000016391277e-02 8.0767101049423218e-01 + <_> + + 0 -1 1574 -2.1328000351786613e-02 + + -8.2023900747299194e-01 2.0524999126791954e-02 + <_> + + 0 -1 1575 -2.3904999718070030e-02 + + 5.4210501909255981e-01 -7.4767000973224640e-02 + <_> + + 0 -1 1576 1.8008999526500702e-02 + + -3.3827701210975647e-01 4.2358601093292236e-01 + <_> + + 0 -1 1577 -4.3614000082015991e-02 + + -1.1983489990234375e+00 1.5566200017929077e-01 + <_> + + 0 -1 1578 -9.2449998483061790e-03 + + -8.9029997587203979e-01 1.1003999970853329e-02 + <_> + + 0 -1 1579 4.7485001385211945e-02 + + 1.6664099693298340e-01 -9.0764498710632324e-01 + <_> + + 0 -1 1580 -1.4233999885618687e-02 + + 6.2695199251174927e-01 -2.5791200995445251e-01 + <_> + + 0 -1 1581 3.8010000716894865e-03 + + -2.8229999542236328e-01 2.6624599099159241e-01 + <_> + + 0 -1 1582 3.4330000635236502e-03 + + -6.3771998882293701e-01 9.8422996699810028e-02 + <_> + + 0 -1 1583 -2.9221000149846077e-02 + + -7.6769900321960449e-01 2.2634500265121460e-01 + <_> + + 0 -1 1584 -6.4949998632073402e-03 + + 4.5600101351737976e-01 -2.6528900861740112e-01 + <_> + + 0 -1 1585 -3.0034000054001808e-02 + + -7.6551097631454468e-01 1.4009299874305725e-01 + <_> + + 0 -1 1586 7.8360000625252724e-03 + + 4.6755999326705933e-02 -7.2356200218200684e-01 + <_> + + 0 -1 1587 8.8550001382827759e-03 + + -4.9141999334096909e-02 5.1472699642181396e-01 + <_> + + 0 -1 1588 9.5973998308181763e-02 + + -2.0068999379873276e-02 -1.0850950479507446e+00 + <_> + + 0 -1 1589 -3.2876998186111450e-02 + + -9.5875298976898193e-01 1.4543600380420685e-01 + <_> + + 0 -1 1590 -1.3384000398218632e-02 + + -7.0013600587844849e-01 2.9157999902963638e-02 + <_> + + 0 -1 1591 1.5235999599099159e-02 + + -2.8235700726509094e-01 2.5367999076843262e-01 + <_> + + 0 -1 1592 1.2054000049829483e-02 + + -2.5303399562835693e-01 4.6526700258255005e-01 + <_> + + 0 -1 1593 -7.6295003294944763e-02 + + -6.9915801286697388e-01 1.3217200338840485e-01 + <_> + + 0 -1 1594 -1.2040000408887863e-02 + + 4.5894598960876465e-01 -2.3856499791145325e-01 + <_> + + 0 -1 1595 2.1916000172495842e-02 + + 1.8268600106239319e-01 -6.1629700660705566e-01 + <_> + + 0 -1 1596 -2.7330000884830952e-03 + + -6.3257902860641479e-01 3.4219000488519669e-02 + <_> + + 0 -1 1597 -4.8652000725269318e-02 + + -1.0297729969024658e+00 1.7386500537395477e-01 + <_> + + 0 -1 1598 -1.0463999584317207e-02 + + 3.4757301211357117e-01 -2.7464100718498230e-01 + <_> + + 0 -1 1599 -6.6550001502037048e-03 + + -2.8980299830436707e-01 2.4037900567054749e-01 + <_> + + 0 -1 1600 8.5469996556639671e-03 + + -4.4340500235557556e-01 1.4267399907112122e-01 + <_> + + 0 -1 1601 1.9913999363780022e-02 + + 1.7740400135517120e-01 -2.4096299707889557e-01 + <_> + + 0 -1 1602 2.2012999281287193e-02 + + -1.0812000371515751e-02 -9.4690799713134766e-01 + <_> + + 0 -1 1603 -5.2179001271724701e-02 + + 1.6547499895095825e+00 9.6487000584602356e-02 + <_> + + 0 -1 1604 1.9698999822139740e-02 + + -6.7560002207756042e-03 -8.6311501264572144e-01 + <_> + + 0 -1 1605 2.3040000349283218e-02 + + -2.3519999813288450e-03 3.8531300425529480e-01 + <_> + + 0 -1 1606 -1.5038000419735909e-02 + + -6.1905699968338013e-01 3.1077999621629715e-02 + <_> + + 0 -1 1607 -4.9956001341342926e-02 + + 7.0657497644424438e-01 4.7880999743938446e-02 + <_> + + 0 -1 1608 -6.9269999861717224e-02 + + 3.9212900400161743e-01 -2.3848000168800354e-01 + <_> + + 0 -1 1609 4.7399997711181641e-03 + + -2.4309000000357628e-02 2.5386300683021545e-01 + <_> + + 0 -1 1610 -3.3923998475074768e-02 + + 4.6930399537086487e-01 -2.3321899771690369e-01 + <_> + + 0 -1 1611 -1.6231000423431396e-02 + + 3.2319200038909912e-01 -2.0545600354671478e-01 + <_> + + 0 -1 1612 -5.0193000584840775e-02 + + -1.2277870178222656e+00 -4.0798000991344452e-02 + <_> + + 0 -1 1613 5.6944001466035843e-02 + + 4.5184001326560974e-02 6.0197502374649048e-01 + <_> + + 0 -1 1614 4.0936999022960663e-02 + + -1.6772800683975220e-01 8.9819300174713135e-01 + <_> + + 0 -1 1615 -3.0839999672025442e-03 + + 3.3716198801994324e-01 -2.7240800857543945e-01 + <_> + + 0 -1 1616 -3.2600000500679016e-02 + + -8.5446500778198242e-01 1.9664999097585678e-02 + <_> + + 0 -1 1617 9.8480999469757080e-02 + + 5.4742000997066498e-02 6.3827300071716309e-01 + <_> + + 0 -1 1618 -3.8185000419616699e-02 + + 5.2274698019027710e-01 -2.3384800553321838e-01 + <_> + + 0 -1 1619 -4.5917000621557236e-02 + + 6.2829202413558960e-01 3.2859001308679581e-02 + <_> + + 0 -1 1620 -1.1955499649047852e-01 + + -6.1572700738906860e-01 3.4680001437664032e-02 + <_> + + 0 -1 1621 -1.2044399976730347e-01 + + -8.4380000829696655e-01 1.6530700027942657e-01 + <_> + + 0 -1 1622 7.0619001984596252e-02 + + -6.3261002302169800e-02 -1.9863929748535156e+00 + <_> + + 0 -1 1623 8.4889996796846390e-03 + + -1.7663399875164032e-01 3.8011199235916138e-01 + <_> + + 0 -1 1624 2.2710999473929405e-02 + + -2.7605999261140823e-02 -9.1921401023864746e-01 + <_> + + 0 -1 1625 4.9700000090524554e-04 + + -2.4293200671672821e-01 2.2878900170326233e-01 + <_> + + 0 -1 1626 3.4651998430490494e-02 + + -2.3705999553203583e-01 5.4010999202728271e-01 + <_> + + 0 -1 1627 -4.4700000435113907e-03 + + 3.9078998565673828e-01 -1.2693800032138824e-01 + <_> + + 0 -1 1628 2.3643000051379204e-02 + + -2.6663699746131897e-01 3.2312598824501038e-01 + <_> + + 0 -1 1629 1.2813000008463860e-02 + + 1.7540800571441650e-01 -6.0787999629974365e-01 + <_> + + 0 -1 1630 -1.1250999756157398e-02 + + -1.0852589607238770e+00 -2.8046000748872757e-02 + <_> + + 0 -1 1631 -4.1535001248121262e-02 + + 7.1887397766113281e-01 2.7982000261545181e-02 + <_> + + 0 -1 1632 -9.3470998108386993e-02 + + -1.1906319856643677e+00 -4.4810999184846878e-02 + <_> + + 0 -1 1633 -2.7249999344348907e-02 + + 6.2942498922348022e-01 9.5039997249841690e-03 + <_> + + 0 -1 1634 -2.1759999915957451e-02 + + 1.3233649730682373e+00 -1.5027000010013580e-01 + <_> + + 0 -1 1635 -9.6890004351735115e-03 + + -3.3947101235389709e-01 1.7085799574851990e-01 + <_> + + 0 -1 1636 6.9395996630191803e-02 + + -2.5657799839973450e-01 4.7652098536491394e-01 + <_> + + 0 -1 1637 3.1208999454975128e-02 + + 1.4154000580310822e-01 -3.4942001104354858e-01 + <_> + + 0 -1 1638 -4.9727000296115875e-02 + + -1.1675560474395752e+00 -4.0757998824119568e-02 + <_> + + 0 -1 1639 -2.0301999524235725e-02 + + -3.9486399292945862e-01 1.5814900398254395e-01 + <_> + + 0 -1 1640 -1.5367000363767147e-02 + + 4.9300000071525574e-01 -2.0092099905014038e-01 + <_> + + 0 -1 1641 -5.0735000520944595e-02 + + 1.8736059665679932e+00 8.6730003356933594e-02 + <_> + + 0 -1 1642 -2.0726000890135765e-02 + + -8.8938397169113159e-01 -7.3199998587369919e-03 + <_> + + 0 -1 1643 -3.0993999913334846e-02 + + -1.1664899587631226e+00 1.4274600148200989e-01 + <_> + + 0 -1 1644 -4.4269999489188194e-03 + + -6.6815102100372314e-01 4.4120000675320625e-03 + <_> + + 0 -1 1645 -4.5743998140096664e-02 + + -4.7955200076103210e-01 1.5121999382972717e-01 + <_> + + 0 -1 1646 1.6698999330401421e-02 + + 1.2048599869012833e-01 -4.5235899090766907e-01 + <_> + + 0 -1 1647 3.2210000790655613e-03 + + -7.7615000307559967e-02 2.7846598625183105e-01 + <_> + + 0 -1 1648 2.4434000253677368e-02 + + -1.9987100362777710e-01 6.7253702878952026e-01 + <_> + + 0 -1 1649 -7.9677999019622803e-02 + + 9.2222398519515991e-01 9.2557996511459351e-02 + <_> + + 0 -1 1650 4.4530000537633896e-02 + + -2.6690500974655151e-01 3.3320501446723938e-01 + <_> + + 0 -1 1651 -1.2528300285339355e-01 + + -5.4253101348876953e-01 1.3976299762725830e-01 + <_> + + 0 -1 1652 1.7971999943256378e-02 + + 1.8219999969005585e-02 -6.8048501014709473e-01 + <_> + + 0 -1 1653 1.9184000790119171e-02 + + -1.2583999894559383e-02 5.4126697778701782e-01 + <_> + + 0 -1 1654 4.0024001151323318e-02 + + -1.7638799548149109e-01 7.8810399770736694e-01 + <_> + + 0 -1 1655 1.3558999635279179e-02 + + 2.0737600326538086e-01 -4.7744300961494446e-01 + <_> + + 0 -1 1656 1.6220999881625175e-02 + + 2.3076999932527542e-02 -6.1182099580764771e-01 + <_> + + 0 -1 1657 1.1229000054299831e-02 + + -1.7728000879287720e-02 4.1764199733734131e-01 + <_> + + 0 -1 1658 3.9193000644445419e-02 + + -1.8948499858379364e-01 7.4019300937652588e-01 + <_> + + 0 -1 1659 -9.5539996400475502e-03 + + 4.0947100520133972e-01 -1.3508899509906769e-01 + <_> + + 0 -1 1660 2.7878999710083008e-02 + + -2.0350700616836548e-01 6.1625397205352783e-01 + <_> + + 0 -1 1661 -2.3600999265909195e-02 + + -1.6967060565948486e+00 1.4633199572563171e-01 + <_> + + 0 -1 1662 2.6930000633001328e-02 + + -3.0401999130845070e-02 -1.0909470319747925e+00 + <_> + + 0 -1 1663 2.8999999631196260e-04 + + -2.0076000690460205e-01 2.2314099967479706e-01 + <_> + + 0 -1 1664 -4.1124999523162842e-02 + + -4.5242199301719666e-01 5.7392001152038574e-02 + <_> + + 0 -1 1665 6.6789998672902584e-03 + + 2.3824900388717651e-01 -2.1262100338935852e-01 + <_> + + 0 -1 1666 4.7864999622106552e-02 + + -1.8194800615310669e-01 6.1918401718139648e-01 + <_> + + 0 -1 1667 -3.1679999083280563e-03 + + -2.7393200993537903e-01 2.5017300248146057e-01 + <_> + + 0 -1 1668 -8.6230002343654633e-03 + + -4.6280300617218018e-01 4.2397998273372650e-02 + <_> + + 0 -1 1669 -7.4350000359117985e-03 + + 4.1796800494194031e-01 -1.7079999670386314e-03 + <_> + + 0 -1 1670 -1.8769999733194709e-03 + + 1.4602300524711609e-01 -3.3721101284027100e-01 + <_> + + 0 -1 1671 -8.6226001381874084e-02 + + 7.5143402814865112e-01 1.0711999610066414e-02 + <_> + + 0 -1 1672 4.6833999454975128e-02 + + -1.9119599461555481e-01 4.8414900898933411e-01 + <_> + + 0 -1 1673 -9.2000002041459084e-05 + + 3.5220399498939514e-01 -1.7333300411701202e-01 + <_> + + 0 -1 1674 -1.6343999654054642e-02 + + -6.4397698640823364e-01 9.0680001303553581e-03 + <_> + + 0 -1 1675 4.5703999698162079e-02 + + 1.8216000869870186e-02 3.1970798969268799e-01 + <_> + + 0 -1 1676 -2.7382999658584595e-02 + + 1.0564049482345581e+00 -1.7276400327682495e-01 + <_> + + 0 -1 1677 -2.7602000162005424e-02 + + 2.9715499281883240e-01 -9.4600003212690353e-03 + <_> + + 0 -1 1678 7.6939999125897884e-03 + + -2.1660299599170685e-01 4.7385200858116150e-01 + <_> + + 0 -1 1679 -7.0500001311302185e-04 + + 2.4048799276351929e-01 -2.6776000857353210e-01 + <_> + + 0 -1 1680 1.1054199934005737e-01 + + -3.3539000898599625e-02 -1.0233880281448364e+00 + <_> + + 0 -1 1681 6.8765997886657715e-02 + + -4.3239998631179333e-03 5.7153397798538208e-01 + <_> + + 0 -1 1682 1.7999999690800905e-03 + + 7.7574998140335083e-02 -4.2092698812484741e-01 + <_> + + 0 -1 1683 1.9232000410556793e-01 + + 8.2021996378898621e-02 2.8810169696807861e+00 + <_> + + 0 -1 1684 1.5742099285125732e-01 + + -1.3708199560642242e-01 2.0890059471130371e+00 + <_> + + 0 -1 1685 -4.9387000501155853e-02 + + -1.8610910177230835e+00 1.4332099258899689e-01 + <_> + + 0 -1 1686 5.1929000765085220e-02 + + -1.8737000226974487e-01 5.4231601953506470e-01 + <_> + + 0 -1 1687 4.9965001642704010e-02 + + 1.4175300300121307e-01 -1.5625779628753662e+00 + <_> + + 0 -1 1688 -4.2633000761270523e-02 + + 1.6059479713439941e+00 -1.4712899923324585e-01 + <_> + + 0 -1 1689 -3.7553999572992325e-02 + + -8.0974900722503662e-01 1.3256999850273132e-01 + <_> + + 0 -1 1690 -3.7174999713897705e-02 + + -1.3945020437240601e+00 -5.7055000215768814e-02 + <_> + + 0 -1 1691 1.3945999555289745e-02 + + 3.3427000045776367e-02 5.7474797964096069e-01 + <_> + + 0 -1 1692 -4.4800000614486635e-04 + + -5.5327498912811279e-01 2.1952999755740166e-02 + <_> + + 0 -1 1693 3.1993001699447632e-02 + + 2.0340999588370323e-02 3.7459200620651245e-01 + <_> + + 0 -1 1694 -4.2799999937415123e-03 + + 4.4428700208663940e-01 -2.2999699413776398e-01 + <_> + + 0 -1 1695 9.8550003021955490e-03 + + 1.8315799534320831e-01 -4.0964999794960022e-01 + <_> + + 0 -1 1696 9.3356996774673462e-02 + + -6.3661001622676849e-02 -1.6929290294647217e+00 + <_> + + 0 -1 1697 1.7209999263286591e-02 + + 2.0153899490833282e-01 -4.6061098575592041e-01 + <_> + + 0 -1 1698 8.4319999441504478e-03 + + -3.2003998756408691e-01 1.5312199294567108e-01 + <_> + + 0 -1 1699 -1.4054999686777592e-02 + + 8.6882400512695312e-01 3.2575000077486038e-02 + <_> + + 0 -1 1700 -7.7180000953376293e-03 + + 6.3686698675155640e-01 -1.8425500392913818e-01 + <_> + + 0 -1 1701 2.8005000203847885e-02 + + 1.7357499897480011e-01 -4.7883599996566772e-01 + <_> + + 0 -1 1702 -1.8884999677538872e-02 + + 2.4101600050926208e-01 -2.6547598838806152e-01 + <_> + + 0 -1 1703 -1.8585000187158585e-02 + + 5.4232501983642578e-01 5.3633000701665878e-02 + <_> + + 0 -1 1704 -3.6437001079320908e-02 + + 2.3908898830413818e+00 -1.3634699583053589e-01 + <_> + + 0 -1 1705 3.2455001026391983e-02 + + 1.5910699963569641e-01 -6.7581498622894287e-01 + <_> + + 0 -1 1706 5.9781998395919800e-02 + + -2.3479999508708715e-03 -7.3053699731826782e-01 + <_> + + 0 -1 1707 9.8209995776414871e-03 + + -1.1444099992513657e-01 3.0570301413536072e-01 + <_> + + 0 -1 1708 -3.5163998603820801e-02 + + -1.0511469841003418e+00 -3.3103000372648239e-02 + <_> + + 0 -1 1709 2.7429999317973852e-03 + + -2.0135399699211121e-01 3.2754099369049072e-01 + <_> + + 0 -1 1710 8.1059997901320457e-03 + + -2.1383500099182129e-01 4.3362098932266235e-01 + <_> + + 0 -1 1711 8.8942997157573700e-02 + + 1.0940899699926376e-01 -4.7609338760375977e+00 + <_> + + 0 -1 1712 -3.0054999515414238e-02 + + -1.7169300317764282e+00 -6.0919001698493958e-02 + <_> + + 0 -1 1713 -2.1734999492764473e-02 + + 6.4778900146484375e-01 -3.2830998301506042e-02 + <_> + + 0 -1 1714 3.7648998200893402e-02 + + -1.0060000233352184e-02 -7.6569098234176636e-01 + <_> + + 0 -1 1715 2.7189999818801880e-03 + + 1.9888900220394135e-01 -8.2479000091552734e-02 + <_> + + 0 -1 1716 -1.0548000223934650e-02 + + -8.6613601446151733e-01 -2.5986000895500183e-02 + <_> + + 0 -1 1717 1.2966300547122955e-01 + + 1.3911999762058258e-01 -2.2271950244903564e+00 + <_> + + 0 -1 1718 -1.7676999792456627e-02 + + 3.3967700600624084e-01 -2.3989599943161011e-01 + <_> + + 0 -1 1719 -7.7051997184753418e-02 + + -2.5017969608306885e+00 1.2841999530792236e-01 + <_> + + 0 -1 1720 -1.9230000674724579e-02 + + 5.0641202926635742e-01 -1.9751599431037903e-01 + <_> + + 0 -1 1721 -5.1222998648881912e-02 + + -2.9333369731903076e+00 1.3858500123023987e-01 + <_> + + 0 -1 1722 2.0830000285059214e-03 + + -6.0043597221374512e-01 2.9718000441789627e-02 + <_> + + 0 -1 1723 2.5418000295758247e-02 + + 3.3915799856185913e-01 -1.4392000436782837e-01 + <_> + + 0 -1 1724 -2.3905999958515167e-02 + + -1.1082680225372314e+00 -4.7377001494169235e-02 + <_> + + 0 -1 1725 -6.3740001060068607e-03 + + 4.4533699750900269e-01 -6.7052997648715973e-02 + <_> + + 0 -1 1726 -3.7698999047279358e-02 + + -1.0406579971313477e+00 -4.1790001094341278e-02 + <_> + + 0 -1 1727 2.1655100584030151e-01 + + 3.3863000571727753e-02 8.2017302513122559e-01 + <_> + + 0 -1 1728 -1.3400999829173088e-02 + + 5.2903497219085693e-01 -1.9133000075817108e-01 + <_> + 196 + -3.2103500366210938e+00 + + <_> + + 0 -1 1729 7.1268998086452484e-02 + + -5.3631198406219482e-01 6.0715299844741821e-01 + <_> + + 0 -1 1730 5.6111000478267670e-02 + + -5.0141602754592896e-01 4.3976101279258728e-01 + <_> + + 0 -1 1731 4.0463998913764954e-02 + + -3.2922199368476868e-01 5.4834699630737305e-01 + <_> + + 0 -1 1732 6.3155002892017365e-02 + + -3.1701698899269104e-01 4.6152999997138977e-01 + <_> + + 0 -1 1733 1.0320999659597874e-02 + + 1.0694999992847443e-01 -9.8243898153305054e-01 + <_> + + 0 -1 1734 6.2606997787952423e-02 + + -1.4329700171947479e-01 7.1095001697540283e-01 + <_> + + 0 -1 1735 -3.9416000247001648e-02 + + 9.4380199909210205e-01 -2.1572099626064301e-01 + <_> + + 0 -1 1736 -5.3960001096129417e-03 + + -5.4611998796463013e-01 2.5303798913955688e-01 + <_> + + 0 -1 1737 1.0773199796676636e-01 + + 1.2496000155806541e-02 -1.0809199810028076e+00 + <_> + + 0 -1 1738 1.6982000321149826e-02 + + -3.1536400318145752e-01 5.1239997148513794e-01 + <_> + + 0 -1 1739 3.1216999515891075e-02 + + -4.5199999585747719e-03 -1.2443480491638184e+00 + <_> + + 0 -1 1740 -2.3106999695301056e-02 + + -7.6492899656295776e-01 2.0640599727630615e-01 + <_> + + 0 -1 1741 -1.1203999631106853e-02 + + 2.4092699587345123e-01 -3.5142099857330322e-01 + <_> + + 0 -1 1742 -4.7479998320341110e-03 + + -9.7007997334003448e-02 2.0638099312782288e-01 + <_> + + 0 -1 1743 -1.7358999699354172e-02 + + -7.9020297527313232e-01 2.1852999925613403e-02 + <_> + + 0 -1 1744 1.8851999193429947e-02 + + -1.0394600033760071e-01 5.4844200611114502e-01 + <_> + + 0 -1 1745 7.2249998338520527e-03 + + -4.0409401059150696e-01 2.6763799786567688e-01 + <_> + + 0 -1 1746 1.8915999680757523e-02 + + 2.0508000254631042e-01 -1.0206340551376343e+00 + <_> + + 0 -1 1747 3.1156999990344048e-02 + + 1.2400000123307109e-03 -8.7293499708175659e-01 + <_> + + 0 -1 1748 2.0951999351382256e-02 + + -5.5559999309480190e-03 8.0356198549270630e-01 + <_> + + 0 -1 1749 1.1291000060737133e-02 + + -3.6478400230407715e-01 2.2767899930477142e-01 + <_> + + 0 -1 1750 -5.7011000812053680e-02 + + -1.4295619726181030e+00 1.4322000741958618e-01 + <_> + + 0 -1 1751 7.2194002568721771e-02 + + -4.1850000619888306e-02 -1.9111829996109009e+00 + <_> + + 0 -1 1752 -1.9874000921845436e-02 + + 2.6425498723983765e-01 -3.2617700099945068e-01 + <_> + + 0 -1 1753 -1.6692999750375748e-02 + + -8.3907800912857056e-01 4.0799999260343611e-04 + <_> + + 0 -1 1754 -3.9834998548030853e-02 + + -4.8858499526977539e-01 1.6436100006103516e-01 + <_> + + 0 -1 1755 2.7009999379515648e-02 + + -1.8862499296665192e-01 8.3419400453567505e-01 + <_> + + 0 -1 1756 -3.9420002140104771e-03 + + 2.3231500387191772e-01 -7.2360001504421234e-02 + <_> + + 0 -1 1757 2.2833000868558884e-02 + + -3.5884000360965729e-02 -1.1549400091171265e+00 + <_> + + 0 -1 1758 -6.8888001143932343e-02 + + -1.7837309837341309e+00 1.5159000456333160e-01 + <_> + + 0 -1 1759 4.3097000569105148e-02 + + -2.1608099341392517e-01 5.0624102354049683e-01 + <_> + + 0 -1 1760 8.6239995434880257e-03 + + -1.7795599997043610e-01 2.8957900404930115e-01 + <_> + + 0 -1 1761 1.4561000280082226e-02 + + -1.1408000253140926e-02 -8.9402002096176147e-01 + <_> + + 0 -1 1762 -1.1501000262796879e-02 + + 3.0171999335289001e-01 -4.3659001588821411e-02 + <_> + + 0 -1 1763 -1.0971499979496002e-01 + + -9.5147097110748291e-01 -1.9973000511527061e-02 + <_> + + 0 -1 1764 4.5228000730276108e-02 + + 3.3110998570919037e-02 9.6619802713394165e-01 + <_> + + 0 -1 1765 -2.7047999203205109e-02 + + 9.7963601350784302e-01 -1.7261900007724762e-01 + <_> + + 0 -1 1766 1.8030999228358269e-02 + + -2.0801000297069550e-02 2.7385899424552917e-01 + <_> + + 0 -1 1767 5.0524998456239700e-02 + + -5.6802999228239059e-02 -1.7775089740753174e+00 + <_> + + 0 -1 1768 -2.9923999682068825e-02 + + 6.5329200029373169e-01 -2.3537000641226768e-02 + <_> + + 0 -1 1769 3.8058001548051834e-02 + + 2.6317000389099121e-02 -7.0665699243545532e-01 + <_> + + 0 -1 1770 1.8563899397850037e-01 + + -5.6039998307824135e-03 3.2873699069023132e-01 + <_> + + 0 -1 1771 -4.0670000016689301e-03 + + 3.4204798936843872e-01 -3.0171599984169006e-01 + <_> + + 0 -1 1772 1.0108999907970428e-02 + + -7.3600001633167267e-03 5.7981598377227783e-01 + <_> + + 0 -1 1773 -1.1567000299692154e-02 + + -5.2722197771072388e-01 4.6447999775409698e-02 + <_> + + 0 -1 1774 -6.5649999305605888e-03 + + -5.8529102802276611e-01 1.9101899862289429e-01 + <_> + + 0 -1 1775 1.0582000017166138e-02 + + 2.1073000505566597e-02 -6.8892598152160645e-01 + <_> + + 0 -1 1776 -2.0304000005125999e-02 + + -3.6400699615478516e-01 1.5338799357414246e-01 + <_> + + 0 -1 1777 2.3529999889433384e-03 + + 3.6164000630378723e-02 -5.9825098514556885e-01 + <_> + + 0 -1 1778 -1.4690000098198652e-03 + + -1.4707699418067932e-01 3.7507998943328857e-01 + <_> + + 0 -1 1779 8.6449999362230301e-03 + + -2.1708500385284424e-01 5.1936799287796021e-01 + <_> + + 0 -1 1780 -2.4326000362634659e-02 + + -1.0846769809722900e+00 1.4084799587726593e-01 + <_> + + 0 -1 1781 7.4418999254703522e-02 + + -1.5513800084590912e-01 1.1822769641876221e+00 + <_> + + 0 -1 1782 1.7077999189496040e-02 + + 4.4231001287698746e-02 9.1561102867126465e-01 + <_> + + 0 -1 1783 -2.4577999487519264e-02 + + -1.5504100322723389e+00 -5.4745998233556747e-02 + <_> + + 0 -1 1784 3.0205000191926956e-02 + + 1.6662800312042236e-01 -1.0001239776611328e+00 + <_> + + 0 -1 1785 1.2136000208556652e-02 + + -7.7079099416732788e-01 -4.8639997839927673e-03 + <_> + + 0 -1 1786 8.6717002093791962e-02 + + 1.1061699688434601e-01 -1.6857999563217163e+00 + <_> + + 0 -1 1787 -4.2309001088142395e-02 + + 1.1075930595397949e+00 -1.5438599884510040e-01 + <_> + + 0 -1 1788 -2.6420000940561295e-03 + + 2.7451899647712708e-01 -1.8456199765205383e-01 + <_> + + 0 -1 1789 -5.6662000715732574e-02 + + -8.0625599622726440e-01 -1.6928000375628471e-02 + <_> + + 0 -1 1790 2.3475000634789467e-02 + + 1.4187699556350708e-01 -2.5500899553298950e-01 + <_> + + 0 -1 1791 -2.0803000777959824e-02 + + 1.9826300442218781e-01 -3.1171199679374695e-01 + <_> + + 0 -1 1792 7.2599998675286770e-03 + + -5.0590999424457550e-02 4.1923800110816956e-01 + <_> + + 0 -1 1793 3.4160000085830688e-01 + + -1.6674900054931641e-01 9.2748600244522095e-01 + <_> + + 0 -1 1794 6.2029999680817127e-03 + + -1.2625899910926819e-01 4.0445300936698914e-01 + <_> + + 0 -1 1795 3.2692000269889832e-02 + + -3.2634999603033066e-02 -9.8939800262451172e-01 + <_> + + 0 -1 1796 2.1100000594742596e-04 + + -6.4534001052379608e-02 2.5473698973655701e-01 + <_> + + 0 -1 1797 7.2100001852959394e-04 + + -3.6618599295616150e-01 1.1973100155591965e-01 + <_> + + 0 -1 1798 5.4490998387336731e-02 + + 1.2073499709367752e-01 -1.0291390419006348e+00 + <_> + + 0 -1 1799 -1.0141000151634216e-02 + + -5.2177202701568604e-01 3.3734999597072601e-02 + <_> + + 0 -1 1800 -1.8815999850630760e-02 + + 6.5181797742843628e-01 1.3399999588727951e-03 + <_> + + 0 -1 1801 -5.3480002097785473e-03 + + 1.7370699346065521e-01 -3.4132000803947449e-01 + <_> + + 0 -1 1802 -1.0847000405192375e-02 + + -1.9699899852275848e-01 1.5045499801635742e-01 + <_> + + 0 -1 1803 -4.9926001578569412e-02 + + -5.0888502597808838e-01 3.0762000009417534e-02 + <_> + + 0 -1 1804 1.2160000391304493e-02 + + -6.9251999258995056e-02 1.8745499849319458e-01 + <_> + + 0 -1 1805 -2.2189998999238014e-03 + + -4.0849098563194275e-01 7.9954996705055237e-02 + <_> + + 0 -1 1806 3.1580000650137663e-03 + + -2.1124599874019623e-01 2.2366400063037872e-01 + <_> + + 0 -1 1807 4.1439998894929886e-03 + + -4.9900299310684204e-01 6.2917001545429230e-02 + <_> + + 0 -1 1808 -7.3730000294744968e-03 + + -2.0553299784660339e-01 2.2096699476242065e-01 + <_> + + 0 -1 1809 5.1812000572681427e-02 + + 1.8096800148487091e-01 -4.3495801091194153e-01 + <_> + + 0 -1 1810 1.8340000882744789e-02 + + 1.5200000256299973e-02 3.7991699576377869e-01 + <_> + + 0 -1 1811 1.7490799725055695e-01 + + -2.0920799672603607e-01 4.0013000369071960e-01 + <_> + + 0 -1 1812 5.3993999958038330e-02 + + 2.4751600623130798e-01 -2.6712900400161743e-01 + <_> + + 0 -1 1813 -3.2033199071884155e-01 + + -1.9094380140304565e+00 -6.6960997879505157e-02 + <_> + + 0 -1 1814 -2.7060000225901604e-02 + + -7.1371299028396606e-01 1.5904599428176880e-01 + <_> + + 0 -1 1815 7.7463999390602112e-02 + + -1.6970199346542358e-01 7.7552998065948486e-01 + <_> + + 0 -1 1816 2.3771999403834343e-02 + + 1.9021899998188019e-01 -6.0162097215652466e-01 + <_> + + 0 -1 1817 1.1501000262796879e-02 + + 7.7039999887347221e-03 -6.1730301380157471e-01 + <_> + + 0 -1 1818 3.2616000622510910e-02 + + 1.7159199714660645e-01 -7.0978200435638428e-01 + <_> + + 0 -1 1819 -4.4383000582456589e-02 + + -2.2606229782104492e+00 -7.3276996612548828e-02 + <_> + + 0 -1 1820 -5.8476001024246216e-02 + + 2.4087750911712646e+00 8.3091996610164642e-02 + <_> + + 0 -1 1821 1.9303999841213226e-02 + + -2.7082300186157227e-01 2.7369999885559082e-01 + <_> + + 0 -1 1822 -4.4705998152494431e-02 + + 3.1355598568916321e-01 -6.2492001801729202e-02 + <_> + + 0 -1 1823 -6.0334999114274979e-02 + + -1.4515119791030884e+00 -5.8761000633239746e-02 + <_> + + 0 -1 1824 1.1667000129818916e-02 + + -1.8084999173879623e-02 5.0479698181152344e-01 + <_> + + 0 -1 1825 2.8009999543428421e-02 + + -2.3302899301052094e-01 3.0708700418472290e-01 + <_> + + 0 -1 1826 6.5397001802921295e-02 + + 1.4135900139808655e-01 -5.0010901689529419e-01 + <_> + + 0 -1 1827 9.6239997074007988e-03 + + -2.2054600715637207e-01 3.9191201329231262e-01 + <_> + + 0 -1 1828 2.5510000996291637e-03 + + -1.1381500214338303e-01 2.0032300055027008e-01 + <_> + + 0 -1 1829 3.1847000122070312e-02 + + 2.5476999580860138e-02 -5.3326398134231567e-01 + <_> + + 0 -1 1830 3.3055000007152557e-02 + + 1.7807699739933014e-01 -6.2793898582458496e-01 + <_> + + 0 -1 1831 4.7600999474525452e-02 + + -1.4747899770736694e-01 1.4204180240631104e+00 + <_> + + 0 -1 1832 -1.9571999087929726e-02 + + -5.2693498134613037e-01 1.5838600695133209e-01 + <_> + + 0 -1 1833 -5.4730001837015152e-02 + + 8.8231599330902100e-01 -1.6627800464630127e-01 + <_> + + 0 -1 1834 -2.2686000913381577e-02 + + -4.8386898636817932e-01 1.5000100433826447e-01 + <_> + + 0 -1 1835 1.0713200271129608e-01 + + -2.1336199343204498e-01 4.2333900928497314e-01 + <_> + + 0 -1 1836 -3.6380000412464142e-02 + + -7.4198000133037567e-02 1.4589400589466095e-01 + <_> + + 0 -1 1837 1.3935999944806099e-02 + + -2.4911600351333618e-01 2.6771199703216553e-01 + <_> + + 0 -1 1838 2.0991999655961990e-02 + + 8.7959999218583107e-03 4.3064999580383301e-01 + <_> + + 0 -1 1839 4.9118999391794205e-02 + + -1.7591999471187592e-01 6.9282901287078857e-01 + <_> + + 0 -1 1840 3.6315999925136566e-02 + + 1.3145299255847931e-01 -3.3597299456596375e-01 + <_> + + 0 -1 1841 4.1228000074625015e-02 + + -4.5692000538110733e-02 -1.3515930175781250e+00 + <_> + + 0 -1 1842 1.5672000125050545e-02 + + 1.7544099688529968e-01 -6.0550000518560410e-02 + <_> + + 0 -1 1843 -1.6286000609397888e-02 + + -1.1308189630508423e+00 -3.9533000439405441e-02 + <_> + + 0 -1 1844 -3.0229999683797359e-03 + + -2.2454300522804260e-01 2.3628099262714386e-01 + <_> + + 0 -1 1845 -1.3786299526691437e-01 + + 4.5376899838447571e-01 -2.1098700165748596e-01 + <_> + + 0 -1 1846 -9.6760001033544540e-03 + + -1.5105099976062775e-01 2.0781700313091278e-01 + <_> + + 0 -1 1847 -2.4839999154210091e-02 + + -6.8350297212600708e-01 -8.0040004104375839e-03 + <_> + + 0 -1 1848 -1.3964399695396423e-01 + + 6.5011298656463623e-01 4.6544000506401062e-02 + <_> + + 0 -1 1849 -8.2153998315334320e-02 + + 4.4887199997901917e-01 -2.3591999709606171e-01 + <_> + + 0 -1 1850 3.8449999410659075e-03 + + -8.8173002004623413e-02 2.7346798777580261e-01 + <_> + + 0 -1 1851 -6.6579999402165413e-03 + + -4.6866598725318909e-01 7.7001996338367462e-02 + <_> + + 0 -1 1852 -1.5898000448942184e-02 + + 2.9268398880958557e-01 -2.1941000595688820e-02 + <_> + + 0 -1 1853 -5.0946000963449478e-02 + + -1.2093789577484131e+00 -4.2109999805688858e-02 + <_> + + 0 -1 1854 1.6837999224662781e-02 + + -4.5595999807119370e-02 5.0180697441101074e-01 + <_> + + 0 -1 1855 1.5918999910354614e-02 + + -2.6904299855232239e-01 2.6516300439834595e-01 + <_> + + 0 -1 1856 3.6309999413788319e-03 + + -1.3046100735664368e-01 3.1807100772857666e-01 + <_> + + 0 -1 1857 -8.6144998669624329e-02 + + 1.9443659782409668e+00 -1.3978299498558044e-01 + <_> + + 0 -1 1858 3.3140998333692551e-02 + + 1.5266799926757812e-01 -3.0866000801324844e-02 + <_> + + 0 -1 1859 -3.9679999463260174e-03 + + -7.1202301979064941e-01 -1.3844000175595284e-02 + <_> + + 0 -1 1860 -2.4008000269532204e-02 + + 9.2007797956466675e-01 4.6723999083042145e-02 + <_> + + 0 -1 1861 8.7320003658533096e-03 + + -2.2567300498485565e-01 3.1931799650192261e-01 + <_> + + 0 -1 1862 -2.7786999940872192e-02 + + -7.2337102890014648e-01 1.7018599808216095e-01 + <_> + + 0 -1 1863 -1.9455300271511078e-01 + + 1.2461860179901123e+00 -1.4736199378967285e-01 + <_> + + 0 -1 1864 -1.0869699716567993e-01 + + -1.4465179443359375e+00 1.2145300209522247e-01 + <_> + + 0 -1 1865 -1.9494999200105667e-02 + + -7.8153097629547119e-01 -2.3732999339699745e-02 + <_> + + 0 -1 1866 3.0650000553578138e-03 + + -8.5471397638320923e-01 1.6686999797821045e-01 + <_> + + 0 -1 1867 5.9193998575210571e-02 + + -1.4853699505329132e-01 1.1273469924926758e+00 + <_> + + 0 -1 1868 -5.4207999259233475e-02 + + 5.4726999998092651e-01 3.5523999482393265e-02 + <_> + + 0 -1 1869 -3.9324998855590820e-02 + + 3.6642599105834961e-01 -2.0543999969959259e-01 + <_> + + 0 -1 1870 8.2278996706008911e-02 + + -3.5007998347282410e-02 5.3994202613830566e-01 + <_> + + 0 -1 1871 -7.4479999020695686e-03 + + -6.1537498235702515e-01 -3.5319998860359192e-03 + <_> + + 0 -1 1872 7.3770000599324703e-03 + + -6.5591000020503998e-02 4.1961398720741272e-01 + <_> + + 0 -1 1873 7.0779998786747456e-03 + + -3.4129500389099121e-01 1.2536799907684326e-01 + <_> + + 0 -1 1874 -1.5581999905407429e-02 + + -3.0240398645401001e-01 2.1511000394821167e-01 + <_> + + 0 -1 1875 -2.7399999089539051e-03 + + 7.6553001999855042e-02 -4.1060501337051392e-01 + <_> + + 0 -1 1876 -7.0600003004074097e-02 + + -9.7356200218200684e-01 1.1241800338029861e-01 + <_> + + 0 -1 1877 -1.1706000193953514e-02 + + 1.8560700118541718e-01 -2.9755198955535889e-01 + <_> + + 0 -1 1878 7.1499997284263372e-04 + + -5.9650000184774399e-02 2.4824699759483337e-01 + <_> + + 0 -1 1879 -3.6866001784801483e-02 + + 3.2751700282096863e-01 -2.3059600591659546e-01 + <_> + + 0 -1 1880 -3.2526999711990356e-02 + + -2.9320299625396729e-01 1.5427699685096741e-01 + <_> + + 0 -1 1881 -7.4813999235630035e-02 + + -1.2143570184707642e+00 -5.2244000136852264e-02 + <_> + + 0 -1 1882 4.1469998657703400e-02 + + 1.3062499463558197e-01 -2.3274369239807129e+00 + <_> + + 0 -1 1883 -2.8880000114440918e-02 + + -6.6074597835540771e-01 -9.0960003435611725e-03 + <_> + + 0 -1 1884 4.6381998807191849e-02 + + 1.6630199551582336e-01 -6.6949498653411865e-01 + <_> + + 0 -1 1885 2.5424998998641968e-01 + + -5.4641999304294586e-02 -1.2676080465316772e+00 + <_> + + 0 -1 1886 2.4000001139938831e-03 + + 2.0276799798011780e-01 1.4667999930679798e-02 + <_> + + 0 -1 1887 -8.2805998623371124e-02 + + -7.8713601827621460e-01 -2.4468999356031418e-02 + <_> + + 0 -1 1888 -1.1438000015914440e-02 + + 2.8623399138450623e-01 -3.0894000083208084e-02 + <_> + + 0 -1 1889 -1.2913399934768677e-01 + + 1.7292929887771606e+00 -1.4293900132179260e-01 + <_> + + 0 -1 1890 3.8552999496459961e-02 + + 1.9232999533414841e-02 3.7732601165771484e-01 + <_> + + 0 -1 1891 1.0191400349140167e-01 + + -7.4533998966217041e-02 -3.3868899345397949e+00 + <_> + + 0 -1 1892 -1.9068000838160515e-02 + + 3.1814101338386536e-01 1.9261000677943230e-02 + <_> + + 0 -1 1893 -6.0775000602006912e-02 + + 7.6936298608779907e-01 -1.7644000053405762e-01 + <_> + + 0 -1 1894 2.4679999798536301e-02 + + 1.8396499752998352e-01 -3.0868801474571228e-01 + <_> + + 0 -1 1895 2.6759000495076180e-02 + + -2.3454900085926056e-01 3.3056598901748657e-01 + <_> + + 0 -1 1896 1.4969999901950359e-02 + + 1.7213599383831024e-01 -1.8248899281024933e-01 + <_> + + 0 -1 1897 2.6142999529838562e-02 + + -4.6463999897241592e-02 -1.1318379640579224e+00 + <_> + + 0 -1 1898 -3.7512000650167465e-02 + + 8.0404001474380493e-01 6.9660000503063202e-02 + <_> + + 0 -1 1899 -5.3229997865855694e-03 + + -8.1884402036666870e-01 -1.8224999308586121e-02 + <_> + + 0 -1 1900 1.7813000828027725e-02 + + 1.4957800507545471e-01 -1.8667200207710266e-01 + <_> + + 0 -1 1901 -3.4010000526905060e-02 + + -7.2852301597595215e-01 -1.6615999862551689e-02 + <_> + + 0 -1 1902 -1.5953000634908676e-02 + + 5.6944000720977783e-01 1.3832000084221363e-02 + <_> + + 0 -1 1903 1.9743999466300011e-02 + + 4.0525000542402267e-02 -4.1773399710655212e-01 + <_> + + 0 -1 1904 -1.0374800115823746e-01 + + -1.9825149774551392e+00 1.1960200220346451e-01 + <_> + + 0 -1 1905 -1.9285000860691071e-02 + + 5.0230598449707031e-01 -1.9745899736881256e-01 + <_> + + 0 -1 1906 -1.2780000455677509e-02 + + 4.0195000171661377e-01 -2.6957999914884567e-02 + <_> + + 0 -1 1907 -1.6352999955415726e-02 + + -7.6608800888061523e-01 -2.4209000170230865e-02 + <_> + + 0 -1 1908 -1.2763699889183044e-01 + + 8.6578500270843506e-01 6.4205996692180634e-02 + <_> + + 0 -1 1909 1.9068999215960503e-02 + + -5.5929797887802124e-01 -1.6880000475794077e-03 + <_> + + 0 -1 1910 3.2480999827384949e-02 + + 4.0722001343965530e-02 4.8925098776817322e-01 + <_> + + 0 -1 1911 9.4849998131394386e-03 + + -1.9231900572776794e-01 5.1139700412750244e-01 + <_> + + 0 -1 1912 5.0470000132918358e-03 + + 1.8706800043582916e-01 -1.6113600134849548e-01 + <_> + + 0 -1 1913 4.1267998516559601e-02 + + -4.8817999660968781e-02 -1.1326299905776978e+00 + <_> + + 0 -1 1914 -7.6358996331691742e-02 + + 1.4169390201568604e+00 8.7319999933242798e-02 + <_> + + 0 -1 1915 -7.2834998369216919e-02 + + 1.3189860582351685e+00 -1.4819100499153137e-01 + <_> + + 0 -1 1916 5.9576999396085739e-02 + + 4.8376999795436859e-02 8.5611802339553833e-01 + <_> + + 0 -1 1917 2.0263999700546265e-02 + + -2.1044099330902100e-01 3.3858999609947205e-01 + <_> + + 0 -1 1918 -8.0301001667976379e-02 + + -1.2464400529861450e+00 1.1857099831104279e-01 + <_> + + 0 -1 1919 -1.7835000529885292e-02 + + 2.5782299041748047e-01 -2.4564799666404724e-01 + <_> + + 0 -1 1920 1.1431000195443630e-02 + + 2.2949799895286560e-01 -2.9497599601745605e-01 + <_> + + 0 -1 1921 -2.5541000068187714e-02 + + -8.6252999305725098e-01 -7.0400000549852848e-04 + <_> + + 0 -1 1922 -7.6899997657164931e-04 + + 3.1511399149894714e-01 -1.4349000155925751e-01 + <_> + + 0 -1 1923 -1.4453999698162079e-02 + + 2.5148499011993408e-01 -2.8232899308204651e-01 + <_> + + 0 -1 1924 8.6730001494288445e-03 + + 2.6601400971412659e-01 -2.8190800547599792e-01 + <_> + 197 + -3.2772979736328125e+00 + + <_> + + 0 -1 1925 5.4708998650312424e-02 + + -5.4144299030303955e-01 6.1043000221252441e-01 + <_> + + 0 -1 1926 -1.0838799923658371e-01 + + 7.1739900112152100e-01 -4.1196098923683167e-01 + <_> + + 0 -1 1927 2.2996999323368073e-02 + + -5.8269798755645752e-01 2.9645600914955139e-01 + <_> + + 0 -1 1928 2.7540000155568123e-03 + + -7.4243897199630737e-01 1.4183300733566284e-01 + <_> + + 0 -1 1929 -2.1520000882446766e-03 + + 1.7879900336265564e-01 -6.8548601865768433e-01 + <_> + + 0 -1 1930 -2.2559000179171562e-02 + + -1.0775549411773682e+00 1.2388999760150909e-01 + <_> + + 0 -1 1931 8.3025000989437103e-02 + + 2.4500999599695206e-02 -1.0251879692077637e+00 + <_> + + 0 -1 1932 -6.6740000620484352e-03 + + -4.5283100008964539e-01 2.1230199933052063e-01 + <_> + + 0 -1 1933 7.6485000550746918e-02 + + -2.6972699165344238e-01 4.8580199480056763e-01 + <_> + + 0 -1 1934 5.4910001344978809e-03 + + -4.8871201276779175e-01 3.1616398692131042e-01 + <_> + + 0 -1 1935 -1.0414999909698963e-02 + + 4.1512900590896606e-01 -3.0044800043106079e-01 + <_> + + 0 -1 1936 2.7607999742031097e-02 + + 1.6203799843788147e-01 -9.9868500232696533e-01 + <_> + + 0 -1 1937 -2.3272000253200531e-02 + + -1.1024399995803833e+00 2.1124999970197678e-02 + <_> + + 0 -1 1938 -5.5619999766349792e-02 + + 6.5033102035522461e-01 -2.7938000857830048e-02 + <_> + + 0 -1 1939 -4.0631998330354691e-02 + + 4.2117300629615784e-01 -2.6763799786567688e-01 + <_> + + 0 -1 1940 -7.3560001328587532e-03 + + 3.5277798771858215e-01 -3.7854000926017761e-01 + <_> + + 0 -1 1941 1.7007000744342804e-02 + + -2.9189500212669373e-01 4.1053798794746399e-01 + <_> + + 0 -1 1942 -3.7034001201391220e-02 + + -1.3216309547424316e+00 1.2966500222682953e-01 + <_> + + 0 -1 1943 -1.9633000716567039e-02 + + -8.7702298164367676e-01 1.0799999581649899e-03 + <_> + + 0 -1 1944 -2.3546999320387840e-02 + + 2.6106101274490356e-01 -2.1481400728225708e-01 + <_> + + 0 -1 1945 -4.3352998793125153e-02 + + -9.9089699983596802e-01 -9.9560003727674484e-03 + <_> + + 0 -1 1946 -2.2183999419212341e-02 + + 6.3454401493072510e-01 -5.6547001004219055e-02 + <_> + + 0 -1 1947 1.6530999913811684e-02 + + 2.4664999917149544e-02 -7.3326802253723145e-01 + <_> + + 0 -1 1948 -3.2744001597166061e-02 + + -5.6297200918197632e-01 1.6640299558639526e-01 + <_> + + 0 -1 1949 7.1415998041629791e-02 + + -3.0000001424923539e-04 -9.3286401033401489e-01 + <_> + + 0 -1 1950 8.0999999772757292e-04 + + -9.5380000770092010e-02 2.5184699892997742e-01 + <_> + + 0 -1 1951 -8.4090000018477440e-03 + + -6.5496802330017090e-01 6.7300997674465179e-02 + <_> + + 0 -1 1952 -1.7254000529646873e-02 + + -4.6492999792098999e-01 1.6070899367332458e-01 + <_> + + 0 -1 1953 -1.8641000613570213e-02 + + -1.0594010353088379e+00 -1.9617000594735146e-02 + <_> + + 0 -1 1954 -9.1979997232556343e-03 + + 5.0716197490692139e-01 -1.5339200198650360e-01 + <_> + + 0 -1 1955 1.8538000062108040e-02 + + -3.0498200654983521e-01 7.3506200313568115e-01 + <_> + + 0 -1 1956 -5.0335001200437546e-02 + + -1.1140480041503906e+00 1.8000100553035736e-01 + <_> + + 0 -1 1957 -2.3529000580310822e-02 + + -8.6907899379730225e-01 -1.2459999881684780e-02 + <_> + + 0 -1 1958 -2.7100000530481339e-02 + + 6.5942901372909546e-01 -3.5323999822139740e-02 + <_> + + 0 -1 1959 6.5879998728632927e-03 + + -2.2953400015830994e-01 4.2425099015235901e-01 + <_> + + 0 -1 1960 2.3360000923275948e-02 + + 1.8356199562549591e-01 -9.8587298393249512e-01 + <_> + + 0 -1 1961 1.2946999631822109e-02 + + -3.3147400617599487e-01 2.1323199570178986e-01 + <_> + + 0 -1 1962 -6.6559999249875546e-03 + + -1.1951400339603424e-01 2.9752799868583679e-01 + <_> + + 0 -1 1963 -2.2570999339222908e-02 + + 3.8499400019645691e-01 -2.4434499442577362e-01 + <_> + + 0 -1 1964 -6.3813999295234680e-02 + + -8.9383500814437866e-01 1.4217500388622284e-01 + <_> + + 0 -1 1965 -4.9945000559091568e-02 + + 5.3864401578903198e-01 -2.0485299825668335e-01 + <_> + + 0 -1 1966 6.8319998681545258e-03 + + -5.6678999215364456e-02 3.9970999956130981e-01 + <_> + + 0 -1 1967 -5.5835999548435211e-02 + + -1.5239470005035400e+00 -5.1183000206947327e-02 + <_> + + 0 -1 1968 3.1957000494003296e-01 + + 7.4574001133441925e-02 1.2447799444198608e+00 + <_> + + 0 -1 1969 8.0955997109413147e-02 + + -1.9665500521659851e-01 5.9889698028564453e-01 + <_> + + 0 -1 1970 -1.4911999925971031e-02 + + -6.4020597934722900e-01 1.5807600319385529e-01 + <_> + + 0 -1 1971 4.6709001064300537e-02 + + 8.5239000618457794e-02 -4.5487201213836670e-01 + <_> + + 0 -1 1972 6.0539999976754189e-03 + + -4.3184000253677368e-01 2.2452600300312042e-01 + <_> + + 0 -1 1973 -3.4375999122858047e-02 + + 4.0202501416206360e-01 -2.3903599381446838e-01 + <_> + + 0 -1 1974 -3.4924000501632690e-02 + + 5.2870100736618042e-01 3.9709001779556274e-02 + <_> + + 0 -1 1975 3.0030000489205122e-03 + + -3.8754299283027649e-01 1.4192600548267365e-01 + <_> + + 0 -1 1976 -1.4132999815046787e-02 + + 8.7528401613235474e-01 8.5507996380329132e-02 + <_> + + 0 -1 1977 -6.7940000444650650e-03 + + -1.1649219989776611e+00 -3.3943001180887222e-02 + <_> + + 0 -1 1978 -5.2886001765727997e-02 + + 1.0930680036544800e+00 5.1187001168727875e-02 + <_> + + 0 -1 1979 -2.1079999860376120e-03 + + 1.3696199655532837e-01 -3.3849999308586121e-01 + <_> + + 0 -1 1980 1.8353000283241272e-02 + + 1.3661600649356842e-01 -4.0777799487113953e-01 + <_> + + 0 -1 1981 1.2671999633312225e-02 + + -1.4936000108718872e-02 -8.1707501411437988e-01 + <_> + + 0 -1 1982 1.2924999929964542e-02 + + 1.7625099420547485e-01 -3.2491698861122131e-01 + <_> + + 0 -1 1983 -1.7921000719070435e-02 + + -5.2745401859283447e-01 4.4443000108003616e-02 + <_> + + 0 -1 1984 1.9160000374540687e-03 + + -1.0978599637746811e-01 2.2067500650882721e-01 + <_> + + 0 -1 1985 -1.4697999693453312e-02 + + 3.9067798852920532e-01 -2.2224999964237213e-01 + <_> + + 0 -1 1986 -1.4972999691963196e-02 + + -2.5450900197029114e-01 1.7790000140666962e-01 + <_> + + 0 -1 1987 1.4636999927461147e-02 + + -2.5125000625848770e-02 -8.7121301889419556e-01 + <_> + + 0 -1 1988 -1.0974000208079815e-02 + + 7.9082798957824707e-01 2.0121000707149506e-02 + <_> + + 0 -1 1989 -9.1599998995661736e-03 + + -4.7906899452209473e-01 5.2232000976800919e-02 + <_> + + 0 -1 1990 4.6179997734725475e-03 + + -1.7244599759578705e-01 3.4527799487113953e-01 + <_> + + 0 -1 1991 2.3476999253034592e-02 + + 3.7760001141577959e-03 -6.5333700180053711e-01 + <_> + + 0 -1 1992 3.1766999512910843e-02 + + 1.6364000737667084e-02 5.8723700046539307e-01 + <_> + + 0 -1 1993 -1.8419999629259109e-02 + + 1.9993899762630463e-01 -3.2056498527526855e-01 + <_> + + 0 -1 1994 1.9543999806046486e-02 + + 1.8450200557708740e-01 -2.3793600499629974e-01 + <_> + + 0 -1 1995 4.1159498691558838e-01 + + -6.0382001101970673e-02 -1.6072119474411011e+00 + <_> + + 0 -1 1996 -4.1595999151468277e-02 + + -3.2756200432777405e-01 1.5058000385761261e-01 + <_> + + 0 -1 1997 -1.0335999540984631e-02 + + -6.2394398450851440e-01 1.3112000189721584e-02 + <_> + + 0 -1 1998 1.2392999604344368e-02 + + -3.3114999532699585e-02 5.5579900741577148e-01 + <_> + + 0 -1 1999 -8.7270000949501991e-03 + + 1.9883200526237488e-01 -3.7635600566864014e-01 + <_> + + 0 -1 2000 1.6295000910758972e-02 + + 2.0373000204563141e-01 -4.2800799012184143e-01 + <_> + + 0 -1 2001 -1.0483999736607075e-02 + + -5.6847000122070312e-01 4.4199001044034958e-02 + <_> + + 0 -1 2002 -1.2431999668478966e-02 + + 7.4641901254653931e-01 4.3678998947143555e-02 + <_> + + 0 -1 2003 -5.0374999642372131e-02 + + 8.5090100765228271e-01 -1.7773799598217010e-01 + <_> + + 0 -1 2004 4.9548000097274780e-02 + + 1.6784900426864624e-01 -2.9877498745918274e-01 + <_> + + 0 -1 2005 -4.1085001081228256e-02 + + -1.3302919864654541e+00 -4.9182001501321793e-02 + <_> + + 0 -1 2006 1.0069999843835831e-03 + + -6.0538999736309052e-02 1.8483200669288635e-01 + <_> + + 0 -1 2007 -5.0142999738454819e-02 + + 7.6447701454162598e-01 -1.8356999754905701e-01 + <_> + + 0 -1 2008 -8.7879998609423637e-03 + + 2.2655999660491943e-01 -6.3156999647617340e-02 + <_> + + 0 -1 2009 -5.0170999020338058e-02 + + -1.5899070501327515e+00 -6.1255000531673431e-02 + <_> + + 0 -1 2010 1.0216099768877029e-01 + + 1.2071800231933594e-01 -1.4120110273361206e+00 + <_> + + 0 -1 2011 -1.4372999779880047e-02 + + -1.3116970062255859e+00 -5.1936000585556030e-02 + <_> + + 0 -1 2012 1.0281999595463276e-02 + + -2.1639999467879534e-03 4.4247201085090637e-01 + <_> + + 0 -1 2013 -1.1814000084996223e-02 + + 6.5378099679946899e-01 -1.8723699450492859e-01 + <_> + + 0 -1 2014 7.2114996612071991e-02 + + 7.1846999228000641e-02 8.1496298313140869e-01 + <_> + + 0 -1 2015 -1.9001999869942665e-02 + + -6.7427200078964233e-01 -4.3200000072829425e-04 + <_> + + 0 -1 2016 -4.6990001574158669e-03 + + 3.3311501145362854e-01 5.5794000625610352e-02 + <_> + + 0 -1 2017 -5.8157000690698624e-02 + + 4.5572298765182495e-01 -2.0305100083351135e-01 + <_> + + 0 -1 2018 1.1360000353306532e-03 + + -4.4686999171972275e-02 2.2681899368762970e-01 + <_> + + 0 -1 2019 -4.9414999783039093e-02 + + 2.6694598793983459e-01 -2.6116999983787537e-01 + <_> + + 0 -1 2020 -1.1913800239562988e-01 + + -8.3017998933792114e-01 1.3248500227928162e-01 + <_> + + 0 -1 2021 -1.8303999677300453e-02 + + -6.7499202489852905e-01 1.7092000693082809e-02 + <_> + + 0 -1 2022 -7.9199997708201408e-03 + + -7.2287000715732574e-02 1.4425800740718842e-01 + <_> + + 0 -1 2023 5.1925998181104660e-02 + + 3.0921999365091324e-02 -5.5860602855682373e-01 + <_> + + 0 -1 2024 6.6724002361297607e-02 + + 1.3666400313377380e-01 -2.9411000013351440e-01 + <_> + + 0 -1 2025 -1.3778000138700008e-02 + + -5.9443902969360352e-01 1.5300000086426735e-02 + <_> + + 0 -1 2026 -1.7760999500751495e-02 + + 4.0496501326560974e-01 -3.3559999428689480e-03 + <_> + + 0 -1 2027 -4.2234998196363449e-02 + + -1.0897940397262573e+00 -4.0224999189376831e-02 + <_> + + 0 -1 2028 -1.3524999842047691e-02 + + 2.8921899199485779e-01 -2.5194799900054932e-01 + <_> + + 0 -1 2029 -1.1106000281870365e-02 + + 6.5312802791595459e-01 -1.8053700029850006e-01 + <_> + + 0 -1 2030 -1.2284599989652634e-01 + + -1.9570649862289429e+00 1.4815400540828705e-01 + <_> + + 0 -1 2031 4.7715999186038971e-02 + + -2.2875599563121796e-01 3.4233701229095459e-01 + <_> + + 0 -1 2032 3.1817000359296799e-02 + + 1.5976299345493317e-01 -1.0091969966888428e+00 + <_> + + 0 -1 2033 4.2570000514388084e-03 + + -3.8881298899650574e-01 8.4210000932216644e-02 + <_> + + 0 -1 2034 -6.1372999101877213e-02 + + 1.7152810096740723e+00 5.9324998408555984e-02 + <_> + + 0 -1 2035 -2.7030000928789377e-03 + + -3.8161700963973999e-01 8.5127003490924835e-02 + <_> + + 0 -1 2036 -6.8544000387191772e-02 + + -3.0925889015197754e+00 1.1788000166416168e-01 + <_> + + 0 -1 2037 1.0372500121593475e-01 + + -1.3769300282001495e-01 1.9009410142898560e+00 + <_> + + 0 -1 2038 1.5799000859260559e-02 + + -6.2660001218318939e-02 2.5917699933052063e-01 + <_> + + 0 -1 2039 -9.8040001466870308e-03 + + -5.6291598081588745e-01 4.3923001736402512e-02 + <_> + + 0 -1 2040 -9.0229995548725128e-03 + + 2.5287100672721863e-01 -4.1225999593734741e-02 + <_> + + 0 -1 2041 -6.3754998147487640e-02 + + -2.6178569793701172e+00 -7.4005998671054840e-02 + <_> + + 0 -1 2042 3.8954999297857285e-02 + + 5.9032998979091644e-02 8.5945600271224976e-01 + <_> + + 0 -1 2043 -3.9802998304367065e-02 + + 9.3600499629974365e-01 -1.5639400482177734e-01 + <_> + + 0 -1 2044 5.0301998853683472e-02 + + 1.3725900650024414e-01 -2.5549728870391846e+00 + <_> + + 0 -1 2045 4.6250000596046448e-02 + + -1.3964000158011913e-02 -7.1026200056076050e-01 + <_> + + 0 -1 2046 6.2196001410484314e-02 + + 5.9526000171899796e-02 1.6509100198745728e+00 + <_> + + 0 -1 2047 -6.4776003360748291e-02 + + 7.1368998289108276e-01 -1.7270000278949738e-01 + <_> + + 0 -1 2048 2.7522999793291092e-02 + + 1.4631600677967072e-01 -8.1428997218608856e-02 + <_> + + 0 -1 2049 3.9900001138448715e-04 + + -3.7144500017166138e-01 1.0152699798345566e-01 + <_> + + 0 -1 2050 -4.3299999088048935e-03 + + -2.3756299912929535e-01 2.6798400282859802e-01 + <_> + + 0 -1 2051 4.7297000885009766e-02 + + -2.7682000771164894e-02 -8.4910297393798828e-01 + <_> + + 0 -1 2052 1.2508999556303024e-02 + + 1.8730199337005615e-01 -5.6001102924346924e-01 + <_> + + 0 -1 2053 4.5899000018835068e-02 + + -1.5601199865341187e-01 9.7073000669479370e-01 + <_> + + 0 -1 2054 1.9853399693965912e-01 + + 1.4895500242710114e-01 -1.1015529632568359e+00 + <_> + + 0 -1 2055 1.6674999147653580e-02 + + -1.6615299880504608e-01 8.2210999727249146e-01 + <_> + + 0 -1 2056 1.9829999655485153e-03 + + -7.1249999105930328e-02 2.8810900449752808e-01 + <_> + + 0 -1 2057 2.2447999566793442e-02 + + -2.0981000736355782e-02 -7.8416502475738525e-01 + <_> + + 0 -1 2058 -1.3913000002503395e-02 + + -1.8165799975395203e-01 2.0491799712181091e-01 + <_> + + 0 -1 2059 -7.7659999951720238e-03 + + -4.5595899224281311e-01 6.3576996326446533e-02 + <_> + + 0 -1 2060 -1.3209000229835510e-02 + + 2.6632300019264221e-01 -1.7795999348163605e-01 + <_> + + 0 -1 2061 4.9052998423576355e-02 + + -1.5476800501346588e-01 1.1069979667663574e+00 + <_> + + 0 -1 2062 2.0263999700546265e-02 + + 6.8915002048015594e-02 6.9867497682571411e-01 + <_> + + 0 -1 2063 -1.6828000545501709e-02 + + 2.7607199549674988e-01 -2.5139200687408447e-01 + <_> + + 0 -1 2064 -1.6939499974250793e-01 + + -3.0767529010772705e+00 1.1617500334978104e-01 + <_> + + 0 -1 2065 -1.1336100101470947e-01 + + -1.4639229774475098e+00 -5.1447000354528427e-02 + <_> + + 0 -1 2066 -7.7685996890068054e-02 + + 8.8430202007293701e-01 4.3306998908519745e-02 + <_> + + 0 -1 2067 -1.5568000264465809e-02 + + 1.3672499358654022e-01 -3.4505501389503479e-01 + <_> + + 0 -1 2068 -6.6018998622894287e-02 + + -1.0300110578536987e+00 1.1601399630308151e-01 + <_> + + 0 -1 2069 8.3699999377131462e-03 + + 7.6429001986980438e-02 -4.4002500176429749e-01 + <_> + + 0 -1 2070 3.5402998328208923e-02 + + 1.1979500204324722e-01 -7.2668302059173584e-01 + <_> + + 0 -1 2071 -3.9051000028848648e-02 + + 6.7375302314758301e-01 -1.8196000158786774e-01 + <_> + + 0 -1 2072 -9.7899995744228363e-03 + + 2.1264599263668060e-01 3.6756001412868500e-02 + <_> + + 0 -1 2073 -2.3047000169754028e-02 + + 4.4742199778556824e-01 -2.0986700057983398e-01 + <_> + + 0 -1 2074 3.1169999856501818e-03 + + 3.7544000893831253e-02 2.7808201313018799e-01 + <_> + + 0 -1 2075 1.3136000372469425e-02 + + -1.9842399656772614e-01 5.4335701465606689e-01 + <_> + + 0 -1 2076 1.4782000333070755e-02 + + 1.3530600070953369e-01 -1.1153600364923477e-01 + <_> + + 0 -1 2077 -6.0139000415802002e-02 + + 8.4039300680160522e-01 -1.6711600124835968e-01 + <_> + + 0 -1 2078 5.1998998969793320e-02 + + 1.7372000217437744e-01 -7.8547602891921997e-01 + <_> + + 0 -1 2079 2.4792000651359558e-02 + + -1.7739200592041016e-01 6.6752600669860840e-01 + <_> + + 0 -1 2080 -1.2014999985694885e-02 + + -1.4263699948787689e-01 1.6070500016212463e-01 + <_> + + 0 -1 2081 -9.8655998706817627e-02 + + 1.0429769754409790e+00 -1.5770199894905090e-01 + <_> + + 0 -1 2082 1.1758299916982651e-01 + + 1.0955700278282166e-01 -4.4920377731323242e+00 + <_> + + 0 -1 2083 -1.8922999501228333e-02 + + -7.8543400764465332e-01 1.2984000146389008e-02 + <_> + + 0 -1 2084 -2.8390999883413315e-02 + + -6.0569900274276733e-01 1.2903499603271484e-01 + <_> + + 0 -1 2085 1.3182999566197395e-02 + + -1.4415999874472618e-02 -7.3210501670837402e-01 + <_> + + 0 -1 2086 -1.1653000116348267e-01 + + -2.0442469120025635e+00 1.4053100347518921e-01 + <_> + + 0 -1 2087 -3.8880000356584787e-03 + + -4.1861599683761597e-01 7.8704997897148132e-02 + <_> + + 0 -1 2088 3.1229000538587570e-02 + + 2.4632999673485756e-02 4.1870400309562683e-01 + <_> + + 0 -1 2089 2.5198999792337418e-02 + + -1.7557799816131592e-01 6.4710599184036255e-01 + <_> + + 0 -1 2090 -2.8124000877141953e-02 + + -2.2005599737167358e-01 1.4121000468730927e-01 + <_> + + 0 -1 2091 3.6499001085758209e-02 + + -6.8426996469497681e-02 -2.3410849571228027e+00 + <_> + + 0 -1 2092 -7.2292998433113098e-02 + + 1.2898750305175781e+00 8.4875002503395081e-02 + <_> + + 0 -1 2093 -4.1671000421047211e-02 + + -1.1630970239639282e+00 -5.3752999752759933e-02 + <_> + + 0 -1 2094 4.7703001648187637e-02 + + 7.0101000368595123e-02 7.3676502704620361e-01 + <_> + + 0 -1 2095 6.5793000161647797e-02 + + -1.7755299806594849e-01 6.9780498743057251e-01 + <_> + + 0 -1 2096 1.3904999941587448e-02 + + 2.1936799585819244e-01 -2.0390799641609192e-01 + <_> + + 0 -1 2097 -2.7730999514460564e-02 + + 6.1867898702621460e-01 -1.7804099619388580e-01 + <_> + + 0 -1 2098 -1.5879999846220016e-02 + + -4.6484100818634033e-01 1.8828600645065308e-01 + <_> + + 0 -1 2099 7.4128001928329468e-02 + + -1.2858100235462189e-01 3.2792479991912842e+00 + <_> + + 0 -1 2100 -8.9000002481043339e-04 + + -3.0117601156234741e-01 2.3818799853324890e-01 + <_> + + 0 -1 2101 1.7965000122785568e-02 + + -2.2284999489784241e-01 2.9954001307487488e-01 + <_> + + 0 -1 2102 -2.5380000006407499e-03 + + 2.5064399838447571e-01 -1.3665600121021271e-01 + <_> + + 0 -1 2103 -9.0680001303553581e-03 + + 2.9017499089241028e-01 -2.8929701447486877e-01 + <_> + + 0 -1 2104 4.9169998615980148e-02 + + 1.9156399369239807e-01 -6.8328702449798584e-01 + <_> + + 0 -1 2105 -3.0680999159812927e-02 + + -7.5677001476287842e-01 -1.3279999606311321e-02 + <_> + + 0 -1 2106 1.0017400234937668e-01 + + 8.4453999996185303e-02 1.0888710021972656e+00 + <_> + + 0 -1 2107 3.1950001139193773e-03 + + -2.6919400691986084e-01 1.9537900388240814e-01 + <_> + + 0 -1 2108 3.5503000020980835e-02 + + 1.3632300496101379e-01 -5.6917202472686768e-01 + <_> + + 0 -1 2109 4.5900000259280205e-04 + + -4.0443998575210571e-01 1.4074799418449402e-01 + <_> + + 0 -1 2110 2.5258999317884445e-02 + + 1.6243200004100800e-01 -5.5741798877716064e-01 + <_> + + 0 -1 2111 -5.1549999043345451e-03 + + 3.1132599711418152e-01 -2.2756099700927734e-01 + <_> + + 0 -1 2112 1.5869999770075083e-03 + + -2.6867699623107910e-01 1.9565400481224060e-01 + <_> + + 0 -1 2113 -1.6204999759793282e-02 + + 1.5486499667167664e-01 -3.4057798981666565e-01 + <_> + + 0 -1 2114 -2.9624000191688538e-02 + + 1.1466799974441528e+00 9.0557999908924103e-02 + <_> + + 0 -1 2115 -1.5930000226944685e-03 + + -7.1257501840591431e-01 -7.0400000549852848e-04 + <_> + + 0 -1 2116 -5.4019000381231308e-02 + + 4.1537499427795410e-01 2.7246000245213509e-02 + <_> + + 0 -1 2117 -6.6211000084877014e-02 + + -1.3340090513229370e+00 -4.7352999448776245e-02 + <_> + + 0 -1 2118 2.7940999716520309e-02 + + 1.4446300268173218e-01 -5.1518398523330688e-01 + <_> + + 0 -1 2119 2.8957000002264977e-02 + + -4.9966000020503998e-02 -1.1929039955139160e+00 + <_> + + 0 -1 2120 -2.0424999296665192e-02 + + 6.3881301879882812e-01 3.8141001015901566e-02 + <_> + + 0 -1 2121 1.2416999787092209e-02 + + -2.1547000110149384e-01 4.9477699398994446e-01 + <_> + 181 + -3.3196411132812500e+00 + + <_> + + 0 -1 2122 4.3274000287055969e-02 + + -8.0494397878646851e-01 3.9897298812866211e-01 + <_> + + 0 -1 2123 1.8615500628948212e-01 + + -3.1655299663543701e-01 6.8877297639846802e-01 + <_> + + 0 -1 2124 3.1860999763011932e-02 + + -6.4266198873519897e-01 2.5550898909568787e-01 + <_> + + 0 -1 2125 1.4022000133991241e-02 + + -4.5926600694656372e-01 3.1171199679374695e-01 + <_> + + 0 -1 2126 -6.3029997982084751e-03 + + 4.6026900410652161e-01 -2.7438500523567200e-01 + <_> + + 0 -1 2127 -5.4310001432895660e-03 + + 3.6608600616455078e-01 -2.7205801010131836e-01 + <_> + + 0 -1 2128 1.6822999343276024e-02 + + 2.3476999253034592e-02 -8.8443797826766968e-01 + <_> + + 0 -1 2129 2.6039000600576401e-02 + + 1.7488799989223480e-01 -5.4564702510833740e-01 + <_> + + 0 -1 2130 -2.6720000430941582e-02 + + -9.6396499872207642e-01 2.3524999618530273e-02 + <_> + + 0 -1 2131 -1.7041999846696854e-02 + + -7.0848798751831055e-01 2.1468099951744080e-01 + <_> + + 0 -1 2132 5.9569999575614929e-03 + + 7.3601000010967255e-02 -6.8225598335266113e-01 + <_> + + 0 -1 2133 -2.8679999522864819e-03 + + -7.4935001134872437e-01 2.3803399503231049e-01 + <_> + + 0 -1 2134 -4.3774999678134918e-02 + + 6.8323302268981934e-01 -2.1380299329757690e-01 + <_> + + 0 -1 2135 5.1633000373840332e-02 + + -1.2566499412059784e-01 6.7523801326751709e-01 + <_> + + 0 -1 2136 8.1780003383755684e-03 + + 7.0689998567104340e-02 -8.0665898323059082e-01 + <_> + + 0 -1 2137 -5.2841998636722565e-02 + + 9.5433902740478516e-01 1.6548000276088715e-02 + <_> + + 0 -1 2138 5.2583999931812286e-02 + + -2.8414401412010193e-01 4.7129800915718079e-01 + <_> + + 0 -1 2139 -1.2659000232815742e-02 + + 3.8445401191711426e-01 -6.2288001179695129e-02 + <_> + + 0 -1 2140 1.1694000102579594e-02 + + 5.6000000768108293e-05 -1.0173139572143555e+00 + <_> + + 0 -1 2141 -2.3918999359011650e-02 + + 8.4921300411224365e-01 5.7399999350309372e-03 + <_> + + 0 -1 2142 -6.1673998832702637e-02 + + -9.2571401596069336e-01 -1.7679999582469463e-03 + <_> + + 0 -1 2143 -1.8279999494552612e-03 + + -5.4372298717498779e-01 2.4932399392127991e-01 + <_> + + 0 -1 2144 3.5257998853921890e-02 + + -7.3719997890293598e-03 -9.3963998556137085e-01 + <_> + + 0 -1 2145 -1.8438000231981277e-02 + + 7.2136700153350830e-01 1.0491999797523022e-02 + <_> + + 0 -1 2146 -3.8389001041650772e-02 + + 1.9272600114345551e-01 -3.5832101106643677e-01 + <_> + + 0 -1 2147 9.9720999598503113e-02 + + 1.1354199796915054e-01 -1.6304190158843994e+00 + <_> + + 0 -1 2148 8.4462001919746399e-02 + + -5.3420998156070709e-02 -1.6981120109558105e+00 + <_> + + 0 -1 2149 4.0270000696182251e-02 + + -1.0783199965953827e-01 5.1926600933074951e-01 + <_> + + 0 -1 2150 5.8935999870300293e-02 + + -1.8053700029850006e-01 9.5119798183441162e-01 + <_> + + 0 -1 2151 1.4957000315189362e-01 + + 1.6785299777984619e-01 -1.1591869592666626e+00 + <_> + + 0 -1 2152 6.9399998756125569e-04 + + 2.0491400361061096e-01 -3.3118200302124023e-01 + <_> + + 0 -1 2153 -3.3369001001119614e-02 + + 9.3468099832534790e-01 -2.9639999847859144e-03 + <_> + + 0 -1 2154 9.3759996816515923e-03 + + 3.7000000011175871e-03 -7.7549797296524048e-01 + <_> + + 0 -1 2155 4.3193999677896500e-02 + + -2.2040000185370445e-03 7.4589699506759644e-01 + <_> + + 0 -1 2156 -6.7555002868175507e-02 + + 7.2292101383209229e-01 -1.8404200673103333e-01 + <_> + + 0 -1 2157 -3.1168600916862488e-01 + + 1.0014270544052124e+00 3.4003000706434250e-02 + <_> + + 0 -1 2158 2.9743999242782593e-02 + + -4.6356000006198883e-02 -1.2781809568405151e+00 + <_> + + 0 -1 2159 1.0737000033259392e-02 + + 1.4812000095844269e-02 6.6649997234344482e-01 + <_> + + 0 -1 2160 -2.8841000050306320e-02 + + -9.4222599267959595e-01 -2.0796999335289001e-02 + <_> + + 0 -1 2161 -5.7649998925626278e-03 + + -4.3541899323463440e-01 2.3386000096797943e-01 + <_> + + 0 -1 2162 2.8410999104380608e-02 + + -1.7615799605846405e-01 8.5765302181243896e-01 + <_> + + 0 -1 2163 -2.9007999226450920e-02 + + 5.7978099584579468e-01 2.8565999120473862e-02 + <_> + + 0 -1 2164 2.4965999647974968e-02 + + -2.2729000076651573e-02 -9.6773099899291992e-01 + <_> + + 0 -1 2165 1.2036000378429890e-02 + + -1.4214700460433960e-01 5.1687997579574585e-01 + <_> + + 0 -1 2166 -4.2514000087976456e-02 + + 9.7273802757263184e-01 -1.8119800090789795e-01 + <_> + + 0 -1 2167 1.0276000015437603e-02 + + -8.3099998533725739e-02 3.1762799620628357e-01 + <_> + + 0 -1 2168 -6.9191999733448029e-02 + + -2.0668580532073975e+00 -6.0173999518156052e-02 + <_> + + 0 -1 2169 -4.6769999898970127e-03 + + 4.4131800532341003e-01 2.3209000006318092e-02 + <_> + + 0 -1 2170 -1.3923999853432178e-02 + + 2.8606700897216797e-01 -2.9152700304985046e-01 + <_> + + 0 -1 2171 -1.5333999879658222e-02 + + -5.7414501905441284e-01 2.3063300549983978e-01 + <_> + + 0 -1 2172 -1.0239000432193279e-02 + + 3.4479200839996338e-01 -2.6080399751663208e-01 + <_> + + 0 -1 2173 -5.0988998264074326e-02 + + 5.6154102087020874e-01 6.1218999326229095e-02 + <_> + + 0 -1 2174 3.0689999461174011e-02 + + -1.4772799611091614e-01 1.6378489732742310e+00 + <_> + + 0 -1 2175 -1.1223999783396721e-02 + + 2.4006199836730957e-01 -4.4864898920059204e-01 + <_> + + 0 -1 2176 -6.2899999320507050e-03 + + 4.3119499087333679e-01 -2.3808999359607697e-01 + <_> + + 0 -1 2177 7.8590996563434601e-02 + + 1.9865000620484352e-02 8.0853801965713501e-01 + <_> + + 0 -1 2178 -1.0178999975323677e-02 + + 1.8193200230598450e-01 -3.2877799868583679e-01 + <_> + + 0 -1 2179 3.1227000057697296e-02 + + 1.4973899722099304e-01 -1.4180339574813843e+00 + <_> + + 0 -1 2180 4.0196999907493591e-02 + + -1.9760499894618988e-01 5.8508199453353882e-01 + <_> + + 0 -1 2181 1.6138000413775444e-02 + + 5.0000002374872565e-04 3.9050000905990601e-01 + <_> + + 0 -1 2182 -4.5519001781940460e-02 + + 1.2646820545196533e+00 -1.5632599592208862e-01 + <_> + + 0 -1 2183 -1.8130000680685043e-02 + + 6.5148502588272095e-01 1.0235999710857868e-02 + <_> + + 0 -1 2184 -1.4001999981701374e-02 + + -1.0344820022583008e+00 -3.2182998955249786e-02 + <_> + + 0 -1 2185 -3.8816001266241074e-02 + + -4.7874298691749573e-01 1.6290700435638428e-01 + <_> + + 0 -1 2186 3.1656000763177872e-02 + + -2.0983399450778961e-01 5.4575902223587036e-01 + <_> + + 0 -1 2187 -1.0839999653398991e-02 + + 5.1898801326751709e-01 -1.5080000273883343e-02 + <_> + + 0 -1 2188 1.2032999657094479e-02 + + -2.1107600629329681e-01 7.5937002897262573e-01 + <_> + + 0 -1 2189 7.0772998034954071e-02 + + 1.8048800528049469e-01 -7.4048501253128052e-01 + <_> + + 0 -1 2190 5.3139799833297729e-01 + + -1.4491699635982513e-01 1.5360039472579956e+00 + <_> + + 0 -1 2191 -1.4774000272154808e-02 + + -2.8153699636459351e-01 2.0407299697399139e-01 + <_> + + 0 -1 2192 -2.2410000674426556e-03 + + -4.4876301288604736e-01 5.3989000618457794e-02 + <_> + + 0 -1 2193 4.9968000501394272e-02 + + 4.1514001786708832e-02 2.9417100548744202e-01 + <_> + + 0 -1 2194 -4.7701999545097351e-02 + + 3.9674299955368042e-01 -2.8301799297332764e-01 + <_> + + 0 -1 2195 -9.1311000287532806e-02 + + 2.1994259357452393e+00 8.7964996695518494e-02 + <_> + + 0 -1 2196 3.8070000708103180e-02 + + -2.8025600314140320e-01 2.5156199932098389e-01 + <_> + + 0 -1 2197 -1.5538999810814857e-02 + + 3.4157499670982361e-01 1.7924999818205833e-02 + <_> + + 0 -1 2198 -1.5445999801158905e-02 + + 2.8680199384689331e-01 -2.5135898590087891e-01 + <_> + + 0 -1 2199 -5.7388000190258026e-02 + + 6.3830000162124634e-01 8.8597998023033142e-02 + <_> + + 0 -1 2200 -5.9440000914037228e-03 + + 7.9016998410224915e-02 -4.0774899721145630e-01 + <_> + + 0 -1 2201 -6.9968998432159424e-02 + + -4.4644200801849365e-01 1.7219600081443787e-01 + <_> + + 0 -1 2202 -2.5064999237656593e-02 + + -9.8270201683044434e-01 -3.5388000309467316e-02 + <_> + + 0 -1 2203 1.7216000705957413e-02 + + 2.2705900669097900e-01 -8.0550098419189453e-01 + <_> + + 0 -1 2204 -4.4279001653194427e-02 + + 8.3951997756958008e-01 -1.7429600656032562e-01 + <_> + + 0 -1 2205 4.3988998979330063e-02 + + 1.1557199805974960e-01 -1.9666889905929565e+00 + <_> + + 0 -1 2206 1.5907000750303268e-02 + + -3.7576001137495041e-02 -1.0311100482940674e+00 + <_> + + 0 -1 2207 -9.2754997313022614e-02 + + -1.3530019521713257e+00 1.2141299992799759e-01 + <_> + + 0 -1 2208 7.1037001907825470e-02 + + -1.7684300243854523e-01 7.4485200643539429e-01 + <_> + + 0 -1 2209 5.7762000709772110e-02 + + 1.2835599482059479e-01 -4.4444200396537781e-01 + <_> + + 0 -1 2210 -1.6432000324130058e-02 + + 8.0152702331542969e-01 -1.7491699755191803e-01 + <_> + + 0 -1 2211 2.3939000442624092e-02 + + 1.6144999861717224e-01 -1.2364500015974045e-01 + <_> + + 0 -1 2212 1.2636000290513039e-02 + + 1.5411999821662903e-01 -3.3293798565864563e-01 + <_> + + 0 -1 2213 -5.4347999393939972e-02 + + -1.8400700092315674e+00 1.4835999906063080e-01 + <_> + + 0 -1 2214 -1.3261999934911728e-02 + + -8.0838799476623535e-01 -2.7726000174880028e-02 + <_> + + 0 -1 2215 6.1340001411736012e-03 + + -1.3785000145435333e-01 3.2858499884605408e-01 + <_> + + 0 -1 2216 2.8991000726819038e-02 + + -2.5516999885439873e-02 -8.3387202024459839e-01 + <_> + + 0 -1 2217 -2.1986000239849091e-02 + + -7.3739999532699585e-01 1.7887100577354431e-01 + <_> + + 0 -1 2218 5.3269998170435429e-03 + + -4.5449298620223999e-01 6.8791002035140991e-02 + <_> + + 0 -1 2219 8.6047999560832977e-02 + + 2.1008500456809998e-01 -3.7808901071548462e-01 + <_> + + 0 -1 2220 -8.5549997165799141e-03 + + 4.0134999155998230e-01 -2.1074099838733673e-01 + <_> + + 0 -1 2221 6.7790001630783081e-03 + + -2.1648999303579330e-02 4.5421499013900757e-01 + <_> + + 0 -1 2222 -6.3959998078644276e-03 + + -4.9818599224090576e-01 7.5907997786998749e-02 + <_> + + 0 -1 2223 8.9469999074935913e-03 + + 1.7857700586318970e-01 -2.8454899787902832e-01 + <_> + + 0 -1 2224 3.2589999027550220e-03 + + 4.6624999493360519e-02 -5.5206298828125000e-01 + <_> + + 0 -1 2225 4.1476998478174210e-02 + + 1.7550499737262726e-01 -2.0703999698162079e-01 + <_> + + 0 -1 2226 -6.7449999041855335e-03 + + -4.6392598748207092e-01 6.9303996860980988e-02 + <_> + + 0 -1 2227 3.0564999207854271e-02 + + 5.1734998822212219e-02 7.5550502538681030e-01 + <_> + + 0 -1 2228 -7.4780001305043697e-03 + + 1.4893899857997894e-01 -3.1906801462173462e-01 + <_> + + 0 -1 2229 8.9088998734951019e-02 + + 1.3738800585269928e-01 -1.1379710435867310e+00 + <_> + + 0 -1 2230 7.3230001144111156e-03 + + -2.8829199075698853e-01 1.9088600575923920e-01 + <_> + + 0 -1 2231 -1.8205000087618828e-02 + + -3.0178600549697876e-01 1.6795800626277924e-01 + <_> + + 0 -1 2232 -2.5828000158071518e-02 + + -9.8137998580932617e-01 -1.9860999658703804e-02 + <_> + + 0 -1 2233 1.0936199873685837e-01 + + 4.8790000379085541e-02 5.3118300437927246e-01 + <_> + + 0 -1 2234 -1.1424999684095383e-02 + + 2.3705999553203583e-01 -2.7925300598144531e-01 + <_> + + 0 -1 2235 -5.7565998286008835e-02 + + 4.7255399823188782e-01 6.5171003341674805e-02 + <_> + + 0 -1 2236 1.0278300195932388e-01 + + -2.0765100419521332e-01 5.0947701930999756e-01 + <_> + + 0 -1 2237 2.7041999623179436e-02 + + 1.6421200335025787e-01 -1.4508620500564575e+00 + <_> + + 0 -1 2238 -1.3635000213980675e-02 + + -5.6543898582458496e-01 2.3788999766111374e-02 + <_> + + 0 -1 2239 -3.2158198952674866e-01 + + -3.5602829456329346e+00 1.1801300197839737e-01 + <_> + + 0 -1 2240 2.0458100736141205e-01 + + -3.7016000598669052e-02 -1.0225499868392944e+00 + <_> + + 0 -1 2241 -7.0347003638744354e-02 + + -5.6491899490356445e-01 1.8525199592113495e-01 + <_> + + 0 -1 2242 3.7831000983715057e-02 + + -2.9901999980211258e-02 -8.2921499013900757e-01 + <_> + + 0 -1 2243 -7.0298001170158386e-02 + + -5.3172302246093750e-01 1.4430199563503265e-01 + <_> + + 0 -1 2244 6.3221000134944916e-02 + + -2.2041200101375580e-01 4.7952198982238770e-01 + <_> + + 0 -1 2245 3.6393001675605774e-02 + + 1.4222699403762817e-01 -6.1193901300430298e-01 + <_> + + 0 -1 2246 4.0099998004734516e-03 + + -3.4560799598693848e-01 1.1738699674606323e-01 + <_> + + 0 -1 2247 -4.9106001853942871e-02 + + 9.5984101295471191e-01 6.4934998750686646e-02 + <_> + + 0 -1 2248 -7.1583002805709839e-02 + + 1.7385669946670532e+00 -1.4252899587154388e-01 + <_> + + 0 -1 2249 -3.8008999079465866e-02 + + 1.3872820138931274e+00 6.6188000142574310e-02 + <_> + + 0 -1 2250 -3.1570000573992729e-03 + + 5.3677000105381012e-02 -5.4048001766204834e-01 + <_> + + 0 -1 2251 1.9458999857306480e-02 + + -9.3620002269744873e-02 3.9131000638008118e-01 + <_> + + 0 -1 2252 1.1293999850749969e-02 + + 3.7223998457193375e-02 -5.4251801967620850e-01 + <_> + + 0 -1 2253 -3.3495001494884491e-02 + + 9.5307898521423340e-01 3.7696998566389084e-02 + <_> + + 0 -1 2254 9.2035003006458282e-02 + + -1.3488399982452393e-01 2.2897069454193115e+00 + <_> + + 0 -1 2255 3.7529999390244484e-03 + + 2.2824199497699738e-01 -5.9983700513839722e-01 + <_> + + 0 -1 2256 1.2848000042140484e-02 + + -2.2005200386047363e-01 3.7221899628639221e-01 + <_> + + 0 -1 2257 -1.4316199719905853e-01 + + 1.2855789661407471e+00 4.7237001359462738e-02 + <_> + + 0 -1 2258 -9.6879996359348297e-02 + + -3.9550929069519043e+00 -7.2903998196125031e-02 + <_> + + 0 -1 2259 -8.8459998369216919e-03 + + 3.7674999237060547e-01 -4.6484000980854034e-02 + <_> + + 0 -1 2260 1.5900000929832458e-02 + + -2.4457000195980072e-02 -8.0034798383712769e-01 + <_> + + 0 -1 2261 7.0372000336647034e-02 + + 1.7019000649452209e-01 -6.3068997859954834e-01 + <_> + + 0 -1 2262 -3.7953998893499374e-02 + + -9.3667197227478027e-01 -4.1214000433683395e-02 + <_> + + 0 -1 2263 5.1597899198532104e-01 + + 1.3080599904060364e-01 -1.5802290439605713e+00 + <_> + + 0 -1 2264 -3.2843001186847687e-02 + + -1.1441620588302612e+00 -4.9173999577760696e-02 + <_> + + 0 -1 2265 -3.6357000470161438e-02 + + 4.9606400728225708e-01 -3.4458998590707779e-02 + <_> + + 0 -1 2266 6.8080001510679722e-03 + + -3.0997800827026367e-01 1.7054800689220428e-01 + <_> + + 0 -1 2267 -1.6114000231027603e-02 + + -3.7904599308967590e-01 1.6078999638557434e-01 + <_> + + 0 -1 2268 8.4530003368854523e-03 + + -1.8655499815940857e-01 5.6367701292037964e-01 + <_> + + 0 -1 2269 -1.3752399384975433e-01 + + -5.8989900350570679e-01 1.1749500036239624e-01 + <_> + + 0 -1 2270 1.7688000202178955e-01 + + -1.5424899756908417e-01 9.2911100387573242e-01 + <_> + + 0 -1 2271 7.9309996217489243e-03 + + 3.2190701365470886e-01 -1.6392600536346436e-01 + <_> + + 0 -1 2272 1.0971800237894058e-01 + + -1.5876500308513641e-01 1.0186259746551514e+00 + <_> + + 0 -1 2273 -3.0293000862002373e-02 + + 7.5587302446365356e-01 3.1794998794794083e-02 + <_> + + 0 -1 2274 -2.3118000477552414e-02 + + -8.8451498746871948e-01 -9.5039997249841690e-03 + <_> + + 0 -1 2275 -3.0900000128895044e-03 + + 2.3838299512863159e-01 -1.1606200039386749e-01 + <_> + + 0 -1 2276 -3.3392000943422318e-02 + + -1.8738139867782593e+00 -6.8502999842166901e-02 + <_> + + 0 -1 2277 1.3190000317990780e-02 + + 1.2919899821281433e-01 -6.7512202262878418e-01 + <_> + + 0 -1 2278 1.4661000110208988e-02 + + -2.4829000234603882e-02 -7.4396800994873047e-01 + <_> + + 0 -1 2279 -1.3248000293970108e-02 + + 4.6820199489593506e-01 -2.4165000766515732e-02 + <_> + + 0 -1 2280 -1.6218999400734901e-02 + + 4.0083798766136169e-01 -2.1255700290203094e-01 + <_> + + 0 -1 2281 -2.9052000492811203e-02 + + -1.5650019645690918e+00 1.4375899732112885e-01 + <_> + + 0 -1 2282 -1.0153199732303619e-01 + + -1.9220689535140991e+00 -6.9559998810291290e-02 + <_> + + 0 -1 2283 3.7753999233245850e-02 + + 1.3396799564361572e-01 -2.2639141082763672e+00 + <_> + + 0 -1 2284 -2.8555598855018616e-01 + + 1.0215270519256592e+00 -1.5232199430465698e-01 + <_> + + 0 -1 2285 1.5360699594020844e-01 + + -9.7409002482891083e-02 4.1662400960922241e-01 + <_> + + 0 -1 2286 -2.1199999901000410e-04 + + 1.1271899938583374e-01 -4.1653999686241150e-01 + <_> + + 0 -1 2287 -2.0597999915480614e-02 + + 6.0540497303009033e-01 6.2467999756336212e-02 + <_> + + 0 -1 2288 3.7353999912738800e-02 + + -1.8919000029563904e-01 4.6464699506759644e-01 + <_> + + 0 -1 2289 5.7275000959634781e-02 + + 1.1565300077199936e-01 -1.3213009834289551e+00 + <_> + + 0 -1 2290 5.1029999740421772e-03 + + -2.8061500191688538e-01 1.9313399493694305e-01 + <_> + + 0 -1 2291 -5.4644998162984848e-02 + + 7.2428500652313232e-01 7.5447998940944672e-02 + <_> + + 0 -1 2292 2.5349000468850136e-02 + + -1.9481800496578217e-01 4.6032801270484924e-01 + <_> + + 0 -1 2293 2.4311000481247902e-02 + + 1.5564100444316864e-01 -4.9913901090621948e-01 + <_> + + 0 -1 2294 3.5962000489234924e-02 + + -5.8573000133037567e-02 -1.5418399572372437e+00 + <_> + + 0 -1 2295 -1.0000699758529663e-01 + + -1.6100039482116699e+00 1.1450500041246414e-01 + <_> + + 0 -1 2296 8.4435999393463135e-02 + + -6.1406999826431274e-02 -1.4673349857330322e+00 + <_> + + 0 -1 2297 1.5947999432682991e-02 + + 1.6287900507450104e-01 -1.1026400327682495e-01 + <_> + + 0 -1 2298 3.3824000507593155e-02 + + -1.7932699620723724e-01 5.7218402624130249e-01 + <_> + + 0 -1 2299 -6.1996001750230789e-02 + + 4.6511812210083008e+00 9.4534002244472504e-02 + <_> + + 0 -1 2300 6.9876998662948608e-02 + + -1.6985900700092316e-01 8.7028998136520386e-01 + <_> + + 0 -1 2301 -2.7916999533772469e-02 + + 9.1042500734329224e-01 5.6827001273632050e-02 + <_> + + 0 -1 2302 -1.2764000333845615e-02 + + 2.2066700458526611e-01 -2.7769100666046143e-01 + <_> + 199 + -3.2573320865631104e+00 + + <_> + + 0 -1 2303 2.1662000566720963e-02 + + -8.9868897199630737e-01 2.9436299204826355e-01 + <_> + + 0 -1 2304 1.0044500231742859e-01 + + -3.7659201025962830e-01 6.0891002416610718e-01 + <_> + + 0 -1 2305 2.6003999635577202e-02 + + -3.8128501176834106e-01 3.9217400550842285e-01 + <_> + + 0 -1 2306 2.8441000729799271e-02 + + -1.8182300031185150e-01 5.8927202224731445e-01 + <_> + + 0 -1 2307 3.8612000644207001e-02 + + -2.2399599850177765e-01 6.3779997825622559e-01 + <_> + + 0 -1 2308 -4.6594999730587006e-02 + + 7.0812201499938965e-01 -1.4666199684143066e-01 + <_> + + 0 -1 2309 -4.2791999876499176e-02 + + 4.7680398821830750e-01 -2.9233199357986450e-01 + <_> + + 0 -1 2310 3.7960000336170197e-03 + + -1.8510299921035767e-01 5.2626699209213257e-01 + <_> + + 0 -1 2311 4.2348999530076981e-02 + + 3.9244998246431351e-02 -8.9197701215744019e-01 + <_> + + 0 -1 2312 1.9598999992012978e-02 + + -2.3358400166034698e-01 4.4146499037742615e-01 + <_> + + 0 -1 2313 8.7400001939386129e-04 + + -4.6063598990440369e-01 1.7689600586891174e-01 + <_> + + 0 -1 2314 -4.3629999272525311e-03 + + 3.3493199944496155e-01 -2.9893401265144348e-01 + <_> + + 0 -1 2315 1.6973000019788742e-02 + + -1.6408699750900269e-01 1.5993679761886597e+00 + <_> + + 0 -1 2316 3.6063998937606812e-02 + + 2.2601699829101562e-01 -5.3186100721359253e-01 + <_> + + 0 -1 2317 -7.0864997804164886e-02 + + 1.5220500528812408e-01 -4.1914600133895874e-01 + <_> + + 0 -1 2318 -6.3075996935367584e-02 + + -1.4874019622802734e+00 1.2953700125217438e-01 + <_> + + 0 -1 2319 2.9670000076293945e-02 + + -1.9145900011062622e-01 9.8184901475906372e-01 + <_> + + 0 -1 2320 3.7873998284339905e-02 + + 1.3459500670433044e-01 -5.6316298246383667e-01 + <_> + + 0 -1 2321 -3.3289000391960144e-02 + + -1.0828030109405518e+00 -1.1504000052809715e-02 + <_> + + 0 -1 2322 -3.1608998775482178e-02 + + -5.9224498271942139e-01 1.3394799828529358e-01 + <_> + + 0 -1 2323 1.0740000288933516e-03 + + -4.9185800552368164e-01 9.4446003437042236e-02 + <_> + + 0 -1 2324 -7.1556001901626587e-02 + + 5.9710198640823364e-01 -3.9553001523017883e-02 + <_> + + 0 -1 2325 -8.1170000135898590e-02 + + -1.1817820072174072e+00 -2.8254000470042229e-02 + <_> + + 0 -1 2326 4.4860001653432846e-03 + + -6.1028099060058594e-01 2.2619099915027618e-01 + <_> + + 0 -1 2327 -4.2176000773906708e-02 + + -1.1435619592666626e+00 -2.9001999646425247e-02 + <_> + + 0 -1 2328 -6.5640002489089966e-02 + + -1.6470279693603516e+00 1.2810300290584564e-01 + <_> + + 0 -1 2329 1.8188999965786934e-02 + + -3.1149399280548096e-01 2.5739601254463196e-01 + <_> + + 0 -1 2330 -5.1520001143217087e-02 + + -6.9206899404525757e-01 1.5270799398422241e-01 + <_> + + 0 -1 2331 -4.7150999307632446e-02 + + -7.1868300437927246e-01 2.6879999786615372e-03 + <_> + + 0 -1 2332 1.7488999292254448e-02 + + 2.2371199727058411e-01 -5.5381798744201660e-01 + <_> + + 0 -1 2333 -2.5264000520110130e-02 + + 1.0319819450378418e+00 -1.7496499419212341e-01 + <_> + + 0 -1 2334 -4.0745001286268234e-02 + + 4.4961598515510559e-01 3.9349000900983810e-02 + <_> + + 0 -1 2335 -3.7666998803615570e-02 + + -8.5475701093673706e-01 -1.2463999912142754e-02 + <_> + + 0 -1 2336 -1.3411000370979309e-02 + + 5.7845598459243774e-01 -1.7467999830842018e-02 + <_> + + 0 -1 2337 -7.8999997640494257e-05 + + -3.7749201059341431e-01 1.3961799442768097e-01 + <_> + + 0 -1 2338 -1.1415000073611736e-02 + + -2.6186600327491760e-01 2.3712499439716339e-01 + <_> + + 0 -1 2339 3.7200000137090683e-02 + + -2.8626000508666039e-02 -1.2945239543914795e+00 + <_> + + 0 -1 2340 3.4050000831484795e-03 + + 2.0531399548053741e-01 -1.8747499585151672e-01 + <_> + + 0 -1 2341 -2.2483000531792641e-02 + + 6.7027199268341064e-01 -1.9594000279903412e-01 + <_> + + 0 -1 2342 2.3274999111890793e-02 + + 1.7405399680137634e-01 -3.2746300101280212e-01 + <_> + + 0 -1 2343 -1.3917000032961369e-02 + + -8.3954298496246338e-01 -6.3760001212358475e-03 + <_> + + 0 -1 2344 7.5429999269545078e-03 + + -3.4194998443126678e-02 5.8998197317123413e-01 + <_> + + 0 -1 2345 -1.1539000086486340e-02 + + 4.2142799496650696e-01 -2.3510499298572540e-01 + <_> + + 0 -1 2346 5.2501998841762543e-02 + + 6.9303996860980988e-02 7.3226499557495117e-01 + <_> + + 0 -1 2347 5.2715998142957687e-02 + + -1.5688100457191467e-01 1.0907289981842041e+00 + <_> + + 0 -1 2348 -1.1726000346243382e-02 + + -7.0934301614761353e-01 1.6828800737857819e-01 + <_> + + 0 -1 2349 9.5945999026298523e-02 + + -1.6192899644374847e-01 1.0072519779205322e+00 + <_> + + 0 -1 2350 -1.5871999785304070e-02 + + 3.9008399844169617e-01 -5.3777001798152924e-02 + <_> + + 0 -1 2351 3.4818001091480255e-02 + + 1.7179999500513077e-02 -9.3941801786422729e-01 + <_> + + 0 -1 2352 3.4791998565196991e-02 + + 5.0462998449802399e-02 5.4465699195861816e-01 + <_> + + 0 -1 2353 1.6284000128507614e-02 + + -2.6981300115585327e-01 4.0365299582481384e-01 + <_> + + 0 -1 2354 -4.4319000095129013e-02 + + 8.4399998188018799e-01 3.2882999628782272e-02 + <_> + + 0 -1 2355 -5.5689997971057892e-03 + + 1.5309399366378784e-01 -3.4959799051284790e-01 + <_> + + 0 -1 2356 -6.5842002630233765e-02 + + -9.2711198329925537e-01 1.6800999641418457e-01 + <_> + + 0 -1 2357 -7.3337003588676453e-02 + + 5.1614499092102051e-01 -2.0236000418663025e-01 + <_> + + 0 -1 2358 1.6450000926852226e-02 + + 1.3950599730014801e-01 -4.9301299452781677e-01 + <_> + + 0 -1 2359 -9.2630004510283470e-03 + + -9.0101999044418335e-01 -1.6116000711917877e-02 + <_> + + 0 -1 2360 5.9139998629689217e-03 + + 1.9858199357986450e-01 -1.6731299459934235e-01 + <_> + + 0 -1 2361 -8.4699998842552304e-04 + + 9.4005003571510315e-02 -4.1570898890495300e-01 + <_> + + 0 -1 2362 2.0532900094985962e-01 + + -6.0022000223398209e-02 7.0993602275848389e-01 + <_> + + 0 -1 2363 -1.6883000731468201e-02 + + 2.4392199516296387e-01 -3.0551800131797791e-01 + <_> + + 0 -1 2364 -1.9111000001430511e-02 + + 6.1229902505874634e-01 2.4252999573945999e-02 + <_> + + 0 -1 2365 -2.5962999090552330e-02 + + 9.0764999389648438e-01 -1.6722099483013153e-01 + <_> + + 0 -1 2366 -2.1762000396847725e-02 + + -3.1384700536727905e-01 2.0134599506855011e-01 + <_> + + 0 -1 2367 -2.4119999259710312e-02 + + -6.6588401794433594e-01 7.4559999629855156e-03 + <_> + + 0 -1 2368 4.7129999846220016e-02 + + 5.9533998370170593e-02 8.7804502248764038e-01 + <_> + + 0 -1 2369 -4.5984998345375061e-02 + + 8.0067998170852661e-01 -1.7252300679683685e-01 + <_> + + 0 -1 2370 2.6507999747991562e-02 + + 1.8774099647998810e-01 -6.0850602388381958e-01 + <_> + + 0 -1 2371 -4.8615001142024994e-02 + + 5.8644098043441772e-01 -1.9427700340747833e-01 + <_> + + 0 -1 2372 -1.8562000244855881e-02 + + -2.5587901473045349e-01 1.6326199471950531e-01 + <_> + + 0 -1 2373 1.2678000144660473e-02 + + -1.4228000305593014e-02 -7.6738101243972778e-01 + <_> + + 0 -1 2374 -1.1919999960809946e-03 + + 2.0495000481605530e-01 -1.1404299736022949e-01 + <_> + + 0 -1 2375 -4.9088999629020691e-02 + + -1.0740849971771240e+00 -3.8940999656915665e-02 + <_> + + 0 -1 2376 -1.7436999827623367e-02 + + -5.7973802089691162e-01 1.8584500253200531e-01 + <_> + + 0 -1 2377 -1.4770000241696835e-02 + + -6.6150301694869995e-01 5.3119999356567860e-03 + <_> + + 0 -1 2378 -2.2905200719833374e-01 + + -4.8305100202560425e-01 1.2326399981975555e-01 + <_> + + 0 -1 2379 -1.2707099318504333e-01 + + 5.7452601194381714e-01 -1.9420400261878967e-01 + <_> + + 0 -1 2380 1.0339000262320042e-02 + + -5.4641999304294586e-02 2.4501800537109375e-01 + <_> + + 0 -1 2381 6.9010001607239246e-03 + + 1.2180600315332413e-01 -3.8797399401664734e-01 + <_> + + 0 -1 2382 2.9025399684906006e-01 + + 1.0966199636459351e-01 -30. + <_> + + 0 -1 2383 -2.3804999887943268e-01 + + -1.7352679967880249e+00 -6.3809998333454132e-02 + <_> + + 0 -1 2384 6.2481001019477844e-02 + + 1.3523000478744507e-01 -7.0301097631454468e-01 + <_> + + 0 -1 2385 4.7109997831285000e-03 + + -4.6984100341796875e-01 6.0341998934745789e-02 + <_> + + 0 -1 2386 -2.7815999463200569e-02 + + 6.9807600975036621e-01 1.3719999697059393e-03 + <_> + + 0 -1 2387 -1.7020000144839287e-02 + + 1.6870440244674683e+00 -1.4314800500869751e-01 + <_> + + 0 -1 2388 -4.9754999577999115e-02 + + 7.9497700929641724e-01 7.7199999941512942e-04 + <_> + + 0 -1 2389 -7.4732996523380280e-02 + + -1.0132360458374023e+00 -1.9388999789953232e-02 + <_> + + 0 -1 2390 3.2009001821279526e-02 + + 1.4412100613117218e-01 -4.2139101028442383e-01 + <_> + + 0 -1 2391 -9.4463996589183807e-02 + + 5.0682598352432251e-01 -2.0478899776935577e-01 + <_> + + 0 -1 2392 -1.5426999889314175e-02 + + -1.5811300277709961e-01 1.7806899547576904e-01 + <_> + + 0 -1 2393 -4.0540001355111599e-03 + + -5.4366701841354370e-01 3.1235000118613243e-02 + <_> + + 0 -1 2394 3.0080000869929790e-03 + + -1.7376799881458282e-01 3.0441701412200928e-01 + <_> + + 0 -1 2395 -1.0091999545693398e-02 + + 2.5103801488876343e-01 -2.6224100589752197e-01 + <_> + + 0 -1 2396 -3.8818001747131348e-02 + + 9.3226701021194458e-01 7.2659999132156372e-02 + <_> + + 0 -1 2397 3.4651998430490494e-02 + + -3.3934999257326126e-02 -8.5707902908325195e-01 + <_> + + 0 -1 2398 -4.6729999594390392e-03 + + 3.4969300031661987e-01 -4.8517998307943344e-02 + <_> + + 0 -1 2399 6.8499997723847628e-04 + + 6.6573001444339752e-02 -4.4973799586296082e-01 + <_> + + 0 -1 2400 3.5317000001668930e-02 + + 1.4275799691677094e-01 -4.6726399660110474e-01 + <_> + + 0 -1 2401 -2.3569999262690544e-02 + + -1.0286079645156860e+00 -4.5288000255823135e-02 + <_> + + 0 -1 2402 -1.9109999993816018e-03 + + -1.9652199745178223e-01 2.8661000728607178e-01 + <_> + + 0 -1 2403 -1.6659000888466835e-02 + + -7.7532202005386353e-01 -8.3280000835657120e-03 + <_> + + 0 -1 2404 6.6062200069427490e-01 + + 1.3232499361038208e-01 -3.5266680717468262e+00 + <_> + + 0 -1 2405 1.0970599949359894e-01 + + -1.5547199547290802e-01 1.4674140214920044e+00 + <_> + + 0 -1 2406 1.3500999659299850e-02 + + 1.5233400464057922e-01 -1.3020930290222168e+00 + <_> + + 0 -1 2407 -2.2871999070048332e-02 + + -7.1325999498367310e-01 -8.7040001526474953e-03 + <_> + + 0 -1 2408 -8.1821002066135406e-02 + + 1.1127580404281616e+00 8.3219997584819794e-02 + <_> + + 0 -1 2409 -5.2728001028299332e-02 + + 9.3165099620819092e-01 -1.7103999853134155e-01 + <_> + + 0 -1 2410 -2.5242000818252563e-02 + + -1.9733799993991852e-01 2.5359401106834412e-01 + <_> + + 0 -1 2411 -4.3818999081850052e-02 + + 4.1815200448036194e-01 -2.4585500359535217e-01 + <_> + + 0 -1 2412 -1.8188999965786934e-02 + + -5.1743197441101074e-01 2.0174199342727661e-01 + <_> + + 0 -1 2413 2.3466000333428383e-02 + + -4.3071001768112183e-02 -1.0636579990386963e+00 + <_> + + 0 -1 2414 3.4216001629829407e-02 + + 5.3780999034643173e-02 4.9707201123237610e-01 + <_> + + 0 -1 2415 2.5692999362945557e-02 + + -2.3800100386142731e-01 4.1651499271392822e-01 + <_> + + 0 -1 2416 -2.6565000414848328e-02 + + -8.8574802875518799e-01 1.3365900516510010e-01 + <_> + + 0 -1 2417 6.0942001640796661e-02 + + -2.0669700205326080e-01 5.8309000730514526e-01 + <_> + + 0 -1 2418 1.4474500715732574e-01 + + 1.3282300531864166e-01 -3.1449348926544189e+00 + <_> + + 0 -1 2419 5.3410999476909637e-02 + + -1.7325200140476227e-01 6.9190698862075806e-01 + <_> + + 0 -1 2420 1.1408000253140926e-02 + + 5.4822001606225967e-02 3.0240398645401001e-01 + <_> + + 0 -1 2421 -2.3179999552667141e-03 + + 1.5820899605751038e-01 -3.1973201036453247e-01 + <_> + + 0 -1 2422 -2.9695000499486923e-02 + + 7.1274799108505249e-01 5.8136001229286194e-02 + <_> + + 0 -1 2423 2.7249999344348907e-02 + + -1.5754100680351257e-01 9.2143797874450684e-01 + <_> + + 0 -1 2424 -3.6200000904500484e-03 + + -3.4548398852348328e-01 2.0220999419689178e-01 + <_> + + 0 -1 2425 -1.2578999623656273e-02 + + -5.5650299787521362e-01 2.0388999953866005e-02 + <_> + + 0 -1 2426 -8.8849000632762909e-02 + + -3.6100010871887207e+00 1.3164199888706207e-01 + <_> + + 0 -1 2427 -1.9256999716162682e-02 + + 5.1908999681472778e-01 -1.9284300506114960e-01 + <_> + + 0 -1 2428 -1.6666999086737633e-02 + + -8.7499998509883881e-02 1.5812499821186066e-01 + <_> + + 0 -1 2429 1.2931999750435352e-02 + + 2.7405999600887299e-02 -5.5123901367187500e-01 + <_> + + 0 -1 2430 -1.3431999832391739e-02 + + 2.3457799851894379e-01 -4.3235000222921371e-02 + <_> + + 0 -1 2431 1.8810000270605087e-02 + + -3.9680998772382736e-02 -9.4373297691345215e-01 + <_> + + 0 -1 2432 -6.4349998719990253e-03 + + 4.5703700184822083e-01 -4.0520001202821732e-03 + <_> + + 0 -1 2433 -2.4249000474810600e-02 + + -7.6248002052307129e-01 -1.9857000559568405e-02 + <_> + + 0 -1 2434 -2.9667999595403671e-02 + + -3.7412509918212891e+00 1.1250600218772888e-01 + <_> + + 0 -1 2435 5.1150000654160976e-03 + + -6.3781797885894775e-01 1.1223999783396721e-02 + <_> + + 0 -1 2436 -5.7819997891783714e-03 + + 1.9374400377273560e-01 -8.2042001187801361e-02 + <_> + + 0 -1 2437 1.6606999561190605e-02 + + -1.6192099452018738e-01 1.1334990262985229e+00 + <_> + + 0 -1 2438 3.8228001445531845e-02 + + 2.1105000749230385e-02 7.6264202594757080e-01 + <_> + + 0 -1 2439 -5.7094000279903412e-02 + + -1.6974929571151733e+00 -5.9762001037597656e-02 + <_> + + 0 -1 2440 -5.3883001208305359e-02 + + 1.1850190162658691e+00 9.0966999530792236e-02 + <_> + + 0 -1 2441 -2.6110000908374786e-03 + + -4.0941199660301208e-01 8.3820998668670654e-02 + <_> + + 0 -1 2442 2.9714399576187134e-01 + + 1.5529899299144745e-01 -1.0995409488677979e+00 + <_> + + 0 -1 2443 -8.9063003659248352e-02 + + 4.8947200179100037e-01 -2.0041200518608093e-01 + <_> + + 0 -1 2444 -5.6193001568317413e-02 + + -2.4581399559974670e-01 1.4365500211715698e-01 + <_> + + 0 -1 2445 3.7004999816417694e-02 + + -4.8168998211622238e-02 -1.2310709953308105e+00 + <_> + + 0 -1 2446 -8.4840003401041031e-03 + + 4.3372601270675659e-01 1.3779999688267708e-02 + <_> + + 0 -1 2447 -2.4379999376833439e-03 + + 1.8949699401855469e-01 -3.2294198870658875e-01 + <_> + + 0 -1 2448 -7.1639999747276306e-02 + + -4.3979001045227051e-01 2.2730199992656708e-01 + <_> + + 0 -1 2449 5.2260002121329308e-03 + + -2.0548400282859802e-01 5.0933301448822021e-01 + <_> + + 0 -1 2450 -6.1360001564025879e-03 + + 3.1157198548316956e-01 7.0680998265743256e-02 + <_> + + 0 -1 2451 1.5595000237226486e-02 + + -3.0934798717498779e-01 1.5627700090408325e-01 + <_> + + 0 -1 2452 2.5995999574661255e-02 + + 1.3821600377559662e-01 -1.7616599798202515e-01 + <_> + + 0 -1 2453 -1.2085000053048134e-02 + + -5.1070201396942139e-01 5.8440998196601868e-02 + <_> + + 0 -1 2454 -6.7836001515388489e-02 + + 4.7757101058959961e-01 -7.1446001529693604e-02 + <_> + + 0 -1 2455 -1.4715000055730343e-02 + + 4.5238900184631348e-01 -1.9861400127410889e-01 + <_> + + 0 -1 2456 2.5118999183177948e-02 + + 1.2954899668693542e-01 -8.6266398429870605e-01 + <_> + + 0 -1 2457 1.8826000392436981e-02 + + -4.1570000350475311e-02 -1.1354700326919556e+00 + <_> + + 0 -1 2458 -2.1263999864459038e-02 + + -3.4738001227378845e-01 1.5779499709606171e-01 + <_> + + 0 -1 2459 9.4609996303915977e-03 + + 4.8639997839927673e-03 -6.1654800176620483e-01 + <_> + + 0 -1 2460 2.2957700490951538e-01 + + 8.1372998654842377e-02 6.9841402769088745e-01 + <_> + + 0 -1 2461 -3.8061998784542084e-02 + + 1.1616369485855103e+00 -1.4976699650287628e-01 + <_> + + 0 -1 2462 -1.3484999537467957e-02 + + -3.2036399841308594e-01 1.7365099489688873e-01 + <_> + + 0 -1 2463 3.6238998174667358e-02 + + -1.8158499896526337e-01 6.1956697702407837e-01 + <_> + + 0 -1 2464 6.7210001870989799e-03 + + 7.9600000753998756e-04 4.2441400885581970e-01 + <_> + + 0 -1 2465 9.6525996923446655e-02 + + -1.4696800708770752e-01 1.2525680065155029e+00 + <_> + + 0 -1 2466 -3.5656999796628952e-02 + + -3.9781698584556580e-01 1.4191399514675140e-01 + <_> + + 0 -1 2467 1.0772000066936016e-02 + + -1.8194000422954559e-01 5.9762197732925415e-01 + <_> + + 0 -1 2468 7.9279996454715729e-02 + + 1.4642499387264252e-01 -7.8836899995803833e-01 + <_> + + 0 -1 2469 3.2841000705957413e-02 + + -6.2408000230789185e-02 -1.4227490425109863e+00 + <_> + + 0 -1 2470 -2.7781000360846519e-02 + + 3.4033098816871643e-01 3.0670000240206718e-02 + <_> + + 0 -1 2471 -4.0339999832212925e-03 + + 3.1084701418876648e-01 -2.2595700621604919e-01 + <_> + + 0 -1 2472 7.4260002002120018e-03 + + -3.8936998695135117e-02 3.1702101230621338e-01 + <_> + + 0 -1 2473 1.1213999986648560e-01 + + -1.7578299343585968e-01 6.5056598186492920e-01 + <_> + + 0 -1 2474 -1.1878100037574768e-01 + + -1.0092990398406982e+00 1.1069700121879578e-01 + <_> + + 0 -1 2475 -4.1584998369216919e-02 + + -5.3806400299072266e-01 1.9905000925064087e-02 + <_> + + 0 -1 2476 -2.7966000139713287e-02 + + 4.8143199086189270e-01 3.3590998500585556e-02 + <_> + + 0 -1 2477 -1.2506400048732758e-01 + + 2.6352199912071228e-01 -2.5737899541854858e-01 + <_> + + 0 -1 2478 2.3666900396347046e-01 + + 3.6508001387119293e-02 9.0655601024627686e-01 + <_> + + 0 -1 2479 -2.9475999996066093e-02 + + -6.0048800706863403e-01 9.5880003646016121e-03 + <_> + + 0 -1 2480 3.7792999297380447e-02 + + 1.5506200492382050e-01 -9.5733499526977539e-01 + <_> + + 0 -1 2481 7.2044000029563904e-02 + + -1.4525899291038513e-01 1.3676730394363403e+00 + <_> + + 0 -1 2482 9.7759999334812164e-03 + + 1.2915999628603458e-02 2.1640899777412415e-01 + <_> + + 0 -1 2483 5.2154000848531723e-02 + + -1.6359999775886536e-02 -8.8356298208236694e-01 + <_> + + 0 -1 2484 -4.3790999799966812e-02 + + 3.5829600691795349e-01 6.5131001174449921e-02 + <_> + + 0 -1 2485 -3.8378998637199402e-02 + + 1.1961040496826172e+00 -1.4971500635147095e-01 + <_> + + 0 -1 2486 -9.8838999867439270e-02 + + -6.1834001541137695e-01 1.2786200642585754e-01 + <_> + + 0 -1 2487 -1.2190700322389603e-01 + + -1.8276120424270630e+00 -6.4862996339797974e-02 + <_> + + 0 -1 2488 -1.1981700360774994e-01 + + -30. 1.1323300004005432e-01 + <_> + + 0 -1 2489 3.0910000205039978e-02 + + -2.3934000730514526e-01 3.6332899332046509e-01 + <_> + + 0 -1 2490 1.0800999589264393e-02 + + -3.5140000283718109e-02 2.7707898616790771e-01 + <_> + + 0 -1 2491 5.6844998151063919e-02 + + -1.5524299442768097e-01 1.0802700519561768e+00 + <_> + + 0 -1 2492 1.0280000278726220e-03 + + -6.1202999204397202e-02 2.0508000254631042e-01 + <_> + + 0 -1 2493 -2.8273999691009521e-02 + + -6.4778000116348267e-01 2.3917000740766525e-02 + <_> + + 0 -1 2494 -1.6013599932193756e-01 + + 1.0892050266265869e+00 5.8389000594615936e-02 + <_> + + 0 -1 2495 4.9629998393356800e-03 + + -2.5806298851966858e-01 2.0834599435329437e-01 + <_> + + 0 -1 2496 4.6937000006437302e-02 + + 1.3886299729347229e-01 -1.5662620067596436e+00 + <_> + + 0 -1 2497 2.4286000058054924e-02 + + -2.0728300511837006e-01 5.2430999279022217e-01 + <_> + + 0 -1 2498 7.0202000439167023e-02 + + 1.4796899259090424e-01 -1.3095090389251709e+00 + <_> + + 0 -1 2499 9.8120002076029778e-03 + + 2.7906000614166260e-02 -5.0864601135253906e-01 + <_> + + 0 -1 2500 -5.6200999766588211e-02 + + 1.2618130445480347e+00 6.3801996409893036e-02 + <_> + + 0 -1 2501 1.0982800275087357e-01 + + -1.2850099802017212e-01 3.0776169300079346e+00 + <_> + 211 + -3.3703000545501709e+00 + + <_> + + 0 -1 2502 2.0910000428557396e-02 + + -6.8559402227401733e-01 3.8984298706054688e-01 + <_> + + 0 -1 2503 3.5032000392675400e-02 + + -4.7724398970603943e-01 4.5027199387550354e-01 + <_> + + 0 -1 2504 3.9799001067876816e-02 + + -4.7011101245880127e-01 4.2702499032020569e-01 + <_> + + 0 -1 2505 -4.8409998416900635e-03 + + 2.5614300370216370e-01 -6.6556298732757568e-01 + <_> + + 0 -1 2506 2.3439999204128981e-03 + + -4.8083499073982239e-01 2.8013798594474792e-01 + <_> + + 0 -1 2507 2.5312999263405800e-02 + + -2.3948200047016144e-01 4.4191798567771912e-01 + <_> + + 0 -1 2508 -3.2193001359701157e-02 + + 7.6086699962615967e-01 -2.5059100985527039e-01 + <_> + + 0 -1 2509 7.5409002602100372e-02 + + -3.4974598884582520e-01 3.4380298852920532e-01 + <_> + + 0 -1 2510 -1.8469000235199928e-02 + + -7.9085600376129150e-01 3.4788001328706741e-02 + <_> + + 0 -1 2511 -1.2802000157535076e-02 + + 4.7107800841331482e-01 -6.0006000101566315e-02 + <_> + + 0 -1 2512 -2.6598000898957253e-02 + + 6.7116099596023560e-01 -2.4257500469684601e-01 + <_> + + 0 -1 2513 2.1988999098539352e-02 + + 2.4717499315738678e-01 -4.8301699757575989e-01 + <_> + + 0 -1 2514 1.4654099941253662e-01 + + -2.1504099667072296e-01 7.2055900096893311e-01 + <_> + + 0 -1 2515 3.5310001112520695e-03 + + 2.7930998802185059e-01 -3.4339898824691772e-01 + <_> + + 0 -1 2516 9.4010001048445702e-03 + + 5.5861998349428177e-02 -8.2143598794937134e-01 + <_> + + 0 -1 2517 -8.6390003561973572e-03 + + -9.9620598554611206e-01 1.8874999880790710e-01 + <_> + + 0 -1 2518 -3.9193000644445419e-02 + + -1.1945559978485107e+00 -2.9198000207543373e-02 + <_> + + 0 -1 2519 2.4855000898241997e-02 + + 1.4987599849700928e-01 -5.4137802124023438e-01 + <_> + + 0 -1 2520 -3.4995000809431076e-02 + + -1.4210180044174194e+00 -4.2314000427722931e-02 + <_> + + 0 -1 2521 -1.8378999084234238e-02 + + -2.8242599964141846e-01 1.5581800043582916e-01 + <_> + + 0 -1 2522 -1.3592000119388103e-02 + + 4.7317099571228027e-01 -2.1937200427055359e-01 + <_> + + 0 -1 2523 6.2629999592900276e-03 + + -5.9714000672101974e-02 6.0625898838043213e-01 + <_> + + 0 -1 2524 -1.8478000536561012e-02 + + -8.5647201538085938e-01 -1.3783999718725681e-02 + <_> + + 0 -1 2525 1.4236000366508961e-02 + + 1.6654799878597260e-01 -2.7713999152183533e-01 + <_> + + 0 -1 2526 -3.2547000795602798e-02 + + -1.1728240251541138e+00 -4.0185000747442245e-02 + <_> + + 0 -1 2527 -2.6410000864416361e-03 + + 2.6514300704002380e-01 -5.6343000382184982e-02 + <_> + + 0 -1 2528 -8.7799999164417386e-04 + + 3.6556001752614975e-02 -5.5075198411941528e-01 + <_> + + 0 -1 2529 4.7371998429298401e-02 + + -4.2614001780748367e-02 4.8194900155067444e-01 + <_> + + 0 -1 2530 -7.0790001191198826e-03 + + 2.8698998689651489e-01 -3.2923001050949097e-01 + <_> + + 0 -1 2531 -4.3145999312400818e-02 + + -1.4065419435501099e+00 1.2836399674415588e-01 + <_> + + 0 -1 2532 2.0592000335454941e-02 + + -2.1435299515724182e-01 5.3981798887252808e-01 + <_> + + 0 -1 2533 -2.2367000579833984e-02 + + 3.3718299865722656e-01 4.5212000608444214e-02 + <_> + + 0 -1 2534 5.0039999186992645e-02 + + -2.5121700763702393e-01 4.1750499606132507e-01 + <_> + + 0 -1 2535 6.1794999986886978e-02 + + 4.0084999054670334e-02 6.8779802322387695e-01 + <_> + + 0 -1 2536 -4.1861999779939651e-02 + + 5.3027397394180298e-01 -2.2901999950408936e-01 + <_> + + 0 -1 2537 -3.1959998887032270e-03 + + 2.5161498785018921e-01 -2.1514600515365601e-01 + <_> + + 0 -1 2538 2.4255000054836273e-02 + + 7.2320001199841499e-03 -7.2519099712371826e-01 + <_> + + 0 -1 2539 -1.7303999513387680e-02 + + -4.9958199262619019e-01 1.8394500017166138e-01 + <_> + + 0 -1 2540 -4.1470001451671124e-03 + + 8.5211999714374542e-02 -4.6364700794219971e-01 + <_> + + 0 -1 2541 -1.4369999989867210e-02 + + -5.2258902788162231e-01 2.3892599344253540e-01 + <_> + + 0 -1 2542 -9.0399999171495438e-03 + + -6.3250398635864258e-01 3.2551001757383347e-02 + <_> + + 0 -1 2543 -1.2373100221157074e-01 + + 1.2856210470199585e+00 7.6545000076293945e-02 + <_> + + 0 -1 2544 -8.2221999764442444e-02 + + 8.3208197355270386e-01 -1.8590599298477173e-01 + <_> + + 0 -1 2545 6.5659001469612122e-02 + + 1.1298800259828568e-01 -30. + <_> + + 0 -1 2546 -3.1582999974489212e-02 + + -1.3485900163650513e+00 -4.7097001224756241e-02 + <_> + + 0 -1 2547 -7.9636000096797943e-02 + + -1.3533639907836914e+00 1.5668800473213196e-01 + <_> + + 0 -1 2548 -1.8880000337958336e-02 + + 4.0300300717353821e-01 -2.5148901343345642e-01 + <_> + + 0 -1 2549 -5.0149997696280479e-03 + + -2.6287099719047546e-01 1.8582500517368317e-01 + <_> + + 0 -1 2550 -1.2218000367283821e-02 + + 5.8692401647567749e-01 -1.9427700340747833e-01 + <_> + + 0 -1 2551 1.2710000155493617e-03 + + -1.6688999533653259e-01 2.3006899654865265e-01 + <_> + + 0 -1 2552 2.9743999242782593e-02 + + 1.2520000338554382e-02 -6.6723597049713135e-01 + <_> + + 0 -1 2553 2.8175000101327896e-02 + + -1.7060000449419022e-02 6.4579397439956665e-01 + <_> + + 0 -1 2554 3.0345000326633453e-02 + + -2.4178700149059296e-01 3.4878900647163391e-01 + <_> + + 0 -1 2555 -1.7325999215245247e-02 + + -5.3599399328231812e-01 2.0995999872684479e-01 + <_> + + 0 -1 2556 -8.4178000688552856e-02 + + 7.5093299150466919e-01 -1.7593200504779816e-01 + <_> + + 0 -1 2557 7.4950000271201134e-03 + + -1.6188099980354309e-01 3.0657500028610229e-01 + <_> + + 0 -1 2558 5.6494999676942825e-02 + + -1.7318800091743469e-01 1.0016150474548340e+00 + <_> + + 0 -1 2559 -5.2939997985959053e-03 + + 2.3417599499225616e-01 -6.5347000956535339e-02 + <_> + + 0 -1 2560 -1.4945000410079956e-02 + + 2.5018900632858276e-01 -3.0591198801994324e-01 + <_> + + 0 -1 2561 5.4919000715017319e-02 + + 1.3121999800205231e-01 -9.3765097856521606e-01 + <_> + + 0 -1 2562 -1.9721999764442444e-02 + + -8.3978497982025146e-01 -2.3473000153899193e-02 + <_> + + 0 -1 2563 -6.7158997058868408e-02 + + 2.3586840629577637e+00 8.2970999181270599e-02 + <_> + + 0 -1 2564 -1.4325999654829502e-02 + + 1.8814499676227570e-01 -3.1221601366996765e-01 + <_> + + 0 -1 2565 2.9841000214219093e-02 + + 1.4825099706649780e-01 -8.4681701660156250e-01 + <_> + + 0 -1 2566 5.1883000880479813e-02 + + -4.3731000274419785e-02 -1.3366169929504395e+00 + <_> + + 0 -1 2567 4.1127000004053116e-02 + + 1.7660099267959595e-01 -6.0904097557067871e-01 + <_> + + 0 -1 2568 -1.2865099310874939e-01 + + -9.8701000213623047e-01 -3.7785001099109650e-02 + <_> + + 0 -1 2569 2.4170000106096268e-03 + + -1.6119599342346191e-01 3.2675701379776001e-01 + <_> + + 0 -1 2570 7.7030002139508724e-03 + + -2.3841500282287598e-01 2.9319399595260620e-01 + <_> + + 0 -1 2571 4.5520000159740448e-02 + + 1.4424599707126617e-01 -1.5010160207748413e+00 + <_> + + 0 -1 2572 -7.8700996935367584e-02 + + -1.0394560098648071e+00 -4.5375999063253403e-02 + <_> + + 0 -1 2573 7.8619997948408127e-03 + + 1.9633600115776062e-01 -1.4472399652004242e-01 + <_> + + 0 -1 2574 -1.3458999805152416e-02 + + -9.0634697675704956e-01 -3.8049001246690750e-02 + <_> + + 0 -1 2575 2.8827000409364700e-02 + + -2.9473999515175819e-02 6.0058397054672241e-01 + <_> + + 0 -1 2576 -2.7365999296307564e-02 + + -9.9804002046585083e-01 -3.8653001189231873e-02 + <_> + + 0 -1 2577 -7.2917997837066650e-02 + + 7.3361498117446899e-01 5.7440001517534256e-02 + <_> + + 0 -1 2578 -1.3988999649882317e-02 + + 2.7892601490020752e-01 -2.6516300439834595e-01 + <_> + + 0 -1 2579 4.3242998421192169e-02 + + 4.7760000452399254e-03 3.5925900936126709e-01 + <_> + + 0 -1 2580 2.9533000662922859e-02 + + -2.0083999633789062e-01 5.1202899217605591e-01 + <_> + + 0 -1 2581 -3.1897000968456268e-02 + + 6.4721697568893433e-01 -1.3760000001639128e-03 + <_> + + 0 -1 2582 3.7868998944759369e-02 + + -1.8363800644874573e-01 6.1343097686767578e-01 + <_> + + 0 -1 2583 -2.2417999804019928e-02 + + -2.9187899827957153e-01 1.8194800615310669e-01 + <_> + + 0 -1 2584 5.8958999812602997e-02 + + -6.6451996564865112e-02 -1.9290030002593994e+00 + <_> + + 0 -1 2585 3.1222999095916748e-02 + + -1.2732000090181828e-02 6.1560797691345215e-01 + <_> + + 0 -1 2586 3.7484999746084213e-02 + + -2.0856900513172150e-01 4.4363999366760254e-01 + <_> + + 0 -1 2587 -2.0966000854969025e-02 + + -3.5712799429893494e-01 2.4252200126647949e-01 + <_> + + 0 -1 2588 -2.5477999821305275e-02 + + 1.0846560001373291e+00 -1.5054400265216827e-01 + <_> + + 0 -1 2589 -7.2570000775158405e-03 + + 2.1302600204944611e-01 -1.8308199942111969e-01 + <_> + + 0 -1 2590 -5.0983000546693802e-02 + + 5.1736801862716675e-01 -1.8833099305629730e-01 + <_> + + 0 -1 2591 -2.0640000700950623e-02 + + -4.4030201435089111e-01 2.2745999693870544e-01 + <_> + + 0 -1 2592 1.0672999545931816e-02 + + 3.5059999674558640e-02 -5.1665002107620239e-01 + <_> + + 0 -1 2593 3.1895998865365982e-02 + + 1.3228000141680241e-02 3.4915199875831604e-01 + <_> + + 0 -1 2594 -2.3824999108910561e-02 + + 3.4118801355361938e-01 -2.1510200202465057e-01 + <_> + + 0 -1 2595 -6.0680001042783260e-03 + + 3.2937398552894592e-01 -2.8523799777030945e-01 + <_> + + 0 -1 2596 2.3881999775767326e-02 + + -2.5333800911903381e-01 2.6296100020408630e-01 + <_> + + 0 -1 2597 2.7966000139713287e-02 + + 1.4049099385738373e-01 -4.9887099862098694e-01 + <_> + + 0 -1 2598 1.4603000134229660e-02 + + -1.5395999886095524e-02 -7.6958000659942627e-01 + <_> + + 0 -1 2599 1.0872399806976318e-01 + + 1.9069600105285645e-01 -3.2393100857734680e-01 + <_> + + 0 -1 2600 -1.4038000255823135e-02 + + 3.4924700856208801e-01 -2.2358700633049011e-01 + <_> + + 0 -1 2601 4.0440000593662262e-03 + + -3.8329001516103745e-02 5.1177299022674561e-01 + <_> + + 0 -1 2602 -4.9769999459385872e-03 + + -4.2888298630714417e-01 4.9173999577760696e-02 + <_> + + 0 -1 2603 -8.5183002054691315e-02 + + 6.6624599695205688e-01 7.8079998493194580e-03 + <_> + + 0 -1 2604 2.1559998858720064e-03 + + -4.9135199189186096e-01 6.9555997848510742e-02 + <_> + + 0 -1 2605 3.6384499073028564e-01 + + 1.2997099757194519e-01 -1.8949509859085083e+00 + <_> + + 0 -1 2606 2.2082500159740448e-01 + + -5.7211998850107193e-02 -1.4281120300292969e+00 + <_> + + 0 -1 2607 -1.6140000894665718e-02 + + -5.7589399814605713e-01 1.8062500655651093e-01 + <_> + + 0 -1 2608 -4.8330001533031464e-02 + + 9.7308498620986938e-01 -1.6513000428676605e-01 + <_> + + 0 -1 2609 1.7529999837279320e-02 + + 1.7932699620723724e-01 -2.7948901057243347e-01 + <_> + + 0 -1 2610 -3.4309998154640198e-02 + + -8.1072497367858887e-01 -1.6596000641584396e-02 + <_> + + 0 -1 2611 -4.5830002054572105e-03 + + 2.7908998727798462e-01 -7.4519999325275421e-03 + <_> + + 0 -1 2612 1.2896400690078735e-01 + + -1.3508500158786774e-01 2.5411539077758789e+00 + <_> + + 0 -1 2613 3.0361000448465347e-02 + + -6.8419001996517181e-02 2.8734099864959717e-01 + <_> + + 0 -1 2614 4.4086001813411713e-02 + + -1.8135899305343628e-01 6.5413200855255127e-01 + <_> + + 0 -1 2615 3.0159999150782824e-03 + + -1.5690499544143677e-01 2.6963800191879272e-01 + <_> + + 0 -1 2616 -2.6336999610066414e-02 + + 2.9175600409507751e-01 -2.5274100899696350e-01 + <_> + + 0 -1 2617 -2.7866000309586525e-02 + + 4.4387501478195190e-01 5.5038001388311386e-02 + <_> + + 0 -1 2618 1.1725000105798244e-02 + + -1.9346499443054199e-01 4.6656700968742371e-01 + <_> + + 0 -1 2619 1.5689999563619494e-03 + + -8.2360003143548965e-03 2.5700899958610535e-01 + <_> + + 0 -1 2620 -3.5550000611692667e-03 + + -4.2430898547172546e-01 7.1174003183841705e-02 + <_> + + 0 -1 2621 -3.1695000827312469e-02 + + -8.5393500328063965e-01 1.6916200518608093e-01 + <_> + + 0 -1 2622 -3.2097000628709793e-02 + + 8.3784902095794678e-01 -1.7597299814224243e-01 + <_> + + 0 -1 2623 1.5544199943542480e-01 + + 9.9550001323223114e-02 2.3873300552368164e+00 + <_> + + 0 -1 2624 8.8045999407768250e-02 + + -1.8725299835205078e-01 6.2384301424026489e-01 + <_> + + 0 -1 2625 -1.6720000421628356e-03 + + 2.5008699297904968e-01 -6.5118998289108276e-02 + <_> + + 0 -1 2626 9.3409996479749680e-03 + + -3.5378900170326233e-01 1.0715000331401825e-01 + <_> + + 0 -1 2627 3.7138000130653381e-02 + + 1.6387000679969788e-01 -9.1718399524688721e-01 + <_> + + 0 -1 2628 8.0183997750282288e-02 + + -1.4812999963760376e-01 1.4895190000534058e+00 + <_> + + 0 -1 2629 -7.9100002767518163e-04 + + -2.1326899528503418e-01 1.9676400721073151e-01 + <_> + + 0 -1 2630 -5.0400001928210258e-03 + + -7.1318697929382324e-01 1.8240000354126096e-03 + <_> + + 0 -1 2631 1.1962399631738663e-01 + + 3.3098999410867691e-02 1.0441709756851196e+00 + <_> + + 0 -1 2632 -4.5280000194907188e-03 + + -2.7308499813079834e-01 2.7229800820350647e-01 + <_> + + 0 -1 2633 -2.9639000073075294e-02 + + 3.6225798726081848e-01 5.6795001029968262e-02 + <_> + + 0 -1 2634 2.6650000363588333e-02 + + -4.8041000962257385e-02 -9.6723502874374390e-01 + <_> + + 0 -1 2635 4.4422000646591187e-02 + + 1.3052900135517120e-01 -3.5077300667762756e-01 + <_> + + 0 -1 2636 -2.4359999224543571e-02 + + -1.0766899585723877e+00 -5.1222998648881912e-02 + <_> + + 0 -1 2637 1.9734999164938927e-02 + + 2.6238000020384789e-02 2.8070500493049622e-01 + <_> + + 0 -1 2638 5.4930001497268677e-03 + + -2.6111298799514771e-01 2.1011400222778320e-01 + <_> + + 0 -1 2639 -2.3200300335884094e-01 + + -1.7748440504074097e+00 1.1482600122690201e-01 + <_> + + 0 -1 2640 -2.5614000856876373e-02 + + 2.9900801181793213e-01 -2.2502499818801880e-01 + <_> + + 0 -1 2641 -6.4949998632073402e-03 + + 1.9563800096511841e-01 -9.9762998521327972e-02 + <_> + + 0 -1 2642 3.9840000681579113e-03 + + -4.3021500110626221e-01 8.1261001527309418e-02 + <_> + + 0 -1 2643 -3.5813000053167343e-02 + + -5.0987398624420166e-01 1.6345900297164917e-01 + <_> + + 0 -1 2644 -1.4169000089168549e-02 + + 7.7978098392486572e-01 -1.7476299405097961e-01 + <_> + + 0 -1 2645 -1.2642100453376770e-01 + + -6.3047897815704346e-01 1.2728300690650940e-01 + <_> + + 0 -1 2646 6.8677999079227448e-02 + + -4.6447999775409698e-02 -1.1128979921340942e+00 + <_> + + 0 -1 2647 8.5864998400211334e-02 + + 1.1835400015115738e-01 -4.8235158920288086e+00 + <_> + + 0 -1 2648 1.5511999838054180e-02 + + -1.7467999830842018e-02 -6.3693398237228394e-01 + <_> + + 0 -1 2649 8.1091001629829407e-02 + + 8.6133003234863281e-02 2.4559431076049805e+00 + <_> + + 0 -1 2650 1.8495000898838043e-02 + + 4.0229000151157379e-02 -5.0858199596405029e-01 + <_> + + 0 -1 2651 -8.6320996284484863e-02 + + -1.9006760120391846e+00 1.1019100248813629e-01 + <_> + + 0 -1 2652 7.2355002164840698e-02 + + -6.2111999839544296e-02 -1.4165179729461670e+00 + <_> + + 0 -1 2653 -7.8179001808166504e-02 + + 8.8849300146102905e-01 4.2369998991489410e-02 + <_> + + 0 -1 2654 9.6681997179985046e-02 + + -2.2094200551509857e-01 3.3575099706649780e-01 + <_> + + 0 -1 2655 -3.9875999093055725e-02 + + 5.7804799079895020e-01 4.5347999781370163e-02 + <_> + + 0 -1 2656 -9.5349997282028198e-03 + + -5.4175698757171631e-01 3.2399999909102917e-03 + <_> + + 0 -1 2657 4.0600000647827983e-04 + + -8.1549003720283508e-02 3.5837900638580322e-01 + <_> + + 0 -1 2658 1.2107999995350838e-02 + + -2.0280399918556213e-01 4.3768000602722168e-01 + <_> + + 0 -1 2659 -2.0873999223113060e-02 + + 4.1469898819923401e-01 -4.5568000525236130e-02 + <_> + + 0 -1 2660 5.7888001203536987e-02 + + -2.9009999707341194e-02 -9.1822302341461182e-01 + <_> + + 0 -1 2661 1.3200000103097409e-04 + + -1.1772400140762329e-01 2.0000000298023224e-01 + <_> + + 0 -1 2662 -1.7137000337243080e-02 + + 3.3004799485206604e-01 -2.3055200278759003e-01 + <_> + + 0 -1 2663 3.0655000358819962e-02 + + -2.1545000374317169e-02 2.6878198981285095e-01 + <_> + + 0 -1 2664 -7.8699999721720815e-04 + + -4.4100698828697205e-01 4.9157999455928802e-02 + <_> + + 0 -1 2665 8.8036999106407166e-02 + + 1.1782000213861465e-01 -2.8293309211730957e+00 + <_> + + 0 -1 2666 -3.9028998464345932e-02 + + 9.1777199506759644e-01 -1.5827399492263794e-01 + <_> + + 0 -1 2667 8.0105997622013092e-02 + + 1.1289200186729431e-01 -1.9937280416488647e+00 + <_> + + 0 -1 2668 3.9538998156785965e-02 + + -1.4357399940490723e-01 1.3085240125656128e+00 + <_> + + 0 -1 2669 2.0684000104665756e-02 + + 2.0048099756240845e-01 -4.4186998158693314e-02 + <_> + + 0 -1 2670 -6.7037999629974365e-02 + + 3.2618600130081177e-01 -2.0550400018692017e-01 + <_> + + 0 -1 2671 4.6815000474452972e-02 + + 1.5825299918651581e-01 -9.5535099506378174e-01 + <_> + + 0 -1 2672 7.8443996608257294e-02 + + -7.4651002883911133e-02 -2.1161499023437500e+00 + <_> + + 0 -1 2673 6.6380001604557037e-02 + + 1.1641900241374969e-01 -1.6113519668579102e+00 + <_> + + 0 -1 2674 3.0053999274969101e-02 + + -1.6562600433826447e-01 7.0025402307510376e-01 + <_> + + 0 -1 2675 1.7119999974966049e-02 + + 2.2627699375152588e-01 -4.0114998817443848e-01 + <_> + + 0 -1 2676 2.0073000341653824e-02 + + -1.9389699399471283e-01 4.4420298933982849e-01 + <_> + + 0 -1 2677 3.3101998269557953e-02 + + 1.1637499928474426e-01 -1.5771679878234863e+00 + <_> + + 0 -1 2678 -1.4882000163197517e-02 + + -8.9680302143096924e-01 -4.2010001838207245e-02 + <_> + + 0 -1 2679 -1.0281000286340714e-02 + + 3.5602998733520508e-01 -1.3124000281095505e-02 + <_> + + 0 -1 2680 -2.8695000335574150e-02 + + -4.6039599180221558e-01 2.6801999658346176e-02 + <_> + + 0 -1 2681 -4.7189998440444469e-03 + + 2.3788799345493317e-01 -6.5518997609615326e-02 + <_> + + 0 -1 2682 3.2201600074768066e-01 + + -2.8489999473094940e-02 -8.4234601259231567e-01 + <_> + + 0 -1 2683 -1.7045000568032265e-02 + + -5.0938802957534790e-01 1.6057600080966949e-01 + <_> + + 0 -1 2684 -7.3469998314976692e-03 + + -5.4154998064041138e-01 4.7320001758635044e-03 + <_> + + 0 -1 2685 -3.0001999810338020e-02 + + -8.8785797357559204e-01 1.3621799647808075e-01 + <_> + + 0 -1 2686 -1.1292999610304832e-02 + + 8.0615198612213135e-01 -1.6159500181674957e-01 + <_> + + 0 -1 2687 4.7749998047947884e-03 + + 1.2968000024557114e-02 5.5079901218414307e-01 + <_> + + 0 -1 2688 5.0710001960396767e-03 + + -4.5728001743555069e-02 -1.0766259431838989e+00 + <_> + + 0 -1 2689 1.9344100356101990e-01 + + 7.1262001991271973e-02 1.1694519519805908e+00 + <_> + + 0 -1 2690 5.3750001825392246e-03 + + -1.9736200571060181e-01 3.8206899166107178e-01 + <_> + + 0 -1 2691 -6.8276003003120422e-02 + + -5.4372339248657227e+00 1.1151900142431259e-01 + <_> + + 0 -1 2692 -3.4933000802993774e-02 + + 4.4793400168418884e-01 -1.8657900393009186e-01 + <_> + + 0 -1 2693 5.1219998858869076e-03 + + -1.4871999621391296e-02 1.8413899838924408e-01 + <_> + + 0 -1 2694 9.5311999320983887e-02 + + -1.5117099881172180e-01 9.4991499185562134e-01 + <_> + + 0 -1 2695 -6.2849000096321106e-02 + + 4.6473601460456848e-01 3.8405001163482666e-02 + <_> + + 0 -1 2696 -1.7040699720382690e-01 + + -1.6499999761581421e+00 -6.3236996531486511e-02 + <_> + + 0 -1 2697 1.0583999566733837e-02 + + -3.8348998874425888e-02 4.1913801431655884e-01 + <_> + + 0 -1 2698 -4.1579000651836395e-02 + + 3.4461900591850281e-01 -2.1187700331211090e-01 + <_> + + 0 -1 2699 1.2718600034713745e-01 + + 1.2398199737071991e-01 -2.1254889965057373e+00 + <_> + + 0 -1 2700 8.2557000219821930e-02 + + -6.2024001032114029e-02 -1.4875819683074951e+00 + <_> + + 0 -1 2701 8.5293002426624298e-02 + + 1.7087999731302261e-02 3.2076600193977356e-01 + <_> + + 0 -1 2702 5.5544000118970871e-02 + + -2.7414000034332275e-01 1.8976399302482605e-01 + <_> + + 0 -1 2703 4.5650000683963299e-03 + + -1.7920200526714325e-01 2.7967301011085510e-01 + <_> + + 0 -1 2704 1.2997999787330627e-02 + + -3.2297500967979431e-01 2.6941800117492676e-01 + <_> + + 0 -1 2705 5.7891998440027237e-02 + + 1.2644399702548981e-01 -6.0713499784469604e-01 + <_> + + 0 -1 2706 -2.2824000567197800e-02 + + -4.9682098627090454e-01 2.2376999258995056e-02 + <_> + + 0 -1 2707 4.8312000930309296e-02 + + 4.3607000261545181e-02 4.8537799715995789e-01 + <_> + + 0 -1 2708 2.5714000687003136e-02 + + -4.2950998991727829e-02 -9.3023502826690674e-01 + <_> + + 0 -1 2709 6.9269998930394650e-03 + + -2.9680000152438879e-03 3.4296301007270813e-01 + <_> + + 0 -1 2710 -3.4446999430656433e-02 + + -1.5299769639968872e+00 -6.1014998704195023e-02 + <_> + + 0 -1 2711 2.9387999325990677e-02 + + 3.7595998495817184e-02 6.4172399044036865e-01 + <_> + + 0 -1 2712 -2.4319998919963837e-03 + + 9.9088996648788452e-02 -3.9688101410865784e-01 + <_> + 200 + -2.9928278923034668e+00 + + <_> + + 0 -1 2713 -9.5944002270698547e-02 + + 6.2419098615646362e-01 -4.5875200629234314e-01 + <_> + + 0 -1 2714 1.6834000125527382e-02 + + -9.3072801828384399e-01 2.1563600003719330e-01 + <_> + + 0 -1 2715 2.6049999520182610e-02 + + -4.0532299876213074e-01 4.2256599664688110e-01 + <_> + + 0 -1 2716 3.6500001442618668e-04 + + 9.5288001000881195e-02 -6.3298100233078003e-01 + <_> + + 0 -1 2717 -6.6940002143383026e-03 + + 3.7243801355361938e-01 -3.0332401394844055e-01 + <_> + + 0 -1 2718 1.8874000757932663e-02 + + -2.3357200622558594e-01 4.0330699086189270e-01 + <_> + + 0 -1 2719 -1.6300000424962491e-04 + + 4.2886998504400253e-02 -7.7796798944473267e-01 + <_> + + 0 -1 2720 -7.6259002089500427e-02 + + -4.9628499150276184e-01 1.6335399448871613e-01 + <_> + + 0 -1 2721 5.0149001181125641e-02 + + 3.2747000455856323e-02 -8.0047899484634399e-01 + <_> + + 0 -1 2722 -2.9239999130368233e-03 + + -5.0002801418304443e-01 2.5480601191520691e-01 + <_> + + 0 -1 2723 1.6243999823927879e-02 + + 3.8913000375032425e-02 -7.0724898576736450e-01 + <_> + + 0 -1 2724 3.7811998277902603e-02 + + -6.6267997026443481e-02 7.3868799209594727e-01 + <_> + + 0 -1 2725 -1.2319999746978283e-02 + + 4.8696398735046387e-01 -2.4485599994659424e-01 + <_> + + 0 -1 2726 5.8003999292850494e-02 + + 1.3459099829196930e-01 -1.3232100009918213e-01 + <_> + + 0 -1 2727 4.8630000092089176e-03 + + -4.4172900915145874e-01 1.4005599915981293e-01 + <_> + + 0 -1 2728 4.5690998435020447e-02 + + 3.1217999756336212e-02 8.9818298816680908e-01 + <_> + + 0 -1 2729 2.1321000531315804e-02 + + 1.2008000165224075e-02 -8.6066198348999023e-01 + <_> + + 0 -1 2730 1.5679100155830383e-01 + + 1.4055999927222729e-02 8.5332900285720825e-01 + <_> + + 0 -1 2731 -1.0328999720513821e-02 + + 2.9022800922393799e-01 -2.9478800296783447e-01 + <_> + + 0 -1 2732 2.4290001019835472e-03 + + -4.0439900755882263e-01 1.9400200247764587e-01 + <_> + + 0 -1 2733 -2.3338999599218369e-02 + + 3.2945200800895691e-01 -2.5712698698043823e-01 + <_> + + 0 -1 2734 -6.8970001302659512e-03 + + -5.3352999687194824e-01 2.1635200083255768e-01 + <_> + + 0 -1 2735 -3.4403000026941299e-02 + + -1.4425489902496338e+00 -4.4682998210191727e-02 + <_> + + 0 -1 2736 -2.1235000342130661e-02 + + -7.9017502069473267e-01 1.9084100425243378e-01 + <_> + + 0 -1 2737 2.0620001014322042e-03 + + -2.6931199431419373e-01 3.1488001346588135e-01 + <_> + + 0 -1 2738 -4.2190002277493477e-03 + + -5.4464399814605713e-01 1.6574600338935852e-01 + <_> + + 0 -1 2739 -1.4334999956190586e-02 + + 2.2105000913143158e-02 -6.2342500686645508e-01 + <_> + + 0 -1 2740 -8.2120001316070557e-03 + + -4.9884998798370361e-01 1.9237099587917328e-01 + <_> + + 0 -1 2741 -9.3350000679492950e-03 + + -7.9131197929382324e-01 -1.4143999665975571e-02 + <_> + + 0 -1 2742 -3.7937998771667480e-02 + + 7.9841297864913940e-01 -3.3799000084400177e-02 + <_> + + 0 -1 2743 4.7059999778866768e-03 + + -3.3163401484489441e-01 2.0726299285888672e-01 + <_> + + 0 -1 2744 -4.4499998912215233e-03 + + -2.7256301045417786e-01 1.8402199447154999e-01 + <_> + + 0 -1 2745 5.2189999260008335e-03 + + -5.3096002340316772e-01 5.2607998251914978e-02 + <_> + + 0 -1 2746 -9.5399999991059303e-03 + + -5.6485402584075928e-01 1.9269399344921112e-01 + <_> + + 0 -1 2747 4.4969998300075531e-02 + + -1.7411500215530396e-01 9.5382601022720337e-01 + <_> + + 0 -1 2748 1.4209000393748283e-02 + + -9.1949000954627991e-02 2.4836100637912750e-01 + <_> + + 0 -1 2749 1.6380199790000916e-01 + + -5.8497000485658646e-02 -1.6404409408569336e+00 + <_> + + 0 -1 2750 2.5579999200999737e-03 + + 2.3447999358177185e-01 -9.2734001576900482e-02 + <_> + + 0 -1 2751 -3.8499999791383743e-03 + + 1.7880700528621674e-01 -3.5844099521636963e-01 + <_> + + 0 -1 2752 -2.5221999734640121e-02 + + -4.2903000116348267e-01 2.0244500041007996e-01 + <_> + + 0 -1 2753 -1.9415000453591347e-02 + + 5.8016300201416016e-01 -1.8806399405002594e-01 + <_> + + 0 -1 2754 1.4419999904930592e-02 + + 3.2846998423337936e-02 8.1980502605438232e-01 + <_> + + 0 -1 2755 5.1582999527454376e-02 + + 6.9176003336906433e-02 -4.5866298675537109e-01 + <_> + + 0 -1 2756 -3.7960000336170197e-02 + + -1.2553000450134277e+00 1.4332899451255798e-01 + <_> + + 0 -1 2757 -2.9560999944806099e-02 + + 5.3151798248291016e-01 -2.0596499741077423e-01 + <_> + + 0 -1 2758 -3.9110999554395676e-02 + + 1.1658719778060913e+00 5.3897000849246979e-02 + <_> + + 0 -1 2759 -2.9159000143408775e-02 + + 3.9307600259780884e-01 -2.2184500098228455e-01 + <_> + + 0 -1 2760 -8.3617001771926880e-02 + + -7.3744499683380127e-01 1.4268200099468231e-01 + <_> + + 0 -1 2761 4.2004001140594482e-01 + + -1.4277400076389313e-01 1.7894840240478516e+00 + <_> + + 0 -1 2762 6.0005001723766327e-02 + + 1.1976700276136398e-01 -1.8886189460754395e+00 + <_> + + 0 -1 2763 -1.8981000408530235e-02 + + -1.4148449897766113e+00 -5.6522998958826065e-02 + <_> + + 0 -1 2764 -6.0049998573958874e-03 + + 4.4170799851417542e-01 -1.0200800001621246e-01 + <_> + + 0 -1 2765 -5.8214001357555389e-02 + + -1.3918470144271851e+00 -4.8268999904394150e-02 + <_> + + 0 -1 2766 -1.2271000072360039e-02 + + 5.1317697763442993e-01 -9.3696996569633484e-02 + <_> + + 0 -1 2767 4.6585999429225922e-02 + + -5.7484000921249390e-02 -1.4283169507980347e+00 + <_> + + 0 -1 2768 1.2110000243410468e-03 + + -8.0891996622085571e-02 3.2333201169967651e-01 + <_> + + 0 -1 2769 -8.8642001152038574e-02 + + -8.6449098587036133e-01 -3.3146999776363373e-02 + <_> + + 0 -1 2770 -2.3184999823570251e-02 + + 5.2162200212478638e-01 -1.6168000176548958e-02 + <_> + + 0 -1 2771 4.3090000748634338e-02 + + -1.6153800487518311e-01 1.0915000438690186e+00 + <_> + + 0 -1 2772 2.0599999697878957e-04 + + -1.7091499269008636e-01 3.1236699223518372e-01 + <_> + + 0 -1 2773 8.9159999042749405e-03 + + -6.7039998248219490e-03 -6.8810397386550903e-01 + <_> + + 0 -1 2774 -1.7752999439835548e-02 + + 6.3292801380157471e-01 -4.2360001243650913e-03 + <_> + + 0 -1 2775 6.2299999408423901e-03 + + -3.3637198805809021e-01 1.2790599465370178e-01 + <_> + + 0 -1 2776 2.2770000621676445e-02 + + -3.4703999757766724e-02 3.9141800999641418e-01 + <_> + + 0 -1 2777 -2.1534999832510948e-02 + + 6.4765101671218872e-01 -2.0097799599170685e-01 + <_> + + 0 -1 2778 6.1758998781442642e-02 + + 5.4297000169754028e-02 9.0700101852416992e-01 + <_> + + 0 -1 2779 -7.8069999814033508e-02 + + 6.5523397922515869e-01 -1.9754399359226227e-01 + <_> + + 0 -1 2780 1.1315000243484974e-02 + + 1.9385300576686859e-01 -5.1707297563552856e-01 + <_> + + 0 -1 2781 -2.5590000674128532e-02 + + -9.3096500635147095e-01 -3.1546998769044876e-02 + <_> + + 0 -1 2782 -3.8058999925851822e-02 + + -6.8326902389526367e-01 1.2709100544452667e-01 + <_> + + 0 -1 2783 9.7970003262162209e-03 + + 1.5523999929428101e-02 -6.3347899913787842e-01 + <_> + + 0 -1 2784 -1.3841999694705009e-02 + + 1.0060529708862305e+00 6.2812998890876770e-02 + <_> + + 0 -1 2785 8.3459997549653053e-03 + + -2.3383200168609619e-01 3.0982699990272522e-01 + <_> + + 0 -1 2786 -7.1439996361732483e-02 + + -7.2505402565002441e-01 1.7148299515247345e-01 + <_> + + 0 -1 2787 1.0006000287830830e-02 + + -2.2071999311447144e-01 3.5266199707984924e-01 + <_> + + 0 -1 2788 1.1005300283432007e-01 + + 1.6662000119686127e-01 -7.4318999052047729e-01 + <_> + + 0 -1 2789 3.5310998558998108e-02 + + -2.3982700705528259e-01 4.1435998678207397e-01 + <_> + + 0 -1 2790 -1.1174699664115906e-01 + + 5.1045399904251099e-01 2.2319999989122152e-03 + <_> + + 0 -1 2791 -1.1367800086736679e-01 + + 9.0475201606750488e-01 -1.6615299880504608e-01 + <_> + + 0 -1 2792 1.6667999327182770e-02 + + 1.4024500548839569e-01 -5.2178502082824707e-01 + <_> + + 0 -1 2793 -8.0340001732110977e-03 + + -6.6178399324417114e-01 3.7640000227838755e-03 + <_> + + 0 -1 2794 -3.3096998929977417e-02 + + 8.0185902118682861e-01 5.9385001659393311e-02 + <_> + + 0 -1 2795 1.2547999620437622e-02 + + -3.3545500040054321e-01 1.4578600227832794e-01 + <_> + + 0 -1 2796 -4.2073998600244522e-02 + + -5.5509102344512939e-01 1.3266600668430328e-01 + <_> + + 0 -1 2797 2.5221999734640121e-02 + + -6.1631999909877777e-02 -1.3678770065307617e+00 + <_> + + 0 -1 2798 -2.4268999695777893e-02 + + 3.4185099601745605e-01 -7.4160001240670681e-03 + <_> + + 0 -1 2799 -1.2280000373721123e-02 + + 2.7745801210403442e-01 -3.1033900380134583e-01 + <_> + + 0 -1 2800 -1.1377099901437759e-01 + + 1.1719540357589722e+00 8.3681002259254456e-02 + <_> + + 0 -1 2801 -8.4771998226642609e-02 + + 8.1694799661636353e-01 -1.7837500572204590e-01 + <_> + + 0 -1 2802 -2.4552000686526299e-02 + + -1.8627299368381500e-01 1.4340099692344666e-01 + <_> + + 0 -1 2803 -9.0269995853304863e-03 + + 3.2659199833869934e-01 -2.3541299998760223e-01 + <_> + + 0 -1 2804 1.1177999898791313e-02 + + 1.9761200249195099e-01 -2.1701000630855560e-02 + <_> + + 0 -1 2805 -2.9366999864578247e-02 + + -9.3414801359176636e-01 -2.1704999729990959e-02 + <_> + + 0 -1 2806 6.3640000298619270e-03 + + 2.5573000311851501e-02 4.6412798762321472e-01 + <_> + + 0 -1 2807 1.4026000164449215e-02 + + -2.1228599548339844e-01 4.0078800916671753e-01 + <_> + + 0 -1 2808 -1.3341999612748623e-02 + + 7.4202698469161987e-01 2.9001999646425247e-02 + <_> + + 0 -1 2809 2.8422799706459045e-01 + + -1.9243599474430084e-01 4.3631199002265930e-01 + <_> + + 0 -1 2810 -2.3724000155925751e-01 + + 6.9736397266387939e-01 6.9307997822761536e-02 + <_> + + 0 -1 2811 -1.1169700324535370e-01 + + 3.9147201180458069e-01 -2.0922000706195831e-01 + <_> + + 0 -1 2812 1.2787500023841858e-01 + + -7.2555996477603912e-02 3.6088201403617859e-01 + <_> + + 0 -1 2813 -6.2900997698307037e-02 + + 9.5424997806549072e-01 -1.5402799844741821e-01 + <_> + + 0 -1 2814 1.7439000308513641e-02 + + -5.1134999841451645e-02 2.7750301361083984e-01 + <_> + + 0 -1 2815 1.2319999514147639e-03 + + 7.5627997517585754e-02 -3.6456099152565002e-01 + <_> + + 0 -1 2816 2.7495000511407852e-02 + + 5.1844000816345215e-02 4.1562598943710327e-01 + <_> + + 0 -1 2817 -4.3543998152017593e-02 + + 7.1969997882843018e-01 -1.7132200300693512e-01 + <_> + + 0 -1 2818 1.1025999672710896e-02 + + 1.4354600012302399e-01 -6.5403002500534058e-01 + <_> + + 0 -1 2819 2.0865999162197113e-02 + + 4.0089000016450882e-02 -4.5743298530578613e-01 + <_> + + 0 -1 2820 -2.2304000332951546e-02 + + 5.3855001926422119e-01 7.1662999689579010e-02 + <_> + + 0 -1 2821 3.2492000609636307e-02 + + -4.5991998165845871e-02 -1.0047069787979126e+00 + <_> + + 0 -1 2822 1.2269999831914902e-02 + + 3.4334998577833176e-02 4.2431798577308655e-01 + <_> + + 0 -1 2823 8.3820000290870667e-03 + + -2.5850600004196167e-01 2.6263499259948730e-01 + <_> + + 0 -1 2824 3.7353999912738800e-02 + + 1.5692499279975891e-01 -1.0429090261459351e+00 + <_> + + 0 -1 2825 -1.4111000113189220e-02 + + -7.3177701234817505e-01 -2.0276999101042747e-02 + <_> + + 0 -1 2826 5.7066999375820160e-02 + + 8.3360001444816589e-02 1.5661499500274658e+00 + <_> + + 0 -1 2827 4.9680001102387905e-03 + + -3.5318198800086975e-01 1.4698399603366852e-01 + <_> + + 0 -1 2828 -2.4492999538779259e-02 + + 2.8325900435447693e-01 -3.4640000667423010e-03 + <_> + + 0 -1 2829 -1.1254999786615372e-02 + + -8.4017497301101685e-01 -3.6251999437808990e-02 + <_> + + 0 -1 2830 3.4533001482486725e-02 + + 1.4998500049114227e-01 -8.7367099523544312e-01 + <_> + + 0 -1 2831 2.4303000420331955e-02 + + -1.8787500262260437e-01 5.9483999013900757e-01 + <_> + + 0 -1 2832 -7.8790001571178436e-03 + + 4.4315698742866516e-01 -5.6570999324321747e-02 + <_> + + 0 -1 2833 3.5142000764608383e-02 + + -5.6494999676942825e-02 -1.3617190122604370e+00 + <_> + + 0 -1 2834 4.6259998343884945e-03 + + -3.1161698698997498e-01 2.5447699427604675e-01 + <_> + + 0 -1 2835 -8.3131000399589539e-02 + + 1.6424349546432495e+00 -1.4429399371147156e-01 + <_> + + 0 -1 2836 -1.4015999622642994e-02 + + -7.7819502353668213e-01 1.7173300683498383e-01 + <_> + + 0 -1 2837 1.2450000504031777e-03 + + -2.3191399872303009e-01 2.8527900576591492e-01 + <_> + + 0 -1 2838 -1.6803000122308731e-02 + + -3.5965099930763245e-01 2.0412999391555786e-01 + <_> + + 0 -1 2839 -7.6747998595237732e-02 + + 7.8050500154495239e-01 -1.5612800419330597e-01 + <_> + + 0 -1 2840 -2.3671999573707581e-01 + + 1.1813700199127197e+00 7.8111998736858368e-02 + <_> + + 0 -1 2841 -1.0057400166988373e-01 + + -4.7104099392890930e-01 7.9172998666763306e-02 + <_> + + 0 -1 2842 1.3239999534562230e-03 + + 2.2262699902057648e-01 -3.7099799513816833e-01 + <_> + + 0 -1 2843 2.2152999415993690e-02 + + -3.8649000227451324e-02 -9.2274999618530273e-01 + <_> + + 0 -1 2844 -1.1246199905872345e-01 + + 4.1899600625038147e-01 8.0411002039909363e-02 + <_> + + 0 -1 2845 1.6481000930070877e-02 + + -1.6756699979305267e-01 7.1842402219772339e-01 + <_> + + 0 -1 2846 6.8113997578620911e-02 + + 1.5719899535179138e-01 -8.7681102752685547e-01 + <_> + + 0 -1 2847 1.6011999920010567e-02 + + -4.1600000113248825e-03 -5.9327799081802368e-01 + <_> + + 0 -1 2848 4.6640001237392426e-03 + + -3.0153999105095863e-02 4.8345300555229187e-01 + <_> + + 0 -1 2849 6.7579997703433037e-03 + + -2.2667400538921356e-01 3.3662301301956177e-01 + <_> + + 0 -1 2850 4.7289999201893806e-03 + + -6.0373999178409576e-02 3.1458100676536560e-01 + <_> + + 0 -1 2851 2.5869999080896378e-03 + + -2.9872599244117737e-01 1.7787499725818634e-01 + <_> + + 0 -1 2852 2.8989999555051327e-03 + + 2.1890200674533844e-01 -2.9567098617553711e-01 + <_> + + 0 -1 2853 -3.0053999274969101e-02 + + 1.2150429487228394e+00 -1.4354999363422394e-01 + <_> + + 0 -1 2854 1.4181000180542469e-02 + + 1.2451999820768833e-02 5.5490100383758545e-01 + <_> + + 0 -1 2855 -6.0527000576257706e-02 + + -1.4933999776840210e+00 -6.5227001905441284e-02 + <_> + + 0 -1 2856 -1.9882999360561371e-02 + + -3.8526400923728943e-01 1.9761200249195099e-01 + <_> + + 0 -1 2857 3.1218999996781349e-02 + + -2.1281200647354126e-01 2.9446500539779663e-01 + <_> + + 0 -1 2858 1.8271999433636665e-02 + + 9.7200000891461968e-04 6.6814202070236206e-01 + <_> + + 0 -1 2859 1.1089999461546540e-03 + + -6.2467902898788452e-01 -1.6599999507889152e-03 + <_> + + 0 -1 2860 -3.6713998764753342e-02 + + -4.2333900928497314e-01 1.2084700167179108e-01 + <_> + + 0 -1 2861 1.2044000439345837e-02 + + 2.5882000103592873e-02 -5.0732398033142090e-01 + <_> + + 0 -1 2862 7.4749000370502472e-02 + + 1.3184699416160583e-01 -2.1739600598812103e-01 + <_> + + 0 -1 2863 -2.3473200201988220e-01 + + 1.1775610446929932e+00 -1.5114699304103851e-01 + <_> + + 0 -1 2864 1.4096499979496002e-01 + + 3.3991001546382904e-02 3.9923098683357239e-01 + <_> + + 0 -1 2865 6.1789997853338718e-03 + + -3.1806701421737671e-01 1.1681699752807617e-01 + <_> + + 0 -1 2866 -5.7216998189687729e-02 + + 8.4399098157882690e-01 8.3889000117778778e-02 + <_> + + 0 -1 2867 -5.5227000266313553e-02 + + 3.6888301372528076e-01 -1.8913400173187256e-01 + <_> + + 0 -1 2868 -2.1583000198006630e-02 + + -5.2161800861358643e-01 1.5772600471973419e-01 + <_> + + 0 -1 2869 2.5747999548912048e-02 + + -5.9921998530626297e-02 -1.0674990415573120e+00 + <_> + + 0 -1 2870 -1.3098999857902527e-02 + + 7.8958398103713989e-01 5.2099999040365219e-02 + <_> + + 0 -1 2871 2.2799998987466097e-03 + + -1.1704430580139160e+00 -5.9356998652219772e-02 + <_> + + 0 -1 2872 8.8060004636645317e-03 + + 4.1717998683452606e-02 6.6352599859237671e-01 + <_> + + 0 -1 2873 -8.9699998497962952e-03 + + -3.5862699151039124e-01 6.0458000749349594e-02 + <_> + + 0 -1 2874 4.0230001322925091e-03 + + 2.0979399979114532e-01 -2.4806000292301178e-01 + <_> + + 0 -1 2875 2.5017000734806061e-02 + + -1.8795900046825409e-01 3.9547100663185120e-01 + <_> + + 0 -1 2876 -5.9009999968111515e-03 + + 2.5663900375366211e-01 -9.4919003546237946e-02 + <_> + + 0 -1 2877 4.3850000947713852e-03 + + 3.3139001578092575e-02 -4.6075400710105896e-01 + <_> + + 0 -1 2878 -3.3771999180316925e-02 + + -9.8881602287292480e-01 1.4636899530887604e-01 + <_> + + 0 -1 2879 4.4523000717163086e-02 + + -1.3286699354648590e-01 1.5796790122985840e+00 + <_> + + 0 -1 2880 -4.0929000824689865e-02 + + 3.3877098560333252e-01 7.4970997869968414e-02 + <_> + + 0 -1 2881 3.9351999759674072e-02 + + -1.8327899277210236e-01 4.6980699896812439e-01 + <_> + + 0 -1 2882 -7.0322997868061066e-02 + + -9.8322701454162598e-01 1.1808100342750549e-01 + <_> + + 0 -1 2883 3.5743001848459244e-02 + + -3.3050999045372009e-02 -8.3610898256301880e-01 + <_> + + 0 -1 2884 -4.2961999773979187e-02 + + 1.1670809984207153e+00 8.0692000687122345e-02 + <_> + + 0 -1 2885 -2.1007999777793884e-02 + + 6.3869798183441162e-01 -1.7626300454139709e-01 + <_> + + 0 -1 2886 -1.5742200613021851e-01 + + -2.3302499949932098e-01 1.2517499923706055e-01 + <_> + + 0 -1 2887 7.8659998252987862e-03 + + -2.2037999331951141e-01 2.7196800708770752e-01 + <_> + + 0 -1 2888 2.3622000589966774e-02 + + 1.6127300262451172e-01 -4.3329000473022461e-01 + <_> + + 0 -1 2889 7.4692003428936005e-02 + + -1.6991999745368958e-01 5.8884900808334351e-01 + <_> + + 0 -1 2890 -6.4799998654052615e-04 + + 2.5842899084091187e-01 -3.5911999642848969e-02 + <_> + + 0 -1 2891 -1.6290999948978424e-02 + + -7.6764398813247681e-01 -2.0472999662160873e-02 + <_> + + 0 -1 2892 -3.3133998513221741e-02 + + -2.7180099487304688e-01 1.4325700700283051e-01 + <_> + + 0 -1 2893 4.8797998577356339e-02 + + 7.6408997178077698e-02 -4.1445198655128479e-01 + <_> + + 0 -1 2894 2.2869999520480633e-03 + + -3.8628999143838882e-02 2.0753799378871918e-01 + <_> + + 0 -1 2895 4.5304000377655029e-02 + + -1.7777900397777557e-01 6.3461399078369141e-01 + <_> + + 0 -1 2896 1.0705800354480743e-01 + + 1.8972299993038177e-01 -5.1236200332641602e-01 + <_> + + 0 -1 2897 -4.0525000542402267e-02 + + 7.0614999532699585e-01 -1.7803299427032471e-01 + <_> + + 0 -1 2898 3.1968999654054642e-02 + + 6.8149998784065247e-02 6.8733102083206177e-01 + <_> + + 0 -1 2899 -5.7617001235485077e-02 + + 7.5170499086380005e-01 -1.5764999389648438e-01 + <_> + + 0 -1 2900 1.3593999668955803e-02 + + 1.9411900639533997e-01 -2.4561899900436401e-01 + <_> + + 0 -1 2901 7.1396000683307648e-02 + + -4.6881001442670822e-02 -8.8198298215866089e-01 + <_> + + 0 -1 2902 -1.4895999804139137e-02 + + -4.4532400369644165e-01 1.7679899930953979e-01 + <_> + + 0 -1 2903 -1.0026000440120697e-02 + + 6.5122699737548828e-01 -1.6709999740123749e-01 + <_> + + 0 -1 2904 3.7589999847114086e-03 + + -5.8301001787185669e-02 3.4483298659324646e-01 + <_> + + 0 -1 2905 1.6263000667095184e-02 + + -1.5581500530242920e-01 8.6432701349258423e-01 + <_> + + 0 -1 2906 -4.0176000446081161e-02 + + -6.1028599739074707e-01 1.1796399950981140e-01 + <_> + + 0 -1 2907 2.7080999687314034e-02 + + -4.9601998180150986e-02 -8.9990001916885376e-01 + <_> + + 0 -1 2908 5.2420001477003098e-02 + + 1.1297199875116348e-01 -1.0833640098571777e+00 + <_> + + 0 -1 2909 -1.9160000607371330e-02 + + -7.9880100488662720e-01 -3.4079000353813171e-02 + <_> + + 0 -1 2910 -3.7730000913143158e-03 + + -1.9124099612236023e-01 2.1535199880599976e-01 + <_> + + 0 -1 2911 7.5762003660202026e-02 + + -1.3421699404716492e-01 1.6807060241699219e+00 + <_> + + 0 -1 2912 -2.2173000499606133e-02 + + 4.8600998520851135e-01 3.6160000599920750e-03 + + <_> + + <_> + 6 4 12 9 -1. + <_> + 6 7 12 3 3. + <_> + + <_> + 6 4 12 7 -1. + <_> + 10 4 4 7 3. + <_> + + <_> + 3 9 18 9 -1. + <_> + 3 12 18 3 3. + <_> + + <_> + 8 18 9 6 -1. + <_> + 8 20 9 2 3. + <_> + + <_> + 3 5 4 19 -1. + <_> + 5 5 2 19 2. + <_> + + <_> + 6 5 12 16 -1. + <_> + 6 13 12 8 2. + <_> + + <_> + 5 8 12 6 -1. + <_> + 5 11 12 3 2. + <_> + + <_> + 11 14 4 10 -1. + <_> + 11 19 4 5 2. + <_> + + <_> + 4 0 7 6 -1. + <_> + 4 3 7 3 2. + <_> + + <_> + 6 6 12 6 -1. + <_> + 6 8 12 2 3. + <_> + + <_> + 6 4 12 7 -1. + <_> + 10 4 4 7 3. + <_> + + <_> + 1 8 19 12 -1. + <_> + 1 12 19 4 3. + <_> + + <_> + 0 2 24 3 -1. + <_> + 8 2 8 3 3. + <_> + + <_> + 9 9 6 15 -1. + <_> + 9 14 6 5 3. + <_> + + <_> + 5 6 14 10 -1. + <_> + 5 11 14 5 2. + <_> + + <_> + 5 0 14 9 -1. + <_> + 5 3 14 3 3. + <_> + + <_> + 13 11 9 6 -1. + <_> + 16 11 3 6 3. + <_> + + <_> + 7 5 6 10 -1. + <_> + 9 5 2 10 3. + <_> + + <_> + 10 8 6 10 -1. + <_> + 12 8 2 10 3. + <_> + + <_> + 2 5 4 9 -1. + <_> + 4 5 2 9 2. + <_> + + <_> + 18 0 6 11 -1. + <_> + 20 0 2 11 3. + <_> + + <_> + 0 6 24 13 -1. + <_> + 8 6 8 13 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 7 18 10 6 -1. + <_> + 7 20 10 2 3. + <_> + + <_> + 5 7 14 12 -1. + <_> + 5 13 14 6 2. + <_> + + <_> + 0 3 24 3 -1. + <_> + 8 3 8 3 3. + <_> + + <_> + 5 8 15 6 -1. + <_> + 5 11 15 3 2. + <_> + + <_> + 9 6 5 14 -1. + <_> + 9 13 5 7 2. + <_> + + <_> + 9 5 6 10 -1. + <_> + 11 5 2 10 3. + <_> + + <_> + 6 6 3 12 -1. + <_> + 6 12 3 6 2. + <_> + + <_> + 3 21 18 3 -1. + <_> + 9 21 6 3 3. + <_> + + <_> + 5 6 13 6 -1. + <_> + 5 8 13 2 3. + <_> + + <_> + 18 1 6 15 -1. + <_> + 18 1 3 15 2. + <_> + + <_> + 1 1 6 15 -1. + <_> + 4 1 3 15 2. + <_> + + <_> + 0 8 24 15 -1. + <_> + 8 8 8 15 3. + <_> + + <_> + 5 6 14 12 -1. + <_> + 5 6 7 6 2. + <_> + 12 12 7 6 2. + <_> + + <_> + 2 12 21 12 -1. + <_> + 2 16 21 4 3. + <_> + + <_> + 8 1 4 10 -1. + <_> + 10 1 2 10 2. + <_> + + <_> + 2 13 20 10 -1. + <_> + 2 13 10 10 2. + <_> + + <_> + 0 1 6 13 -1. + <_> + 2 1 2 13 3. + <_> + + <_> + 20 2 4 13 -1. + <_> + 20 2 2 13 2. + <_> + + <_> + 0 5 22 19 -1. + <_> + 11 5 11 19 2. + <_> + + <_> + 18 4 6 9 -1. + <_> + 20 4 2 9 3. + <_> + + <_> + 0 3 6 11 -1. + <_> + 2 3 2 11 3. + <_> + + <_> + 12 1 4 9 -1. + <_> + 12 1 2 9 2. + <_> + + <_> + 0 6 19 3 -1. + <_> + 0 7 19 1 3. + <_> + + <_> + 12 1 4 9 -1. + <_> + 12 1 2 9 2. + <_> + + <_> + 8 1 4 9 -1. + <_> + 10 1 2 9 2. + <_> + + <_> + 5 5 14 14 -1. + <_> + 12 5 7 7 2. + <_> + 5 12 7 7 2. + <_> + + <_> + 1 10 18 2 -1. + <_> + 1 11 18 1 2. + <_> + + <_> + 17 13 4 11 -1. + <_> + 17 13 2 11 2. + <_> + + <_> + 0 4 6 9 -1. + <_> + 0 7 6 3 3. + <_> + + <_> + 6 4 12 9 -1. + <_> + 6 7 12 3 3. + <_> + + <_> + 6 5 12 6 -1. + <_> + 10 5 4 6 3. + <_> + + <_> + 0 1 24 5 -1. + <_> + 8 1 8 5 3. + <_> + + <_> + 4 10 18 6 -1. + <_> + 4 12 18 2 3. + <_> + + <_> + 2 17 12 6 -1. + <_> + 2 17 6 3 2. + <_> + 8 20 6 3 2. + <_> + + <_> + 19 3 4 13 -1. + <_> + 19 3 2 13 2. + <_> + + <_> + 1 3 4 13 -1. + <_> + 3 3 2 13 2. + <_> + + <_> + 0 1 24 23 -1. + <_> + 8 1 8 23 3. + <_> + + <_> + 1 7 8 12 -1. + <_> + 1 11 8 4 3. + <_> + + <_> + 14 7 3 14 -1. + <_> + 14 14 3 7 2. + <_> + + <_> + 3 12 16 6 -1. + <_> + 3 12 8 3 2. + <_> + 11 15 8 3 2. + <_> + + <_> + 6 6 12 6 -1. + <_> + 6 8 12 2 3. + <_> + + <_> + 8 7 6 12 -1. + <_> + 8 13 6 6 2. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 1 17 18 3 -1. + <_> + 1 18 18 1 3. + <_> + + <_> + 4 4 16 12 -1. + <_> + 4 10 16 6 2. + <_> + + <_> + 0 1 4 20 -1. + <_> + 2 1 2 20 2. + <_> + + <_> + 3 0 18 2 -1. + <_> + 3 1 18 1 2. + <_> + + <_> + 1 5 20 14 -1. + <_> + 1 5 10 7 2. + <_> + 11 12 10 7 2. + <_> + + <_> + 5 8 14 12 -1. + <_> + 5 12 14 4 3. + <_> + + <_> + 3 14 7 9 -1. + <_> + 3 17 7 3 3. + <_> + + <_> + 14 15 9 6 -1. + <_> + 14 17 9 2 3. + <_> + + <_> + 1 15 9 6 -1. + <_> + 1 17 9 2 3. + <_> + + <_> + 11 6 8 10 -1. + <_> + 15 6 4 5 2. + <_> + 11 11 4 5 2. + <_> + + <_> + 5 5 14 14 -1. + <_> + 5 5 7 7 2. + <_> + 12 12 7 7 2. + <_> + + <_> + 6 0 12 5 -1. + <_> + 10 0 4 5 3. + <_> + + <_> + 9 0 6 9 -1. + <_> + 9 3 6 3 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 3 8 18 4 -1. + <_> + 9 8 6 4 3. + <_> + + <_> + 6 0 12 9 -1. + <_> + 6 3 12 3 3. + <_> + + <_> + 0 0 24 6 -1. + <_> + 8 0 8 6 3. + <_> + + <_> + 4 7 16 12 -1. + <_> + 4 11 16 4 3. + <_> + + <_> + 11 6 6 6 -1. + <_> + 11 6 3 6 2. + <_> + + <_> + 0 20 24 3 -1. + <_> + 8 20 8 3 3. + <_> + + <_> + 11 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 4 13 15 4 -1. + <_> + 9 13 5 4 3. + <_> + + <_> + 11 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 9 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 9 12 6 12 -1. + <_> + 9 18 6 6 2. + <_> + + <_> + 1 22 18 2 -1. + <_> + 1 23 18 1 2. + <_> + + <_> + 10 7 4 10 -1. + <_> + 10 12 4 5 2. + <_> + + <_> + 6 7 8 10 -1. + <_> + 6 12 8 5 2. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 0 14 10 4 -1. + <_> + 0 16 10 2 2. + <_> + + <_> + 6 18 18 2 -1. + <_> + 6 19 18 1 2. + <_> + + <_> + 1 1 22 3 -1. + <_> + 1 2 22 1 3. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 2 4 6 15 -1. + <_> + 5 4 3 15 2. + <_> + + <_> + 20 4 4 10 -1. + <_> + 20 4 2 10 2. + <_> + + <_> + 0 4 4 10 -1. + <_> + 2 4 2 10 2. + <_> + + <_> + 2 16 20 6 -1. + <_> + 12 16 10 3 2. + <_> + 2 19 10 3 2. + <_> + + <_> + 0 12 8 9 -1. + <_> + 4 12 4 9 2. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 5 10 6 6 -1. + <_> + 8 10 3 6 2. + <_> + + <_> + 11 8 12 6 -1. + <_> + 17 8 6 3 2. + <_> + 11 11 6 3 2. + <_> + + <_> + 0 8 12 6 -1. + <_> + 0 8 6 3 2. + <_> + 6 11 6 3 2. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 8 14 9 6 -1. + <_> + 8 16 9 2 3. + <_> + + <_> + 0 16 9 6 -1. + <_> + 0 18 9 2 3. + <_> + + <_> + 10 8 6 10 -1. + <_> + 12 8 2 10 3. + <_> + + <_> + 3 19 12 3 -1. + <_> + 9 19 6 3 2. + <_> + + <_> + 2 10 20 2 -1. + <_> + 2 11 20 1 2. + <_> + + <_> + 2 9 18 12 -1. + <_> + 2 9 9 6 2. + <_> + 11 15 9 6 2. + <_> + + <_> + 3 0 18 24 -1. + <_> + 3 0 9 24 2. + <_> + + <_> + 5 6 14 10 -1. + <_> + 5 6 7 5 2. + <_> + 12 11 7 5 2. + <_> + + <_> + 9 5 10 12 -1. + <_> + 14 5 5 6 2. + <_> + 9 11 5 6 2. + <_> + + <_> + 4 5 12 12 -1. + <_> + 4 5 6 6 2. + <_> + 10 11 6 6 2. + <_> + + <_> + 4 14 18 3 -1. + <_> + 4 15 18 1 3. + <_> + + <_> + 6 13 8 8 -1. + <_> + 6 17 8 4 2. + <_> + + <_> + 3 16 18 6 -1. + <_> + 3 19 18 3 2. + <_> + + <_> + 0 0 6 6 -1. + <_> + 3 0 3 6 2. + <_> + + <_> + 6 6 12 18 -1. + <_> + 10 6 4 18 3. + <_> + + <_> + 6 1 4 14 -1. + <_> + 8 1 2 14 2. + <_> + + <_> + 3 2 19 2 -1. + <_> + 3 3 19 1 2. + <_> + + <_> + 1 8 22 13 -1. + <_> + 12 8 11 13 2. + <_> + + <_> + 8 9 11 4 -1. + <_> + 8 11 11 2 2. + <_> + + <_> + 0 12 15 10 -1. + <_> + 5 12 5 10 3. + <_> + + <_> + 12 16 12 6 -1. + <_> + 16 16 4 6 3. + <_> + + <_> + 0 16 12 6 -1. + <_> + 4 16 4 6 3. + <_> + + <_> + 19 1 5 12 -1. + <_> + 19 5 5 4 3. + <_> + + <_> + 0 2 24 4 -1. + <_> + 8 2 8 4 3. + <_> + + <_> + 6 8 12 4 -1. + <_> + 6 10 12 2 2. + <_> + + <_> + 7 5 9 6 -1. + <_> + 10 5 3 6 3. + <_> + + <_> + 9 17 6 6 -1. + <_> + 9 20 6 3 2. + <_> + + <_> + 0 7 22 15 -1. + <_> + 0 12 22 5 3. + <_> + + <_> + 4 1 17 9 -1. + <_> + 4 4 17 3 3. + <_> + + <_> + 7 5 6 10 -1. + <_> + 9 5 2 10 3. + <_> + + <_> + 18 1 6 8 -1. + <_> + 18 1 3 8 2. + <_> + + <_> + 0 1 6 7 -1. + <_> + 3 1 3 7 2. + <_> + + <_> + 18 0 6 22 -1. + <_> + 18 0 3 22 2. + <_> + + <_> + 0 0 6 22 -1. + <_> + 3 0 3 22 2. + <_> + + <_> + 16 7 8 16 -1. + <_> + 16 7 4 16 2. + <_> + + <_> + 2 10 19 6 -1. + <_> + 2 12 19 2 3. + <_> + + <_> + 9 9 6 12 -1. + <_> + 9 13 6 4 3. + <_> + + <_> + 2 15 17 6 -1. + <_> + 2 17 17 2 3. + <_> + + <_> + 14 7 3 14 -1. + <_> + 14 14 3 7 2. + <_> + + <_> + 5 6 8 10 -1. + <_> + 5 6 4 5 2. + <_> + 9 11 4 5 2. + <_> + + <_> + 15 8 9 11 -1. + <_> + 18 8 3 11 3. + <_> + + <_> + 0 8 9 11 -1. + <_> + 3 8 3 11 3. + <_> + + <_> + 8 6 10 18 -1. + <_> + 8 15 10 9 2. + <_> + + <_> + 7 7 3 14 -1. + <_> + 7 14 3 7 2. + <_> + + <_> + 0 14 24 8 -1. + <_> + 8 14 8 8 3. + <_> + + <_> + 1 10 18 14 -1. + <_> + 10 10 9 14 2. + <_> + + <_> + 14 12 6 6 -1. + <_> + 14 15 6 3 2. + <_> + + <_> + 7 0 10 16 -1. + <_> + 7 0 5 8 2. + <_> + 12 8 5 8 2. + <_> + + <_> + 10 0 9 6 -1. + <_> + 13 0 3 6 3. + <_> + + <_> + 4 3 16 4 -1. + <_> + 12 3 8 4 2. + <_> + + <_> + 10 0 9 6 -1. + <_> + 13 0 3 6 3. + <_> + + <_> + 1 1 20 4 -1. + <_> + 1 1 10 2 2. + <_> + 11 3 10 2 2. + <_> + + <_> + 10 0 9 6 -1. + <_> + 13 0 3 6 3. + <_> + + <_> + 5 0 9 6 -1. + <_> + 8 0 3 6 3. + <_> + + <_> + 8 18 10 6 -1. + <_> + 8 20 10 2 3. + <_> + + <_> + 6 3 6 9 -1. + <_> + 8 3 2 9 3. + <_> + + <_> + 7 3 12 6 -1. + <_> + 7 5 12 2 3. + <_> + + <_> + 0 10 18 3 -1. + <_> + 0 11 18 1 3. + <_> + + <_> + 1 10 22 3 -1. + <_> + 1 11 22 1 3. + <_> + + <_> + 5 11 8 8 -1. + <_> + 9 11 4 8 2. + <_> + + <_> + 12 11 6 6 -1. + <_> + 12 11 3 6 2. + <_> + + <_> + 6 11 6 6 -1. + <_> + 9 11 3 6 2. + <_> + + <_> + 7 10 11 6 -1. + <_> + 7 12 11 2 3. + <_> + + <_> + 0 13 24 4 -1. + <_> + 0 13 12 2 2. + <_> + 12 15 12 2 2. + <_> + + <_> + 2 4 22 12 -1. + <_> + 13 4 11 6 2. + <_> + 2 10 11 6 2. + <_> + + <_> + 2 0 20 17 -1. + <_> + 12 0 10 17 2. + <_> + + <_> + 14 0 2 24 -1. + <_> + 14 0 1 24 2. + <_> + + <_> + 8 0 2 24 -1. + <_> + 9 0 1 24 2. + <_> + + <_> + 14 1 2 22 -1. + <_> + 14 1 1 22 2. + <_> + + <_> + 8 1 2 22 -1. + <_> + 9 1 1 22 2. + <_> + + <_> + 17 6 3 18 -1. + <_> + 18 6 1 18 3. + <_> + + <_> + 6 14 9 6 -1. + <_> + 6 16 9 2 3. + <_> + + <_> + 13 14 9 4 -1. + <_> + 13 16 9 2 2. + <_> + + <_> + 3 18 18 3 -1. + <_> + 3 19 18 1 3. + <_> + + <_> + 9 4 8 18 -1. + <_> + 13 4 4 9 2. + <_> + 9 13 4 9 2. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 0 2 12 4 -1. + <_> + 6 2 6 4 2. + <_> + + <_> + 6 8 14 6 -1. + <_> + 6 11 14 3 2. + <_> + + <_> + 7 5 6 6 -1. + <_> + 10 5 3 6 2. + <_> + + <_> + 10 5 6 16 -1. + <_> + 10 13 6 8 2. + <_> + + <_> + 1 4 9 16 -1. + <_> + 4 4 3 16 3. + <_> + + <_> + 5 0 18 9 -1. + <_> + 5 3 18 3 3. + <_> + + <_> + 9 15 5 8 -1. + <_> + 9 19 5 4 2. + <_> + + <_> + 20 0 4 9 -1. + <_> + 20 0 2 9 2. + <_> + + <_> + 2 0 18 3 -1. + <_> + 2 1 18 1 3. + <_> + + <_> + 5 22 19 2 -1. + <_> + 5 23 19 1 2. + <_> + + <_> + 0 0 4 9 -1. + <_> + 2 0 2 9 2. + <_> + + <_> + 5 6 19 18 -1. + <_> + 5 12 19 6 3. + <_> + + <_> + 0 1 6 9 -1. + <_> + 2 1 2 9 3. + <_> + + <_> + 6 5 14 12 -1. + <_> + 13 5 7 6 2. + <_> + 6 11 7 6 2. + <_> + + <_> + 0 1 20 2 -1. + <_> + 0 2 20 1 2. + <_> + + <_> + 1 2 22 3 -1. + <_> + 1 3 22 1 3. + <_> + + <_> + 2 8 7 9 -1. + <_> + 2 11 7 3 3. + <_> + + <_> + 2 12 22 4 -1. + <_> + 13 12 11 2 2. + <_> + 2 14 11 2 2. + <_> + + <_> + 0 12 22 4 -1. + <_> + 0 12 11 2 2. + <_> + 11 14 11 2 2. + <_> + + <_> + 9 7 6 11 -1. + <_> + 11 7 2 11 3. + <_> + + <_> + 7 1 9 6 -1. + <_> + 10 1 3 6 3. + <_> + + <_> + 11 2 4 10 -1. + <_> + 11 7 4 5 2. + <_> + + <_> + 6 4 12 12 -1. + <_> + 6 10 12 6 2. + <_> + + <_> + 18 1 6 15 -1. + <_> + 18 6 6 5 3. + <_> + + <_> + 3 15 18 3 -1. + <_> + 3 16 18 1 3. + <_> + + <_> + 18 5 6 9 -1. + <_> + 18 8 6 3 3. + <_> + + <_> + 1 5 16 6 -1. + <_> + 1 5 8 3 2. + <_> + 9 8 8 3 2. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 0 4 24 14 -1. + <_> + 0 4 12 7 2. + <_> + 12 11 12 7 2. + <_> + + <_> + 13 0 4 13 -1. + <_> + 13 0 2 13 2. + <_> + + <_> + 7 0 4 13 -1. + <_> + 9 0 2 13 2. + <_> + + <_> + 11 6 6 9 -1. + <_> + 13 6 2 9 3. + <_> + + <_> + 8 7 6 9 -1. + <_> + 10 7 2 9 3. + <_> + + <_> + 13 17 9 6 -1. + <_> + 13 19 9 2 3. + <_> + + <_> + 2 18 14 6 -1. + <_> + 2 18 7 3 2. + <_> + 9 21 7 3 2. + <_> + + <_> + 3 18 18 4 -1. + <_> + 12 18 9 2 2. + <_> + 3 20 9 2 2. + <_> + + <_> + 0 20 15 4 -1. + <_> + 5 20 5 4 3. + <_> + + <_> + 9 15 15 9 -1. + <_> + 14 15 5 9 3. + <_> + + <_> + 4 4 16 4 -1. + <_> + 4 6 16 2 2. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 0 14 15 10 -1. + <_> + 5 14 5 10 3. + <_> + + <_> + 7 9 10 14 -1. + <_> + 12 9 5 7 2. + <_> + 7 16 5 7 2. + <_> + + <_> + 7 6 6 9 -1. + <_> + 9 6 2 9 3. + <_> + + <_> + 3 6 18 3 -1. + <_> + 3 7 18 1 3. + <_> + + <_> + 0 10 18 3 -1. + <_> + 0 11 18 1 3. + <_> + + <_> + 3 16 18 4 -1. + <_> + 12 16 9 2 2. + <_> + 3 18 9 2 2. + <_> + + <_> + 4 6 14 6 -1. + <_> + 4 6 7 3 2. + <_> + 11 9 7 3 2. + <_> + + <_> + 13 0 2 18 -1. + <_> + 13 0 1 18 2. + <_> + + <_> + 9 0 2 18 -1. + <_> + 10 0 1 18 2. + <_> + + <_> + 5 7 15 10 -1. + <_> + 10 7 5 10 3. + <_> + + <_> + 1 20 21 4 -1. + <_> + 8 20 7 4 3. + <_> + + <_> + 10 5 5 18 -1. + <_> + 10 14 5 9 2. + <_> + + <_> + 0 2 24 6 -1. + <_> + 0 2 12 3 2. + <_> + 12 5 12 3 2. + <_> + + <_> + 1 1 22 8 -1. + <_> + 12 1 11 4 2. + <_> + 1 5 11 4 2. + <_> + + <_> + 4 0 15 9 -1. + <_> + 4 3 15 3 3. + <_> + + <_> + 0 0 24 19 -1. + <_> + 8 0 8 19 3. + <_> + + <_> + 2 21 18 3 -1. + <_> + 11 21 9 3 2. + <_> + + <_> + 9 7 10 4 -1. + <_> + 9 7 5 4 2. + <_> + + <_> + 5 7 10 4 -1. + <_> + 10 7 5 4 2. + <_> + + <_> + 17 8 6 16 -1. + <_> + 20 8 3 8 2. + <_> + 17 16 3 8 2. + <_> + + <_> + 1 15 20 4 -1. + <_> + 1 15 10 2 2. + <_> + 11 17 10 2 2. + <_> + + <_> + 14 15 10 6 -1. + <_> + 14 17 10 2 3. + <_> + + <_> + 3 0 16 9 -1. + <_> + 3 3 16 3 3. + <_> + + <_> + 15 6 7 15 -1. + <_> + 15 11 7 5 3. + <_> + + <_> + 9 1 6 13 -1. + <_> + 11 1 2 13 3. + <_> + + <_> + 17 2 6 14 -1. + <_> + 17 2 3 14 2. + <_> + + <_> + 3 14 12 10 -1. + <_> + 3 14 6 5 2. + <_> + 9 19 6 5 2. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 1 2 6 14 -1. + <_> + 4 2 3 14 2. + <_> + + <_> + 10 4 5 12 -1. + <_> + 10 8 5 4 3. + <_> + + <_> + 0 17 24 5 -1. + <_> + 8 17 8 5 3. + <_> + + <_> + 15 7 5 12 -1. + <_> + 15 11 5 4 3. + <_> + + <_> + 3 1 6 12 -1. + <_> + 3 1 3 6 2. + <_> + 6 7 3 6 2. + <_> + + <_> + 12 13 6 6 -1. + <_> + 12 16 6 3 2. + <_> + + <_> + 6 13 6 6 -1. + <_> + 6 16 6 3 2. + <_> + + <_> + 14 6 3 16 -1. + <_> + 14 14 3 8 2. + <_> + + <_> + 1 12 13 6 -1. + <_> + 1 14 13 2 3. + <_> + + <_> + 13 1 4 9 -1. + <_> + 13 1 2 9 2. + <_> + + <_> + 7 0 9 6 -1. + <_> + 10 0 3 6 3. + <_> + + <_> + 12 2 6 9 -1. + <_> + 12 2 3 9 2. + <_> + + <_> + 6 2 6 9 -1. + <_> + 9 2 3 9 2. + <_> + + <_> + 6 18 12 6 -1. + <_> + 6 20 12 2 3. + <_> + + <_> + 7 6 6 9 -1. + <_> + 9 6 2 9 3. + <_> + + <_> + 7 7 12 3 -1. + <_> + 7 7 6 3 2. + <_> + + <_> + 8 3 8 21 -1. + <_> + 8 10 8 7 3. + <_> + + <_> + 7 4 10 12 -1. + <_> + 7 8 10 4 3. + <_> + + <_> + 0 1 6 9 -1. + <_> + 0 4 6 3 3. + <_> + + <_> + 15 2 2 20 -1. + <_> + 15 2 1 20 2. + <_> + + <_> + 0 3 6 9 -1. + <_> + 0 6 6 3 3. + <_> + + <_> + 15 3 2 21 -1. + <_> + 15 3 1 21 2. + <_> + + <_> + 7 0 2 23 -1. + <_> + 8 0 1 23 2. + <_> + + <_> + 15 8 9 4 -1. + <_> + 15 10 9 2 2. + <_> + + <_> + 0 8 9 4 -1. + <_> + 0 10 9 2 2. + <_> + + <_> + 8 14 9 6 -1. + <_> + 8 16 9 2 3. + <_> + + <_> + 0 14 9 6 -1. + <_> + 0 16 9 2 3. + <_> + + <_> + 3 10 18 4 -1. + <_> + 9 10 6 4 3. + <_> + + <_> + 0 0 24 19 -1. + <_> + 8 0 8 19 3. + <_> + + <_> + 9 1 8 12 -1. + <_> + 9 7 8 6 2. + <_> + + <_> + 10 6 4 10 -1. + <_> + 12 6 2 10 2. + <_> + + <_> + 7 9 10 12 -1. + <_> + 12 9 5 6 2. + <_> + 7 15 5 6 2. + <_> + + <_> + 5 0 3 19 -1. + <_> + 6 0 1 19 3. + <_> + + <_> + 14 0 6 10 -1. + <_> + 16 0 2 10 3. + <_> + + <_> + 2 0 6 12 -1. + <_> + 2 0 3 6 2. + <_> + 5 6 3 6 2. + <_> + + <_> + 0 11 24 2 -1. + <_> + 0 12 24 1 2. + <_> + + <_> + 4 9 13 4 -1. + <_> + 4 11 13 2 2. + <_> + + <_> + 9 8 6 9 -1. + <_> + 9 11 6 3 3. + <_> + + <_> + 0 12 16 4 -1. + <_> + 0 14 16 2 2. + <_> + + <_> + 18 12 6 9 -1. + <_> + 18 15 6 3 3. + <_> + + <_> + 0 12 6 9 -1. + <_> + 0 15 6 3 3. + <_> + + <_> + 8 7 10 4 -1. + <_> + 8 7 5 4 2. + <_> + + <_> + 8 7 6 9 -1. + <_> + 10 7 2 9 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 12 3 6 15 -1. + <_> + 14 3 2 15 3. + <_> + + <_> + 6 3 6 15 -1. + <_> + 8 3 2 15 3. + <_> + + <_> + 15 2 9 4 -1. + <_> + 15 4 9 2 2. + <_> + + <_> + 5 10 6 7 -1. + <_> + 8 10 3 7 2. + <_> + + <_> + 9 14 6 10 -1. + <_> + 9 19 6 5 2. + <_> + + <_> + 7 13 5 8 -1. + <_> + 7 17 5 4 2. + <_> + + <_> + 14 5 3 16 -1. + <_> + 14 13 3 8 2. + <_> + + <_> + 2 17 18 3 -1. + <_> + 2 18 18 1 3. + <_> + + <_> + 5 18 19 3 -1. + <_> + 5 19 19 1 3. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 12 4 3 18 -1. + <_> + 13 4 1 18 3. + <_> + + <_> + 9 4 3 18 -1. + <_> + 10 4 1 18 3. + <_> + + <_> + 3 3 18 9 -1. + <_> + 9 3 6 9 3. + <_> + + <_> + 6 1 6 14 -1. + <_> + 8 1 2 14 3. + <_> + + <_> + 12 16 9 6 -1. + <_> + 12 19 9 3 2. + <_> + + <_> + 1 3 20 16 -1. + <_> + 1 3 10 8 2. + <_> + 11 11 10 8 2. + <_> + + <_> + 12 5 6 12 -1. + <_> + 15 5 3 6 2. + <_> + 12 11 3 6 2. + <_> + + <_> + 1 2 22 16 -1. + <_> + 1 2 11 8 2. + <_> + 12 10 11 8 2. + <_> + + <_> + 10 14 5 10 -1. + <_> + 10 19 5 5 2. + <_> + + <_> + 3 21 18 3 -1. + <_> + 3 22 18 1 3. + <_> + + <_> + 10 14 6 10 -1. + <_> + 12 14 2 10 3. + <_> + + <_> + 0 2 24 4 -1. + <_> + 8 2 8 4 3. + <_> + + <_> + 6 4 12 9 -1. + <_> + 6 7 12 3 3. + <_> + + <_> + 6 6 12 5 -1. + <_> + 10 6 4 5 3. + <_> + + <_> + 5 8 14 12 -1. + <_> + 5 12 14 4 3. + <_> + + <_> + 4 14 8 10 -1. + <_> + 4 14 4 5 2. + <_> + 8 19 4 5 2. + <_> + + <_> + 11 6 5 14 -1. + <_> + 11 13 5 7 2. + <_> + + <_> + 7 6 3 16 -1. + <_> + 7 14 3 8 2. + <_> + + <_> + 3 7 18 8 -1. + <_> + 9 7 6 8 3. + <_> + + <_> + 2 3 20 2 -1. + <_> + 2 4 20 1 2. + <_> + + <_> + 3 12 19 6 -1. + <_> + 3 14 19 2 3. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 16 6 6 14 -1. + <_> + 16 6 3 14 2. + <_> + + <_> + 7 9 6 12 -1. + <_> + 9 9 2 12 3. + <_> + + <_> + 18 6 6 18 -1. + <_> + 21 6 3 9 2. + <_> + 18 15 3 9 2. + <_> + + <_> + 0 6 6 18 -1. + <_> + 0 6 3 9 2. + <_> + 3 15 3 9 2. + <_> + + <_> + 18 2 6 9 -1. + <_> + 18 5 6 3 3. + <_> + + <_> + 3 18 15 6 -1. + <_> + 3 20 15 2 3. + <_> + + <_> + 18 2 6 9 -1. + <_> + 18 5 6 3 3. + <_> + + <_> + 0 2 6 9 -1. + <_> + 0 5 6 3 3. + <_> + + <_> + 5 10 18 2 -1. + <_> + 5 11 18 1 2. + <_> + + <_> + 6 0 12 6 -1. + <_> + 6 2 12 2 3. + <_> + + <_> + 10 0 6 9 -1. + <_> + 12 0 2 9 3. + <_> + + <_> + 8 0 6 9 -1. + <_> + 10 0 2 9 3. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 3 6 13 6 -1. + <_> + 3 8 13 2 3. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 2 5 6 15 -1. + <_> + 5 5 3 15 2. + <_> + + <_> + 8 8 9 6 -1. + <_> + 11 8 3 6 3. + <_> + + <_> + 8 6 3 14 -1. + <_> + 8 13 3 7 2. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 4 12 10 4 -1. + <_> + 9 12 5 4 2. + <_> + + <_> + 13 1 4 19 -1. + <_> + 13 1 2 19 2. + <_> + + <_> + 7 1 4 19 -1. + <_> + 9 1 2 19 2. + <_> + + <_> + 18 9 6 9 -1. + <_> + 18 12 6 3 3. + <_> + + <_> + 1 21 18 3 -1. + <_> + 1 22 18 1 3. + <_> + + <_> + 14 13 10 9 -1. + <_> + 14 16 10 3 3. + <_> + + <_> + 1 13 22 4 -1. + <_> + 1 13 11 2 2. + <_> + 12 15 11 2 2. + <_> + + <_> + 4 6 16 6 -1. + <_> + 12 6 8 3 2. + <_> + 4 9 8 3 2. + <_> + + <_> + 1 0 18 22 -1. + <_> + 1 0 9 11 2. + <_> + 10 11 9 11 2. + <_> + + <_> + 10 7 8 14 -1. + <_> + 14 7 4 7 2. + <_> + 10 14 4 7 2. + <_> + + <_> + 0 4 6 20 -1. + <_> + 0 4 3 10 2. + <_> + 3 14 3 10 2. + <_> + + <_> + 15 0 6 9 -1. + <_> + 17 0 2 9 3. + <_> + + <_> + 3 0 6 9 -1. + <_> + 5 0 2 9 3. + <_> + + <_> + 15 12 6 12 -1. + <_> + 18 12 3 6 2. + <_> + 15 18 3 6 2. + <_> + + <_> + 3 12 6 12 -1. + <_> + 3 12 3 6 2. + <_> + 6 18 3 6 2. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 0 12 9 6 -1. + <_> + 0 14 9 2 3. + <_> + + <_> + 4 14 19 3 -1. + <_> + 4 15 19 1 3. + <_> + + <_> + 2 13 19 3 -1. + <_> + 2 14 19 1 3. + <_> + + <_> + 14 15 10 6 -1. + <_> + 14 17 10 2 3. + <_> + + <_> + 6 0 10 12 -1. + <_> + 6 0 5 6 2. + <_> + 11 6 5 6 2. + <_> + + <_> + 17 1 6 12 -1. + <_> + 20 1 3 6 2. + <_> + 17 7 3 6 2. + <_> + + <_> + 1 1 6 12 -1. + <_> + 1 1 3 6 2. + <_> + 4 7 3 6 2. + <_> + + <_> + 16 14 6 9 -1. + <_> + 16 17 6 3 3. + <_> + + <_> + 7 3 9 12 -1. + <_> + 7 9 9 6 2. + <_> + + <_> + 12 1 4 12 -1. + <_> + 12 7 4 6 2. + <_> + + <_> + 4 0 14 8 -1. + <_> + 4 4 14 4 2. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 2 10 18 3 -1. + <_> + 8 10 6 3 3. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 0 1 21 23 -1. + <_> + 7 1 7 23 3. + <_> + + <_> + 6 9 17 4 -1. + <_> + 6 11 17 2 2. + <_> + + <_> + 1 0 11 18 -1. + <_> + 1 6 11 6 3. + <_> + + <_> + 6 15 13 6 -1. + <_> + 6 17 13 2 3. + <_> + + <_> + 0 15 9 6 -1. + <_> + 0 17 9 2 3. + <_> + + <_> + 8 7 15 4 -1. + <_> + 13 7 5 4 3. + <_> + + <_> + 9 12 6 9 -1. + <_> + 9 15 6 3 3. + <_> + + <_> + 6 8 18 3 -1. + <_> + 12 8 6 3 3. + <_> + + <_> + 0 14 24 4 -1. + <_> + 8 14 8 4 3. + <_> + + <_> + 16 10 3 12 -1. + <_> + 16 16 3 6 2. + <_> + + <_> + 0 3 24 3 -1. + <_> + 0 4 24 1 3. + <_> + + <_> + 14 17 10 6 -1. + <_> + 14 19 10 2 3. + <_> + + <_> + 1 13 18 3 -1. + <_> + 7 13 6 3 3. + <_> + + <_> + 5 0 18 9 -1. + <_> + 5 3 18 3 3. + <_> + + <_> + 4 3 16 9 -1. + <_> + 4 6 16 3 3. + <_> + + <_> + 16 5 3 12 -1. + <_> + 16 11 3 6 2. + <_> + + <_> + 0 7 18 4 -1. + <_> + 6 7 6 4 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 9 8 6 10 -1. + <_> + 11 8 2 10 3. + <_> + + <_> + 9 15 6 9 -1. + <_> + 11 15 2 9 3. + <_> + + <_> + 3 1 18 21 -1. + <_> + 12 1 9 21 2. + <_> + + <_> + 6 8 12 7 -1. + <_> + 6 8 6 7 2. + <_> + + <_> + 8 5 6 9 -1. + <_> + 10 5 2 9 3. + <_> + + <_> + 0 2 24 4 -1. + <_> + 8 2 8 4 3. + <_> + + <_> + 14 7 5 12 -1. + <_> + 14 11 5 4 3. + <_> + + <_> + 5 7 5 12 -1. + <_> + 5 11 5 4 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 0 1 6 17 -1. + <_> + 3 1 3 17 2. + <_> + + <_> + 3 1 19 9 -1. + <_> + 3 4 19 3 3. + <_> + + <_> + 3 18 12 6 -1. + <_> + 3 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 20 4 4 19 -1. + <_> + 20 4 2 19 2. + <_> + + <_> + 0 16 10 7 -1. + <_> + 5 16 5 7 2. + <_> + + <_> + 8 7 10 12 -1. + <_> + 13 7 5 6 2. + <_> + 8 13 5 6 2. + <_> + + <_> + 6 7 10 12 -1. + <_> + 6 7 5 6 2. + <_> + 11 13 5 6 2. + <_> + + <_> + 9 2 9 6 -1. + <_> + 12 2 3 6 3. + <_> + + <_> + 1 20 21 4 -1. + <_> + 8 20 7 4 3. + <_> + + <_> + 9 12 9 6 -1. + <_> + 9 14 9 2 3. + <_> + + <_> + 7 2 9 6 -1. + <_> + 10 2 3 6 3. + <_> + + <_> + 13 0 4 14 -1. + <_> + 13 0 2 14 2. + <_> + + <_> + 7 0 4 14 -1. + <_> + 9 0 2 14 2. + <_> + + <_> + 14 15 9 6 -1. + <_> + 14 17 9 2 3. + <_> + + <_> + 2 8 18 5 -1. + <_> + 8 8 6 5 3. + <_> + + <_> + 18 3 6 11 -1. + <_> + 20 3 2 11 3. + <_> + + <_> + 6 5 11 14 -1. + <_> + 6 12 11 7 2. + <_> + + <_> + 18 4 6 9 -1. + <_> + 18 7 6 3 3. + <_> + + <_> + 7 6 9 6 -1. + <_> + 7 8 9 2 3. + <_> + + <_> + 18 4 6 9 -1. + <_> + 18 7 6 3 3. + <_> + + <_> + 0 4 6 9 -1. + <_> + 0 7 6 3 3. + <_> + + <_> + 9 4 9 4 -1. + <_> + 9 6 9 2 2. + <_> + + <_> + 0 22 19 2 -1. + <_> + 0 23 19 1 2. + <_> + + <_> + 17 14 6 9 -1. + <_> + 17 17 6 3 3. + <_> + + <_> + 1 14 6 9 -1. + <_> + 1 17 6 3 3. + <_> + + <_> + 14 11 4 9 -1. + <_> + 14 11 2 9 2. + <_> + + <_> + 6 11 4 9 -1. + <_> + 8 11 2 9 2. + <_> + + <_> + 3 9 18 7 -1. + <_> + 9 9 6 7 3. + <_> + + <_> + 9 12 6 10 -1. + <_> + 9 17 6 5 2. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 6 17 18 3 -1. + <_> + 6 18 18 1 3. + <_> + + <_> + 1 17 18 3 -1. + <_> + 1 18 18 1 3. + <_> + + <_> + 10 6 11 12 -1. + <_> + 10 12 11 6 2. + <_> + + <_> + 5 6 14 6 -1. + <_> + 5 6 7 3 2. + <_> + 12 9 7 3 2. + <_> + + <_> + 5 4 15 4 -1. + <_> + 5 6 15 2 2. + <_> + + <_> + 0 0 22 2 -1. + <_> + 0 1 22 1 2. + <_> + + <_> + 0 0 24 24 -1. + <_> + 8 0 8 24 3. + <_> + + <_> + 1 15 18 4 -1. + <_> + 10 15 9 4 2. + <_> + + <_> + 6 8 12 9 -1. + <_> + 6 11 12 3 3. + <_> + + <_> + 4 12 7 12 -1. + <_> + 4 16 7 4 3. + <_> + + <_> + 1 2 22 6 -1. + <_> + 12 2 11 3 2. + <_> + 1 5 11 3 2. + <_> + + <_> + 5 20 14 3 -1. + <_> + 12 20 7 3 2. + <_> + + <_> + 0 0 24 16 -1. + <_> + 12 0 12 8 2. + <_> + 0 8 12 8 2. + <_> + + <_> + 3 13 18 4 -1. + <_> + 3 13 9 2 2. + <_> + 12 15 9 2 2. + <_> + + <_> + 2 10 22 2 -1. + <_> + 2 11 22 1 2. + <_> + + <_> + 6 3 11 8 -1. + <_> + 6 7 11 4 2. + <_> + + <_> + 14 5 6 6 -1. + <_> + 14 8 6 3 2. + <_> + + <_> + 0 7 24 6 -1. + <_> + 0 9 24 2 3. + <_> + + <_> + 14 0 10 10 -1. + <_> + 19 0 5 5 2. + <_> + 14 5 5 5 2. + <_> + + <_> + 0 0 10 10 -1. + <_> + 0 0 5 5 2. + <_> + 5 5 5 5 2. + <_> + + <_> + 0 1 24 4 -1. + <_> + 12 1 12 2 2. + <_> + 0 3 12 2 2. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 5 15 16 6 -1. + <_> + 13 15 8 3 2. + <_> + 5 18 8 3 2. + <_> + + <_> + 3 15 16 6 -1. + <_> + 3 15 8 3 2. + <_> + 11 18 8 3 2. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 0 13 21 10 -1. + <_> + 0 18 21 5 2. + <_> + + <_> + 13 0 6 24 -1. + <_> + 15 0 2 24 3. + <_> + + <_> + 7 4 6 11 -1. + <_> + 9 4 2 11 3. + <_> + + <_> + 9 5 9 6 -1. + <_> + 12 5 3 6 3. + <_> + + <_> + 1 4 2 20 -1. + <_> + 1 14 2 10 2. + <_> + + <_> + 13 0 6 24 -1. + <_> + 15 0 2 24 3. + <_> + + <_> + 5 0 6 24 -1. + <_> + 7 0 2 24 3. + <_> + + <_> + 16 7 6 14 -1. + <_> + 19 7 3 7 2. + <_> + 16 14 3 7 2. + <_> + + <_> + 4 7 4 12 -1. + <_> + 6 7 2 12 2. + <_> + + <_> + 0 5 24 14 -1. + <_> + 8 5 8 14 3. + <_> + + <_> + 5 13 10 6 -1. + <_> + 5 15 10 2 3. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 2 7 6 14 -1. + <_> + 2 7 3 7 2. + <_> + 5 14 3 7 2. + <_> + + <_> + 15 2 9 15 -1. + <_> + 18 2 3 15 3. + <_> + + <_> + 0 2 6 9 -1. + <_> + 2 2 2 9 3. + <_> + + <_> + 12 2 10 14 -1. + <_> + 17 2 5 7 2. + <_> + 12 9 5 7 2. + <_> + + <_> + 11 6 2 18 -1. + <_> + 12 6 1 18 2. + <_> + + <_> + 9 5 15 6 -1. + <_> + 14 5 5 6 3. + <_> + + <_> + 8 6 6 10 -1. + <_> + 10 6 2 10 3. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 3 3 9 7 -1. + <_> + 6 3 3 7 3. + <_> + + <_> + 6 7 14 3 -1. + <_> + 6 7 7 3 2. + <_> + + <_> + 7 7 8 6 -1. + <_> + 11 7 4 6 2. + <_> + + <_> + 12 7 7 12 -1. + <_> + 12 13 7 6 2. + <_> + + <_> + 10 6 4 18 -1. + <_> + 10 6 2 9 2. + <_> + 12 15 2 9 2. + <_> + + <_> + 16 14 6 9 -1. + <_> + 16 17 6 3 3. + <_> + + <_> + 4 0 6 13 -1. + <_> + 6 0 2 13 3. + <_> + + <_> + 2 2 21 3 -1. + <_> + 9 2 7 3 3. + <_> + + <_> + 5 4 5 12 -1. + <_> + 5 8 5 4 3. + <_> + + <_> + 10 3 4 10 -1. + <_> + 10 8 4 5 2. + <_> + + <_> + 8 4 5 8 -1. + <_> + 8 8 5 4 2. + <_> + + <_> + 6 0 11 9 -1. + <_> + 6 3 11 3 3. + <_> + + <_> + 6 6 12 5 -1. + <_> + 10 6 4 5 3. + <_> + + <_> + 0 0 24 5 -1. + <_> + 8 0 8 5 3. + <_> + + <_> + 1 10 23 6 -1. + <_> + 1 12 23 2 3. + <_> + + <_> + 3 21 18 3 -1. + <_> + 9 21 6 3 3. + <_> + + <_> + 3 6 21 6 -1. + <_> + 3 8 21 2 3. + <_> + + <_> + 0 5 6 12 -1. + <_> + 2 5 2 12 3. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 8 7 8 10 -1. + <_> + 8 12 8 5 2. + <_> + + <_> + 5 7 15 12 -1. + <_> + 10 7 5 12 3. + <_> + + <_> + 0 17 10 6 -1. + <_> + 0 19 10 2 3. + <_> + + <_> + 14 18 9 6 -1. + <_> + 14 20 9 2 3. + <_> + + <_> + 9 6 6 16 -1. + <_> + 9 14 6 8 2. + <_> + + <_> + 14 18 9 6 -1. + <_> + 14 20 9 2 3. + <_> + + <_> + 1 18 9 6 -1. + <_> + 1 20 9 2 3. + <_> + + <_> + 15 9 9 6 -1. + <_> + 15 11 9 2 3. + <_> + + <_> + 0 9 9 6 -1. + <_> + 0 11 9 2 3. + <_> + + <_> + 17 3 6 9 -1. + <_> + 19 3 2 9 3. + <_> + + <_> + 2 17 18 3 -1. + <_> + 2 18 18 1 3. + <_> + + <_> + 3 15 21 6 -1. + <_> + 3 17 21 2 3. + <_> + + <_> + 9 17 6 6 -1. + <_> + 9 20 6 3 2. + <_> + + <_> + 18 3 6 9 -1. + <_> + 18 6 6 3 3. + <_> + + <_> + 0 3 6 9 -1. + <_> + 0 6 6 3 3. + <_> + + <_> + 4 0 16 10 -1. + <_> + 12 0 8 5 2. + <_> + 4 5 8 5 2. + <_> + + <_> + 2 0 10 16 -1. + <_> + 2 0 5 8 2. + <_> + 7 8 5 8 2. + <_> + + <_> + 14 0 10 5 -1. + <_> + 14 0 5 5 2. + <_> + + <_> + 0 0 10 5 -1. + <_> + 5 0 5 5 2. + <_> + + <_> + 18 3 6 10 -1. + <_> + 18 3 3 10 2. + <_> + + <_> + 5 11 12 6 -1. + <_> + 5 11 6 3 2. + <_> + 11 14 6 3 2. + <_> + + <_> + 21 0 3 18 -1. + <_> + 22 0 1 18 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 8 8 9 7 -1. + <_> + 11 8 3 7 3. + <_> + + <_> + 7 12 8 10 -1. + <_> + 7 12 4 5 2. + <_> + 11 17 4 5 2. + <_> + + <_> + 21 0 3 18 -1. + <_> + 22 0 1 18 3. + <_> + + <_> + 10 6 4 9 -1. + <_> + 12 6 2 9 2. + <_> + + <_> + 15 0 9 6 -1. + <_> + 15 2 9 2 3. + <_> + + <_> + 0 2 24 3 -1. + <_> + 0 3 24 1 3. + <_> + + <_> + 11 7 6 9 -1. + <_> + 13 7 2 9 3. + <_> + + <_> + 7 6 6 10 -1. + <_> + 9 6 2 10 3. + <_> + + <_> + 12 1 6 12 -1. + <_> + 14 1 2 12 3. + <_> + + <_> + 6 4 12 12 -1. + <_> + 6 10 12 6 2. + <_> + + <_> + 14 3 2 21 -1. + <_> + 14 3 1 21 2. + <_> + + <_> + 6 1 12 8 -1. + <_> + 6 5 12 4 2. + <_> + + <_> + 3 0 18 8 -1. + <_> + 3 4 18 4 2. + <_> + + <_> + 3 0 18 3 -1. + <_> + 3 1 18 1 3. + <_> + + <_> + 0 13 24 4 -1. + <_> + 12 13 12 2 2. + <_> + 0 15 12 2 2. + <_> + + <_> + 10 5 4 9 -1. + <_> + 12 5 2 9 2. + <_> + + <_> + 11 1 6 9 -1. + <_> + 13 1 2 9 3. + <_> + + <_> + 6 2 6 22 -1. + <_> + 8 2 2 22 3. + <_> + + <_> + 16 10 8 14 -1. + <_> + 20 10 4 7 2. + <_> + 16 17 4 7 2. + <_> + + <_> + 3 4 16 15 -1. + <_> + 3 9 16 5 3. + <_> + + <_> + 16 10 8 14 -1. + <_> + 20 10 4 7 2. + <_> + 16 17 4 7 2. + <_> + + <_> + 0 10 8 14 -1. + <_> + 0 10 4 7 2. + <_> + 4 17 4 7 2. + <_> + + <_> + 10 14 11 6 -1. + <_> + 10 17 11 3 2. + <_> + + <_> + 0 7 24 9 -1. + <_> + 8 7 8 9 3. + <_> + + <_> + 13 1 4 16 -1. + <_> + 13 1 2 16 2. + <_> + + <_> + 7 1 4 16 -1. + <_> + 9 1 2 16 2. + <_> + + <_> + 5 5 16 8 -1. + <_> + 13 5 8 4 2. + <_> + 5 9 8 4 2. + <_> + + <_> + 0 9 6 9 -1. + <_> + 0 12 6 3 3. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 3 12 6 9 -1. + <_> + 3 15 6 3 3. + <_> + + <_> + 8 14 9 6 -1. + <_> + 8 16 9 2 3. + <_> + + <_> + 2 13 8 10 -1. + <_> + 2 13 4 5 2. + <_> + 6 18 4 5 2. + <_> + + <_> + 15 5 3 18 -1. + <_> + 15 11 3 6 3. + <_> + + <_> + 3 5 18 3 -1. + <_> + 3 6 18 1 3. + <_> + + <_> + 17 5 6 11 -1. + <_> + 19 5 2 11 3. + <_> + + <_> + 1 5 6 11 -1. + <_> + 3 5 2 11 3. + <_> + + <_> + 19 1 4 9 -1. + <_> + 19 1 2 9 2. + <_> + + <_> + 1 1 4 9 -1. + <_> + 3 1 2 9 2. + <_> + + <_> + 4 15 18 9 -1. + <_> + 4 15 9 9 2. + <_> + + <_> + 6 9 12 4 -1. + <_> + 6 11 12 2 2. + <_> + + <_> + 15 2 9 6 -1. + <_> + 15 4 9 2 3. + <_> + + <_> + 0 2 9 6 -1. + <_> + 0 4 9 2 3. + <_> + + <_> + 15 0 6 17 -1. + <_> + 17 0 2 17 3. + <_> + + <_> + 3 0 6 17 -1. + <_> + 5 0 2 17 3. + <_> + + <_> + 8 17 9 4 -1. + <_> + 8 19 9 2 2. + <_> + + <_> + 6 5 3 18 -1. + <_> + 6 11 3 6 3. + <_> + + <_> + 5 2 14 12 -1. + <_> + 5 8 14 6 2. + <_> + + <_> + 10 2 3 12 -1. + <_> + 10 8 3 6 2. + <_> + + <_> + 10 7 14 15 -1. + <_> + 10 12 14 5 3. + <_> + + <_> + 0 7 14 15 -1. + <_> + 0 12 14 5 3. + <_> + + <_> + 15 0 9 6 -1. + <_> + 15 2 9 2 3. + <_> + + <_> + 0 0 9 6 -1. + <_> + 0 2 9 2 3. + <_> + + <_> + 12 6 6 14 -1. + <_> + 14 6 2 14 3. + <_> + + <_> + 9 7 6 9 -1. + <_> + 11 7 2 9 3. + <_> + + <_> + 12 6 6 15 -1. + <_> + 14 6 2 15 3. + <_> + + <_> + 6 6 6 15 -1. + <_> + 8 6 2 15 3. + <_> + + <_> + 15 3 8 9 -1. + <_> + 15 3 4 9 2. + <_> + + <_> + 0 0 9 21 -1. + <_> + 3 0 3 21 3. + <_> + + <_> + 11 9 8 12 -1. + <_> + 11 13 8 4 3. + <_> + + <_> + 6 7 10 12 -1. + <_> + 6 7 5 6 2. + <_> + 11 13 5 6 2. + <_> + + <_> + 10 6 4 18 -1. + <_> + 12 6 2 9 2. + <_> + 10 15 2 9 2. + <_> + + <_> + 0 0 6 9 -1. + <_> + 0 3 6 3 3. + <_> + + <_> + 3 14 18 3 -1. + <_> + 3 15 18 1 3. + <_> + + <_> + 3 14 8 10 -1. + <_> + 3 14 4 5 2. + <_> + 7 19 4 5 2. + <_> + + <_> + 0 12 24 4 -1. + <_> + 12 12 12 2 2. + <_> + 0 14 12 2 2. + <_> + + <_> + 0 2 3 20 -1. + <_> + 1 2 1 20 3. + <_> + + <_> + 12 16 10 8 -1. + <_> + 17 16 5 4 2. + <_> + 12 20 5 4 2. + <_> + + <_> + 2 16 10 8 -1. + <_> + 2 16 5 4 2. + <_> + 7 20 5 4 2. + <_> + + <_> + 7 0 10 9 -1. + <_> + 7 3 10 3 3. + <_> + + <_> + 0 0 24 3 -1. + <_> + 8 0 8 3 3. + <_> + + <_> + 3 8 15 4 -1. + <_> + 3 10 15 2 2. + <_> + + <_> + 6 5 12 6 -1. + <_> + 10 5 4 6 3. + <_> + + <_> + 5 13 14 6 -1. + <_> + 5 16 14 3 2. + <_> + + <_> + 11 14 4 10 -1. + <_> + 11 19 4 5 2. + <_> + + <_> + 0 6 6 7 -1. + <_> + 3 6 3 7 2. + <_> + + <_> + 18 0 6 6 -1. + <_> + 18 0 3 6 2. + <_> + + <_> + 3 1 18 3 -1. + <_> + 3 2 18 1 3. + <_> + + <_> + 9 6 14 18 -1. + <_> + 9 12 14 6 3. + <_> + + <_> + 0 0 6 6 -1. + <_> + 3 0 3 6 2. + <_> + + <_> + 13 11 6 6 -1. + <_> + 13 11 3 6 2. + <_> + + <_> + 0 20 24 3 -1. + <_> + 8 20 8 3 3. + <_> + + <_> + 13 11 6 7 -1. + <_> + 13 11 3 7 2. + <_> + + <_> + 4 12 10 6 -1. + <_> + 4 14 10 2 3. + <_> + + <_> + 13 11 6 6 -1. + <_> + 13 11 3 6 2. + <_> + + <_> + 5 11 6 7 -1. + <_> + 8 11 3 7 2. + <_> + + <_> + 7 4 11 12 -1. + <_> + 7 8 11 4 3. + <_> + + <_> + 6 15 10 4 -1. + <_> + 6 17 10 2 2. + <_> + + <_> + 14 0 6 9 -1. + <_> + 16 0 2 9 3. + <_> + + <_> + 4 0 6 9 -1. + <_> + 6 0 2 9 3. + <_> + + <_> + 11 2 4 15 -1. + <_> + 11 7 4 5 3. + <_> + + <_> + 0 0 20 3 -1. + <_> + 0 1 20 1 3. + <_> + + <_> + 13 18 10 6 -1. + <_> + 13 20 10 2 3. + <_> + + <_> + 2 7 6 11 -1. + <_> + 5 7 3 11 2. + <_> + + <_> + 10 14 10 9 -1. + <_> + 10 17 10 3 3. + <_> + + <_> + 8 2 4 9 -1. + <_> + 10 2 2 9 2. + <_> + + <_> + 14 3 10 4 -1. + <_> + 14 3 5 4 2. + <_> + + <_> + 6 6 12 6 -1. + <_> + 6 6 6 3 2. + <_> + 12 9 6 3 2. + <_> + + <_> + 8 8 8 10 -1. + <_> + 12 8 4 5 2. + <_> + 8 13 4 5 2. + <_> + + <_> + 7 4 4 16 -1. + <_> + 7 12 4 8 2. + <_> + + <_> + 8 8 9 4 -1. + <_> + 8 10 9 2 2. + <_> + + <_> + 5 2 14 9 -1. + <_> + 5 5 14 3 3. + <_> + + <_> + 3 16 19 8 -1. + <_> + 3 20 19 4 2. + <_> + + <_> + 0 0 10 8 -1. + <_> + 5 0 5 8 2. + <_> + + <_> + 5 2 16 18 -1. + <_> + 5 2 8 18 2. + <_> + + <_> + 0 11 24 11 -1. + <_> + 8 11 8 11 3. + <_> + + <_> + 3 3 18 5 -1. + <_> + 3 3 9 5 2. + <_> + + <_> + 1 16 18 3 -1. + <_> + 1 17 18 1 3. + <_> + + <_> + 5 17 18 3 -1. + <_> + 5 18 18 1 3. + <_> + + <_> + 1 13 9 6 -1. + <_> + 1 15 9 2 3. + <_> + + <_> + 1 9 23 10 -1. + <_> + 1 14 23 5 2. + <_> + + <_> + 3 7 18 3 -1. + <_> + 3 8 18 1 3. + <_> + + <_> + 6 8 12 3 -1. + <_> + 6 8 6 3 2. + <_> + + <_> + 6 2 3 22 -1. + <_> + 7 2 1 22 3. + <_> + + <_> + 14 17 10 6 -1. + <_> + 14 19 10 2 3. + <_> + + <_> + 1 18 10 6 -1. + <_> + 1 20 10 2 3. + <_> + + <_> + 11 3 6 12 -1. + <_> + 13 3 2 12 3. + <_> + + <_> + 10 6 4 9 -1. + <_> + 12 6 2 9 2. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 12 10 9 6 -1. + <_> + 15 10 3 6 3. + <_> + + <_> + 2 11 6 9 -1. + <_> + 5 11 3 9 2. + <_> + + <_> + 14 5 3 19 -1. + <_> + 15 5 1 19 3. + <_> + + <_> + 6 6 9 6 -1. + <_> + 6 8 9 2 3. + <_> + + <_> + 14 5 3 19 -1. + <_> + 15 5 1 19 3. + <_> + + <_> + 0 3 6 9 -1. + <_> + 0 6 6 3 3. + <_> + + <_> + 5 21 18 3 -1. + <_> + 5 22 18 1 3. + <_> + + <_> + 1 10 18 4 -1. + <_> + 7 10 6 4 3. + <_> + + <_> + 13 4 8 10 -1. + <_> + 17 4 4 5 2. + <_> + 13 9 4 5 2. + <_> + + <_> + 7 8 9 6 -1. + <_> + 10 8 3 6 3. + <_> + + <_> + 12 9 9 8 -1. + <_> + 15 9 3 8 3. + <_> + + <_> + 0 6 5 12 -1. + <_> + 0 10 5 4 3. + <_> + + <_> + 7 6 14 6 -1. + <_> + 14 6 7 3 2. + <_> + 7 9 7 3 2. + <_> + + <_> + 7 5 3 19 -1. + <_> + 8 5 1 19 3. + <_> + + <_> + 8 4 15 20 -1. + <_> + 13 4 5 20 3. + <_> + + <_> + 1 4 15 20 -1. + <_> + 6 4 5 20 3. + <_> + + <_> + 13 10 6 6 -1. + <_> + 13 10 3 6 2. + <_> + + <_> + 5 10 6 6 -1. + <_> + 8 10 3 6 2. + <_> + + <_> + 14 2 6 14 -1. + <_> + 17 2 3 7 2. + <_> + 14 9 3 7 2. + <_> + + <_> + 4 2 6 14 -1. + <_> + 4 2 3 7 2. + <_> + 7 9 3 7 2. + <_> + + <_> + 12 4 6 7 -1. + <_> + 12 4 3 7 2. + <_> + + <_> + 9 4 6 9 -1. + <_> + 11 4 2 9 3. + <_> + + <_> + 11 4 8 10 -1. + <_> + 11 4 4 10 2. + <_> + + <_> + 5 4 8 10 -1. + <_> + 9 4 4 10 2. + <_> + + <_> + 8 18 10 6 -1. + <_> + 8 20 10 2 3. + <_> + + <_> + 1 18 21 6 -1. + <_> + 1 20 21 2 3. + <_> + + <_> + 9 2 12 6 -1. + <_> + 9 2 6 6 2. + <_> + + <_> + 3 2 12 6 -1. + <_> + 9 2 6 6 2. + <_> + + <_> + 12 5 12 6 -1. + <_> + 18 5 6 3 2. + <_> + 12 8 6 3 2. + <_> + + <_> + 8 8 6 9 -1. + <_> + 8 11 6 3 3. + <_> + + <_> + 2 7 20 6 -1. + <_> + 2 9 20 2 3. + <_> + + <_> + 0 5 12 6 -1. + <_> + 0 5 6 3 2. + <_> + 6 8 6 3 2. + <_> + + <_> + 14 14 8 10 -1. + <_> + 18 14 4 5 2. + <_> + 14 19 4 5 2. + <_> + + <_> + 2 14 8 10 -1. + <_> + 2 14 4 5 2. + <_> + 6 19 4 5 2. + <_> + + <_> + 2 11 20 13 -1. + <_> + 2 11 10 13 2. + <_> + + <_> + 6 9 12 5 -1. + <_> + 12 9 6 5 2. + <_> + + <_> + 5 6 16 6 -1. + <_> + 13 6 8 3 2. + <_> + 5 9 8 3 2. + <_> + + <_> + 1 19 9 4 -1. + <_> + 1 21 9 2 2. + <_> + + <_> + 7 5 12 5 -1. + <_> + 11 5 4 5 3. + <_> + + <_> + 3 5 14 12 -1. + <_> + 3 5 7 6 2. + <_> + 10 11 7 6 2. + <_> + + <_> + 9 4 9 6 -1. + <_> + 12 4 3 6 3. + <_> + + <_> + 2 6 19 3 -1. + <_> + 2 7 19 1 3. + <_> + + <_> + 18 10 6 9 -1. + <_> + 18 13 6 3 3. + <_> + + <_> + 3 7 18 2 -1. + <_> + 3 8 18 1 2. + <_> + + <_> + 20 2 4 18 -1. + <_> + 22 2 2 9 2. + <_> + 20 11 2 9 2. + <_> + + <_> + 2 18 20 3 -1. + <_> + 2 19 20 1 3. + <_> + + <_> + 1 9 22 3 -1. + <_> + 1 10 22 1 3. + <_> + + <_> + 0 2 4 18 -1. + <_> + 0 2 2 9 2. + <_> + 2 11 2 9 2. + <_> + + <_> + 19 0 4 23 -1. + <_> + 19 0 2 23 2. + <_> + + <_> + 0 3 6 19 -1. + <_> + 3 3 3 19 2. + <_> + + <_> + 18 2 6 9 -1. + <_> + 20 2 2 9 3. + <_> + + <_> + 0 5 10 6 -1. + <_> + 0 7 10 2 3. + <_> + + <_> + 7 0 12 12 -1. + <_> + 13 0 6 6 2. + <_> + 7 6 6 6 2. + <_> + + <_> + 0 3 24 6 -1. + <_> + 0 3 12 3 2. + <_> + 12 6 12 3 2. + <_> + + <_> + 10 14 4 10 -1. + <_> + 10 19 4 5 2. + <_> + + <_> + 8 9 4 15 -1. + <_> + 8 14 4 5 3. + <_> + + <_> + 4 11 17 6 -1. + <_> + 4 14 17 3 2. + <_> + + <_> + 2 5 18 8 -1. + <_> + 2 5 9 4 2. + <_> + 11 9 9 4 2. + <_> + + <_> + 7 6 14 6 -1. + <_> + 14 6 7 3 2. + <_> + 7 9 7 3 2. + <_> + + <_> + 3 6 14 6 -1. + <_> + 3 6 7 3 2. + <_> + 10 9 7 3 2. + <_> + + <_> + 16 5 3 18 -1. + <_> + 17 5 1 18 3. + <_> + + <_> + 5 5 3 18 -1. + <_> + 6 5 1 18 3. + <_> + + <_> + 10 10 14 4 -1. + <_> + 10 12 14 2 2. + <_> + + <_> + 4 10 9 4 -1. + <_> + 4 12 9 2 2. + <_> + + <_> + 2 0 18 9 -1. + <_> + 2 3 18 3 3. + <_> + + <_> + 6 3 12 8 -1. + <_> + 10 3 4 8 3. + <_> + + <_> + 1 1 8 5 -1. + <_> + 5 1 4 5 2. + <_> + + <_> + 12 7 7 8 -1. + <_> + 12 11 7 4 2. + <_> + + <_> + 0 12 22 4 -1. + <_> + 0 14 22 2 2. + <_> + + <_> + 15 6 4 15 -1. + <_> + 15 11 4 5 3. + <_> + + <_> + 5 7 7 8 -1. + <_> + 5 11 7 4 2. + <_> + + <_> + 8 18 9 4 -1. + <_> + 8 20 9 2 2. + <_> + + <_> + 1 2 22 4 -1. + <_> + 1 4 22 2 2. + <_> + + <_> + 17 3 6 17 -1. + <_> + 19 3 2 17 3. + <_> + + <_> + 8 2 8 18 -1. + <_> + 8 11 8 9 2. + <_> + + <_> + 17 0 6 12 -1. + <_> + 20 0 3 6 2. + <_> + 17 6 3 6 2. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 15 5 9 12 -1. + <_> + 15 11 9 6 2. + <_> + + <_> + 2 22 18 2 -1. + <_> + 2 23 18 1 2. + <_> + + <_> + 10 10 12 6 -1. + <_> + 16 10 6 3 2. + <_> + 10 13 6 3 2. + <_> + + <_> + 0 1 4 11 -1. + <_> + 2 1 2 11 2. + <_> + + <_> + 20 0 4 10 -1. + <_> + 20 0 2 10 2. + <_> + + <_> + 1 3 6 17 -1. + <_> + 3 3 2 17 3. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 0 13 8 9 -1. + <_> + 0 16 8 3 3. + <_> + + <_> + 16 8 6 12 -1. + <_> + 16 12 6 4 3. + <_> + + <_> + 2 8 6 12 -1. + <_> + 2 12 6 4 3. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 1 5 19 3 -1. + <_> + 1 6 19 1 3. + <_> + + <_> + 11 8 9 7 -1. + <_> + 14 8 3 7 3. + <_> + + <_> + 3 8 12 9 -1. + <_> + 3 11 12 3 3. + <_> + + <_> + 3 6 18 3 -1. + <_> + 3 7 18 1 3. + <_> + + <_> + 10 0 4 12 -1. + <_> + 10 6 4 6 2. + <_> + + <_> + 3 9 18 14 -1. + <_> + 3 9 9 14 2. + <_> + + <_> + 0 0 4 9 -1. + <_> + 2 0 2 9 2. + <_> + + <_> + 12 5 4 18 -1. + <_> + 12 5 2 18 2. + <_> + + <_> + 8 5 4 18 -1. + <_> + 10 5 2 18 2. + <_> + + <_> + 10 5 6 10 -1. + <_> + 12 5 2 10 3. + <_> + + <_> + 9 4 4 11 -1. + <_> + 11 4 2 11 2. + <_> + + <_> + 4 16 18 3 -1. + <_> + 4 17 18 1 3. + <_> + + <_> + 0 16 20 3 -1. + <_> + 0 17 20 1 3. + <_> + + <_> + 9 9 6 12 -1. + <_> + 9 13 6 4 3. + <_> + + <_> + 8 13 8 8 -1. + <_> + 8 17 8 4 2. + <_> + + <_> + 13 10 3 12 -1. + <_> + 13 16 3 6 2. + <_> + + <_> + 5 9 14 14 -1. + <_> + 5 9 7 7 2. + <_> + 12 16 7 7 2. + <_> + + <_> + 0 0 24 10 -1. + <_> + 12 0 12 5 2. + <_> + 0 5 12 5 2. + <_> + + <_> + 1 11 18 2 -1. + <_> + 1 12 18 1 2. + <_> + + <_> + 19 5 5 12 -1. + <_> + 19 9 5 4 3. + <_> + + <_> + 0 5 5 12 -1. + <_> + 0 9 5 4 3. + <_> + + <_> + 16 6 8 18 -1. + <_> + 20 6 4 9 2. + <_> + 16 15 4 9 2. + <_> + + <_> + 0 6 8 18 -1. + <_> + 0 6 4 9 2. + <_> + 4 15 4 9 2. + <_> + + <_> + 12 5 12 12 -1. + <_> + 18 5 6 6 2. + <_> + 12 11 6 6 2. + <_> + + <_> + 7 6 6 9 -1. + <_> + 9 6 2 9 3. + <_> + + <_> + 9 13 6 11 -1. + <_> + 11 13 2 11 3. + <_> + + <_> + 0 5 12 12 -1. + <_> + 0 5 6 6 2. + <_> + 6 11 6 6 2. + <_> + + <_> + 1 2 23 3 -1. + <_> + 1 3 23 1 3. + <_> + + <_> + 1 15 19 3 -1. + <_> + 1 16 19 1 3. + <_> + + <_> + 13 17 11 4 -1. + <_> + 13 19 11 2 2. + <_> + + <_> + 0 13 8 5 -1. + <_> + 4 13 4 5 2. + <_> + + <_> + 12 10 10 4 -1. + <_> + 12 10 5 4 2. + <_> + + <_> + 4 6 9 9 -1. + <_> + 4 9 9 3 3. + <_> + + <_> + 15 14 9 6 -1. + <_> + 15 16 9 2 3. + <_> + + <_> + 1 12 9 6 -1. + <_> + 1 14 9 2 3. + <_> + + <_> + 3 10 20 8 -1. + <_> + 13 10 10 4 2. + <_> + 3 14 10 4 2. + <_> + + <_> + 2 0 9 18 -1. + <_> + 5 0 3 18 3. + <_> + + <_> + 13 11 9 10 -1. + <_> + 16 11 3 10 3. + <_> + + <_> + 1 2 8 5 -1. + <_> + 5 2 4 5 2. + <_> + + <_> + 3 4 21 6 -1. + <_> + 10 4 7 6 3. + <_> + + <_> + 7 0 10 14 -1. + <_> + 7 0 5 7 2. + <_> + 12 7 5 7 2. + <_> + + <_> + 12 17 12 4 -1. + <_> + 12 19 12 2 2. + <_> + + <_> + 0 6 23 4 -1. + <_> + 0 8 23 2 2. + <_> + + <_> + 13 10 8 10 -1. + <_> + 17 10 4 5 2. + <_> + 13 15 4 5 2. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 15 16 9 4 -1. + <_> + 15 18 9 2 2. + <_> + + <_> + 0 16 9 4 -1. + <_> + 0 18 9 2 2. + <_> + + <_> + 13 11 6 6 -1. + <_> + 13 11 3 6 2. + <_> + + <_> + 5 11 6 6 -1. + <_> + 8 11 3 6 2. + <_> + + <_> + 0 3 24 6 -1. + <_> + 12 3 12 3 2. + <_> + 0 6 12 3 2. + <_> + + <_> + 2 4 18 3 -1. + <_> + 2 5 18 1 3. + <_> + + <_> + 0 0 24 4 -1. + <_> + 12 0 12 2 2. + <_> + 0 2 12 2 2. + <_> + + <_> + 1 16 18 3 -1. + <_> + 1 17 18 1 3. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 0 15 9 6 -1. + <_> + 0 17 9 2 3. + <_> + + <_> + 6 17 18 3 -1. + <_> + 6 18 18 1 3. + <_> + + <_> + 8 8 6 10 -1. + <_> + 10 8 2 10 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 8 8 5 8 -1. + <_> + 8 12 5 4 2. + <_> + + <_> + 12 8 6 8 -1. + <_> + 12 12 6 4 2. + <_> + + <_> + 6 5 6 11 -1. + <_> + 8 5 2 11 3. + <_> + + <_> + 13 6 8 9 -1. + <_> + 13 9 8 3 3. + <_> + + <_> + 1 7 21 6 -1. + <_> + 1 9 21 2 3. + <_> + + <_> + 15 5 3 12 -1. + <_> + 15 11 3 6 2. + <_> + + <_> + 6 9 11 12 -1. + <_> + 6 13 11 4 3. + <_> + + <_> + 13 8 10 8 -1. + <_> + 18 8 5 4 2. + <_> + 13 12 5 4 2. + <_> + + <_> + 5 8 12 3 -1. + <_> + 11 8 6 3 2. + <_> + + <_> + 6 11 18 4 -1. + <_> + 12 11 6 4 3. + <_> + + <_> + 0 0 22 22 -1. + <_> + 0 11 22 11 2. + <_> + + <_> + 11 2 6 8 -1. + <_> + 11 6 6 4 2. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 10 0 6 9 -1. + <_> + 12 0 2 9 3. + <_> + + <_> + 8 3 6 14 -1. + <_> + 8 3 3 7 2. + <_> + 11 10 3 7 2. + <_> + + <_> + 3 10 18 8 -1. + <_> + 9 10 6 8 3. + <_> + + <_> + 10 0 3 14 -1. + <_> + 10 7 3 7 2. + <_> + + <_> + 4 3 16 20 -1. + <_> + 4 13 16 10 2. + <_> + + <_> + 9 4 6 10 -1. + <_> + 11 4 2 10 3. + <_> + + <_> + 5 0 16 4 -1. + <_> + 5 2 16 2 2. + <_> + + <_> + 2 5 18 4 -1. + <_> + 8 5 6 4 3. + <_> + + <_> + 13 0 6 9 -1. + <_> + 15 0 2 9 3. + <_> + + <_> + 8 4 8 5 -1. + <_> + 12 4 4 5 2. + <_> + + <_> + 12 10 10 4 -1. + <_> + 12 10 5 4 2. + <_> + + <_> + 2 10 10 4 -1. + <_> + 7 10 5 4 2. + <_> + + <_> + 7 11 12 5 -1. + <_> + 11 11 4 5 3. + <_> + + <_> + 3 10 8 10 -1. + <_> + 3 10 4 5 2. + <_> + 7 15 4 5 2. + <_> + + <_> + 11 12 9 8 -1. + <_> + 14 12 3 8 3. + <_> + + <_> + 0 21 24 3 -1. + <_> + 8 21 8 3 3. + <_> + + <_> + 3 20 18 4 -1. + <_> + 9 20 6 4 3. + <_> + + <_> + 1 15 9 6 -1. + <_> + 1 17 9 2 3. + <_> + + <_> + 11 17 10 4 -1. + <_> + 11 19 10 2 2. + <_> + + <_> + 9 12 4 12 -1. + <_> + 9 18 4 6 2. + <_> + + <_> + 9 6 9 6 -1. + <_> + 12 6 3 6 3. + <_> + + <_> + 1 13 6 9 -1. + <_> + 1 16 6 3 3. + <_> + + <_> + 6 16 12 4 -1. + <_> + 6 18 12 2 2. + <_> + + <_> + 1 5 20 3 -1. + <_> + 1 6 20 1 3. + <_> + + <_> + 8 1 9 9 -1. + <_> + 8 4 9 3 3. + <_> + + <_> + 2 19 9 4 -1. + <_> + 2 21 9 2 2. + <_> + + <_> + 11 1 4 18 -1. + <_> + 11 7 4 6 3. + <_> + + <_> + 7 2 8 12 -1. + <_> + 7 2 4 6 2. + <_> + 11 8 4 6 2. + <_> + + <_> + 11 10 9 8 -1. + <_> + 14 10 3 8 3. + <_> + + <_> + 5 11 12 5 -1. + <_> + 9 11 4 5 3. + <_> + + <_> + 11 9 9 6 -1. + <_> + 14 9 3 6 3. + <_> + + <_> + 5 10 6 9 -1. + <_> + 7 10 2 9 3. + <_> + + <_> + 4 7 5 12 -1. + <_> + 4 11 5 4 3. + <_> + + <_> + 2 0 21 6 -1. + <_> + 9 0 7 6 3. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 9 0 6 15 -1. + <_> + 11 0 2 15 3. + <_> + + <_> + 2 2 18 2 -1. + <_> + 2 3 18 1 2. + <_> + + <_> + 8 17 8 6 -1. + <_> + 8 20 8 3 2. + <_> + + <_> + 3 0 18 2 -1. + <_> + 3 1 18 1 2. + <_> + + <_> + 8 0 9 6 -1. + <_> + 11 0 3 6 3. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 6 7 12 5 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 0 3 6 9 -1. + <_> + 2 3 2 9 3. + <_> + + <_> + 20 2 4 9 -1. + <_> + 20 2 2 9 2. + <_> + + <_> + 0 2 4 9 -1. + <_> + 2 2 2 9 2. + <_> + + <_> + 0 1 24 4 -1. + <_> + 12 1 12 2 2. + <_> + 0 3 12 2 2. + <_> + + <_> + 0 16 9 6 -1. + <_> + 0 18 9 2 3. + <_> + + <_> + 14 13 9 6 -1. + <_> + 14 15 9 2 3. + <_> + + <_> + 0 15 19 3 -1. + <_> + 0 16 19 1 3. + <_> + + <_> + 1 5 22 12 -1. + <_> + 12 5 11 6 2. + <_> + 1 11 11 6 2. + <_> + + <_> + 5 13 6 6 -1. + <_> + 8 13 3 6 2. + <_> + + <_> + 4 2 20 3 -1. + <_> + 4 3 20 1 3. + <_> + + <_> + 8 14 6 10 -1. + <_> + 10 14 2 10 3. + <_> + + <_> + 6 12 16 6 -1. + <_> + 14 12 8 3 2. + <_> + 6 15 8 3 2. + <_> + + <_> + 2 13 8 9 -1. + <_> + 2 16 8 3 3. + <_> + + <_> + 11 8 6 14 -1. + <_> + 14 8 3 7 2. + <_> + 11 15 3 7 2. + <_> + + <_> + 2 12 16 6 -1. + <_> + 2 12 8 3 2. + <_> + 10 15 8 3 2. + <_> + + <_> + 5 16 16 8 -1. + <_> + 5 20 16 4 2. + <_> + + <_> + 9 1 4 12 -1. + <_> + 9 7 4 6 2. + <_> + + <_> + 8 2 8 10 -1. + <_> + 12 2 4 5 2. + <_> + 8 7 4 5 2. + <_> + + <_> + 6 6 12 6 -1. + <_> + 6 6 6 3 2. + <_> + 12 9 6 3 2. + <_> + + <_> + 10 7 6 9 -1. + <_> + 12 7 2 9 3. + <_> + + <_> + 0 0 8 12 -1. + <_> + 0 0 4 6 2. + <_> + 4 6 4 6 2. + <_> + + <_> + 18 8 6 9 -1. + <_> + 18 11 6 3 3. + <_> + + <_> + 2 12 6 6 -1. + <_> + 5 12 3 6 2. + <_> + + <_> + 3 21 21 3 -1. + <_> + 10 21 7 3 3. + <_> + + <_> + 2 0 16 6 -1. + <_> + 2 3 16 3 2. + <_> + + <_> + 13 6 7 6 -1. + <_> + 13 9 7 3 2. + <_> + + <_> + 6 4 4 14 -1. + <_> + 6 11 4 7 2. + <_> + + <_> + 9 7 6 9 -1. + <_> + 11 7 2 9 3. + <_> + + <_> + 7 8 6 14 -1. + <_> + 7 8 3 7 2. + <_> + 10 15 3 7 2. + <_> + + <_> + 18 8 4 16 -1. + <_> + 18 16 4 8 2. + <_> + + <_> + 9 14 6 10 -1. + <_> + 11 14 2 10 3. + <_> + + <_> + 6 11 12 5 -1. + <_> + 10 11 4 5 3. + <_> + + <_> + 0 12 23 3 -1. + <_> + 0 13 23 1 3. + <_> + + <_> + 13 0 6 12 -1. + <_> + 15 0 2 12 3. + <_> + + <_> + 0 10 12 5 -1. + <_> + 4 10 4 5 3. + <_> + + <_> + 13 2 10 4 -1. + <_> + 13 4 10 2 2. + <_> + + <_> + 5 0 6 12 -1. + <_> + 7 0 2 12 3. + <_> + + <_> + 11 6 9 6 -1. + <_> + 14 6 3 6 3. + <_> + + <_> + 4 6 9 6 -1. + <_> + 7 6 3 6 3. + <_> + + <_> + 6 11 18 13 -1. + <_> + 12 11 6 13 3. + <_> + + <_> + 0 11 18 13 -1. + <_> + 6 11 6 13 3. + <_> + + <_> + 12 16 12 6 -1. + <_> + 16 16 4 6 3. + <_> + + <_> + 0 6 21 3 -1. + <_> + 0 7 21 1 3. + <_> + + <_> + 12 16 12 6 -1. + <_> + 16 16 4 6 3. + <_> + + <_> + 5 7 6 14 -1. + <_> + 5 14 6 7 2. + <_> + + <_> + 5 10 19 2 -1. + <_> + 5 11 19 1 2. + <_> + + <_> + 5 4 14 4 -1. + <_> + 5 6 14 2 2. + <_> + + <_> + 3 18 18 4 -1. + <_> + 9 18 6 4 3. + <_> + + <_> + 7 0 4 9 -1. + <_> + 9 0 2 9 2. + <_> + + <_> + 13 3 11 4 -1. + <_> + 13 5 11 2 2. + <_> + + <_> + 2 0 9 6 -1. + <_> + 5 0 3 6 3. + <_> + + <_> + 19 1 4 23 -1. + <_> + 19 1 2 23 2. + <_> + + <_> + 1 1 4 23 -1. + <_> + 3 1 2 23 2. + <_> + + <_> + 5 16 18 3 -1. + <_> + 5 17 18 1 3. + <_> + + <_> + 0 3 11 4 -1. + <_> + 0 5 11 2 2. + <_> + + <_> + 2 16 20 3 -1. + <_> + 2 17 20 1 3. + <_> + + <_> + 5 3 13 4 -1. + <_> + 5 5 13 2 2. + <_> + + <_> + 1 9 22 15 -1. + <_> + 1 9 11 15 2. + <_> + + <_> + 3 4 14 3 -1. + <_> + 10 4 7 3 2. + <_> + + <_> + 8 7 10 4 -1. + <_> + 8 7 5 4 2. + <_> + + <_> + 6 7 10 4 -1. + <_> + 11 7 5 4 2. + <_> + + <_> + 10 4 6 9 -1. + <_> + 12 4 2 9 3. + <_> + + <_> + 1 12 9 6 -1. + <_> + 4 12 3 6 3. + <_> + + <_> + 8 3 8 10 -1. + <_> + 12 3 4 5 2. + <_> + 8 8 4 5 2. + <_> + + <_> + 3 6 16 6 -1. + <_> + 3 6 8 3 2. + <_> + 11 9 8 3 2. + <_> + + <_> + 5 6 14 6 -1. + <_> + 5 9 14 3 2. + <_> + + <_> + 4 3 9 6 -1. + <_> + 4 5 9 2 3. + <_> + + <_> + 6 3 18 2 -1. + <_> + 6 4 18 1 2. + <_> + + <_> + 7 6 9 6 -1. + <_> + 10 6 3 6 3. + <_> + + <_> + 0 1 24 3 -1. + <_> + 0 2 24 1 3. + <_> + + <_> + 0 17 10 6 -1. + <_> + 0 19 10 2 3. + <_> + + <_> + 3 18 18 3 -1. + <_> + 3 19 18 1 3. + <_> + + <_> + 2 5 6 16 -1. + <_> + 2 5 3 8 2. + <_> + 5 13 3 8 2. + <_> + + <_> + 7 6 11 6 -1. + <_> + 7 8 11 2 3. + <_> + + <_> + 5 2 12 22 -1. + <_> + 5 13 12 11 2. + <_> + + <_> + 10 7 4 10 -1. + <_> + 10 12 4 5 2. + <_> + + <_> + 9 0 4 18 -1. + <_> + 9 6 4 6 3. + <_> + + <_> + 18 8 6 9 -1. + <_> + 18 11 6 3 3. + <_> + + <_> + 4 7 15 10 -1. + <_> + 9 7 5 10 3. + <_> + + <_> + 10 5 6 9 -1. + <_> + 12 5 2 9 3. + <_> + + <_> + 9 9 6 10 -1. + <_> + 11 9 2 10 3. + <_> + + <_> + 11 14 6 10 -1. + <_> + 13 14 2 10 3. + <_> + + <_> + 7 14 6 10 -1. + <_> + 9 14 2 10 3. + <_> + + <_> + 4 8 16 9 -1. + <_> + 4 11 16 3 3. + <_> + + <_> + 2 11 20 3 -1. + <_> + 2 12 20 1 3. + <_> + + <_> + 13 0 4 13 -1. + <_> + 13 0 2 13 2. + <_> + + <_> + 7 0 4 13 -1. + <_> + 9 0 2 13 2. + <_> + + <_> + 3 1 18 7 -1. + <_> + 9 1 6 7 3. + <_> + + <_> + 1 11 6 9 -1. + <_> + 1 14 6 3 3. + <_> + + <_> + 8 18 9 6 -1. + <_> + 8 20 9 2 3. + <_> + + <_> + 3 9 15 6 -1. + <_> + 3 11 15 2 3. + <_> + + <_> + 5 10 19 2 -1. + <_> + 5 11 19 1 2. + <_> + + <_> + 8 6 7 16 -1. + <_> + 8 14 7 8 2. + <_> + + <_> + 9 14 9 6 -1. + <_> + 9 16 9 2 3. + <_> + + <_> + 0 7 8 12 -1. + <_> + 0 11 8 4 3. + <_> + + <_> + 6 4 18 3 -1. + <_> + 6 5 18 1 3. + <_> + + <_> + 0 16 12 6 -1. + <_> + 4 16 4 6 3. + <_> + + <_> + 13 13 9 4 -1. + <_> + 13 15 9 2 2. + <_> + + <_> + 5 8 14 14 -1. + <_> + 5 8 7 7 2. + <_> + 12 15 7 7 2. + <_> + + <_> + 1 16 22 6 -1. + <_> + 12 16 11 3 2. + <_> + 1 19 11 3 2. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 9 5 10 10 -1. + <_> + 14 5 5 5 2. + <_> + 9 10 5 5 2. + <_> + + <_> + 5 5 10 10 -1. + <_> + 5 5 5 5 2. + <_> + 10 10 5 5 2. + <_> + + <_> + 4 6 16 6 -1. + <_> + 12 6 8 3 2. + <_> + 4 9 8 3 2. + <_> + + <_> + 0 7 6 9 -1. + <_> + 0 10 6 3 3. + <_> + + <_> + 16 10 8 14 -1. + <_> + 20 10 4 7 2. + <_> + 16 17 4 7 2. + <_> + + <_> + 9 12 6 12 -1. + <_> + 9 18 6 6 2. + <_> + + <_> + 8 10 8 12 -1. + <_> + 12 10 4 6 2. + <_> + 8 16 4 6 2. + <_> + + <_> + 8 0 4 9 -1. + <_> + 10 0 2 9 2. + <_> + + <_> + 10 4 8 16 -1. + <_> + 14 4 4 8 2. + <_> + 10 12 4 8 2. + <_> + + <_> + 7 10 10 6 -1. + <_> + 7 12 10 2 3. + <_> + + <_> + 5 6 14 14 -1. + <_> + 12 6 7 7 2. + <_> + 5 13 7 7 2. + <_> + + <_> + 2 11 20 2 -1. + <_> + 2 12 20 1 2. + <_> + + <_> + 18 8 4 16 -1. + <_> + 18 16 4 8 2. + <_> + + <_> + 1 11 12 10 -1. + <_> + 1 11 6 5 2. + <_> + 7 16 6 5 2. + <_> + + <_> + 6 9 12 4 -1. + <_> + 6 11 12 2 2. + <_> + + <_> + 9 12 6 7 -1. + <_> + 12 12 3 7 2. + <_> + + <_> + 10 4 8 16 -1. + <_> + 14 4 4 8 2. + <_> + 10 12 4 8 2. + <_> + + <_> + 6 4 8 16 -1. + <_> + 6 4 4 8 2. + <_> + 10 12 4 8 2. + <_> + + <_> + 8 9 9 6 -1. + <_> + 11 9 3 6 3. + <_> + + <_> + 1 5 16 12 -1. + <_> + 1 5 8 6 2. + <_> + 9 11 8 6 2. + <_> + + <_> + 9 9 6 8 -1. + <_> + 9 9 3 8 2. + <_> + + <_> + 6 0 3 18 -1. + <_> + 7 0 1 18 3. + <_> + + <_> + 17 9 5 14 -1. + <_> + 17 16 5 7 2. + <_> + + <_> + 2 9 5 14 -1. + <_> + 2 16 5 7 2. + <_> + + <_> + 7 4 10 6 -1. + <_> + 7 7 10 3 2. + <_> + + <_> + 1 3 23 18 -1. + <_> + 1 9 23 6 3. + <_> + + <_> + 1 1 21 3 -1. + <_> + 8 1 7 3 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 3 18 12 6 -1. + <_> + 3 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 16 8 8 16 -1. + <_> + 20 8 4 8 2. + <_> + 16 16 4 8 2. + <_> + + <_> + 0 19 24 4 -1. + <_> + 8 19 8 4 3. + <_> + + <_> + 16 8 8 16 -1. + <_> + 20 8 4 8 2. + <_> + 16 16 4 8 2. + <_> + + <_> + 0 8 8 16 -1. + <_> + 0 8 4 8 2. + <_> + 4 16 4 8 2. + <_> + + <_> + 8 12 8 10 -1. + <_> + 8 17 8 5 2. + <_> + + <_> + 5 7 5 8 -1. + <_> + 5 11 5 4 2. + <_> + + <_> + 4 1 19 2 -1. + <_> + 4 2 19 1 2. + <_> + + <_> + 0 12 24 9 -1. + <_> + 8 12 8 9 3. + <_> + + <_> + 6 0 13 8 -1. + <_> + 6 4 13 4 2. + <_> + + <_> + 0 0 24 3 -1. + <_> + 0 1 24 1 3. + <_> + + <_> + 20 3 4 11 -1. + <_> + 20 3 2 11 2. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 6 11 12 8 -1. + <_> + 12 11 6 4 2. + <_> + 6 15 6 4 2. + <_> + + <_> + 0 8 12 6 -1. + <_> + 0 8 6 3 2. + <_> + 6 11 6 3 2. + <_> + + <_> + 6 17 18 3 -1. + <_> + 6 18 18 1 3. + <_> + + <_> + 0 14 9 6 -1. + <_> + 0 16 9 2 3. + <_> + + <_> + 20 3 4 9 -1. + <_> + 20 3 2 9 2. + <_> + + <_> + 0 3 4 9 -1. + <_> + 2 3 2 9 2. + <_> + + <_> + 15 0 9 19 -1. + <_> + 18 0 3 19 3. + <_> + + <_> + 0 0 9 19 -1. + <_> + 3 0 3 19 3. + <_> + + <_> + 13 11 6 8 -1. + <_> + 13 11 3 8 2. + <_> + + <_> + 5 11 6 8 -1. + <_> + 8 11 3 8 2. + <_> + + <_> + 5 11 19 3 -1. + <_> + 5 12 19 1 3. + <_> + + <_> + 3 20 18 4 -1. + <_> + 9 20 6 4 3. + <_> + + <_> + 6 6 16 6 -1. + <_> + 6 8 16 2 3. + <_> + + <_> + 6 0 9 6 -1. + <_> + 9 0 3 6 3. + <_> + + <_> + 10 3 4 14 -1. + <_> + 10 10 4 7 2. + <_> + + <_> + 1 5 15 12 -1. + <_> + 1 11 15 6 2. + <_> + + <_> + 11 12 8 5 -1. + <_> + 11 12 4 5 2. + <_> + + <_> + 5 0 6 9 -1. + <_> + 7 0 2 9 3. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 5 5 12 8 -1. + <_> + 5 5 6 4 2. + <_> + 11 9 6 4 2. + <_> + + <_> + 13 12 11 6 -1. + <_> + 13 14 11 2 3. + <_> + + <_> + 0 13 21 3 -1. + <_> + 0 14 21 1 3. + <_> + + <_> + 8 1 8 12 -1. + <_> + 12 1 4 6 2. + <_> + 8 7 4 6 2. + <_> + + <_> + 1 0 6 12 -1. + <_> + 1 0 3 6 2. + <_> + 4 6 3 6 2. + <_> + + <_> + 2 2 21 2 -1. + <_> + 2 3 21 1 2. + <_> + + <_> + 2 2 19 3 -1. + <_> + 2 3 19 1 3. + <_> + + <_> + 17 10 6 14 -1. + <_> + 20 10 3 7 2. + <_> + 17 17 3 7 2. + <_> + + <_> + 1 10 6 14 -1. + <_> + 1 10 3 7 2. + <_> + 4 17 3 7 2. + <_> + + <_> + 7 6 14 14 -1. + <_> + 14 6 7 7 2. + <_> + 7 13 7 7 2. + <_> + + <_> + 0 12 9 6 -1. + <_> + 0 14 9 2 3. + <_> + + <_> + 15 14 8 9 -1. + <_> + 15 17 8 3 3. + <_> + + <_> + 1 1 22 4 -1. + <_> + 1 1 11 2 2. + <_> + 12 3 11 2 2. + <_> + + <_> + 9 11 9 6 -1. + <_> + 9 13 9 2 3. + <_> + + <_> + 0 15 18 3 -1. + <_> + 0 16 18 1 3. + <_> + + <_> + 16 14 7 9 -1. + <_> + 16 17 7 3 3. + <_> + + <_> + 4 3 16 4 -1. + <_> + 12 3 8 4 2. + <_> + + <_> + 7 6 12 5 -1. + <_> + 7 6 6 5 2. + <_> + + <_> + 9 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 12 1 4 10 -1. + <_> + 12 1 2 10 2. + <_> + + <_> + 8 1 4 10 -1. + <_> + 10 1 2 10 2. + <_> + + <_> + 15 15 6 9 -1. + <_> + 15 18 6 3 3. + <_> + + <_> + 3 15 6 9 -1. + <_> + 3 18 6 3 3. + <_> + + <_> + 15 1 3 19 -1. + <_> + 16 1 1 19 3. + <_> + + <_> + 1 3 6 9 -1. + <_> + 3 3 2 9 3. + <_> + + <_> + 15 0 3 19 -1. + <_> + 16 0 1 19 3. + <_> + + <_> + 6 3 12 4 -1. + <_> + 12 3 6 4 2. + <_> + + <_> + 10 5 4 9 -1. + <_> + 10 5 2 9 2. + <_> + + <_> + 6 0 3 19 -1. + <_> + 7 0 1 19 3. + <_> + + <_> + 11 1 3 12 -1. + <_> + 11 7 3 6 2. + <_> + + <_> + 6 7 10 5 -1. + <_> + 11 7 5 5 2. + <_> + + <_> + 11 3 3 18 -1. + <_> + 12 3 1 18 3. + <_> + + <_> + 9 3 6 12 -1. + <_> + 11 3 2 12 3. + <_> + + <_> + 3 7 19 3 -1. + <_> + 3 8 19 1 3. + <_> + + <_> + 2 7 18 3 -1. + <_> + 2 8 18 1 3. + <_> + + <_> + 3 13 18 4 -1. + <_> + 12 13 9 2 2. + <_> + 3 15 9 2 2. + <_> + + <_> + 3 5 6 9 -1. + <_> + 5 5 2 9 3. + <_> + + <_> + 4 1 20 4 -1. + <_> + 14 1 10 2 2. + <_> + 4 3 10 2 2. + <_> + + <_> + 0 1 20 4 -1. + <_> + 0 1 10 2 2. + <_> + 10 3 10 2 2. + <_> + + <_> + 10 15 6 6 -1. + <_> + 10 15 3 6 2. + <_> + + <_> + 0 2 24 8 -1. + <_> + 8 2 8 8 3. + <_> + + <_> + 5 5 18 3 -1. + <_> + 5 6 18 1 3. + <_> + + <_> + 8 15 6 6 -1. + <_> + 11 15 3 6 2. + <_> + + <_> + 11 12 8 5 -1. + <_> + 11 12 4 5 2. + <_> + + <_> + 5 12 8 5 -1. + <_> + 9 12 4 5 2. + <_> + + <_> + 5 0 14 6 -1. + <_> + 5 2 14 2 3. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 10 7 5 12 -1. + <_> + 10 11 5 4 3. + <_> + + <_> + 7 9 8 14 -1. + <_> + 7 9 4 7 2. + <_> + 11 16 4 7 2. + <_> + + <_> + 1 5 22 6 -1. + <_> + 12 5 11 3 2. + <_> + 1 8 11 3 2. + <_> + + <_> + 0 5 6 6 -1. + <_> + 0 8 6 3 2. + <_> + + <_> + 12 17 9 4 -1. + <_> + 12 19 9 2 2. + <_> + + <_> + 2 18 19 3 -1. + <_> + 2 19 19 1 3. + <_> + + <_> + 12 17 9 4 -1. + <_> + 12 19 9 2 2. + <_> + + <_> + 1 17 18 3 -1. + <_> + 1 18 18 1 3. + <_> + + <_> + 12 17 9 4 -1. + <_> + 12 19 9 2 2. + <_> + + <_> + 0 0 24 3 -1. + <_> + 0 1 24 1 3. + <_> + + <_> + 5 0 14 4 -1. + <_> + 5 2 14 2 2. + <_> + + <_> + 6 14 9 6 -1. + <_> + 6 16 9 2 3. + <_> + + <_> + 14 13 6 9 -1. + <_> + 14 16 6 3 3. + <_> + + <_> + 5 20 13 4 -1. + <_> + 5 22 13 2 2. + <_> + + <_> + 9 9 6 12 -1. + <_> + 9 13 6 4 3. + <_> + + <_> + 1 10 21 3 -1. + <_> + 8 10 7 3 3. + <_> + + <_> + 8 8 9 6 -1. + <_> + 11 8 3 6 3. + <_> + + <_> + 3 10 9 7 -1. + <_> + 6 10 3 7 3. + <_> + + <_> + 12 10 10 8 -1. + <_> + 17 10 5 4 2. + <_> + 12 14 5 4 2. + <_> + + <_> + 0 15 24 3 -1. + <_> + 8 15 8 3 3. + <_> + + <_> + 8 5 9 6 -1. + <_> + 8 7 9 2 3. + <_> + + <_> + 4 13 6 9 -1. + <_> + 4 16 6 3 3. + <_> + + <_> + 12 17 9 4 -1. + <_> + 12 19 9 2 2. + <_> + + <_> + 9 12 6 6 -1. + <_> + 9 15 6 3 2. + <_> + + <_> + 9 9 14 10 -1. + <_> + 16 9 7 5 2. + <_> + 9 14 7 5 2. + <_> + + <_> + 1 9 14 10 -1. + <_> + 1 9 7 5 2. + <_> + 8 14 7 5 2. + <_> + + <_> + 8 7 9 17 -1. + <_> + 11 7 3 17 3. + <_> + + <_> + 3 4 6 20 -1. + <_> + 3 4 3 10 2. + <_> + 6 14 3 10 2. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 8 5 4 2. + <_> + + <_> + 10 7 4 9 -1. + <_> + 12 7 2 9 2. + <_> + + <_> + 10 15 6 9 -1. + <_> + 12 15 2 9 3. + <_> + + <_> + 3 8 6 16 -1. + <_> + 3 8 3 8 2. + <_> + 6 16 3 8 2. + <_> + + <_> + 12 17 9 4 -1. + <_> + 12 19 9 2 2. + <_> + + <_> + 3 17 9 4 -1. + <_> + 3 19 9 2 2. + <_> + + <_> + 10 1 9 6 -1. + <_> + 13 1 3 6 3. + <_> + + <_> + 5 7 4 10 -1. + <_> + 5 12 4 5 2. + <_> + + <_> + 7 5 12 6 -1. + <_> + 11 5 4 6 3. + <_> + + <_> + 6 4 9 8 -1. + <_> + 9 4 3 8 3. + <_> + + <_> + 12 16 10 8 -1. + <_> + 17 16 5 4 2. + <_> + 12 20 5 4 2. + <_> + + <_> + 2 16 10 8 -1. + <_> + 2 16 5 4 2. + <_> + 7 20 5 4 2. + <_> + + <_> + 0 0 24 4 -1. + <_> + 12 0 12 2 2. + <_> + 0 2 12 2 2. + <_> + + <_> + 0 6 9 6 -1. + <_> + 0 8 9 2 3. + <_> + + <_> + 0 4 24 6 -1. + <_> + 12 4 12 3 2. + <_> + 0 7 12 3 2. + <_> + + <_> + 5 0 11 4 -1. + <_> + 5 2 11 2 2. + <_> + + <_> + 1 1 22 4 -1. + <_> + 12 1 11 2 2. + <_> + 1 3 11 2 2. + <_> + + <_> + 9 6 6 18 -1. + <_> + 9 15 6 9 2. + <_> + + <_> + 2 9 20 4 -1. + <_> + 2 11 20 2 2. + <_> + + <_> + 5 2 14 14 -1. + <_> + 5 9 14 7 2. + <_> + + <_> + 4 2 16 6 -1. + <_> + 4 5 16 3 2. + <_> + + <_> + 2 3 19 3 -1. + <_> + 2 4 19 1 3. + <_> + + <_> + 7 1 10 4 -1. + <_> + 7 3 10 2 2. + <_> + + <_> + 0 9 4 15 -1. + <_> + 0 14 4 5 3. + <_> + + <_> + 2 10 21 3 -1. + <_> + 2 11 21 1 3. + <_> + + <_> + 3 0 6 6 -1. + <_> + 6 0 3 6 2. + <_> + + <_> + 6 4 14 9 -1. + <_> + 6 7 14 3 3. + <_> + + <_> + 9 1 6 9 -1. + <_> + 11 1 2 9 3. + <_> + + <_> + 15 8 9 9 -1. + <_> + 15 11 9 3 3. + <_> + + <_> + 8 0 4 21 -1. + <_> + 8 7 4 7 3. + <_> + + <_> + 3 22 19 2 -1. + <_> + 3 23 19 1 2. + <_> + + <_> + 2 15 20 3 -1. + <_> + 2 16 20 1 3. + <_> + + <_> + 19 0 4 13 -1. + <_> + 19 0 2 13 2. + <_> + + <_> + 1 7 8 8 -1. + <_> + 1 11 8 4 2. + <_> + + <_> + 14 14 6 9 -1. + <_> + 14 17 6 3 3. + <_> + + <_> + 4 14 6 9 -1. + <_> + 4 17 6 3 3. + <_> + + <_> + 14 5 4 10 -1. + <_> + 14 5 2 10 2. + <_> + + <_> + 6 5 4 10 -1. + <_> + 8 5 2 10 2. + <_> + + <_> + 14 5 6 6 -1. + <_> + 14 8 6 3 2. + <_> + + <_> + 4 5 6 6 -1. + <_> + 4 8 6 3 2. + <_> + + <_> + 0 2 24 21 -1. + <_> + 8 2 8 21 3. + <_> + + <_> + 1 2 6 13 -1. + <_> + 3 2 2 13 3. + <_> + + <_> + 20 0 4 21 -1. + <_> + 20 0 2 21 2. + <_> + + <_> + 0 4 4 20 -1. + <_> + 2 4 2 20 2. + <_> + + <_> + 8 16 9 6 -1. + <_> + 8 18 9 2 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 16 12 7 9 -1. + <_> + 16 15 7 3 3. + <_> + + <_> + 5 21 14 3 -1. + <_> + 12 21 7 3 2. + <_> + + <_> + 11 5 6 9 -1. + <_> + 11 5 3 9 2. + <_> + + <_> + 10 5 4 10 -1. + <_> + 12 5 2 10 2. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 7 5 6 9 -1. + <_> + 10 5 3 9 2. + <_> + + <_> + 14 14 10 4 -1. + <_> + 14 16 10 2 2. + <_> + + <_> + 5 5 14 14 -1. + <_> + 5 5 7 7 2. + <_> + 12 12 7 7 2. + <_> + + <_> + 12 8 12 6 -1. + <_> + 18 8 6 3 2. + <_> + 12 11 6 3 2. + <_> + + <_> + 6 6 12 12 -1. + <_> + 6 6 6 6 2. + <_> + 12 12 6 6 2. + <_> + + <_> + 11 13 6 10 -1. + <_> + 13 13 2 10 3. + <_> + + <_> + 1 10 20 8 -1. + <_> + 1 10 10 4 2. + <_> + 11 14 10 4 2. + <_> + + <_> + 15 13 9 6 -1. + <_> + 15 15 9 2 3. + <_> + + <_> + 9 0 6 9 -1. + <_> + 9 3 6 3 3. + <_> + + <_> + 10 1 5 14 -1. + <_> + 10 8 5 7 2. + <_> + + <_> + 3 4 16 6 -1. + <_> + 3 6 16 2 3. + <_> + + <_> + 16 3 8 9 -1. + <_> + 16 6 8 3 3. + <_> + + <_> + 7 13 6 10 -1. + <_> + 9 13 2 10 3. + <_> + + <_> + 15 13 9 6 -1. + <_> + 15 15 9 2 3. + <_> + + <_> + 0 13 9 6 -1. + <_> + 0 15 9 2 3. + <_> + + <_> + 13 16 9 6 -1. + <_> + 13 18 9 2 3. + <_> + + <_> + 2 16 9 6 -1. + <_> + 2 18 9 2 3. + <_> + + <_> + 5 16 18 3 -1. + <_> + 5 17 18 1 3. + <_> + + <_> + 1 16 18 3 -1. + <_> + 1 17 18 1 3. + <_> + + <_> + 5 0 18 3 -1. + <_> + 5 1 18 1 3. + <_> + + <_> + 1 1 19 2 -1. + <_> + 1 2 19 1 2. + <_> + + <_> + 14 2 6 11 -1. + <_> + 16 2 2 11 3. + <_> + + <_> + 4 15 15 6 -1. + <_> + 9 15 5 6 3. + <_> + + <_> + 14 2 6 11 -1. + <_> + 16 2 2 11 3. + <_> + + <_> + 4 2 6 11 -1. + <_> + 6 2 2 11 3. + <_> + + <_> + 18 2 6 9 -1. + <_> + 18 5 6 3 3. + <_> + + <_> + 1 2 22 4 -1. + <_> + 1 2 11 2 2. + <_> + 12 4 11 2 2. + <_> + + <_> + 2 0 21 12 -1. + <_> + 9 0 7 12 3. + <_> + + <_> + 0 12 18 3 -1. + <_> + 0 13 18 1 3. + <_> + + <_> + 12 2 6 9 -1. + <_> + 14 2 2 9 3. + <_> + + <_> + 3 10 18 3 -1. + <_> + 3 11 18 1 3. + <_> + + <_> + 16 3 8 9 -1. + <_> + 16 6 8 3 3. + <_> + + <_> + 3 7 18 3 -1. + <_> + 3 8 18 1 3. + <_> + + <_> + 9 11 6 9 -1. + <_> + 11 11 2 9 3. + <_> + + <_> + 9 8 6 9 -1. + <_> + 11 8 2 9 3. + <_> + + <_> + 15 0 2 18 -1. + <_> + 15 0 1 18 2. + <_> + + <_> + 7 0 2 18 -1. + <_> + 8 0 1 18 2. + <_> + + <_> + 17 3 7 9 -1. + <_> + 17 6 7 3 3. + <_> + + <_> + 3 18 9 6 -1. + <_> + 3 20 9 2 3. + <_> + + <_> + 3 18 21 3 -1. + <_> + 3 19 21 1 3. + <_> + + <_> + 0 3 7 9 -1. + <_> + 0 6 7 3 3. + <_> + + <_> + 2 7 22 3 -1. + <_> + 2 8 22 1 3. + <_> + + <_> + 0 3 24 16 -1. + <_> + 0 3 12 8 2. + <_> + 12 11 12 8 2. + <_> + + <_> + 13 17 9 4 -1. + <_> + 13 19 9 2 2. + <_> + + <_> + 5 5 12 8 -1. + <_> + 5 5 6 4 2. + <_> + 11 9 6 4 2. + <_> + + <_> + 5 6 14 6 -1. + <_> + 12 6 7 3 2. + <_> + 5 9 7 3 2. + <_> + + <_> + 5 16 14 6 -1. + <_> + 5 16 7 3 2. + <_> + 12 19 7 3 2. + <_> + + <_> + 18 2 6 9 -1. + <_> + 18 5 6 3 3. + <_> + + <_> + 0 2 6 9 -1. + <_> + 0 5 6 3 3. + <_> + + <_> + 3 4 20 10 -1. + <_> + 13 4 10 5 2. + <_> + 3 9 10 5 2. + <_> + + <_> + 2 13 9 8 -1. + <_> + 5 13 3 8 3. + <_> + + <_> + 2 1 21 15 -1. + <_> + 9 1 7 15 3. + <_> + + <_> + 5 12 14 8 -1. + <_> + 12 12 7 8 2. + <_> + + <_> + 6 7 12 4 -1. + <_> + 6 7 6 4 2. + <_> + + <_> + 6 5 9 6 -1. + <_> + 9 5 3 6 3. + <_> + + <_> + 13 11 6 6 -1. + <_> + 13 11 3 6 2. + <_> + + <_> + 5 11 6 6 -1. + <_> + 8 11 3 6 2. + <_> + + <_> + 6 4 18 2 -1. + <_> + 6 5 18 1 2. + <_> + + <_> + 0 2 6 11 -1. + <_> + 2 2 2 11 3. + <_> + + <_> + 18 0 6 15 -1. + <_> + 20 0 2 15 3. + <_> + + <_> + 0 0 6 13 -1. + <_> + 2 0 2 13 3. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 0 2 24 4 -1. + <_> + 8 2 8 4 3. + <_> + + <_> + 3 13 18 4 -1. + <_> + 12 13 9 4 2. + <_> + + <_> + 9 7 10 4 -1. + <_> + 9 7 5 4 2. + <_> + + <_> + 5 8 12 3 -1. + <_> + 11 8 6 3 2. + <_> + + <_> + 4 14 19 3 -1. + <_> + 4 15 19 1 3. + <_> + + <_> + 10 0 4 20 -1. + <_> + 10 10 4 10 2. + <_> + + <_> + 8 15 9 6 -1. + <_> + 8 17 9 2 3. + <_> + + <_> + 2 9 15 4 -1. + <_> + 7 9 5 4 3. + <_> + + <_> + 8 4 12 7 -1. + <_> + 12 4 4 7 3. + <_> + + <_> + 0 10 6 9 -1. + <_> + 0 13 6 3 3. + <_> + + <_> + 18 5 6 9 -1. + <_> + 18 8 6 3 3. + <_> + + <_> + 0 18 16 6 -1. + <_> + 0 18 8 3 2. + <_> + 8 21 8 3 2. + <_> + + <_> + 9 18 14 6 -1. + <_> + 16 18 7 3 2. + <_> + 9 21 7 3 2. + <_> + + <_> + 1 20 20 4 -1. + <_> + 1 20 10 2 2. + <_> + 11 22 10 2 2. + <_> + + <_> + 2 8 20 6 -1. + <_> + 12 8 10 3 2. + <_> + 2 11 10 3 2. + <_> + + <_> + 7 8 6 9 -1. + <_> + 9 8 2 9 3. + <_> + + <_> + 8 5 12 8 -1. + <_> + 12 5 4 8 3. + <_> + + <_> + 4 5 12 8 -1. + <_> + 8 5 4 8 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 2 0 6 16 -1. + <_> + 4 0 2 16 3. + <_> + + <_> + 15 4 6 12 -1. + <_> + 15 8 6 4 3. + <_> + + <_> + 3 4 6 12 -1. + <_> + 3 8 6 4 3. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 4 0 15 22 -1. + <_> + 4 11 15 11 2. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 0 12 9 6 -1. + <_> + 0 14 9 2 3. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 0 15 9 6 -1. + <_> + 0 17 9 2 3. + <_> + + <_> + 10 0 8 10 -1. + <_> + 14 0 4 5 2. + <_> + 10 5 4 5 2. + <_> + + <_> + 1 0 4 16 -1. + <_> + 3 0 2 16 2. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 10 12 4 10 -1. + <_> + 10 17 4 5 2. + <_> + + <_> + 8 4 10 6 -1. + <_> + 8 6 10 2 3. + <_> + + <_> + 3 22 18 2 -1. + <_> + 12 22 9 2 2. + <_> + + <_> + 7 7 11 6 -1. + <_> + 7 9 11 2 3. + <_> + + <_> + 0 0 12 10 -1. + <_> + 0 0 6 5 2. + <_> + 6 5 6 5 2. + <_> + + <_> + 10 1 12 6 -1. + <_> + 16 1 6 3 2. + <_> + 10 4 6 3 2. + <_> + + <_> + 7 16 9 4 -1. + <_> + 7 18 9 2 2. + <_> + + <_> + 5 7 15 16 -1. + <_> + 10 7 5 16 3. + <_> + + <_> + 5 10 12 13 -1. + <_> + 11 10 6 13 2. + <_> + + <_> + 6 2 12 6 -1. + <_> + 12 2 6 3 2. + <_> + 6 5 6 3 2. + <_> + + <_> + 3 9 12 9 -1. + <_> + 3 12 12 3 3. + <_> + + <_> + 16 2 8 6 -1. + <_> + 16 5 8 3 2. + <_> + + <_> + 0 2 8 6 -1. + <_> + 0 5 8 3 2. + <_> + + <_> + 0 3 24 11 -1. + <_> + 0 3 12 11 2. + <_> + + <_> + 0 13 8 10 -1. + <_> + 0 13 4 5 2. + <_> + 4 18 4 5 2. + <_> + + <_> + 10 14 4 10 -1. + <_> + 10 19 4 5 2. + <_> + + <_> + 10 2 4 21 -1. + <_> + 10 9 4 7 3. + <_> + + <_> + 4 4 15 9 -1. + <_> + 4 7 15 3 3. + <_> + + <_> + 0 1 24 6 -1. + <_> + 8 1 8 6 3. + <_> + + <_> + 9 6 5 16 -1. + <_> + 9 14 5 8 2. + <_> + + <_> + 3 21 18 3 -1. + <_> + 9 21 6 3 3. + <_> + + <_> + 6 5 3 12 -1. + <_> + 6 11 3 6 2. + <_> + + <_> + 11 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 5 6 9 8 -1. + <_> + 8 6 3 8 3. + <_> + + <_> + 4 3 20 2 -1. + <_> + 4 4 20 1 2. + <_> + + <_> + 2 10 18 3 -1. + <_> + 8 10 6 3 3. + <_> + + <_> + 7 15 10 6 -1. + <_> + 7 17 10 2 3. + <_> + + <_> + 1 4 4 18 -1. + <_> + 1 4 2 9 2. + <_> + 3 13 2 9 2. + <_> + + <_> + 13 0 6 9 -1. + <_> + 15 0 2 9 3. + <_> + + <_> + 5 0 6 9 -1. + <_> + 7 0 2 9 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 6 7 9 6 -1. + <_> + 9 7 3 6 3. + <_> + + <_> + 3 0 18 2 -1. + <_> + 3 1 18 1 2. + <_> + + <_> + 0 10 20 4 -1. + <_> + 0 10 10 2 2. + <_> + 10 12 10 2 2. + <_> + + <_> + 10 2 4 12 -1. + <_> + 10 8 4 6 2. + <_> + + <_> + 6 5 6 12 -1. + <_> + 6 5 3 6 2. + <_> + 9 11 3 6 2. + <_> + + <_> + 6 0 18 22 -1. + <_> + 15 0 9 11 2. + <_> + 6 11 9 11 2. + <_> + + <_> + 0 0 18 22 -1. + <_> + 0 0 9 11 2. + <_> + 9 11 9 11 2. + <_> + + <_> + 18 2 6 11 -1. + <_> + 20 2 2 11 3. + <_> + + <_> + 0 2 6 11 -1. + <_> + 2 2 2 11 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 0 0 20 3 -1. + <_> + 0 1 20 1 3. + <_> + + <_> + 2 2 20 2 -1. + <_> + 2 3 20 1 2. + <_> + + <_> + 1 10 18 2 -1. + <_> + 1 11 18 1 2. + <_> + + <_> + 18 7 6 9 -1. + <_> + 18 10 6 3 3. + <_> + + <_> + 0 0 22 9 -1. + <_> + 0 3 22 3 3. + <_> + + <_> + 17 3 6 9 -1. + <_> + 17 6 6 3 3. + <_> + + <_> + 0 7 6 9 -1. + <_> + 0 10 6 3 3. + <_> + + <_> + 0 6 24 6 -1. + <_> + 0 8 24 2 3. + <_> + + <_> + 0 2 6 10 -1. + <_> + 2 2 2 10 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 15 0 6 9 -1. + <_> + 17 0 2 9 3. + <_> + + <_> + 3 0 6 9 -1. + <_> + 5 0 2 9 3. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 15 14 9 6 -1. + <_> + 15 16 9 2 3. + <_> + + <_> + 0 15 23 6 -1. + <_> + 0 17 23 2 3. + <_> + + <_> + 5 15 18 3 -1. + <_> + 5 16 18 1 3. + <_> + + <_> + 0 14 9 6 -1. + <_> + 0 16 9 2 3. + <_> + + <_> + 9 8 8 10 -1. + <_> + 13 8 4 5 2. + <_> + 9 13 4 5 2. + <_> + + <_> + 3 7 15 6 -1. + <_> + 8 7 5 6 3. + <_> + + <_> + 9 8 8 10 -1. + <_> + 13 8 4 5 2. + <_> + 9 13 4 5 2. + <_> + + <_> + 5 0 6 12 -1. + <_> + 8 0 3 12 2. + <_> + + <_> + 9 8 8 10 -1. + <_> + 13 8 4 5 2. + <_> + 9 13 4 5 2. + <_> + + <_> + 8 5 6 9 -1. + <_> + 10 5 2 9 3. + <_> + + <_> + 10 6 4 18 -1. + <_> + 12 6 2 9 2. + <_> + 10 15 2 9 2. + <_> + + <_> + 5 7 12 4 -1. + <_> + 11 7 6 4 2. + <_> + + <_> + 9 8 8 10 -1. + <_> + 13 8 4 5 2. + <_> + 9 13 4 5 2. + <_> + + <_> + 7 8 8 10 -1. + <_> + 7 8 4 5 2. + <_> + 11 13 4 5 2. + <_> + + <_> + 11 10 6 14 -1. + <_> + 14 10 3 7 2. + <_> + 11 17 3 7 2. + <_> + + <_> + 9 5 6 19 -1. + <_> + 12 5 3 19 2. + <_> + + <_> + 6 12 12 6 -1. + <_> + 12 12 6 3 2. + <_> + 6 15 6 3 2. + <_> + + <_> + 1 9 18 6 -1. + <_> + 1 9 9 3 2. + <_> + 10 12 9 3 2. + <_> + + <_> + 16 14 8 10 -1. + <_> + 20 14 4 5 2. + <_> + 16 19 4 5 2. + <_> + + <_> + 0 9 22 8 -1. + <_> + 0 9 11 4 2. + <_> + 11 13 11 4 2. + <_> + + <_> + 8 18 12 6 -1. + <_> + 14 18 6 3 2. + <_> + 8 21 6 3 2. + <_> + + <_> + 0 6 20 18 -1. + <_> + 0 6 10 9 2. + <_> + 10 15 10 9 2. + <_> + + <_> + 3 6 20 12 -1. + <_> + 13 6 10 6 2. + <_> + 3 12 10 6 2. + <_> + + <_> + 0 16 10 8 -1. + <_> + 0 16 5 4 2. + <_> + 5 20 5 4 2. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 0 11 19 3 -1. + <_> + 0 12 19 1 3. + <_> + + <_> + 14 6 6 9 -1. + <_> + 14 9 6 3 3. + <_> + + <_> + 1 7 22 4 -1. + <_> + 1 7 11 2 2. + <_> + 12 9 11 2 2. + <_> + + <_> + 13 6 7 12 -1. + <_> + 13 10 7 4 3. + <_> + + <_> + 4 7 11 9 -1. + <_> + 4 10 11 3 3. + <_> + + <_> + 12 10 10 8 -1. + <_> + 17 10 5 4 2. + <_> + 12 14 5 4 2. + <_> + + <_> + 2 12 9 7 -1. + <_> + 5 12 3 7 3. + <_> + + <_> + 16 14 6 9 -1. + <_> + 16 17 6 3 3. + <_> + + <_> + 3 12 6 12 -1. + <_> + 3 16 6 4 3. + <_> + + <_> + 14 13 6 6 -1. + <_> + 14 16 6 3 2. + <_> + + <_> + 8 0 6 9 -1. + <_> + 10 0 2 9 3. + <_> + + <_> + 9 1 6 23 -1. + <_> + 11 1 2 23 3. + <_> + + <_> + 0 16 9 6 -1. + <_> + 0 18 9 2 3. + <_> + + <_> + 4 17 18 3 -1. + <_> + 4 18 18 1 3. + <_> + + <_> + 5 2 13 14 -1. + <_> + 5 9 13 7 2. + <_> + + <_> + 15 0 8 12 -1. + <_> + 19 0 4 6 2. + <_> + 15 6 4 6 2. + <_> + + <_> + 0 0 8 12 -1. + <_> + 0 0 4 6 2. + <_> + 4 6 4 6 2. + <_> + + <_> + 8 2 8 7 -1. + <_> + 8 2 4 7 2. + <_> + + <_> + 1 1 6 9 -1. + <_> + 3 1 2 9 3. + <_> + + <_> + 14 8 6 12 -1. + <_> + 17 8 3 6 2. + <_> + 14 14 3 6 2. + <_> + + <_> + 4 8 6 12 -1. + <_> + 4 8 3 6 2. + <_> + 7 14 3 6 2. + <_> + + <_> + 16 5 5 15 -1. + <_> + 16 10 5 5 3. + <_> + + <_> + 3 5 5 15 -1. + <_> + 3 10 5 5 3. + <_> + + <_> + 18 4 6 9 -1. + <_> + 18 7 6 3 3. + <_> + + <_> + 1 7 6 15 -1. + <_> + 1 12 6 5 3. + <_> + + <_> + 11 15 12 8 -1. + <_> + 17 15 6 4 2. + <_> + 11 19 6 4 2. + <_> + + <_> + 0 2 24 4 -1. + <_> + 0 2 12 2 2. + <_> + 12 4 12 2 2. + <_> + + <_> + 15 1 2 19 -1. + <_> + 15 1 1 19 2. + <_> + + <_> + 7 1 2 19 -1. + <_> + 8 1 1 19 2. + <_> + + <_> + 22 1 2 20 -1. + <_> + 22 1 1 20 2. + <_> + + <_> + 0 1 2 20 -1. + <_> + 1 1 1 20 2. + <_> + + <_> + 18 11 6 12 -1. + <_> + 20 11 2 12 3. + <_> + + <_> + 0 11 6 12 -1. + <_> + 2 11 2 12 3. + <_> + + <_> + 3 6 18 14 -1. + <_> + 3 13 18 7 2. + <_> + + <_> + 6 10 7 8 -1. + <_> + 6 14 7 4 2. + <_> + + <_> + 7 9 12 12 -1. + <_> + 7 13 12 4 3. + <_> + + <_> + 2 18 18 5 -1. + <_> + 11 18 9 5 2. + <_> + + <_> + 4 21 20 3 -1. + <_> + 4 22 20 1 3. + <_> + + <_> + 9 12 6 12 -1. + <_> + 9 12 3 6 2. + <_> + 12 18 3 6 2. + <_> + + <_> + 4 6 18 3 -1. + <_> + 4 7 18 1 3. + <_> + + <_> + 3 6 18 3 -1. + <_> + 3 7 18 1 3. + <_> + + <_> + 18 4 6 9 -1. + <_> + 18 7 6 3 3. + <_> + + <_> + 2 12 9 6 -1. + <_> + 2 14 9 2 3. + <_> + + <_> + 4 14 18 4 -1. + <_> + 13 14 9 2 2. + <_> + 4 16 9 2 2. + <_> + + <_> + 7 7 6 14 -1. + <_> + 7 7 3 7 2. + <_> + 10 14 3 7 2. + <_> + + <_> + 7 13 12 6 -1. + <_> + 13 13 6 3 2. + <_> + 7 16 6 3 2. + <_> + + <_> + 6 7 12 9 -1. + <_> + 10 7 4 9 3. + <_> + + <_> + 12 12 6 6 -1. + <_> + 12 12 3 6 2. + <_> + + <_> + 0 2 4 10 -1. + <_> + 0 7 4 5 2. + <_> + + <_> + 8 0 9 6 -1. + <_> + 11 0 3 6 3. + <_> + + <_> + 2 9 12 6 -1. + <_> + 2 12 12 3 2. + <_> + + <_> + 13 10 6 9 -1. + <_> + 13 13 6 3 3. + <_> + + <_> + 5 10 6 9 -1. + <_> + 5 13 6 3 3. + <_> + + <_> + 9 15 9 6 -1. + <_> + 9 17 9 2 3. + <_> + + <_> + 5 16 12 6 -1. + <_> + 5 19 12 3 2. + <_> + + <_> + 3 2 20 3 -1. + <_> + 3 3 20 1 3. + <_> + + <_> + 2 5 12 6 -1. + <_> + 6 5 4 6 3. + <_> + + <_> + 11 0 3 24 -1. + <_> + 12 0 1 24 3. + <_> + + <_> + 3 16 15 4 -1. + <_> + 8 16 5 4 3. + <_> + + <_> + 9 12 6 12 -1. + <_> + 9 18 6 6 2. + <_> + + <_> + 1 15 12 8 -1. + <_> + 1 15 6 4 2. + <_> + 7 19 6 4 2. + <_> + + <_> + 15 10 8 14 -1. + <_> + 19 10 4 7 2. + <_> + 15 17 4 7 2. + <_> + + <_> + 1 9 8 14 -1. + <_> + 1 9 4 7 2. + <_> + 5 16 4 7 2. + <_> + + <_> + 9 11 9 10 -1. + <_> + 9 16 9 5 2. + <_> + + <_> + 6 7 12 6 -1. + <_> + 6 9 12 2 3. + <_> + + <_> + 10 15 6 9 -1. + <_> + 12 15 2 9 3. + <_> + + <_> + 7 8 9 7 -1. + <_> + 10 8 3 7 3. + <_> + + <_> + 10 4 8 10 -1. + <_> + 14 4 4 5 2. + <_> + 10 9 4 5 2. + <_> + + <_> + 4 6 6 9 -1. + <_> + 4 9 6 3 3. + <_> + + <_> + 0 6 24 12 -1. + <_> + 8 6 8 12 3. + <_> + + <_> + 3 7 6 14 -1. + <_> + 6 7 3 14 2. + <_> + + <_> + 19 8 5 8 -1. + <_> + 19 12 5 4 2. + <_> + + <_> + 0 8 5 8 -1. + <_> + 0 12 5 4 2. + <_> + + <_> + 17 3 6 6 -1. + <_> + 17 6 6 3 2. + <_> + + <_> + 1 3 6 6 -1. + <_> + 1 6 6 3 2. + <_> + + <_> + 18 2 6 9 -1. + <_> + 18 5 6 3 3. + <_> + + <_> + 0 2 6 9 -1. + <_> + 0 5 6 3 3. + <_> + + <_> + 3 3 18 6 -1. + <_> + 3 5 18 2 3. + <_> + + <_> + 2 3 9 6 -1. + <_> + 2 5 9 2 3. + <_> + + <_> + 9 3 10 8 -1. + <_> + 14 3 5 4 2. + <_> + 9 7 5 4 2. + <_> + + <_> + 5 3 10 8 -1. + <_> + 5 3 5 4 2. + <_> + 10 7 5 4 2. + <_> + + <_> + 10 11 6 12 -1. + <_> + 10 11 3 12 2. + <_> + + <_> + 8 11 6 11 -1. + <_> + 11 11 3 11 2. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 8 5 4 2. + <_> + + <_> + 9 6 6 7 -1. + <_> + 12 6 3 7 2. + <_> + + <_> + 5 18 18 3 -1. + <_> + 5 19 18 1 3. + <_> + + <_> + 8 4 6 9 -1. + <_> + 10 4 2 9 3. + <_> + + <_> + 8 1 9 7 -1. + <_> + 11 1 3 7 3. + <_> + + <_> + 6 11 6 6 -1. + <_> + 9 11 3 6 2. + <_> + + <_> + 14 12 4 11 -1. + <_> + 14 12 2 11 2. + <_> + + <_> + 6 12 4 11 -1. + <_> + 8 12 2 11 2. + <_> + + <_> + 8 0 12 18 -1. + <_> + 12 0 4 18 3. + <_> + + <_> + 2 12 10 5 -1. + <_> + 7 12 5 5 2. + <_> + + <_> + 2 20 22 3 -1. + <_> + 2 21 22 1 3. + <_> + + <_> + 0 4 2 20 -1. + <_> + 1 4 1 20 2. + <_> + + <_> + 0 2 24 4 -1. + <_> + 8 2 8 4 3. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 10 10 2 2. + <_> + + <_> + 6 7 8 10 -1. + <_> + 6 7 4 5 2. + <_> + 10 12 4 5 2. + <_> + + <_> + 14 0 6 14 -1. + <_> + 17 0 3 7 2. + <_> + 14 7 3 7 2. + <_> + + <_> + 4 11 5 8 -1. + <_> + 4 15 5 4 2. + <_> + + <_> + 2 0 20 9 -1. + <_> + 2 3 20 3 3. + <_> + + <_> + 6 7 12 8 -1. + <_> + 6 7 6 4 2. + <_> + 12 11 6 4 2. + <_> + + <_> + 9 17 6 6 -1. + <_> + 9 20 6 3 2. + <_> + + <_> + 7 10 10 4 -1. + <_> + 7 12 10 2 2. + <_> + + <_> + 6 5 12 9 -1. + <_> + 10 5 4 9 3. + <_> + + <_> + 5 11 6 8 -1. + <_> + 8 11 3 8 2. + <_> + + <_> + 18 4 4 17 -1. + <_> + 18 4 2 17 2. + <_> + + <_> + 0 0 6 6 -1. + <_> + 3 0 3 6 2. + <_> + + <_> + 18 4 4 17 -1. + <_> + 18 4 2 17 2. + <_> + + <_> + 2 4 4 17 -1. + <_> + 4 4 2 17 2. + <_> + + <_> + 5 18 19 3 -1. + <_> + 5 19 19 1 3. + <_> + + <_> + 11 0 2 18 -1. + <_> + 11 9 2 9 2. + <_> + + <_> + 15 4 2 18 -1. + <_> + 15 13 2 9 2. + <_> + + <_> + 7 4 2 18 -1. + <_> + 7 13 2 9 2. + <_> + + <_> + 7 11 10 8 -1. + <_> + 12 11 5 4 2. + <_> + 7 15 5 4 2. + <_> + + <_> + 10 6 4 9 -1. + <_> + 12 6 2 9 2. + <_> + + <_> + 10 0 6 9 -1. + <_> + 12 0 2 9 3. + <_> + + <_> + 2 9 16 8 -1. + <_> + 2 9 8 4 2. + <_> + 10 13 8 4 2. + <_> + + <_> + 14 15 6 9 -1. + <_> + 14 18 6 3 3. + <_> + + <_> + 8 7 6 9 -1. + <_> + 10 7 2 9 3. + <_> + + <_> + 14 15 6 9 -1. + <_> + 14 18 6 3 3. + <_> + + <_> + 3 12 12 6 -1. + <_> + 3 14 12 2 3. + <_> + + <_> + 14 12 9 6 -1. + <_> + 14 14 9 2 3. + <_> + + <_> + 1 12 9 6 -1. + <_> + 1 14 9 2 3. + <_> + + <_> + 3 7 18 3 -1. + <_> + 3 8 18 1 3. + <_> + + <_> + 1 7 22 6 -1. + <_> + 1 9 22 2 3. + <_> + + <_> + 18 4 6 6 -1. + <_> + 18 7 6 3 2. + <_> + + <_> + 0 4 6 6 -1. + <_> + 0 7 6 3 2. + <_> + + <_> + 5 11 16 6 -1. + <_> + 5 14 16 3 2. + <_> + + <_> + 6 16 9 4 -1. + <_> + 6 18 9 2 2. + <_> + + <_> + 14 15 6 9 -1. + <_> + 14 18 6 3 3. + <_> + + <_> + 4 15 6 9 -1. + <_> + 4 18 6 3 3. + <_> + + <_> + 15 1 6 23 -1. + <_> + 17 1 2 23 3. + <_> + + <_> + 0 21 24 3 -1. + <_> + 8 21 8 3 3. + <_> + + <_> + 0 20 24 4 -1. + <_> + 8 20 8 4 3. + <_> + + <_> + 3 1 6 23 -1. + <_> + 5 1 2 23 3. + <_> + + <_> + 3 17 18 3 -1. + <_> + 3 18 18 1 3. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 1 16 22 4 -1. + <_> + 12 16 11 2 2. + <_> + 1 18 11 2 2. + <_> + + <_> + 0 16 9 6 -1. + <_> + 0 18 9 2 3. + <_> + + <_> + 2 10 21 3 -1. + <_> + 9 10 7 3 3. + <_> + + <_> + 2 18 12 6 -1. + <_> + 2 18 6 3 2. + <_> + 8 21 6 3 2. + <_> + + <_> + 0 5 24 4 -1. + <_> + 0 7 24 2 2. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 10 7 6 12 -1. + <_> + 10 13 6 6 2. + <_> + + <_> + 6 6 6 9 -1. + <_> + 8 6 2 9 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 9 7 6 9 -1. + <_> + 11 7 2 9 3. + <_> + + <_> + 2 1 20 3 -1. + <_> + 2 2 20 1 3. + <_> + + <_> + 1 18 12 6 -1. + <_> + 1 18 6 3 2. + <_> + 7 21 6 3 2. + <_> + + <_> + 13 2 4 13 -1. + <_> + 13 2 2 13 2. + <_> + + <_> + 6 7 12 4 -1. + <_> + 12 7 6 4 2. + <_> + + <_> + 10 1 4 13 -1. + <_> + 10 1 2 13 2. + <_> + + <_> + 6 0 3 18 -1. + <_> + 7 0 1 18 3. + <_> + + <_> + 14 3 10 5 -1. + <_> + 14 3 5 5 2. + <_> + + <_> + 6 15 12 8 -1. + <_> + 10 15 4 8 3. + <_> + + <_> + 9 10 6 9 -1. + <_> + 11 10 2 9 3. + <_> + + <_> + 8 3 4 9 -1. + <_> + 10 3 2 9 2. + <_> + + <_> + 17 0 6 14 -1. + <_> + 20 0 3 7 2. + <_> + 17 7 3 7 2. + <_> + + <_> + 1 0 6 14 -1. + <_> + 1 0 3 7 2. + <_> + 4 7 3 7 2. + <_> + + <_> + 14 0 6 16 -1. + <_> + 17 0 3 8 2. + <_> + 14 8 3 8 2. + <_> + + <_> + 7 4 4 10 -1. + <_> + 9 4 2 10 2. + <_> + + <_> + 3 17 18 6 -1. + <_> + 12 17 9 3 2. + <_> + 3 20 9 3 2. + <_> + + <_> + 1 20 22 4 -1. + <_> + 12 20 11 4 2. + <_> + + <_> + 14 3 10 5 -1. + <_> + 14 3 5 5 2. + <_> + + <_> + 0 3 10 5 -1. + <_> + 5 3 5 5 2. + <_> + + <_> + 12 6 12 16 -1. + <_> + 16 6 4 16 3. + <_> + + <_> + 0 6 12 16 -1. + <_> + 4 6 4 16 3. + <_> + + <_> + 10 9 5 15 -1. + <_> + 10 14 5 5 3. + <_> + + <_> + 1 18 21 2 -1. + <_> + 1 19 21 1 2. + <_> + + <_> + 15 0 9 6 -1. + <_> + 15 2 9 2 3. + <_> + + <_> + 6 1 12 4 -1. + <_> + 12 1 6 4 2. + <_> + + <_> + 6 0 12 12 -1. + <_> + 12 0 6 6 2. + <_> + 6 6 6 6 2. + <_> + + <_> + 8 10 8 12 -1. + <_> + 8 10 4 6 2. + <_> + 12 16 4 6 2. + <_> + + <_> + 14 16 10 8 -1. + <_> + 19 16 5 4 2. + <_> + 14 20 5 4 2. + <_> + + <_> + 0 16 10 8 -1. + <_> + 0 16 5 4 2. + <_> + 5 20 5 4 2. + <_> + + <_> + 10 12 12 5 -1. + <_> + 14 12 4 5 3. + <_> + + <_> + 6 16 10 8 -1. + <_> + 6 16 5 4 2. + <_> + 11 20 5 4 2. + <_> + + <_> + 7 6 12 6 -1. + <_> + 13 6 6 3 2. + <_> + 7 9 6 3 2. + <_> + + <_> + 9 6 4 18 -1. + <_> + 9 6 2 9 2. + <_> + 11 15 2 9 2. + <_> + + <_> + 10 9 6 14 -1. + <_> + 13 9 3 7 2. + <_> + 10 16 3 7 2. + <_> + + <_> + 8 9 6 14 -1. + <_> + 8 9 3 7 2. + <_> + 11 16 3 7 2. + <_> + + <_> + 7 4 11 12 -1. + <_> + 7 10 11 6 2. + <_> + + <_> + 4 8 6 16 -1. + <_> + 4 8 3 8 2. + <_> + 7 16 3 8 2. + <_> + + <_> + 17 3 4 21 -1. + <_> + 17 10 4 7 3. + <_> + + <_> + 3 3 4 21 -1. + <_> + 3 10 4 7 3. + <_> + + <_> + 10 1 8 18 -1. + <_> + 14 1 4 9 2. + <_> + 10 10 4 9 2. + <_> + + <_> + 2 5 16 8 -1. + <_> + 2 5 8 4 2. + <_> + 10 9 8 4 2. + <_> + + <_> + 3 6 18 12 -1. + <_> + 3 10 18 4 3. + <_> + + <_> + 4 10 16 12 -1. + <_> + 4 14 16 4 3. + <_> + + <_> + 15 4 8 20 -1. + <_> + 19 4 4 10 2. + <_> + 15 14 4 10 2. + <_> + + <_> + 7 2 9 6 -1. + <_> + 10 2 3 6 3. + <_> + + <_> + 15 4 8 20 -1. + <_> + 19 4 4 10 2. + <_> + 15 14 4 10 2. + <_> + + <_> + 1 4 8 20 -1. + <_> + 1 4 4 10 2. + <_> + 5 14 4 10 2. + <_> + + <_> + 11 8 8 14 -1. + <_> + 15 8 4 7 2. + <_> + 11 15 4 7 2. + <_> + + <_> + 5 8 8 14 -1. + <_> + 5 8 4 7 2. + <_> + 9 15 4 7 2. + <_> + + <_> + 10 13 5 8 -1. + <_> + 10 17 5 4 2. + <_> + + <_> + 4 13 7 9 -1. + <_> + 4 16 7 3 3. + <_> + + <_> + 0 13 24 10 -1. + <_> + 0 18 24 5 2. + <_> + + <_> + 4 2 8 11 -1. + <_> + 8 2 4 11 2. + <_> + + <_> + 10 2 8 16 -1. + <_> + 14 2 4 8 2. + <_> + 10 10 4 8 2. + <_> + + <_> + 0 2 24 6 -1. + <_> + 0 2 12 3 2. + <_> + 12 5 12 3 2. + <_> + + <_> + 6 0 12 9 -1. + <_> + 6 3 12 3 3. + <_> + + <_> + 1 2 12 12 -1. + <_> + 1 2 6 6 2. + <_> + 7 8 6 6 2. + <_> + + <_> + 18 5 6 9 -1. + <_> + 18 8 6 3 3. + <_> + + <_> + 4 3 8 10 -1. + <_> + 4 3 4 5 2. + <_> + 8 8 4 5 2. + <_> + + <_> + 6 21 18 3 -1. + <_> + 6 22 18 1 3. + <_> + + <_> + 1 10 18 2 -1. + <_> + 1 11 18 1 2. + <_> + + <_> + 1 10 22 3 -1. + <_> + 1 11 22 1 3. + <_> + + <_> + 2 8 12 9 -1. + <_> + 2 11 12 3 3. + <_> + + <_> + 12 8 12 6 -1. + <_> + 18 8 6 3 2. + <_> + 12 11 6 3 2. + <_> + + <_> + 0 8 12 6 -1. + <_> + 0 8 6 3 2. + <_> + 6 11 6 3 2. + <_> + + <_> + 10 15 6 9 -1. + <_> + 12 15 2 9 3. + <_> + + <_> + 7 13 9 6 -1. + <_> + 7 15 9 2 3. + <_> + + <_> + 9 8 7 12 -1. + <_> + 9 14 7 6 2. + <_> + + <_> + 4 13 9 6 -1. + <_> + 7 13 3 6 3. + <_> + + <_> + 6 15 18 4 -1. + <_> + 12 15 6 4 3. + <_> + + <_> + 5 4 4 16 -1. + <_> + 7 4 2 16 2. + <_> + + <_> + 10 15 6 9 -1. + <_> + 12 15 2 9 3. + <_> + + <_> + 8 15 6 9 -1. + <_> + 10 15 2 9 3. + <_> + + <_> + 9 11 12 10 -1. + <_> + 15 11 6 5 2. + <_> + 9 16 6 5 2. + <_> + + <_> + 3 6 14 6 -1. + <_> + 3 8 14 2 3. + <_> + + <_> + 4 2 17 8 -1. + <_> + 4 6 17 4 2. + <_> + + <_> + 6 2 12 21 -1. + <_> + 6 9 12 7 3. + <_> + + <_> + 8 1 9 9 -1. + <_> + 8 4 9 3 3. + <_> + + <_> + 0 7 24 3 -1. + <_> + 12 7 12 3 2. + <_> + + <_> + 11 6 9 10 -1. + <_> + 11 11 9 5 2. + <_> + + <_> + 2 11 18 3 -1. + <_> + 2 12 18 1 3. + <_> + + <_> + 8 16 9 4 -1. + <_> + 8 18 9 2 2. + <_> + + <_> + 0 0 9 6 -1. + <_> + 0 2 9 2 3. + <_> + + <_> + 0 11 24 6 -1. + <_> + 0 13 24 2 3. + <_> + + <_> + 2 9 20 6 -1. + <_> + 2 12 20 3 2. + <_> + + <_> + 4 5 16 12 -1. + <_> + 12 5 8 6 2. + <_> + 4 11 8 6 2. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 7 3 10 4 -1. + <_> + 7 5 10 2 2. + <_> + + <_> + 9 15 6 8 -1. + <_> + 9 19 6 4 2. + <_> + + <_> + 17 0 7 10 -1. + <_> + 17 5 7 5 2. + <_> + + <_> + 0 0 7 10 -1. + <_> + 0 5 7 5 2. + <_> + + <_> + 16 1 6 12 -1. + <_> + 19 1 3 6 2. + <_> + 16 7 3 6 2. + <_> + + <_> + 1 0 19 8 -1. + <_> + 1 4 19 4 2. + <_> + + <_> + 12 2 9 4 -1. + <_> + 12 4 9 2 2. + <_> + + <_> + 3 2 9 4 -1. + <_> + 3 4 9 2 2. + <_> + + <_> + 12 2 10 6 -1. + <_> + 12 4 10 2 3. + <_> + + <_> + 3 4 18 2 -1. + <_> + 12 4 9 2 2. + <_> + + <_> + 12 1 4 9 -1. + <_> + 12 1 2 9 2. + <_> + + <_> + 8 1 4 9 -1. + <_> + 10 1 2 9 2. + <_> + + <_> + 10 5 8 10 -1. + <_> + 14 5 4 5 2. + <_> + 10 10 4 5 2. + <_> + + <_> + 6 4 12 13 -1. + <_> + 10 4 4 13 3. + <_> + + <_> + 13 5 6 6 -1. + <_> + 13 5 3 6 2. + <_> + + <_> + 1 5 12 3 -1. + <_> + 7 5 6 3 2. + <_> + + <_> + 7 5 10 6 -1. + <_> + 7 7 10 2 3. + <_> + + <_> + 2 0 21 5 -1. + <_> + 9 0 7 5 3. + <_> + + <_> + 0 8 9 9 -1. + <_> + 0 11 9 3 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 0 3 6 7 -1. + <_> + 3 3 3 7 2. + <_> + + <_> + 9 18 12 6 -1. + <_> + 15 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 2 8 20 6 -1. + <_> + 2 8 10 3 2. + <_> + 12 11 10 3 2. + <_> + + <_> + 13 2 10 4 -1. + <_> + 13 4 10 2 2. + <_> + + <_> + 4 5 5 18 -1. + <_> + 4 11 5 6 3. + <_> + + <_> + 20 4 4 9 -1. + <_> + 20 4 2 9 2. + <_> + + <_> + 8 6 8 14 -1. + <_> + 8 13 8 7 2. + <_> + + <_> + 0 1 24 6 -1. + <_> + 12 1 12 3 2. + <_> + 0 4 12 3 2. + <_> + + <_> + 0 4 4 9 -1. + <_> + 2 4 2 9 2. + <_> + + <_> + 3 6 18 3 -1. + <_> + 3 7 18 1 3. + <_> + + <_> + 3 17 16 6 -1. + <_> + 3 19 16 2 3. + <_> + + <_> + 13 6 6 9 -1. + <_> + 13 9 6 3 3. + <_> + + <_> + 5 6 14 6 -1. + <_> + 5 6 7 3 2. + <_> + 12 9 7 3 2. + <_> + + <_> + 13 5 8 10 -1. + <_> + 17 5 4 5 2. + <_> + 13 10 4 5 2. + <_> + + <_> + 2 2 20 3 -1. + <_> + 2 3 20 1 3. + <_> + + <_> + 9 2 9 6 -1. + <_> + 12 2 3 6 3. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 12 3 4 11 -1. + <_> + 12 3 2 11 2. + <_> + + <_> + 8 3 4 11 -1. + <_> + 10 3 2 11 2. + <_> + + <_> + 8 3 8 10 -1. + <_> + 12 3 4 5 2. + <_> + 8 8 4 5 2. + <_> + + <_> + 11 1 2 18 -1. + <_> + 12 1 1 18 2. + <_> + + <_> + 9 2 9 6 -1. + <_> + 12 2 3 6 3. + <_> + + <_> + 0 2 19 3 -1. + <_> + 0 3 19 1 3. + <_> + + <_> + 9 14 9 6 -1. + <_> + 9 16 9 2 3. + <_> + + <_> + 1 8 18 5 -1. + <_> + 7 8 6 5 3. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 13 6 4 15 -1. + <_> + 13 11 4 5 3. + <_> + + <_> + 1 5 18 3 -1. + <_> + 1 6 18 1 3. + <_> + + <_> + 9 7 14 6 -1. + <_> + 9 9 14 2 3. + <_> + + <_> + 2 16 18 3 -1. + <_> + 2 17 18 1 3. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 0 8 12 6 -1. + <_> + 0 8 6 3 2. + <_> + 6 11 6 3 2. + <_> + + <_> + 9 13 7 8 -1. + <_> + 9 17 7 4 2. + <_> + + <_> + 2 17 20 3 -1. + <_> + 2 18 20 1 3. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 4 0 15 4 -1. + <_> + 4 2 15 2 2. + <_> + + <_> + 17 2 6 6 -1. + <_> + 17 5 6 3 2. + <_> + + <_> + 0 3 6 9 -1. + <_> + 0 6 6 3 3. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 0 17 9 6 -1. + <_> + 0 19 9 2 3. + <_> + + <_> + 9 18 12 6 -1. + <_> + 15 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 3 15 6 9 -1. + <_> + 3 18 6 3 3. + <_> + + <_> + 16 13 8 10 -1. + <_> + 20 13 4 5 2. + <_> + 16 18 4 5 2. + <_> + + <_> + 0 14 24 4 -1. + <_> + 8 14 8 4 3. + <_> + + <_> + 13 18 6 6 -1. + <_> + 13 18 3 6 2. + <_> + + <_> + 0 13 8 10 -1. + <_> + 0 13 4 5 2. + <_> + 4 18 4 5 2. + <_> + + <_> + 0 14 24 6 -1. + <_> + 0 17 24 3 2. + <_> + + <_> + 5 2 12 8 -1. + <_> + 5 2 6 4 2. + <_> + 11 6 6 4 2. + <_> + + <_> + 8 9 9 6 -1. + <_> + 11 9 3 6 3. + <_> + + <_> + 4 3 16 4 -1. + <_> + 4 5 16 2 2. + <_> + + <_> + 10 2 4 10 -1. + <_> + 10 7 4 5 2. + <_> + + <_> + 8 4 5 8 -1. + <_> + 8 8 5 4 2. + <_> + + <_> + 11 5 9 12 -1. + <_> + 11 9 9 4 3. + <_> + + <_> + 4 5 9 12 -1. + <_> + 4 9 9 4 3. + <_> + + <_> + 14 6 6 9 -1. + <_> + 14 9 6 3 3. + <_> + + <_> + 2 4 20 12 -1. + <_> + 2 8 20 4 3. + <_> + + <_> + 4 4 17 16 -1. + <_> + 4 12 17 8 2. + <_> + + <_> + 8 7 7 6 -1. + <_> + 8 10 7 3 2. + <_> + + <_> + 1 9 23 2 -1. + <_> + 1 10 23 1 2. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 13 3 4 9 -1. + <_> + 13 3 2 9 2. + <_> + + <_> + 8 1 6 13 -1. + <_> + 10 1 2 13 3. + <_> + + <_> + 4 22 18 2 -1. + <_> + 4 23 18 1 2. + <_> + + <_> + 3 10 9 6 -1. + <_> + 6 10 3 6 3. + <_> + + <_> + 14 0 2 24 -1. + <_> + 14 0 1 24 2. + <_> + + <_> + 8 0 2 24 -1. + <_> + 9 0 1 24 2. + <_> + + <_> + 3 2 18 10 -1. + <_> + 9 2 6 10 3. + <_> + + <_> + 4 13 15 6 -1. + <_> + 9 13 5 6 3. + <_> + + <_> + 3 21 18 3 -1. + <_> + 9 21 6 3 3. + <_> + + <_> + 9 1 4 11 -1. + <_> + 11 1 2 11 2. + <_> + + <_> + 9 7 10 4 -1. + <_> + 9 7 5 4 2. + <_> + + <_> + 7 0 10 18 -1. + <_> + 12 0 5 18 2. + <_> + + <_> + 12 1 6 16 -1. + <_> + 14 1 2 16 3. + <_> + + <_> + 6 1 6 16 -1. + <_> + 8 1 2 16 3. + <_> + + <_> + 18 2 6 6 -1. + <_> + 18 5 6 3 2. + <_> + + <_> + 3 5 18 2 -1. + <_> + 3 6 18 1 2. + <_> + + <_> + 18 2 6 6 -1. + <_> + 18 5 6 3 2. + <_> + + <_> + 0 2 6 6 -1. + <_> + 0 5 6 3 2. + <_> + + <_> + 13 11 11 6 -1. + <_> + 13 13 11 2 3. + <_> + + <_> + 5 7 10 4 -1. + <_> + 10 7 5 4 2. + <_> + + <_> + 11 9 10 7 -1. + <_> + 11 9 5 7 2. + <_> + + <_> + 3 9 10 7 -1. + <_> + 8 9 5 7 2. + <_> + + <_> + 16 4 6 6 -1. + <_> + 16 4 3 6 2. + <_> + + <_> + 5 6 10 8 -1. + <_> + 5 6 5 4 2. + <_> + 10 10 5 4 2. + <_> + + <_> + 7 21 16 3 -1. + <_> + 7 21 8 3 2. + <_> + + <_> + 1 21 16 3 -1. + <_> + 9 21 8 3 2. + <_> + + <_> + 2 5 22 14 -1. + <_> + 13 5 11 7 2. + <_> + 2 12 11 7 2. + <_> + + <_> + 3 10 8 10 -1. + <_> + 3 10 4 5 2. + <_> + 7 15 4 5 2. + <_> + + <_> + 17 0 6 12 -1. + <_> + 20 0 3 6 2. + <_> + 17 6 3 6 2. + <_> + + <_> + 5 2 6 18 -1. + <_> + 7 2 2 18 3. + <_> + + <_> + 13 0 6 9 -1. + <_> + 15 0 2 9 3. + <_> + + <_> + 0 12 7 9 -1. + <_> + 0 15 7 3 3. + <_> + + <_> + 15 13 8 10 -1. + <_> + 19 13 4 5 2. + <_> + 15 18 4 5 2. + <_> + + <_> + 1 0 6 12 -1. + <_> + 1 0 3 6 2. + <_> + 4 6 3 6 2. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 1 13 8 10 -1. + <_> + 1 13 4 5 2. + <_> + 5 18 4 5 2. + <_> + + <_> + 3 21 19 2 -1. + <_> + 3 22 19 1 2. + <_> + + <_> + 6 3 4 13 -1. + <_> + 8 3 2 13 2. + <_> + + <_> + 5 10 18 3 -1. + <_> + 5 11 18 1 3. + <_> + + <_> + 9 3 5 12 -1. + <_> + 9 7 5 4 3. + <_> + + <_> + 11 2 4 15 -1. + <_> + 11 7 4 5 3. + <_> + + <_> + 4 1 16 4 -1. + <_> + 4 3 16 2 2. + <_> + + <_> + 6 0 18 3 -1. + <_> + 6 1 18 1 3. + <_> + + <_> + 5 1 10 8 -1. + <_> + 5 1 5 4 2. + <_> + 10 5 5 4 2. + <_> + + <_> + 11 18 12 6 -1. + <_> + 17 18 6 3 2. + <_> + 11 21 6 3 2. + <_> + + <_> + 5 15 12 3 -1. + <_> + 11 15 6 3 2. + <_> + + <_> + 1 10 22 4 -1. + <_> + 1 10 11 4 2. + <_> + + <_> + 7 9 9 6 -1. + <_> + 10 9 3 6 3. + <_> + + <_> + 6 11 12 5 -1. + <_> + 10 11 4 5 3. + <_> + + <_> + 6 7 10 7 -1. + <_> + 11 7 5 7 2. + <_> + + <_> + 11 2 8 10 -1. + <_> + 11 2 4 10 2. + <_> + + <_> + 5 2 8 10 -1. + <_> + 9 2 4 10 2. + <_> + + <_> + 6 4 18 6 -1. + <_> + 15 4 9 3 2. + <_> + 6 7 9 3 2. + <_> + + <_> + 0 5 10 9 -1. + <_> + 0 8 10 3 3. + <_> + + <_> + 2 7 21 6 -1. + <_> + 2 9 21 2 3. + <_> + + <_> + 0 4 22 16 -1. + <_> + 0 4 11 8 2. + <_> + 11 12 11 8 2. + <_> + + <_> + 9 0 6 22 -1. + <_> + 9 11 6 11 2. + <_> + + <_> + 9 1 3 12 -1. + <_> + 9 7 3 6 2. + <_> + + <_> + 12 0 12 18 -1. + <_> + 18 0 6 9 2. + <_> + 12 9 6 9 2. + <_> + + <_> + 0 0 12 18 -1. + <_> + 0 0 6 9 2. + <_> + 6 9 6 9 2. + <_> + + <_> + 1 1 22 4 -1. + <_> + 12 1 11 2 2. + <_> + 1 3 11 2 2. + <_> + + <_> + 3 0 18 4 -1. + <_> + 3 2 18 2 2. + <_> + + <_> + 2 5 22 6 -1. + <_> + 2 7 22 2 3. + <_> + + <_> + 5 0 6 9 -1. + <_> + 5 3 6 3 3. + <_> + + <_> + 10 14 6 9 -1. + <_> + 12 14 2 9 3. + <_> + + <_> + 8 14 6 9 -1. + <_> + 10 14 2 9 3. + <_> + + <_> + 5 18 18 3 -1. + <_> + 5 19 18 1 3. + <_> + + <_> + 6 0 6 13 -1. + <_> + 9 0 3 13 2. + <_> + + <_> + 7 4 12 4 -1. + <_> + 7 4 6 4 2. + <_> + + <_> + 5 2 12 6 -1. + <_> + 9 2 4 6 3. + <_> + + <_> + 4 1 18 3 -1. + <_> + 4 2 18 1 3. + <_> + + <_> + 0 8 6 12 -1. + <_> + 0 12 6 4 3. + <_> + + <_> + 9 15 6 9 -1. + <_> + 11 15 2 9 3. + <_> + + <_> + 9 10 6 13 -1. + <_> + 11 10 2 13 3. + <_> + + <_> + 6 17 18 2 -1. + <_> + 6 18 18 1 2. + <_> + + <_> + 9 4 6 9 -1. + <_> + 11 4 2 9 3. + <_> + + <_> + 10 0 6 9 -1. + <_> + 12 0 2 9 3. + <_> + + <_> + 5 6 10 8 -1. + <_> + 5 6 5 4 2. + <_> + 10 10 5 4 2. + <_> + + <_> + 14 9 5 8 -1. + <_> + 14 13 5 4 2. + <_> + + <_> + 5 9 5 8 -1. + <_> + 5 13 5 4 2. + <_> + + <_> + 14 11 9 6 -1. + <_> + 14 13 9 2 3. + <_> + + <_> + 0 2 23 15 -1. + <_> + 0 7 23 5 3. + <_> + + <_> + 16 0 8 12 -1. + <_> + 16 6 8 6 2. + <_> + + <_> + 4 15 6 9 -1. + <_> + 4 18 6 3 3. + <_> + + <_> + 8 18 9 4 -1. + <_> + 8 20 9 2 2. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 13 11 11 6 -1. + <_> + 13 13 11 2 3. + <_> + + <_> + 0 11 11 6 -1. + <_> + 0 13 11 2 3. + <_> + + <_> + 0 9 24 6 -1. + <_> + 12 9 12 3 2. + <_> + 0 12 12 3 2. + <_> + + <_> + 6 16 8 8 -1. + <_> + 6 20 8 4 2. + <_> + + <_> + 10 16 14 6 -1. + <_> + 10 18 14 2 3. + <_> + + <_> + 1 1 21 3 -1. + <_> + 1 2 21 1 3. + <_> + + <_> + 0 2 24 3 -1. + <_> + 0 2 12 3 2. + <_> + + <_> + 2 15 8 5 -1. + <_> + 6 15 4 5 2. + <_> + + <_> + 2 11 21 3 -1. + <_> + 9 11 7 3 3. + <_> + + <_> + 1 18 12 6 -1. + <_> + 1 18 6 3 2. + <_> + 7 21 6 3 2. + <_> + + <_> + 10 14 4 10 -1. + <_> + 10 19 4 5 2. + <_> + + <_> + 7 7 4 10 -1. + <_> + 7 12 4 5 2. + <_> + + <_> + 9 8 6 12 -1. + <_> + 9 12 6 4 3. + <_> + + <_> + 7 1 9 6 -1. + <_> + 10 1 3 6 3. + <_> + + <_> + 3 14 19 2 -1. + <_> + 3 15 19 1 2. + <_> + + <_> + 7 7 10 10 -1. + <_> + 7 7 5 5 2. + <_> + 12 12 5 5 2. + <_> + + <_> + 3 12 18 12 -1. + <_> + 3 12 9 12 2. + <_> + + <_> + 8 0 6 12 -1. + <_> + 10 0 2 12 3. + <_> + + <_> + 3 0 17 9 -1. + <_> + 3 3 17 3 3. + <_> + + <_> + 6 0 12 11 -1. + <_> + 10 0 4 11 3. + <_> + + <_> + 1 0 6 13 -1. + <_> + 4 0 3 13 2. + <_> + + <_> + 5 8 16 6 -1. + <_> + 5 11 16 3 2. + <_> + + <_> + 8 8 5 12 -1. + <_> + 8 14 5 6 2. + <_> + + <_> + 3 21 18 3 -1. + <_> + 9 21 6 3 3. + <_> + + <_> + 0 0 6 6 -1. + <_> + 3 0 3 6 2. + <_> + + <_> + 2 0 20 3 -1. + <_> + 2 1 20 1 3. + <_> + + <_> + 4 6 15 10 -1. + <_> + 9 6 5 10 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 14 0 6 9 -1. + <_> + 16 0 2 9 3. + <_> + + <_> + 7 16 9 6 -1. + <_> + 7 18 9 2 3. + <_> + + <_> + 14 0 6 9 -1. + <_> + 16 0 2 9 3. + <_> + + <_> + 4 0 6 9 -1. + <_> + 6 0 2 9 3. + <_> + + <_> + 17 1 6 16 -1. + <_> + 19 1 2 16 3. + <_> + + <_> + 1 1 6 16 -1. + <_> + 3 1 2 16 3. + <_> + + <_> + 14 13 6 9 -1. + <_> + 14 16 6 3 3. + <_> + + <_> + 0 0 6 9 -1. + <_> + 0 3 6 3 3. + <_> + + <_> + 9 5 6 6 -1. + <_> + 9 5 3 6 2. + <_> + + <_> + 3 10 9 6 -1. + <_> + 6 10 3 6 3. + <_> + + <_> + 14 7 3 16 -1. + <_> + 14 15 3 8 2. + <_> + + <_> + 4 10 14 12 -1. + <_> + 4 10 7 6 2. + <_> + 11 16 7 6 2. + <_> + + <_> + 7 6 12 6 -1. + <_> + 7 8 12 2 3. + <_> + + <_> + 7 2 4 20 -1. + <_> + 9 2 2 20 2. + <_> + + <_> + 14 13 6 9 -1. + <_> + 14 16 6 3 3. + <_> + + <_> + 10 6 4 9 -1. + <_> + 12 6 2 9 2. + <_> + + <_> + 14 13 6 9 -1. + <_> + 14 16 6 3 3. + <_> + + <_> + 5 20 14 4 -1. + <_> + 5 22 14 2 2. + <_> + + <_> + 4 4 16 12 -1. + <_> + 4 10 16 6 2. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 3 0 21 4 -1. + <_> + 3 2 21 2 2. + <_> + + <_> + 4 13 6 9 -1. + <_> + 4 16 6 3 3. + <_> + + <_> + 16 16 5 8 -1. + <_> + 16 20 5 4 2. + <_> + + <_> + 4 0 16 16 -1. + <_> + 4 0 8 8 2. + <_> + 12 8 8 8 2. + <_> + + <_> + 6 6 14 6 -1. + <_> + 13 6 7 3 2. + <_> + 6 9 7 3 2. + <_> + + <_> + 10 5 4 15 -1. + <_> + 10 10 4 5 3. + <_> + + <_> + 9 15 12 8 -1. + <_> + 15 15 6 4 2. + <_> + 9 19 6 4 2. + <_> + + <_> + 6 7 12 4 -1. + <_> + 12 7 6 4 2. + <_> + + <_> + 5 6 14 6 -1. + <_> + 12 6 7 3 2. + <_> + 5 9 7 3 2. + <_> + + <_> + 3 6 18 10 -1. + <_> + 3 6 9 5 2. + <_> + 12 11 9 5 2. + <_> + + <_> + 6 0 18 21 -1. + <_> + 12 0 6 21 3. + <_> + + <_> + 0 0 24 21 -1. + <_> + 8 0 8 21 3. + <_> + + <_> + 6 18 18 3 -1. + <_> + 6 19 18 1 3. + <_> + + <_> + 0 15 9 6 -1. + <_> + 0 17 9 2 3. + <_> + + <_> + 4 3 19 2 -1. + <_> + 4 4 19 1 2. + <_> + + <_> + 0 3 24 2 -1. + <_> + 0 4 24 1 2. + <_> + + <_> + 15 14 9 4 -1. + <_> + 15 16 9 2 2. + <_> + + <_> + 0 14 9 4 -1. + <_> + 0 16 9 2 2. + <_> + + <_> + 6 15 18 2 -1. + <_> + 6 16 18 1 2. + <_> + + <_> + 3 17 18 3 -1. + <_> + 3 18 18 1 3. + <_> + + <_> + 12 0 3 23 -1. + <_> + 13 0 1 23 3. + <_> + + <_> + 6 0 8 6 -1. + <_> + 6 3 8 3 2. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 9 0 3 23 -1. + <_> + 10 0 1 23 3. + <_> + + <_> + 10 7 4 10 -1. + <_> + 10 12 4 5 2. + <_> + + <_> + 7 8 10 12 -1. + <_> + 7 12 10 4 3. + <_> + + <_> + 14 9 6 14 -1. + <_> + 17 9 3 7 2. + <_> + 14 16 3 7 2. + <_> + + <_> + 2 0 10 9 -1. + <_> + 2 3 10 3 3. + <_> + + <_> + 11 1 5 12 -1. + <_> + 11 7 5 6 2. + <_> + + <_> + 1 4 12 10 -1. + <_> + 1 4 6 5 2. + <_> + 7 9 6 5 2. + <_> + + <_> + 15 1 9 4 -1. + <_> + 15 3 9 2 2. + <_> + + <_> + 1 2 8 10 -1. + <_> + 1 2 4 5 2. + <_> + 5 7 4 5 2. + <_> + + <_> + 10 1 5 12 -1. + <_> + 10 5 5 4 3. + <_> + + <_> + 4 0 14 24 -1. + <_> + 11 0 7 24 2. + <_> + + <_> + 7 17 10 4 -1. + <_> + 7 19 10 2 2. + <_> + + <_> + 10 14 4 10 -1. + <_> + 10 19 4 5 2. + <_> + + <_> + 13 15 6 9 -1. + <_> + 15 15 2 9 3. + <_> + + <_> + 3 21 18 3 -1. + <_> + 3 22 18 1 3. + <_> + + <_> + 13 15 6 9 -1. + <_> + 15 15 2 9 3. + <_> + + <_> + 5 15 6 9 -1. + <_> + 7 15 2 9 3. + <_> + + <_> + 10 6 4 18 -1. + <_> + 12 6 2 9 2. + <_> + 10 15 2 9 2. + <_> + + <_> + 7 3 6 11 -1. + <_> + 9 3 2 11 3. + <_> + + <_> + 15 1 9 4 -1. + <_> + 15 3 9 2 2. + <_> + + <_> + 5 4 14 8 -1. + <_> + 5 8 14 4 2. + <_> + + <_> + 8 1 15 9 -1. + <_> + 8 4 15 3 3. + <_> + + <_> + 7 2 8 10 -1. + <_> + 7 2 4 5 2. + <_> + 11 7 4 5 2. + <_> + + <_> + 12 2 6 12 -1. + <_> + 12 2 3 12 2. + <_> + + <_> + 6 2 6 12 -1. + <_> + 9 2 3 12 2. + <_> + + <_> + 7 7 12 4 -1. + <_> + 7 7 6 4 2. + <_> + + <_> + 6 3 12 10 -1. + <_> + 10 3 4 10 3. + <_> + + <_> + 5 6 16 6 -1. + <_> + 13 6 8 3 2. + <_> + 5 9 8 3 2. + <_> + + <_> + 3 1 18 9 -1. + <_> + 9 1 6 9 3. + <_> + + <_> + 3 8 18 5 -1. + <_> + 9 8 6 5 3. + <_> + + <_> + 0 0 24 22 -1. + <_> + 0 0 12 11 2. + <_> + 12 11 12 11 2. + <_> + + <_> + 14 16 9 6 -1. + <_> + 14 18 9 2 3. + <_> + + <_> + 0 16 24 8 -1. + <_> + 0 20 24 4 2. + <_> + + <_> + 1 19 22 4 -1. + <_> + 12 19 11 2 2. + <_> + 1 21 11 2 2. + <_> + + <_> + 1 16 9 6 -1. + <_> + 1 18 9 2 3. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 8 5 4 2. + <_> + + <_> + 9 15 6 9 -1. + <_> + 11 15 2 9 3. + <_> + + <_> + 10 18 12 6 -1. + <_> + 16 18 6 3 2. + <_> + 10 21 6 3 2. + <_> + + <_> + 2 18 12 6 -1. + <_> + 2 18 6 3 2. + <_> + 8 21 6 3 2. + <_> + + <_> + 8 3 16 9 -1. + <_> + 8 6 16 3 3. + <_> + + <_> + 0 5 10 6 -1. + <_> + 0 7 10 2 3. + <_> + + <_> + 5 5 18 3 -1. + <_> + 5 6 18 1 3. + <_> + + <_> + 2 6 9 6 -1. + <_> + 2 9 9 3 2. + <_> + + <_> + 14 2 10 9 -1. + <_> + 14 5 10 3 3. + <_> + + <_> + 3 6 18 3 -1. + <_> + 3 7 18 1 3. + <_> + + <_> + 9 2 15 6 -1. + <_> + 9 4 15 2 3. + <_> + + <_> + 4 8 15 6 -1. + <_> + 4 10 15 2 3. + <_> + + <_> + 0 5 24 4 -1. + <_> + 12 5 12 2 2. + <_> + 0 7 12 2 2. + <_> + + <_> + 7 8 6 12 -1. + <_> + 9 8 2 12 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 0 12 6 12 -1. + <_> + 0 12 3 6 2. + <_> + 3 18 3 6 2. + <_> + + <_> + 14 12 10 6 -1. + <_> + 14 14 10 2 3. + <_> + + <_> + 2 7 18 9 -1. + <_> + 2 10 18 3 3. + <_> + + <_> + 11 14 10 9 -1. + <_> + 11 17 10 3 3. + <_> + + <_> + 7 6 10 8 -1. + <_> + 7 6 5 4 2. + <_> + 12 10 5 4 2. + <_> + + <_> + 6 6 14 6 -1. + <_> + 13 6 7 3 2. + <_> + 6 9 7 3 2. + <_> + + <_> + 4 13 9 7 -1. + <_> + 7 13 3 7 3. + <_> + + <_> + 14 10 6 12 -1. + <_> + 17 10 3 6 2. + <_> + 14 16 3 6 2. + <_> + + <_> + 4 10 6 12 -1. + <_> + 4 10 3 6 2. + <_> + 7 16 3 6 2. + <_> + + <_> + 13 9 8 6 -1. + <_> + 13 9 4 6 2. + <_> + + <_> + 8 3 4 14 -1. + <_> + 10 3 2 14 2. + <_> + + <_> + 17 0 3 18 -1. + <_> + 18 0 1 18 3. + <_> + + <_> + 4 12 16 12 -1. + <_> + 12 12 8 12 2. + <_> + + <_> + 15 0 6 14 -1. + <_> + 17 0 2 14 3. + <_> + + <_> + 3 0 6 14 -1. + <_> + 5 0 2 14 3. + <_> + + <_> + 12 2 12 20 -1. + <_> + 16 2 4 20 3. + <_> + + <_> + 0 2 12 20 -1. + <_> + 4 2 4 20 3. + <_> + + <_> + 16 0 6 17 -1. + <_> + 18 0 2 17 3. + <_> + + <_> + 2 0 6 17 -1. + <_> + 4 0 2 17 3. + <_> + + <_> + 15 6 9 6 -1. + <_> + 15 8 9 2 3. + <_> + + <_> + 0 6 9 6 -1. + <_> + 0 8 9 2 3. + <_> + + <_> + 18 1 6 13 -1. + <_> + 20 1 2 13 3. + <_> + + <_> + 0 1 6 13 -1. + <_> + 2 1 2 13 3. + <_> + + <_> + 16 0 4 9 -1. + <_> + 16 0 2 9 2. + <_> + + <_> + 5 10 12 7 -1. + <_> + 9 10 4 7 3. + <_> + + <_> + 12 9 12 6 -1. + <_> + 12 11 12 2 3. + <_> + + <_> + 0 9 12 6 -1. + <_> + 0 11 12 2 3. + <_> + + <_> + 5 7 14 9 -1. + <_> + 5 10 14 3 3. + <_> + + <_> + 0 15 20 3 -1. + <_> + 0 16 20 1 3. + <_> + + <_> + 8 10 8 10 -1. + <_> + 12 10 4 5 2. + <_> + 8 15 4 5 2. + <_> + + <_> + 5 4 13 9 -1. + <_> + 5 7 13 3 3. + <_> + + <_> + 10 2 6 18 -1. + <_> + 10 8 6 6 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 6 9 12 4 -1. + <_> + 6 11 12 2 2. + <_> + + <_> + 3 2 15 12 -1. + <_> + 3 6 15 4 3. + <_> + + <_> + 12 0 12 5 -1. + <_> + 16 0 4 5 3. + <_> + + <_> + 0 15 18 3 -1. + <_> + 6 15 6 3 3. + <_> + + <_> + 0 14 24 5 -1. + <_> + 8 14 8 5 3. + <_> + + <_> + 5 1 3 18 -1. + <_> + 6 1 1 18 3. + <_> + + <_> + 10 0 4 14 -1. + <_> + 10 0 2 14 2. + <_> + + <_> + 9 3 4 9 -1. + <_> + 11 3 2 9 2. + <_> + + <_> + 8 2 12 6 -1. + <_> + 14 2 6 3 2. + <_> + 8 5 6 3 2. + <_> + + <_> + 0 4 17 4 -1. + <_> + 0 6 17 2 2. + <_> + + <_> + 16 16 5 8 -1. + <_> + 16 20 5 4 2. + <_> + + <_> + 3 16 5 8 -1. + <_> + 3 20 5 4 2. + <_> + + <_> + 6 18 18 2 -1. + <_> + 6 19 18 1 2. + <_> + + <_> + 0 0 12 5 -1. + <_> + 4 0 4 5 3. + <_> + + <_> + 14 3 6 12 -1. + <_> + 17 3 3 6 2. + <_> + 14 9 3 6 2. + <_> + + <_> + 0 12 6 12 -1. + <_> + 2 12 2 12 3. + <_> + + <_> + 2 3 21 3 -1. + <_> + 2 4 21 1 3. + <_> + + <_> + 4 3 6 12 -1. + <_> + 4 3 3 6 2. + <_> + 7 9 3 6 2. + <_> + + <_> + 12 8 12 6 -1. + <_> + 18 8 6 3 2. + <_> + 12 11 6 3 2. + <_> + + <_> + 0 15 16 9 -1. + <_> + 8 15 8 9 2. + <_> + + <_> + 6 13 18 5 -1. + <_> + 6 13 9 5 2. + <_> + + <_> + 1 6 15 6 -1. + <_> + 6 6 5 6 3. + <_> + + <_> + 11 9 9 6 -1. + <_> + 14 9 3 6 3. + <_> + + <_> + 3 0 15 11 -1. + <_> + 8 0 5 11 3. + <_> + + <_> + 15 3 3 18 -1. + <_> + 15 9 3 6 3. + <_> + + <_> + 6 3 3 18 -1. + <_> + 6 9 3 6 3. + <_> + + <_> + 9 5 10 8 -1. + <_> + 14 5 5 4 2. + <_> + 9 9 5 4 2. + <_> + + <_> + 4 4 16 8 -1. + <_> + 4 4 8 4 2. + <_> + 12 8 8 4 2. + <_> + + <_> + 7 7 12 3 -1. + <_> + 7 7 6 3 2. + <_> + + <_> + 5 0 9 13 -1. + <_> + 8 0 3 13 3. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 8 1 10 9 -1. + <_> + 8 4 10 3 3. + <_> + + <_> + 0 2 18 2 -1. + <_> + 0 3 18 1 2. + <_> + + <_> + 10 13 14 6 -1. + <_> + 17 13 7 3 2. + <_> + 10 16 7 3 2. + <_> + + <_> + 0 13 14 6 -1. + <_> + 0 13 7 3 2. + <_> + 7 16 7 3 2. + <_> + + <_> + 20 2 3 21 -1. + <_> + 21 2 1 21 3. + <_> + + <_> + 0 9 5 12 -1. + <_> + 0 13 5 4 3. + <_> + + <_> + 12 6 12 6 -1. + <_> + 12 8 12 2 3. + <_> + + <_> + 1 8 20 3 -1. + <_> + 1 9 20 1 3. + <_> + + <_> + 5 7 19 3 -1. + <_> + 5 8 19 1 3. + <_> + + <_> + 1 12 9 6 -1. + <_> + 1 14 9 2 3. + <_> + + <_> + 6 10 14 12 -1. + <_> + 6 14 14 4 3. + <_> + + <_> + 5 6 14 18 -1. + <_> + 5 12 14 6 3. + <_> + + <_> + 11 12 9 7 -1. + <_> + 14 12 3 7 3. + <_> + + <_> + 1 15 18 4 -1. + <_> + 1 17 18 2 2. + <_> + + <_> + 11 14 6 9 -1. + <_> + 11 17 6 3 3. + <_> + + <_> + 0 8 18 4 -1. + <_> + 0 8 9 2 2. + <_> + 9 10 9 2 2. + <_> + + <_> + 3 10 20 6 -1. + <_> + 13 10 10 3 2. + <_> + 3 13 10 3 2. + <_> + + <_> + 1 10 20 6 -1. + <_> + 1 10 10 3 2. + <_> + 11 13 10 3 2. + <_> + + <_> + 0 9 24 2 -1. + <_> + 0 9 12 2 2. + <_> + + <_> + 1 12 20 8 -1. + <_> + 1 12 10 4 2. + <_> + 11 16 10 4 2. + <_> + + <_> + 11 12 9 7 -1. + <_> + 14 12 3 7 3. + <_> + + <_> + 4 12 9 7 -1. + <_> + 7 12 3 7 3. + <_> + + <_> + 12 12 8 5 -1. + <_> + 12 12 4 5 2. + <_> + + <_> + 4 12 8 5 -1. + <_> + 8 12 4 5 2. + <_> + + <_> + 13 10 4 10 -1. + <_> + 13 10 2 10 2. + <_> + + <_> + 1 15 20 2 -1. + <_> + 11 15 10 2 2. + <_> + + <_> + 9 10 6 6 -1. + <_> + 9 10 3 6 2. + <_> + + <_> + 0 1 21 3 -1. + <_> + 7 1 7 3 3. + <_> + + <_> + 6 4 13 9 -1. + <_> + 6 7 13 3 3. + <_> + + <_> + 6 5 12 5 -1. + <_> + 10 5 4 5 3. + <_> + + <_> + 10 10 10 6 -1. + <_> + 10 12 10 2 3. + <_> + + <_> + 6 12 5 8 -1. + <_> + 6 16 5 4 2. + <_> + + <_> + 13 0 6 9 -1. + <_> + 15 0 2 9 3. + <_> + + <_> + 2 10 18 6 -1. + <_> + 8 10 6 6 3. + <_> + + <_> + 11 2 9 4 -1. + <_> + 11 4 9 2 2. + <_> + + <_> + 1 20 21 3 -1. + <_> + 8 20 7 3 3. + <_> + + <_> + 1 10 22 2 -1. + <_> + 1 11 22 1 2. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 13 0 6 9 -1. + <_> + 15 0 2 9 3. + <_> + + <_> + 5 0 6 9 -1. + <_> + 7 0 2 9 3. + <_> + + <_> + 18 2 6 20 -1. + <_> + 20 2 2 20 3. + <_> + + <_> + 0 2 6 20 -1. + <_> + 2 2 2 20 3. + <_> + + <_> + 11 7 6 14 -1. + <_> + 14 7 3 7 2. + <_> + 11 14 3 7 2. + <_> + + <_> + 0 1 4 9 -1. + <_> + 2 1 2 9 2. + <_> + + <_> + 12 14 9 4 -1. + <_> + 12 16 9 2 2. + <_> + + <_> + 1 13 9 4 -1. + <_> + 1 15 9 2 2. + <_> + + <_> + 7 6 15 6 -1. + <_> + 7 8 15 2 3. + <_> + + <_> + 8 2 3 18 -1. + <_> + 8 8 3 6 3. + <_> + + <_> + 6 6 12 6 -1. + <_> + 12 6 6 3 2. + <_> + 6 9 6 3 2. + <_> + + <_> + 2 19 20 4 -1. + <_> + 2 19 10 2 2. + <_> + 12 21 10 2 2. + <_> + + <_> + 14 15 6 9 -1. + <_> + 14 18 6 3 3. + <_> + + <_> + 3 5 18 14 -1. + <_> + 3 5 9 7 2. + <_> + 12 12 9 7 2. + <_> + + <_> + 15 6 4 18 -1. + <_> + 17 6 2 9 2. + <_> + 15 15 2 9 2. + <_> + + <_> + 5 6 4 18 -1. + <_> + 5 6 2 9 2. + <_> + 7 15 2 9 2. + <_> + + <_> + 11 0 6 9 -1. + <_> + 13 0 2 9 3. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 11 5 6 9 -1. + <_> + 13 5 2 9 3. + <_> + + <_> + 9 5 6 6 -1. + <_> + 12 5 3 6 2. + <_> + + <_> + 4 1 16 6 -1. + <_> + 12 1 8 3 2. + <_> + 4 4 8 3 2. + <_> + + <_> + 9 13 6 11 -1. + <_> + 11 13 2 11 3. + <_> + + <_> + 17 1 6 12 -1. + <_> + 20 1 3 6 2. + <_> + 17 7 3 6 2. + <_> + + <_> + 1 17 18 3 -1. + <_> + 1 18 18 1 3. + <_> + + <_> + 7 13 10 8 -1. + <_> + 7 17 10 4 2. + <_> + + <_> + 6 18 10 6 -1. + <_> + 6 20 10 2 3. + <_> + + <_> + 9 14 9 4 -1. + <_> + 9 16 9 2 2. + <_> + + <_> + 1 1 6 12 -1. + <_> + 1 1 3 6 2. + <_> + 4 7 3 6 2. + <_> + + <_> + 19 4 5 12 -1. + <_> + 19 8 5 4 3. + <_> + + <_> + 0 0 8 8 -1. + <_> + 4 0 4 8 2. + <_> + + <_> + 3 5 19 3 -1. + <_> + 3 6 19 1 3. + <_> + + <_> + 1 5 12 6 -1. + <_> + 1 5 6 3 2. + <_> + 7 8 6 3 2. + <_> + + <_> + 2 1 21 8 -1. + <_> + 9 1 7 8 3. + <_> + + <_> + 4 1 16 8 -1. + <_> + 4 5 16 4 2. + <_> + + <_> + 6 0 18 3 -1. + <_> + 6 1 18 1 3. + <_> + + <_> + 4 4 10 14 -1. + <_> + 4 11 10 7 2. + <_> + + <_> + 15 6 4 10 -1. + <_> + 15 11 4 5 2. + <_> + + <_> + 3 18 18 3 -1. + <_> + 9 18 6 3 3. + <_> + + <_> + 8 18 12 6 -1. + <_> + 12 18 4 6 3. + <_> + + <_> + 3 15 6 9 -1. + <_> + 6 15 3 9 2. + <_> + + <_> + 15 7 6 8 -1. + <_> + 15 11 6 4 2. + <_> + + <_> + 3 7 6 8 -1. + <_> + 3 11 6 4 2. + <_> + + <_> + 5 9 18 6 -1. + <_> + 14 9 9 3 2. + <_> + 5 12 9 3 2. + <_> + + <_> + 1 13 12 6 -1. + <_> + 1 15 12 2 3. + <_> + + <_> + 14 15 10 6 -1. + <_> + 14 17 10 2 3. + <_> + + <_> + 0 15 10 6 -1. + <_> + 0 17 10 2 3. + <_> + + <_> + 15 13 6 9 -1. + <_> + 15 16 6 3 3. + <_> + + <_> + 3 13 6 9 -1. + <_> + 3 16 6 3 3. + <_> + + <_> + 9 5 8 8 -1. + <_> + 9 5 4 8 2. + <_> + + <_> + 1 18 12 6 -1. + <_> + 1 18 6 3 2. + <_> + 7 21 6 3 2. + <_> + + <_> + 13 19 10 4 -1. + <_> + 13 21 10 2 2. + <_> + + <_> + 1 19 10 4 -1. + <_> + 1 21 10 2 2. + <_> + + <_> + 6 19 18 3 -1. + <_> + 6 20 18 1 3. + <_> + + <_> + 8 14 4 10 -1. + <_> + 8 19 4 5 2. + <_> + + <_> + 0 0 24 6 -1. + <_> + 0 2 24 2 3. + <_> + + <_> + 0 1 6 9 -1. + <_> + 0 4 6 3 3. + <_> + + <_> + 4 9 20 6 -1. + <_> + 14 9 10 3 2. + <_> + 4 12 10 3 2. + <_> + + <_> + 1 15 19 8 -1. + <_> + 1 19 19 4 2. + <_> + + <_> + 14 0 10 6 -1. + <_> + 14 2 10 2 3. + <_> + + <_> + 1 10 21 14 -1. + <_> + 8 10 7 14 3. + <_> + + <_> + 10 10 8 8 -1. + <_> + 10 10 4 8 2. + <_> + + <_> + 6 8 10 4 -1. + <_> + 11 8 5 4 2. + <_> + + <_> + 10 5 4 9 -1. + <_> + 10 5 2 9 2. + <_> + + <_> + 7 5 6 10 -1. + <_> + 9 5 2 10 3. + <_> + + <_> + 14 4 4 13 -1. + <_> + 14 4 2 13 2. + <_> + + <_> + 6 4 4 13 -1. + <_> + 8 4 2 13 2. + <_> + + <_> + 8 7 9 6 -1. + <_> + 11 7 3 6 3. + <_> + + <_> + 3 6 16 6 -1. + <_> + 3 6 8 3 2. + <_> + 11 9 8 3 2. + <_> + + <_> + 5 4 16 14 -1. + <_> + 13 4 8 7 2. + <_> + 5 11 8 7 2. + <_> + + <_> + 0 0 24 4 -1. + <_> + 0 0 12 2 2. + <_> + 12 2 12 2 2. + <_> + + <_> + 9 1 9 6 -1. + <_> + 12 1 3 6 3. + <_> + + <_> + 4 1 14 4 -1. + <_> + 11 1 7 4 2. + <_> + + <_> + 10 14 7 9 -1. + <_> + 10 17 7 3 3. + <_> + + <_> + 8 3 8 10 -1. + <_> + 8 3 4 5 2. + <_> + 12 8 4 5 2. + <_> + + <_> + 7 3 12 5 -1. + <_> + 11 3 4 5 3. + <_> + + <_> + 8 2 4 13 -1. + <_> + 10 2 2 13 2. + <_> + + <_> + 11 2 3 19 -1. + <_> + 12 2 1 19 3. + <_> + + <_> + 7 7 9 6 -1. + <_> + 10 7 3 6 3. + <_> + + <_> + 4 22 20 2 -1. + <_> + 4 22 10 2 2. + <_> + + <_> + 0 16 24 4 -1. + <_> + 0 16 12 2 2. + <_> + 12 18 12 2 2. + <_> + + <_> + 7 3 12 5 -1. + <_> + 11 3 4 5 3. + <_> + + <_> + 1 10 8 14 -1. + <_> + 1 10 4 7 2. + <_> + 5 17 4 7 2. + <_> + + <_> + 11 16 6 6 -1. + <_> + 11 19 6 3 2. + <_> + + <_> + 6 0 10 24 -1. + <_> + 6 0 5 12 2. + <_> + 11 12 5 12 2. + <_> + + <_> + 7 5 14 14 -1. + <_> + 14 5 7 7 2. + <_> + 7 12 7 7 2. + <_> + + <_> + 7 8 10 8 -1. + <_> + 7 8 5 4 2. + <_> + 12 12 5 4 2. + <_> + + <_> + 9 1 9 6 -1. + <_> + 12 1 3 6 3. + <_> + + <_> + 0 6 24 3 -1. + <_> + 12 6 12 3 2. + <_> + + <_> + 7 3 12 5 -1. + <_> + 11 3 4 5 3. + <_> + + <_> + 1 13 22 4 -1. + <_> + 1 13 11 2 2. + <_> + 12 15 11 2 2. + <_> + + <_> + 9 12 12 6 -1. + <_> + 9 14 12 2 3. + <_> + + <_> + 0 5 9 6 -1. + <_> + 0 7 9 2 3. + <_> + + <_> + 1 5 23 6 -1. + <_> + 1 7 23 2 3. + <_> + + <_> + 1 6 19 12 -1. + <_> + 1 10 19 4 3. + <_> + + <_> + 9 1 6 21 -1. + <_> + 9 8 6 7 3. + <_> + + <_> + 3 19 18 3 -1. + <_> + 9 19 6 3 3. + <_> + + <_> + 9 14 6 9 -1. + <_> + 11 14 2 9 3. + <_> + + <_> + 9 6 4 12 -1. + <_> + 11 6 2 12 2. + <_> + + <_> + 16 0 6 9 -1. + <_> + 18 0 2 9 3. + <_> + + <_> + 2 0 6 9 -1. + <_> + 4 0 2 9 3. + <_> + + <_> + 13 1 4 22 -1. + <_> + 15 1 2 11 2. + <_> + 13 12 2 11 2. + <_> + + <_> + 1 8 8 12 -1. + <_> + 1 14 8 6 2. + <_> + + <_> + 14 7 7 9 -1. + <_> + 14 10 7 3 3. + <_> + + <_> + 3 12 18 4 -1. + <_> + 3 12 9 2 2. + <_> + 12 14 9 2 2. + <_> + + <_> + 13 1 4 22 -1. + <_> + 15 1 2 11 2. + <_> + 13 12 2 11 2. + <_> + + <_> + 7 1 4 22 -1. + <_> + 7 1 2 11 2. + <_> + 9 12 2 11 2. + <_> + + <_> + 4 7 20 4 -1. + <_> + 14 7 10 2 2. + <_> + 4 9 10 2 2. + <_> + + <_> + 9 10 6 7 -1. + <_> + 12 10 3 7 2. + <_> + + <_> + 7 7 10 4 -1. + <_> + 7 7 5 4 2. + <_> + + <_> + 0 3 4 15 -1. + <_> + 0 8 4 5 3. + <_> + + <_> + 15 0 8 12 -1. + <_> + 19 0 4 6 2. + <_> + 15 6 4 6 2. + <_> + + <_> + 1 0 8 12 -1. + <_> + 1 0 4 6 2. + <_> + 5 6 4 6 2. + <_> + + <_> + 14 5 6 16 -1. + <_> + 16 5 2 16 3. + <_> + + <_> + 4 5 6 16 -1. + <_> + 6 5 2 16 3. + <_> + + <_> + 15 0 6 16 -1. + <_> + 17 0 2 16 3. + <_> + + <_> + 3 0 6 16 -1. + <_> + 5 0 2 16 3. + <_> + + <_> + 0 2 24 3 -1. + <_> + 0 3 24 1 3. + <_> + + <_> + 7 1 10 4 -1. + <_> + 7 3 10 2 2. + <_> + + <_> + 1 0 23 8 -1. + <_> + 1 4 23 4 2. + <_> + + <_> + 1 17 19 3 -1. + <_> + 1 18 19 1 3. + <_> + + <_> + 6 18 18 2 -1. + <_> + 6 19 18 1 2. + <_> + + <_> + 1 17 9 6 -1. + <_> + 1 19 9 2 3. + <_> + + <_> + 15 15 6 9 -1. + <_> + 15 18 6 3 3. + <_> + + <_> + 3 15 6 9 -1. + <_> + 3 18 6 3 3. + <_> + + <_> + 4 14 20 6 -1. + <_> + 4 17 20 3 2. + <_> + + <_> + 0 10 6 14 -1. + <_> + 0 10 3 7 2. + <_> + 3 17 3 7 2. + <_> + + <_> + 6 18 18 3 -1. + <_> + 6 19 18 1 3. + <_> + + <_> + 4 12 9 7 -1. + <_> + 7 12 3 7 3. + <_> + + <_> + 6 10 18 5 -1. + <_> + 12 10 6 5 3. + <_> + + <_> + 0 10 18 5 -1. + <_> + 6 10 6 5 3. + <_> + + <_> + 3 2 18 9 -1. + <_> + 9 2 6 9 3. + <_> + + <_> + 4 6 10 10 -1. + <_> + 4 6 5 5 2. + <_> + 9 11 5 5 2. + <_> + + <_> + 20 14 4 9 -1. + <_> + 20 14 2 9 2. + <_> + + <_> + 0 14 4 9 -1. + <_> + 2 14 2 9 2. + <_> + + <_> + 11 1 4 20 -1. + <_> + 13 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 6 21 12 3 -1. + <_> + 12 21 6 3 2. + <_> + + <_> + 11 1 4 20 -1. + <_> + 13 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 1 16 10 8 -1. + <_> + 1 16 5 4 2. + <_> + 6 20 5 4 2. + <_> + + <_> + 11 1 4 20 -1. + <_> + 13 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 1 0 3 19 -1. + <_> + 2 0 1 19 3. + <_> + + <_> + 11 1 4 20 -1. + <_> + 13 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 0 1 6 9 -1. + <_> + 2 1 2 9 3. + <_> + + <_> + 3 7 19 4 -1. + <_> + 3 9 19 2 2. + <_> + + <_> + 7 14 9 6 -1. + <_> + 7 16 9 2 3. + <_> + + <_> + 17 1 7 6 -1. + <_> + 17 4 7 3 2. + <_> + + <_> + 5 0 14 8 -1. + <_> + 5 4 14 4 2. + <_> + + <_> + 16 1 8 6 -1. + <_> + 16 4 8 3 2. + <_> + + <_> + 0 1 8 6 -1. + <_> + 0 4 8 3 2. + <_> + + <_> + 6 0 18 4 -1. + <_> + 15 0 9 2 2. + <_> + 6 2 9 2 2. + <_> + + <_> + 0 14 9 6 -1. + <_> + 0 16 9 2 3. + <_> + + <_> + 3 7 18 8 -1. + <_> + 9 7 6 8 3. + <_> + + <_> + 2 11 6 9 -1. + <_> + 4 11 2 9 3. + <_> + + <_> + 10 5 6 9 -1. + <_> + 12 5 2 9 3. + <_> + + <_> + 10 6 4 18 -1. + <_> + 10 6 2 9 2. + <_> + 12 15 2 9 2. + <_> + + <_> + 11 1 4 20 -1. + <_> + 13 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 9 1 4 20 -1. + <_> + 9 1 2 10 2. + <_> + 11 11 2 10 2. + <_> + + <_> + 5 9 18 6 -1. + <_> + 14 9 9 3 2. + <_> + 5 12 9 3 2. + <_> + + <_> + 6 4 6 9 -1. + <_> + 8 4 2 9 3. + <_> + + <_> + 10 16 8 6 -1. + <_> + 10 16 4 6 2. + <_> + + <_> + 0 0 18 8 -1. + <_> + 0 0 9 4 2. + <_> + 9 4 9 4 2. + <_> + + <_> + 6 5 14 12 -1. + <_> + 13 5 7 6 2. + <_> + 6 11 7 6 2. + <_> + + <_> + 4 3 15 7 -1. + <_> + 9 3 5 7 3. + <_> + + <_> + 14 12 10 6 -1. + <_> + 14 14 10 2 3. + <_> + + <_> + 0 11 4 10 -1. + <_> + 0 16 4 5 2. + <_> + + <_> + 1 10 22 3 -1. + <_> + 1 11 22 1 3. + <_> + + <_> + 8 9 6 10 -1. + <_> + 10 9 2 10 3. + <_> + + <_> + 13 2 6 12 -1. + <_> + 16 2 3 6 2. + <_> + 13 8 3 6 2. + <_> + + <_> + 10 6 4 18 -1. + <_> + 10 6 2 9 2. + <_> + 12 15 2 9 2. + <_> + + <_> + 7 8 10 16 -1. + <_> + 12 8 5 8 2. + <_> + 7 16 5 8 2. + <_> + + <_> + 8 1 8 12 -1. + <_> + 8 1 4 6 2. + <_> + 12 7 4 6 2. + <_> + + <_> + 7 1 12 14 -1. + <_> + 13 1 6 7 2. + <_> + 7 8 6 7 2. + <_> + + <_> + 2 14 12 6 -1. + <_> + 2 16 12 2 3. + <_> + + <_> + 11 16 6 6 -1. + <_> + 11 19 6 3 2. + <_> + + <_> + 7 16 6 6 -1. + <_> + 7 19 6 3 2. + <_> + + <_> + 13 4 4 10 -1. + <_> + 13 4 2 10 2. + <_> + + <_> + 0 19 19 3 -1. + <_> + 0 20 19 1 3. + <_> + + <_> + 12 8 6 8 -1. + <_> + 12 12 6 4 2. + <_> + + <_> + 8 1 8 22 -1. + <_> + 8 12 8 11 2. + <_> + + <_> + 12 8 6 8 -1. + <_> + 12 12 6 4 2. + <_> + + <_> + 6 8 6 8 -1. + <_> + 6 12 6 4 2. + <_> + + <_> + 14 5 6 9 -1. + <_> + 14 8 6 3 3. + <_> + + <_> + 0 6 24 4 -1. + <_> + 0 8 24 2 2. + <_> + + <_> + 14 12 10 6 -1. + <_> + 14 14 10 2 3. + <_> + + <_> + 0 12 10 6 -1. + <_> + 0 14 10 2 3. + <_> + + <_> + 4 6 19 3 -1. + <_> + 4 7 19 1 3. + <_> + + <_> + 1 6 19 3 -1. + <_> + 1 7 19 1 3. + <_> + + <_> + 4 0 16 9 -1. + <_> + 4 3 16 3 3. + <_> + + <_> + 0 1 24 5 -1. + <_> + 8 1 8 5 3. + <_> + + <_> + 3 6 6 15 -1. + <_> + 3 11 6 5 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 6 22 18 2 -1. + <_> + 6 23 18 1 2. + <_> + + <_> + 2 12 6 9 -1. + <_> + 2 15 6 3 3. + <_> + + <_> + 18 12 6 9 -1. + <_> + 18 15 6 3 3. + <_> + + <_> + 0 12 6 9 -1. + <_> + 0 15 6 3 3. + <_> + + <_> + 11 14 4 10 -1. + <_> + 11 19 4 5 2. + <_> + + <_> + 9 6 6 16 -1. + <_> + 9 14 6 8 2. + <_> + + <_> + 7 7 10 10 -1. + <_> + 7 12 10 5 2. + <_> + + <_> + 1 3 6 13 -1. + <_> + 3 3 2 13 3. + <_> + + <_> + 18 1 6 13 -1. + <_> + 18 1 3 13 2. + <_> + + <_> + 5 1 6 9 -1. + <_> + 7 1 2 9 3. + <_> + + <_> + 18 2 6 11 -1. + <_> + 18 2 3 11 2. + <_> + + <_> + 0 2 6 11 -1. + <_> + 3 2 3 11 2. + <_> + + <_> + 9 12 15 6 -1. + <_> + 9 14 15 2 3. + <_> + + <_> + 2 2 20 3 -1. + <_> + 2 3 20 1 3. + <_> + + <_> + 10 6 4 9 -1. + <_> + 10 6 2 9 2. + <_> + + <_> + 5 6 12 14 -1. + <_> + 5 6 6 7 2. + <_> + 11 13 6 7 2. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 7 0 9 6 -1. + <_> + 10 0 3 6 3. + <_> + + <_> + 10 6 6 9 -1. + <_> + 12 6 2 9 3. + <_> + + <_> + 4 1 12 20 -1. + <_> + 4 1 6 10 2. + <_> + 10 11 6 10 2. + <_> + + <_> + 6 7 18 3 -1. + <_> + 6 7 9 3 2. + <_> + + <_> + 0 7 18 3 -1. + <_> + 9 7 9 3 2. + <_> + + <_> + 3 20 18 3 -1. + <_> + 9 20 6 3 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 6 2 12 15 -1. + <_> + 10 2 4 15 3. + <_> + + <_> + 2 3 18 3 -1. + <_> + 2 4 18 1 3. + <_> + + <_> + 19 4 4 18 -1. + <_> + 21 4 2 9 2. + <_> + 19 13 2 9 2. + <_> + + <_> + 0 1 19 3 -1. + <_> + 0 2 19 1 3. + <_> + + <_> + 5 0 15 4 -1. + <_> + 5 2 15 2 2. + <_> + + <_> + 5 2 14 5 -1. + <_> + 12 2 7 5 2. + <_> + + <_> + 1 2 22 14 -1. + <_> + 1 2 11 14 2. + <_> + + <_> + 8 15 6 9 -1. + <_> + 10 15 2 9 3. + <_> + + <_> + 6 17 18 3 -1. + <_> + 6 18 18 1 3. + <_> + + <_> + 9 6 3 18 -1. + <_> + 9 12 3 6 3. + <_> + + <_> + 2 0 20 3 -1. + <_> + 2 1 20 1 3. + <_> + + <_> + 5 4 5 12 -1. + <_> + 5 8 5 4 3. + <_> + + <_> + 8 6 12 5 -1. + <_> + 12 6 4 5 3. + <_> + + <_> + 9 12 6 12 -1. + <_> + 9 12 3 6 2. + <_> + 12 18 3 6 2. + <_> + + <_> + 14 14 8 10 -1. + <_> + 18 14 4 5 2. + <_> + 14 19 4 5 2. + <_> + + <_> + 2 14 8 10 -1. + <_> + 2 14 4 5 2. + <_> + 6 19 4 5 2. + <_> + + <_> + 10 18 12 6 -1. + <_> + 16 18 6 3 2. + <_> + 10 21 6 3 2. + <_> + + <_> + 1 3 6 9 -1. + <_> + 1 6 6 3 3. + <_> + + <_> + 11 3 3 20 -1. + <_> + 12 3 1 20 3. + <_> + + <_> + 4 6 14 6 -1. + <_> + 4 6 7 3 2. + <_> + 11 9 7 3 2. + <_> + + <_> + 6 5 12 13 -1. + <_> + 10 5 4 13 3. + <_> + + <_> + 5 4 4 15 -1. + <_> + 5 9 4 5 3. + <_> + + <_> + 9 16 15 4 -1. + <_> + 14 16 5 4 3. + <_> + + <_> + 7 8 6 14 -1. + <_> + 7 8 3 7 2. + <_> + 10 15 3 7 2. + <_> + + <_> + 7 6 10 6 -1. + <_> + 7 8 10 2 3. + <_> + + <_> + 2 5 18 3 -1. + <_> + 2 6 18 1 3. + <_> + + <_> + 5 1 15 8 -1. + <_> + 5 5 15 4 2. + <_> + + <_> + 7 1 8 18 -1. + <_> + 7 10 8 9 2. + <_> + + <_> + 0 10 24 3 -1. + <_> + 0 11 24 1 3. + <_> + + <_> + 0 2 6 13 -1. + <_> + 2 2 2 13 3. + <_> + + <_> + 16 0 8 10 -1. + <_> + 20 0 4 5 2. + <_> + 16 5 4 5 2. + <_> + + <_> + 5 1 10 9 -1. + <_> + 5 4 10 3 3. + <_> + + <_> + 5 6 18 3 -1. + <_> + 5 7 18 1 3. + <_> + + <_> + 0 1 24 3 -1. + <_> + 0 2 24 1 3. + <_> + + <_> + 11 4 6 11 -1. + <_> + 13 4 2 11 3. + <_> + + <_> + 0 0 8 10 -1. + <_> + 0 0 4 5 2. + <_> + 4 5 4 5 2. + <_> + + <_> + 4 16 18 3 -1. + <_> + 4 17 18 1 3. + <_> + + <_> + 2 16 18 3 -1. + <_> + 2 17 18 1 3. + <_> + + <_> + 3 0 18 10 -1. + <_> + 12 0 9 5 2. + <_> + 3 5 9 5 2. + <_> + + <_> + 2 3 20 21 -1. + <_> + 12 3 10 21 2. + <_> + + <_> + 6 7 14 3 -1. + <_> + 6 7 7 3 2. + <_> + + <_> + 0 9 12 6 -1. + <_> + 0 9 6 3 2. + <_> + 6 12 6 3 2. + <_> + + <_> + 3 14 21 4 -1. + <_> + 10 14 7 4 3. + <_> + + <_> + 0 14 21 4 -1. + <_> + 7 14 7 4 3. + <_> + + <_> + 5 21 18 3 -1. + <_> + 11 21 6 3 3. + <_> + + <_> + 1 21 18 3 -1. + <_> + 7 21 6 3 3. + <_> + + <_> + 19 4 4 18 -1. + <_> + 21 4 2 9 2. + <_> + 19 13 2 9 2. + <_> + + <_> + 3 7 18 3 -1. + <_> + 3 8 18 1 3. + <_> + + <_> + 19 4 4 18 -1. + <_> + 21 4 2 9 2. + <_> + 19 13 2 9 2. + <_> + + <_> + 7 15 10 6 -1. + <_> + 7 17 10 2 3. + <_> + + <_> + 9 13 11 9 -1. + <_> + 9 16 11 3 3. + <_> + + <_> + 0 6 4 10 -1. + <_> + 0 11 4 5 2. + <_> + + <_> + 15 16 9 6 -1. + <_> + 15 18 9 2 3. + <_> + + <_> + 1 5 4 18 -1. + <_> + 1 5 2 9 2. + <_> + 3 14 2 9 2. + <_> + + <_> + 9 8 8 10 -1. + <_> + 13 8 4 5 2. + <_> + 9 13 4 5 2. + <_> + + <_> + 7 8 8 10 -1. + <_> + 7 8 4 5 2. + <_> + 11 13 4 5 2. + <_> + + <_> + 9 8 12 5 -1. + <_> + 13 8 4 5 3. + <_> + + <_> + 7 8 9 7 -1. + <_> + 10 8 3 7 3. + <_> + + <_> + 9 8 12 5 -1. + <_> + 13 8 4 5 3. + <_> + + <_> + 7 6 9 7 -1. + <_> + 10 6 3 7 3. + <_> + + <_> + 9 8 12 5 -1. + <_> + 13 8 4 5 3. + <_> + + <_> + 10 5 4 18 -1. + <_> + 10 11 4 6 3. + <_> + + <_> + 5 5 14 12 -1. + <_> + 5 11 14 6 2. + <_> + + <_> + 0 1 11 4 -1. + <_> + 0 3 11 2 2. + <_> + + <_> + 9 10 6 10 -1. + <_> + 11 10 2 10 3. + <_> + + <_> + 2 17 11 6 -1. + <_> + 2 19 11 2 3. + <_> + + <_> + 15 16 9 6 -1. + <_> + 15 18 9 2 3. + <_> + + <_> + 1 10 18 2 -1. + <_> + 1 11 18 1 2. + <_> + + <_> + 6 4 12 13 -1. + <_> + 10 4 4 13 3. + <_> + + <_> + 0 18 18 3 -1. + <_> + 0 19 18 1 3. + <_> + + <_> + 6 18 18 3 -1. + <_> + 6 19 18 1 3. + <_> + + <_> + 0 16 9 6 -1. + <_> + 0 18 9 2 3. + <_> + + <_> + 13 15 9 6 -1. + <_> + 13 17 9 2 3. + <_> + + <_> + 2 15 9 6 -1. + <_> + 2 17 9 2 3. + <_> + + <_> + 13 1 6 16 -1. + <_> + 13 1 3 16 2. + <_> + + <_> + 5 1 6 16 -1. + <_> + 8 1 3 16 2. + <_> + + <_> + 11 5 6 10 -1. + <_> + 13 5 2 10 3. + <_> + + <_> + 7 5 6 10 -1. + <_> + 9 5 2 10 3. + <_> + + <_> + 10 0 6 24 -1. + <_> + 12 0 2 24 3. + <_> + + <_> + 3 4 4 20 -1. + <_> + 3 4 2 10 2. + <_> + 5 14 2 10 2. + <_> + + <_> + 14 0 6 9 -1. + <_> + 16 0 2 9 3. + <_> + + <_> + 4 0 6 9 -1. + <_> + 6 0 2 9 3. + <_> + + <_> + 4 5 18 5 -1. + <_> + 10 5 6 5 3. + <_> + + <_> + 5 6 6 9 -1. + <_> + 7 6 2 9 3. + <_> + + <_> + 7 2 15 8 -1. + <_> + 12 2 5 8 3. + <_> + + <_> + 2 2 15 8 -1. + <_> + 7 2 5 8 3. + <_> + + <_> + 10 0 4 9 -1. + <_> + 10 0 2 9 2. + <_> + + <_> + 3 4 6 12 -1. + <_> + 3 4 3 6 2. + <_> + 6 10 3 6 2. + <_> + + <_> + 16 0 8 18 -1. + <_> + 16 0 4 18 2. + <_> + + <_> + 0 0 8 18 -1. + <_> + 4 0 4 18 2. + <_> + + <_> + 0 7 24 6 -1. + <_> + 0 9 24 2 3. + <_> + + <_> + 4 7 14 3 -1. + <_> + 11 7 7 3 2. + <_> + + <_> + 10 8 8 15 -1. + <_> + 10 8 4 15 2. + <_> + + <_> + 7 0 10 14 -1. + <_> + 12 0 5 14 2. + <_> + + <_> + 13 10 8 10 -1. + <_> + 17 10 4 5 2. + <_> + 13 15 4 5 2. + <_> + + <_> + 3 0 4 9 -1. + <_> + 5 0 2 9 2. + <_> + + <_> + 16 1 6 8 -1. + <_> + 16 1 3 8 2. + <_> + + <_> + 2 1 6 8 -1. + <_> + 5 1 3 8 2. + <_> + + <_> + 3 6 18 12 -1. + <_> + 3 10 18 4 3. + <_> + + <_> + 4 12 16 4 -1. + <_> + 4 14 16 2 2. + <_> + + <_> + 4 9 16 15 -1. + <_> + 4 14 16 5 3. + <_> + + <_> + 3 10 8 10 -1. + <_> + 3 10 4 5 2. + <_> + 7 15 4 5 2. + <_> + + <_> + 8 18 16 6 -1. + <_> + 16 18 8 3 2. + <_> + 8 21 8 3 2. + <_> + + <_> + 2 16 12 5 -1. + <_> + 6 16 4 5 3. + <_> + + <_> + 14 14 9 4 -1. + <_> + 14 16 9 2 2. + <_> + + <_> + 7 14 9 6 -1. + <_> + 7 16 9 2 3. + <_> + + <_> + 4 10 16 12 -1. + <_> + 4 14 16 4 3. + <_> + + <_> + 0 13 19 6 -1. + <_> + 0 15 19 2 3. + <_> + + <_> + 10 13 9 6 -1. + <_> + 10 15 9 2 3. + <_> + + <_> + 5 0 3 23 -1. + <_> + 6 0 1 23 3. + <_> + + <_> + 0 8 24 6 -1. + <_> + 0 10 24 2 3. + <_> + + <_> + 0 5 5 12 -1. + <_> + 0 9 5 4 3. + <_> + + <_> + 3 0 19 18 -1. + <_> + 3 9 19 9 2. + <_> + + <_> + 9 11 6 12 -1. + <_> + 9 11 3 6 2. + <_> + 12 17 3 6 2. + <_> + + <_> + 0 5 24 8 -1. + <_> + 12 5 12 4 2. + <_> + 0 9 12 4 2. + <_> + + <_> + 6 18 9 4 -1. + <_> + 6 20 9 2 2. + <_> + + <_> + 8 8 10 6 -1. + <_> + 8 10 10 2 3. + <_> + + <_> + 2 7 20 3 -1. + <_> + 2 8 20 1 3. + <_> + + <_> + 12 0 7 20 -1. + <_> + 12 10 7 10 2. + <_> + + <_> + 5 0 7 20 -1. + <_> + 5 10 7 10 2. + <_> + + <_> + 14 2 2 18 -1. + <_> + 14 11 2 9 2. + <_> + + <_> + 5 8 10 12 -1. + <_> + 10 8 5 12 2. + <_> + + <_> + 6 9 12 8 -1. + <_> + 12 9 6 4 2. + <_> + 6 13 6 4 2. + <_> + + <_> + 7 7 3 14 -1. + <_> + 7 14 3 7 2. + <_> + + <_> + 11 2 12 16 -1. + <_> + 17 2 6 8 2. + <_> + 11 10 6 8 2. + <_> + + <_> + 7 0 6 9 -1. + <_> + 9 0 2 9 3. + <_> + + <_> + 13 14 9 4 -1. + <_> + 13 16 9 2 2. + <_> + + <_> + 0 12 22 4 -1. + <_> + 0 12 11 2 2. + <_> + 11 14 11 2 2. + <_> + + <_> + 1 12 22 6 -1. + <_> + 12 12 11 3 2. + <_> + 1 15 11 3 2. + <_> + + <_> + 6 6 9 6 -1. + <_> + 9 6 3 6 3. + <_> + + <_> + 10 0 4 9 -1. + <_> + 10 0 2 9 2. + <_> + + <_> + 3 8 18 7 -1. + <_> + 9 8 6 7 3. + <_> + + <_> + 0 6 24 6 -1. + <_> + 0 8 24 2 3. + <_> + + <_> + 0 11 24 10 -1. + <_> + 8 11 8 10 3. + <_> + + <_> + 3 3 18 21 -1. + <_> + 9 3 6 21 3. + <_> + + <_> + 7 12 4 10 -1. + <_> + 9 12 2 10 2. + <_> + + <_> + 10 16 10 8 -1. + <_> + 15 16 5 4 2. + <_> + 10 20 5 4 2. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 12 10 6 12 -1. + <_> + 15 10 3 6 2. + <_> + 12 16 3 6 2. + <_> + + <_> + 6 10 6 12 -1. + <_> + 6 10 3 6 2. + <_> + 9 16 3 6 2. + <_> + + <_> + 16 12 6 12 -1. + <_> + 19 12 3 6 2. + <_> + 16 18 3 6 2. + <_> + + <_> + 2 12 6 12 -1. + <_> + 2 12 3 6 2. + <_> + 5 18 3 6 2. + <_> + + <_> + 10 15 6 9 -1. + <_> + 12 15 2 9 3. + <_> + + <_> + 8 15 6 9 -1. + <_> + 10 15 2 9 3. + <_> + + <_> + 14 20 10 4 -1. + <_> + 14 20 5 4 2. + <_> + + <_> + 0 20 10 4 -1. + <_> + 5 20 5 4 2. + <_> + + <_> + 11 17 9 6 -1. + <_> + 11 19 9 2 3. + <_> + + <_> + 3 2 14 4 -1. + <_> + 3 4 14 2 2. + <_> + + <_> + 10 1 10 4 -1. + <_> + 10 3 10 2 2. + <_> + + <_> + 0 15 10 4 -1. + <_> + 5 15 5 4 2. + <_> + + <_> + 19 2 3 19 -1. + <_> + 20 2 1 19 3. + <_> + + <_> + 4 12 9 8 -1. + <_> + 7 12 3 8 3. + <_> + + <_> + 4 7 5 12 -1. + <_> + 4 11 5 4 3. + <_> + + <_> + 0 1 24 3 -1. + <_> + 8 1 8 3 3. + <_> + + <_> + 6 8 12 4 -1. + <_> + 6 10 12 2 2. + <_> + + <_> + 19 3 4 10 -1. + <_> + 19 3 2 10 2. + <_> + + <_> + 0 6 9 6 -1. + <_> + 3 6 3 6 3. + <_> + + <_> + 18 0 6 22 -1. + <_> + 20 0 2 22 3. + <_> + + <_> + 0 0 6 22 -1. + <_> + 2 0 2 22 3. + <_> + + <_> + 5 15 19 3 -1. + <_> + 5 16 19 1 3. + <_> + + <_> + 10 7 4 15 -1. + <_> + 10 12 4 5 3. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 0 21 18 3 -1. + <_> + 0 22 18 1 3. + <_> + + <_> + 7 3 10 15 -1. + <_> + 7 8 10 5 3. + <_> + + <_> + 1 7 18 3 -1. + <_> + 1 8 18 1 3. + <_> + + <_> + 8 2 9 6 -1. + <_> + 11 2 3 6 3. + <_> + + <_> + 0 10 24 14 -1. + <_> + 0 17 24 7 2. + <_> + + <_> + 13 9 8 10 -1. + <_> + 17 9 4 5 2. + <_> + 13 14 4 5 2. + <_> + + <_> + 10 5 4 9 -1. + <_> + 12 5 2 9 2. + <_> + + <_> + 13 9 8 10 -1. + <_> + 17 9 4 5 2. + <_> + 13 14 4 5 2. + <_> + + <_> + 7 11 10 10 -1. + <_> + 7 11 5 5 2. + <_> + 12 16 5 5 2. + <_> + + <_> + 4 13 18 4 -1. + <_> + 13 13 9 2 2. + <_> + 4 15 9 2 2. + <_> + + <_> + 0 0 19 2 -1. + <_> + 0 1 19 1 2. + <_> + + <_> + 0 18 24 6 -1. + <_> + 8 18 8 6 3. + <_> + + <_> + 6 4 8 16 -1. + <_> + 6 12 8 8 2. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 10 10 2 2. + <_> + + <_> + 0 3 6 9 -1. + <_> + 0 6 6 3 3. + <_> + + <_> + 13 15 7 9 -1. + <_> + 13 18 7 3 3. + <_> + + <_> + 3 18 12 6 -1. + <_> + 3 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 12 14 6 9 -1. + <_> + 12 17 6 3 3. + <_> + + <_> + 2 15 15 8 -1. + <_> + 2 19 15 4 2. + <_> + + <_> + 9 6 6 16 -1. + <_> + 9 14 6 8 2. + <_> + + <_> + 6 6 7 12 -1. + <_> + 6 10 7 4 3. + <_> + + <_> + 14 6 6 9 -1. + <_> + 14 9 6 3 3. + <_> + + <_> + 5 14 6 9 -1. + <_> + 5 17 6 3 3. + <_> + + <_> + 10 8 6 9 -1. + <_> + 12 8 2 9 3. + <_> + + <_> + 6 6 4 18 -1. + <_> + 6 6 2 9 2. + <_> + 8 15 2 9 2. + <_> + + <_> + 14 9 6 12 -1. + <_> + 17 9 3 6 2. + <_> + 14 15 3 6 2. + <_> + + <_> + 4 9 6 12 -1. + <_> + 4 9 3 6 2. + <_> + 7 15 3 6 2. + <_> + + <_> + 14 15 9 6 -1. + <_> + 14 17 9 2 3. + <_> + + <_> + 0 20 18 4 -1. + <_> + 0 20 9 2 2. + <_> + 9 22 9 2 2. + <_> + + <_> + 13 18 9 6 -1. + <_> + 13 20 9 2 3. + <_> + + <_> + 2 18 9 6 -1. + <_> + 2 20 9 2 3. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 19 2 4 22 -1. + <_> + 21 2 2 11 2. + <_> + 19 13 2 11 2. + <_> + + <_> + 1 2 4 22 -1. + <_> + 1 2 2 11 2. + <_> + 3 13 2 11 2. + <_> + + <_> + 15 0 2 24 -1. + <_> + 15 0 1 24 2. + <_> + + <_> + 3 20 16 4 -1. + <_> + 11 20 8 4 2. + <_> + + <_> + 11 6 4 18 -1. + <_> + 13 6 2 9 2. + <_> + 11 15 2 9 2. + <_> + + <_> + 7 9 10 14 -1. + <_> + 7 9 5 7 2. + <_> + 12 16 5 7 2. + <_> + + <_> + 14 6 6 9 -1. + <_> + 14 9 6 3 3. + <_> + + <_> + 3 6 7 9 -1. + <_> + 3 9 7 3 3. + <_> + + <_> + 20 4 4 20 -1. + <_> + 22 4 2 10 2. + <_> + 20 14 2 10 2. + <_> + + <_> + 7 6 6 9 -1. + <_> + 7 9 6 3 3. + <_> + + <_> + 7 0 10 14 -1. + <_> + 12 0 5 7 2. + <_> + 7 7 5 7 2. + <_> + + <_> + 2 1 18 6 -1. + <_> + 11 1 9 6 2. + <_> + + <_> + 15 0 2 24 -1. + <_> + 15 0 1 24 2. + <_> + + <_> + 7 0 2 24 -1. + <_> + 8 0 1 24 2. + <_> + + <_> + 13 12 6 7 -1. + <_> + 13 12 3 7 2. + <_> + + <_> + 5 12 6 7 -1. + <_> + 8 12 3 7 2. + <_> + + <_> + 3 5 18 19 -1. + <_> + 9 5 6 19 3. + <_> + + <_> + 5 6 9 6 -1. + <_> + 8 6 3 6 3. + <_> + + <_> + 9 5 9 6 -1. + <_> + 12 5 3 6 3. + <_> + + <_> + 3 16 10 8 -1. + <_> + 3 16 5 4 2. + <_> + 8 20 5 4 2. + <_> + + <_> + 19 8 5 15 -1. + <_> + 19 13 5 5 3. + <_> + + <_> + 0 8 5 15 -1. + <_> + 0 13 5 5 3. + <_> + + <_> + 20 4 4 20 -1. + <_> + 22 4 2 10 2. + <_> + 20 14 2 10 2. + <_> + + <_> + 0 4 4 20 -1. + <_> + 0 4 2 10 2. + <_> + 2 14 2 10 2. + <_> + + <_> + 7 7 10 4 -1. + <_> + 7 7 5 4 2. + <_> + + <_> + 4 19 14 4 -1. + <_> + 11 19 7 4 2. + <_> + + <_> + 10 11 12 3 -1. + <_> + 10 11 6 3 2. + <_> + + <_> + 0 1 24 3 -1. + <_> + 0 2 24 1 3. + <_> + + <_> + 7 2 14 20 -1. + <_> + 14 2 7 10 2. + <_> + 7 12 7 10 2. + <_> + + <_> + 0 13 6 9 -1. + <_> + 2 13 2 9 3. + <_> + + <_> + 13 0 4 19 -1. + <_> + 13 0 2 19 2. + <_> + + <_> + 1 11 14 3 -1. + <_> + 8 11 7 3 2. + <_> + + <_> + 7 1 16 20 -1. + <_> + 15 1 8 10 2. + <_> + 7 11 8 10 2. + <_> + + <_> + 0 10 21 9 -1. + <_> + 7 10 7 9 3. + <_> + + <_> + 6 19 15 5 -1. + <_> + 11 19 5 5 3. + <_> + + <_> + 8 10 6 6 -1. + <_> + 11 10 3 6 2. + <_> + + <_> + 7 1 16 20 -1. + <_> + 15 1 8 10 2. + <_> + 7 11 8 10 2. + <_> + + <_> + 1 1 16 20 -1. + <_> + 1 1 8 10 2. + <_> + 9 11 8 10 2. + <_> + + <_> + 16 4 3 12 -1. + <_> + 16 10 3 6 2. + <_> + + <_> + 5 4 3 12 -1. + <_> + 5 10 3 6 2. + <_> + + <_> + 7 6 10 8 -1. + <_> + 12 6 5 4 2. + <_> + 7 10 5 4 2. + <_> + + <_> + 4 9 6 6 -1. + <_> + 4 12 6 3 2. + <_> + + <_> + 6 5 12 4 -1. + <_> + 6 7 12 2 2. + <_> + + <_> + 9 2 5 15 -1. + <_> + 9 7 5 5 3. + <_> + + <_> + 15 0 9 6 -1. + <_> + 15 2 9 2 3. + <_> + + <_> + 6 0 11 10 -1. + <_> + 6 5 11 5 2. + <_> + + <_> + 12 7 4 12 -1. + <_> + 12 13 4 6 2. + <_> + + <_> + 7 2 9 4 -1. + <_> + 7 4 9 2 2. + <_> + + <_> + 6 0 13 6 -1. + <_> + 6 2 13 2 3. + <_> + + <_> + 10 6 4 18 -1. + <_> + 10 6 2 9 2. + <_> + 12 15 2 9 2. + <_> + + <_> + 10 8 6 9 -1. + <_> + 12 8 2 9 3. + <_> + + <_> + 3 18 10 6 -1. + <_> + 3 20 10 2 3. + <_> + + <_> + 4 14 20 3 -1. + <_> + 4 15 20 1 3. + <_> + + <_> + 2 15 9 6 -1. + <_> + 2 17 9 2 3. + <_> + + <_> + 13 0 4 19 -1. + <_> + 13 0 2 19 2. + <_> + + <_> + 7 0 4 19 -1. + <_> + 9 0 2 19 2. + <_> + + <_> + 1 4 22 2 -1. + <_> + 1 5 22 1 2. + <_> + + <_> + 0 0 9 6 -1. + <_> + 0 2 9 2 3. + <_> + + <_> + 0 0 24 18 -1. + <_> + 0 9 24 9 2. + <_> + + <_> + 3 2 16 8 -1. + <_> + 3 6 16 4 2. + <_> + + <_> + 3 6 18 6 -1. + <_> + 3 8 18 2 3. + <_> + + <_> + 3 1 6 10 -1. + <_> + 5 1 2 10 3. + <_> + + <_> + 13 0 9 6 -1. + <_> + 16 0 3 6 3. + <_> + + <_> + 2 0 9 6 -1. + <_> + 5 0 3 6 3. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 6 0 7 10 -1. + <_> + 6 5 7 5 2. + <_> + + <_> + 2 2 20 4 -1. + <_> + 12 2 10 2 2. + <_> + 2 4 10 2 2. + <_> + + <_> + 2 11 19 3 -1. + <_> + 2 12 19 1 3. + <_> + + <_> + 10 8 6 9 -1. + <_> + 12 8 2 9 3. + <_> + + <_> + 8 8 6 9 -1. + <_> + 10 8 2 9 3. + <_> + + <_> + 13 8 4 9 -1. + <_> + 13 8 2 9 2. + <_> + + <_> + 3 11 9 9 -1. + <_> + 6 11 3 9 3. + <_> + + <_> + 3 9 18 5 -1. + <_> + 9 9 6 5 3. + <_> + + <_> + 2 4 2 20 -1. + <_> + 2 14 2 10 2. + <_> + + <_> + 14 17 8 6 -1. + <_> + 14 20 8 3 2. + <_> + + <_> + 3 21 18 2 -1. + <_> + 3 22 18 1 2. + <_> + + <_> + 5 4 15 6 -1. + <_> + 10 4 5 6 3. + <_> + + <_> + 2 15 12 6 -1. + <_> + 2 17 12 2 3. + <_> + + <_> + 17 8 6 9 -1. + <_> + 17 11 6 3 3. + <_> + + <_> + 2 12 20 4 -1. + <_> + 2 12 10 2 2. + <_> + 12 14 10 2 2. + <_> + + <_> + 0 17 24 6 -1. + <_> + 0 19 24 2 3. + <_> + + <_> + 7 16 9 4 -1. + <_> + 7 18 9 2 2. + <_> + + <_> + 15 1 4 22 -1. + <_> + 17 1 2 11 2. + <_> + 15 12 2 11 2. + <_> + + <_> + 5 1 4 22 -1. + <_> + 5 1 2 11 2. + <_> + 7 12 2 11 2. + <_> + + <_> + 11 13 8 9 -1. + <_> + 11 16 8 3 3. + <_> + + <_> + 6 1 6 9 -1. + <_> + 8 1 2 9 3. + <_> + + <_> + 11 4 3 18 -1. + <_> + 11 10 3 6 3. + <_> + + <_> + 5 8 12 6 -1. + <_> + 5 8 6 3 2. + <_> + 11 11 6 3 2. + <_> + + <_> + 15 7 5 8 -1. + <_> + 15 11 5 4 2. + <_> + + <_> + 4 7 5 8 -1. + <_> + 4 11 5 4 2. + <_> + + <_> + 12 6 6 12 -1. + <_> + 15 6 3 6 2. + <_> + 12 12 3 6 2. + <_> + + <_> + 6 6 6 12 -1. + <_> + 6 6 3 6 2. + <_> + 9 12 3 6 2. + <_> + + <_> + 5 9 14 8 -1. + <_> + 12 9 7 4 2. + <_> + 5 13 7 4 2. + <_> + + <_> + 9 1 3 14 -1. + <_> + 9 8 3 7 2. + <_> + + <_> + 12 6 6 12 -1. + <_> + 12 10 6 4 3. + <_> + + <_> + 4 5 4 18 -1. + <_> + 4 5 2 9 2. + <_> + 6 14 2 9 2. + <_> + + <_> + 4 6 16 18 -1. + <_> + 4 12 16 6 3. + <_> + + <_> + 5 4 7 20 -1. + <_> + 5 14 7 10 2. + <_> + + <_> + 14 8 8 12 -1. + <_> + 14 14 8 6 2. + <_> + + <_> + 9 10 6 14 -1. + <_> + 9 10 3 7 2. + <_> + 12 17 3 7 2. + <_> + + <_> + 9 5 9 6 -1. + <_> + 12 5 3 6 3. + <_> + + <_> + 9 4 3 18 -1. + <_> + 10 4 1 18 3. + <_> + + <_> + 1 4 22 14 -1. + <_> + 12 4 11 7 2. + <_> + 1 11 11 7 2. + <_> + + <_> + 2 7 18 2 -1. + <_> + 2 8 18 1 2. + <_> + + <_> + 12 6 6 12 -1. + <_> + 12 10 6 4 3. + <_> + + <_> + 6 5 9 7 -1. + <_> + 9 5 3 7 3. + <_> + + <_> + 12 7 4 12 -1. + <_> + 12 13 4 6 2. + <_> + + <_> + 8 7 4 12 -1. + <_> + 8 13 4 6 2. + <_> + + <_> + 7 2 10 22 -1. + <_> + 7 13 10 11 2. + <_> + + <_> + 0 1 3 20 -1. + <_> + 1 1 1 20 3. + <_> + + <_> + 4 13 18 4 -1. + <_> + 13 13 9 2 2. + <_> + 4 15 9 2 2. + <_> + + <_> + 2 13 18 4 -1. + <_> + 2 13 9 2 2. + <_> + 11 15 9 2 2. + <_> + + <_> + 15 15 9 6 -1. + <_> + 15 17 9 2 3. + <_> + + <_> + 0 15 9 6 -1. + <_> + 0 17 9 2 3. + <_> + + <_> + 6 0 18 24 -1. + <_> + 15 0 9 12 2. + <_> + 6 12 9 12 2. + <_> + + <_> + 6 6 6 12 -1. + <_> + 6 10 6 4 3. + <_> + + <_> + 8 7 10 4 -1. + <_> + 8 9 10 2 2. + <_> + + <_> + 1 9 18 6 -1. + <_> + 1 9 9 3 2. + <_> + 10 12 9 3 2. + <_> + + <_> + 6 6 18 3 -1. + <_> + 6 7 18 1 3. + <_> + + <_> + 7 7 9 8 -1. + <_> + 10 7 3 8 3. + <_> + + <_> + 10 12 6 12 -1. + <_> + 12 12 2 12 3. + <_> + + <_> + 3 14 18 3 -1. + <_> + 3 15 18 1 3. + <_> + + <_> + 15 17 9 7 -1. + <_> + 18 17 3 7 3. + <_> + + <_> + 1 12 10 6 -1. + <_> + 1 14 10 2 3. + <_> + + <_> + 15 17 9 7 -1. + <_> + 18 17 3 7 3. + <_> + + <_> + 10 3 3 19 -1. + <_> + 11 3 1 19 3. + <_> + + <_> + 15 17 9 7 -1. + <_> + 18 17 3 7 3. + <_> + + <_> + 6 1 11 9 -1. + <_> + 6 4 11 3 3. + <_> + + <_> + 15 17 9 7 -1. + <_> + 18 17 3 7 3. + <_> + + <_> + 6 5 11 6 -1. + <_> + 6 8 11 3 2. + <_> + + <_> + 16 7 8 5 -1. + <_> + 16 7 4 5 2. + <_> + + <_> + 2 4 20 19 -1. + <_> + 12 4 10 19 2. + <_> + + <_> + 2 1 21 6 -1. + <_> + 9 1 7 6 3. + <_> + + <_> + 6 5 12 14 -1. + <_> + 6 5 6 7 2. + <_> + 12 12 6 7 2. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 2 11 8 5 -1. + <_> + 6 11 4 5 2. + <_> + + <_> + 16 7 8 5 -1. + <_> + 16 7 4 5 2. + <_> + + <_> + 0 7 8 5 -1. + <_> + 4 7 4 5 2. + <_> + + <_> + 15 17 9 7 -1. + <_> + 18 17 3 7 3. + <_> + + <_> + 8 6 8 10 -1. + <_> + 8 6 4 5 2. + <_> + 12 11 4 5 2. + <_> + + <_> + 15 15 9 9 -1. + <_> + 18 15 3 9 3. + <_> + + <_> + 0 15 9 9 -1. + <_> + 3 15 3 9 3. + <_> + + <_> + 12 10 9 7 -1. + <_> + 15 10 3 7 3. + <_> + + <_> + 3 10 9 7 -1. + <_> + 6 10 3 7 3. + <_> + + <_> + 13 15 10 8 -1. + <_> + 18 15 5 4 2. + <_> + 13 19 5 4 2. + <_> + + <_> + 0 1 6 12 -1. + <_> + 0 1 3 6 2. + <_> + 3 7 3 6 2. + <_> + + <_> + 10 0 6 12 -1. + <_> + 13 0 3 6 2. + <_> + 10 6 3 6 2. + <_> + + <_> + 7 0 10 12 -1. + <_> + 7 0 5 6 2. + <_> + 12 6 5 6 2. + <_> + + <_> + 4 1 16 8 -1. + <_> + 4 1 8 8 2. + <_> + + <_> + 0 21 19 3 -1. + <_> + 0 22 19 1 3. + <_> + + <_> + 6 9 18 4 -1. + <_> + 15 9 9 2 2. + <_> + 6 11 9 2 2. + <_> + + <_> + 3 4 9 6 -1. + <_> + 3 6 9 2 3. + <_> + + <_> + 9 1 6 15 -1. + <_> + 9 6 6 5 3. + <_> + + <_> + 5 9 6 6 -1. + <_> + 8 9 3 6 2. + <_> + + <_> + 5 1 14 9 -1. + <_> + 5 4 14 3 3. + <_> + + <_> + 3 0 8 20 -1. + <_> + 3 0 4 10 2. + <_> + 7 10 4 10 2. + <_> + + <_> + 5 0 7 9 -1. + <_> + 5 3 7 3 3. + <_> + + <_> + 6 6 12 5 -1. + <_> + 10 6 4 5 3. + <_> + + <_> + 0 1 8 14 -1. + <_> + 4 1 4 14 2. + <_> + + <_> + 2 12 22 4 -1. + <_> + 2 14 22 2 2. + <_> + + <_> + 8 17 6 6 -1. + <_> + 8 20 6 3 2. + <_> + + <_> + 18 1 6 7 -1. + <_> + 18 1 3 7 2. + <_> + + <_> + 0 0 6 6 -1. + <_> + 3 0 3 6 2. + <_> + + <_> + 4 6 17 18 -1. + <_> + 4 12 17 6 3. + <_> + + <_> + 6 0 12 6 -1. + <_> + 6 0 6 3 2. + <_> + 12 3 6 3 2. + <_> + + <_> + 4 7 18 4 -1. + <_> + 13 7 9 2 2. + <_> + 4 9 9 2 2. + <_> + + <_> + 4 12 10 6 -1. + <_> + 4 14 10 2 3. + <_> + + <_> + 7 9 10 12 -1. + <_> + 12 9 5 6 2. + <_> + 7 15 5 6 2. + <_> + + <_> + 0 1 24 3 -1. + <_> + 8 1 8 3 3. + <_> + + <_> + 13 11 6 6 -1. + <_> + 13 11 3 6 2. + <_> + + <_> + 5 11 6 6 -1. + <_> + 8 11 3 6 2. + <_> + + <_> + 3 10 19 3 -1. + <_> + 3 11 19 1 3. + <_> + + <_> + 0 2 6 9 -1. + <_> + 0 5 6 3 3. + <_> + + <_> + 14 16 10 6 -1. + <_> + 14 18 10 2 3. + <_> + + <_> + 0 16 10 6 -1. + <_> + 0 18 10 2 3. + <_> + + <_> + 14 13 9 6 -1. + <_> + 14 15 9 2 3. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 0 18 9 6 -1. + <_> + 0 20 9 2 3. + <_> + + <_> + 14 13 9 6 -1. + <_> + 14 15 9 2 3. + <_> + + <_> + 6 2 6 9 -1. + <_> + 8 2 2 9 3. + <_> + + <_> + 15 8 4 12 -1. + <_> + 15 8 2 12 2. + <_> + + <_> + 8 13 8 8 -1. + <_> + 8 17 8 4 2. + <_> + + <_> + 4 20 18 3 -1. + <_> + 10 20 6 3 3. + <_> + + <_> + 5 8 4 12 -1. + <_> + 7 8 2 12 2. + <_> + + <_> + 7 7 12 3 -1. + <_> + 7 7 6 3 2. + <_> + + <_> + 10 6 4 9 -1. + <_> + 12 6 2 9 2. + <_> + + <_> + 5 20 18 3 -1. + <_> + 11 20 6 3 3. + <_> + + <_> + 1 20 18 3 -1. + <_> + 7 20 6 3 3. + <_> + + <_> + 18 1 6 20 -1. + <_> + 21 1 3 10 2. + <_> + 18 11 3 10 2. + <_> + + <_> + 0 1 6 20 -1. + <_> + 0 1 3 10 2. + <_> + 3 11 3 10 2. + <_> + + <_> + 13 3 4 18 -1. + <_> + 15 3 2 9 2. + <_> + 13 12 2 9 2. + <_> + + <_> + 0 2 6 12 -1. + <_> + 0 6 6 4 3. + <_> + + <_> + 12 9 12 6 -1. + <_> + 18 9 6 3 2. + <_> + 12 12 6 3 2. + <_> + + <_> + 7 3 4 18 -1. + <_> + 7 3 2 9 2. + <_> + 9 12 2 9 2. + <_> + + <_> + 14 0 6 9 -1. + <_> + 16 0 2 9 3. + <_> + + <_> + 0 9 12 6 -1. + <_> + 0 9 6 3 2. + <_> + 6 12 6 3 2. + <_> + + <_> + 14 4 8 20 -1. + <_> + 18 4 4 10 2. + <_> + 14 14 4 10 2. + <_> + + <_> + 2 4 8 20 -1. + <_> + 2 4 4 10 2. + <_> + 6 14 4 10 2. + <_> + + <_> + 14 13 9 6 -1. + <_> + 14 15 9 2 3. + <_> + + <_> + 1 13 9 6 -1. + <_> + 1 15 9 2 3. + <_> + + <_> + 3 15 18 3 -1. + <_> + 9 15 6 3 3. + <_> + + <_> + 5 13 9 6 -1. + <_> + 5 15 9 2 3. + <_> + + <_> + 5 0 18 3 -1. + <_> + 5 1 18 1 3. + <_> + + <_> + 8 2 6 7 -1. + <_> + 11 2 3 7 2. + <_> + + <_> + 9 1 9 6 -1. + <_> + 12 1 3 6 3. + <_> + + <_> + 6 1 9 6 -1. + <_> + 9 1 3 6 3. + <_> + + <_> + 5 6 14 6 -1. + <_> + 12 6 7 3 2. + <_> + 5 9 7 3 2. + <_> + + <_> + 8 2 6 13 -1. + <_> + 10 2 2 13 3. + <_> + + <_> + 6 11 12 6 -1. + <_> + 12 11 6 3 2. + <_> + 6 14 6 3 2. + <_> + + <_> + 3 1 18 15 -1. + <_> + 9 1 6 15 3. + <_> + + <_> + 13 0 6 7 -1. + <_> + 13 0 3 7 2. + <_> + + <_> + 3 3 16 6 -1. + <_> + 3 6 16 3 2. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 7 7 6 9 -1. + <_> + 9 7 2 9 3. + <_> + + <_> + 13 0 4 24 -1. + <_> + 13 0 2 24 2. + <_> + + <_> + 7 0 4 24 -1. + <_> + 9 0 2 24 2. + <_> + + <_> + 11 9 5 12 -1. + <_> + 11 13 5 4 3. + <_> + + <_> + 7 15 9 6 -1. + <_> + 7 17 9 2 3. + <_> + + <_> + 5 7 18 6 -1. + <_> + 5 9 18 2 3. + <_> + + <_> + 8 9 5 12 -1. + <_> + 8 13 5 4 3. + <_> + + <_> + 4 17 17 6 -1. + <_> + 4 19 17 2 3. + <_> + + <_> + 0 3 18 14 -1. + <_> + 0 3 9 7 2. + <_> + 9 10 9 7 2. + <_> + + <_> + 0 1 24 2 -1. + <_> + 0 2 24 1 2. + <_> + + <_> + 0 15 18 3 -1. + <_> + 0 16 18 1 3. + <_> + + <_> + 9 0 6 9 -1. + <_> + 11 0 2 9 3. + <_> + + <_> + 3 3 14 12 -1. + <_> + 3 9 14 6 2. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 8 0 6 9 -1. + <_> + 10 0 2 9 3. + <_> + + <_> + 10 6 6 10 -1. + <_> + 12 6 2 10 3. + <_> + + <_> + 5 0 6 9 -1. + <_> + 7 0 2 9 3. + <_> + + <_> + 2 0 21 7 -1. + <_> + 9 0 7 7 3. + <_> + + <_> + 6 11 12 5 -1. + <_> + 10 11 4 5 3. + <_> + + <_> + 8 7 9 8 -1. + <_> + 11 7 3 8 3. + <_> + + <_> + 9 6 6 18 -1. + <_> + 9 6 3 9 2. + <_> + 12 15 3 9 2. + <_> + + <_> + 15 14 8 10 -1. + <_> + 19 14 4 5 2. + <_> + 15 19 4 5 2. + <_> + + <_> + 1 14 8 10 -1. + <_> + 1 14 4 5 2. + <_> + 5 19 4 5 2. + <_> + + <_> + 11 0 8 10 -1. + <_> + 15 0 4 5 2. + <_> + 11 5 4 5 2. + <_> + + <_> + 5 0 8 10 -1. + <_> + 5 0 4 5 2. + <_> + 9 5 4 5 2. + <_> + + <_> + 6 1 12 5 -1. + <_> + 6 1 6 5 2. + <_> + + <_> + 1 12 18 2 -1. + <_> + 10 12 9 2 2. + <_> + + <_> + 2 8 20 6 -1. + <_> + 12 8 10 3 2. + <_> + 2 11 10 3 2. + <_> + + <_> + 7 6 9 7 -1. + <_> + 10 6 3 7 3. + <_> + + <_> + 10 5 8 16 -1. + <_> + 14 5 4 8 2. + <_> + 10 13 4 8 2. + <_> + + <_> + 3 9 16 8 -1. + <_> + 3 9 8 4 2. + <_> + 11 13 8 4 2. + <_> + + <_> + 7 8 10 4 -1. + <_> + 7 8 5 4 2. + <_> + + <_> + 7 12 10 8 -1. + <_> + 7 12 5 4 2. + <_> + 12 16 5 4 2. + <_> + + <_> + 9 19 15 4 -1. + <_> + 14 19 5 4 3. + <_> + + <_> + 1 0 18 9 -1. + <_> + 7 0 6 9 3. + <_> + + <_> + 13 4 10 8 -1. + <_> + 18 4 5 4 2. + <_> + 13 8 5 4 2. + <_> + + <_> + 3 16 18 4 -1. + <_> + 9 16 6 4 3. + <_> + + <_> + 8 7 10 12 -1. + <_> + 13 7 5 6 2. + <_> + 8 13 5 6 2. + <_> + + <_> + 6 7 10 12 -1. + <_> + 6 7 5 6 2. + <_> + 11 13 5 6 2. + <_> + + <_> + 4 6 18 7 -1. + <_> + 10 6 6 7 3. + <_> + + <_> + 0 17 18 3 -1. + <_> + 0 18 18 1 3. + <_> + + <_> + 3 17 18 3 -1. + <_> + 3 18 18 1 3. + <_> + + <_> + 2 4 6 10 -1. + <_> + 4 4 2 10 3. + <_> + + <_> + 16 0 8 24 -1. + <_> + 16 0 4 24 2. + <_> + + <_> + 4 0 8 15 -1. + <_> + 8 0 4 15 2. + <_> + + <_> + 16 0 8 24 -1. + <_> + 16 0 4 24 2. + <_> + + <_> + 1 4 18 9 -1. + <_> + 7 4 6 9 3. + <_> + + <_> + 15 12 9 6 -1. + <_> + 15 14 9 2 3. + <_> + + <_> + 3 9 18 6 -1. + <_> + 3 9 9 3 2. + <_> + 12 12 9 3 2. + <_> + + <_> + 18 5 6 9 -1. + <_> + 18 8 6 3 3. + <_> + + <_> + 0 5 6 9 -1. + <_> + 0 8 6 3 3. + <_> + + <_> + 4 7 18 4 -1. + <_> + 13 7 9 2 2. + <_> + 4 9 9 2 2. + <_> + + <_> + 2 1 12 20 -1. + <_> + 2 1 6 10 2. + <_> + 8 11 6 10 2. + <_> + + <_> + 17 0 6 23 -1. + <_> + 17 0 3 23 2. + <_> + + <_> + 1 6 2 18 -1. + <_> + 1 15 2 9 2. + <_> + + <_> + 8 8 10 6 -1. + <_> + 8 10 10 2 3. + <_> + + <_> + 0 6 20 6 -1. + <_> + 0 6 10 3 2. + <_> + 10 9 10 3 2. + <_> + + <_> + 11 12 12 5 -1. + <_> + 15 12 4 5 3. + <_> + + <_> + 0 4 3 19 -1. + <_> + 1 4 1 19 3. + <_> + + <_> + 19 1 3 18 -1. + <_> + 20 1 1 18 3. + <_> + + <_> + 2 1 3 18 -1. + <_> + 3 1 1 18 3. + <_> + + <_> + 3 10 18 3 -1. + <_> + 9 10 6 3 3. + <_> + + <_> + 4 4 10 9 -1. + <_> + 9 4 5 9 2. + <_> + + <_> + 7 13 14 7 -1. + <_> + 7 13 7 7 2. + <_> + + <_> + 3 13 14 7 -1. + <_> + 10 13 7 7 2. + <_> + + <_> + 8 15 9 6 -1. + <_> + 11 15 3 6 3. + <_> + + <_> + 4 14 8 10 -1. + <_> + 4 14 4 5 2. + <_> + 8 19 4 5 2. + <_> + + <_> + 10 14 4 10 -1. + <_> + 10 19 4 5 2. + <_> + + <_> + 3 8 5 16 -1. + <_> + 3 16 5 8 2. + <_> + + <_> + 15 10 9 6 -1. + <_> + 15 12 9 2 3. + <_> + + <_> + 0 10 9 6 -1. + <_> + 0 12 9 2 3. + <_> + + <_> + 6 7 12 9 -1. + <_> + 6 10 12 3 3. + <_> + + <_> + 9 10 5 8 -1. + <_> + 9 14 5 4 2. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 8 15 6 9 -1. + <_> + 10 15 2 9 3. + <_> + + <_> + 16 6 7 6 -1. + <_> + 16 9 7 3 2. + <_> + + <_> + 8 1 4 22 -1. + <_> + 10 1 2 22 2. + <_> + + <_> + 6 6 14 3 -1. + <_> + 6 6 7 3 2. + <_> + + <_> + 0 18 19 3 -1. + <_> + 0 19 19 1 3. + <_> + + <_> + 17 0 6 24 -1. + <_> + 17 0 3 24 2. + <_> + + <_> + 0 13 15 6 -1. + <_> + 5 13 5 6 3. + <_> + + <_> + 9 6 10 14 -1. + <_> + 14 6 5 7 2. + <_> + 9 13 5 7 2. + <_> + + <_> + 1 6 8 10 -1. + <_> + 1 6 4 5 2. + <_> + 5 11 4 5 2. + <_> + + <_> + 7 6 12 5 -1. + <_> + 7 6 6 5 2. + <_> + + <_> + 7 7 9 6 -1. + <_> + 10 7 3 6 3. + <_> + + <_> + 7 8 14 14 -1. + <_> + 14 8 7 7 2. + <_> + 7 15 7 7 2. + <_> + + <_> + 3 8 14 14 -1. + <_> + 3 8 7 7 2. + <_> + 10 15 7 7 2. + <_> + + <_> + 9 8 13 4 -1. + <_> + 9 10 13 2 2. + <_> + + <_> + 3 2 6 12 -1. + <_> + 3 2 3 6 2. + <_> + 6 8 3 6 2. + <_> + + <_> + 6 10 17 6 -1. + <_> + 6 13 17 3 2. + <_> + + <_> + 1 10 17 6 -1. + <_> + 1 13 17 3 2. + <_> + + <_> + 16 7 8 9 -1. + <_> + 16 10 8 3 3. + <_> + + <_> + 0 7 8 9 -1. + <_> + 0 10 8 3 3. + <_> + + <_> + 0 9 24 10 -1. + <_> + 12 9 12 5 2. + <_> + 0 14 12 5 2. + <_> + + <_> + 3 2 15 8 -1. + <_> + 8 2 5 8 3. + <_> + + <_> + 4 2 18 8 -1. + <_> + 10 2 6 8 3. + <_> + + <_> + 0 1 18 4 -1. + <_> + 0 1 9 2 2. + <_> + 9 3 9 2 2. + <_> + + <_> + 20 2 3 18 -1. + <_> + 21 2 1 18 3. + <_> + + <_> + 1 3 3 19 -1. + <_> + 2 3 1 19 3. + <_> + + <_> + 18 8 6 16 -1. + <_> + 20 8 2 16 3. + <_> + + <_> + 0 8 6 16 -1. + <_> + 2 8 2 16 3. + <_> + + <_> + 8 18 11 6 -1. + <_> + 8 20 11 2 3. + <_> + + <_> + 4 6 12 5 -1. + <_> + 8 6 4 5 3. + <_> + + <_> + 7 6 12 5 -1. + <_> + 11 6 4 5 3. + <_> + + <_> + 6 3 9 6 -1. + <_> + 9 3 3 6 3. + <_> + + <_> + 7 6 12 5 -1. + <_> + 7 6 6 5 2. + <_> + + <_> + 9 8 6 7 -1. + <_> + 12 8 3 7 2. + <_> + + <_> + 8 2 9 6 -1. + <_> + 11 2 3 6 3. + <_> + + <_> + 8 14 6 9 -1. + <_> + 8 17 6 3 3. + <_> + + <_> + 8 2 9 6 -1. + <_> + 11 2 3 6 3. + <_> + + <_> + 4 3 16 20 -1. + <_> + 4 3 8 10 2. + <_> + 12 13 8 10 2. + <_> + + <_> + 7 6 10 12 -1. + <_> + 12 6 5 6 2. + <_> + 7 12 5 6 2. + <_> + + <_> + 0 2 7 12 -1. + <_> + 0 6 7 4 3. + <_> + + <_> + 12 17 11 6 -1. + <_> + 12 19 11 2 3. + <_> + + <_> + 4 7 12 8 -1. + <_> + 4 7 6 4 2. + <_> + 10 11 6 4 2. + <_> + + <_> + 8 11 8 10 -1. + <_> + 12 11 4 5 2. + <_> + 8 16 4 5 2. + <_> + + <_> + 9 1 4 9 -1. + <_> + 11 1 2 9 2. + <_> + + <_> + 14 0 3 22 -1. + <_> + 15 0 1 22 3. + <_> + + <_> + 7 0 3 22 -1. + <_> + 8 0 1 22 3. + <_> + + <_> + 4 7 18 4 -1. + <_> + 13 7 9 2 2. + <_> + 4 9 9 2 2. + <_> + + <_> + 10 2 4 15 -1. + <_> + 10 7 4 5 3. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 0 0 18 13 -1. + <_> + 9 0 9 13 2. + <_> + + <_> + 16 0 3 24 -1. + <_> + 17 0 1 24 3. + <_> + + <_> + 5 0 3 24 -1. + <_> + 6 0 1 24 3. + <_> + + <_> + 10 15 5 8 -1. + <_> + 10 19 5 4 2. + <_> + + <_> + 2 18 18 2 -1. + <_> + 2 19 18 1 2. + <_> + + <_> + 2 8 20 3 -1. + <_> + 2 9 20 1 3. + <_> + + <_> + 7 6 9 6 -1. + <_> + 7 8 9 2 3. + <_> + + <_> + 3 2 19 10 -1. + <_> + 3 7 19 5 2. + <_> + + <_> + 2 7 19 3 -1. + <_> + 2 8 19 1 3. + <_> + + <_> + 15 6 9 4 -1. + <_> + 15 8 9 2 2. + <_> + + <_> + 2 2 18 8 -1. + <_> + 8 2 6 8 3. + <_> + + <_> + 10 9 14 4 -1. + <_> + 10 9 7 4 2. + <_> + + <_> + 4 4 6 16 -1. + <_> + 7 4 3 16 2. + <_> + + <_> + 15 8 9 16 -1. + <_> + 18 8 3 16 3. + <_> + + <_> + 0 8 9 16 -1. + <_> + 3 8 3 16 3. + <_> + + <_> + 18 0 6 14 -1. + <_> + 20 0 2 14 3. + <_> + + <_> + 0 0 6 14 -1. + <_> + 2 0 2 14 3. + <_> + + <_> + 15 0 6 22 -1. + <_> + 17 0 2 22 3. + <_> + + <_> + 3 0 6 22 -1. + <_> + 5 0 2 22 3. + <_> + + <_> + 12 2 12 20 -1. + <_> + 16 2 4 20 3. + <_> + + <_> + 0 2 12 20 -1. + <_> + 4 2 4 20 3. + <_> + + <_> + 11 6 4 9 -1. + <_> + 11 6 2 9 2. + <_> + + <_> + 9 0 6 16 -1. + <_> + 12 0 3 16 2. + <_> + + <_> + 12 1 3 12 -1. + <_> + 12 7 3 6 2. + <_> + + <_> + 3 4 18 6 -1. + <_> + 3 4 9 3 2. + <_> + 12 7 9 3 2. + <_> + + <_> + 5 5 16 8 -1. + <_> + 13 5 8 4 2. + <_> + 5 9 8 4 2. + <_> + + <_> + 0 13 10 6 -1. + <_> + 0 15 10 2 3. + <_> + + <_> + 8 14 9 6 -1. + <_> + 8 16 9 2 3. + <_> + + <_> + 6 2 9 6 -1. + <_> + 9 2 3 6 3. + <_> + + <_> + 14 1 10 8 -1. + <_> + 19 1 5 4 2. + <_> + 14 5 5 4 2. + <_> + + <_> + 9 1 3 12 -1. + <_> + 9 7 3 6 2. + <_> + + <_> + 6 4 12 9 -1. + <_> + 6 7 12 3 3. + <_> + + <_> + 6 5 12 6 -1. + <_> + 10 5 4 6 3. + <_> + + <_> + 1 1 8 5 -1. + <_> + 5 1 4 5 2. + <_> + + <_> + 12 12 6 8 -1. + <_> + 12 16 6 4 2. + <_> + + <_> + 3 12 12 6 -1. + <_> + 3 14 12 2 3. + <_> + + <_> + 9 18 12 6 -1. + <_> + 15 18 6 3 2. + <_> + 9 21 6 3 2. + <_> + + <_> + 4 13 6 6 -1. + <_> + 4 16 6 3 2. + <_> + + <_> + 11 3 7 18 -1. + <_> + 11 12 7 9 2. + <_> + + <_> + 3 9 18 3 -1. + <_> + 9 9 6 3 3. + <_> + + <_> + 5 3 19 2 -1. + <_> + 5 4 19 1 2. + <_> + + <_> + 4 2 12 6 -1. + <_> + 4 2 6 3 2. + <_> + 10 5 6 3 2. + <_> + + <_> + 9 6 6 9 -1. + <_> + 11 6 2 9 3. + <_> + + <_> + 8 6 6 9 -1. + <_> + 10 6 2 9 3. + <_> + + <_> + 16 9 5 15 -1. + <_> + 16 14 5 5 3. + <_> + + <_> + 3 9 5 15 -1. + <_> + 3 14 5 5 3. + <_> + + <_> + 6 6 14 6 -1. + <_> + 13 6 7 3 2. + <_> + 6 9 7 3 2. + <_> + + <_> + 8 6 3 14 -1. + <_> + 8 13 3 7 2. + <_> + + <_> + 0 16 24 5 -1. + <_> + 8 16 8 5 3. + <_> + + <_> + 0 20 20 3 -1. + <_> + 10 20 10 3 2. + <_> + + <_> + 5 10 18 2 -1. + <_> + 5 11 18 1 2. + <_> + + <_> + 0 6 6 10 -1. + <_> + 2 6 2 10 3. + <_> + + <_> + 2 1 20 3 -1. + <_> + 2 2 20 1 3. + <_> + + <_> + 9 13 6 11 -1. + <_> + 11 13 2 11 3. + <_> + + <_> + 9 15 6 8 -1. + <_> + 9 19 6 4 2. + <_> + + <_> + 9 12 6 9 -1. + <_> + 9 15 6 3 3. + <_> + + <_> + 5 11 18 2 -1. + <_> + 5 12 18 1 2. + <_> + + <_> + 2 6 15 6 -1. + <_> + 2 8 15 2 3. + <_> + + <_> + 6 0 18 3 -1. + <_> + 6 1 18 1 3. + <_> + + <_> + 5 0 3 18 -1. + <_> + 6 0 1 18 3. + <_> + + <_> + 18 3 6 10 -1. + <_> + 20 3 2 10 3. + <_> + + <_> + 0 3 6 10 -1. + <_> + 2 3 2 10 3. + <_> + + <_> + 10 5 8 9 -1. + <_> + 10 5 4 9 2. + <_> + + <_> + 6 5 8 9 -1. + <_> + 10 5 4 9 2. + <_> + + <_> + 3 2 20 3 -1. + <_> + 3 3 20 1 3. + <_> + + <_> + 5 2 13 4 -1. + <_> + 5 4 13 2 2. + <_> + + <_> + 17 0 7 14 -1. + <_> + 17 7 7 7 2. + <_> + + <_> + 0 0 7 14 -1. + <_> + 0 7 7 7 2. + <_> + + <_> + 9 11 10 6 -1. + <_> + 9 11 5 6 2. + <_> + + <_> + 5 11 10 6 -1. + <_> + 10 11 5 6 2. + <_> + + <_> + 11 6 3 18 -1. + <_> + 11 12 3 6 3. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 6 16 18 3 -1. + <_> + 6 17 18 1 3. + <_> + + <_> + 4 6 9 10 -1. + <_> + 4 11 9 5 2. + <_> + + <_> + 9 7 15 4 -1. + <_> + 9 9 15 2 2. + <_> + + <_> + 5 6 12 6 -1. + <_> + 5 6 6 3 2. + <_> + 11 9 6 3 2. + <_> + + <_> + 6 1 12 9 -1. + <_> + 6 4 12 3 3. + <_> + + <_> + 7 9 6 12 -1. + <_> + 7 9 3 6 2. + <_> + 10 15 3 6 2. + <_> + + <_> + 11 5 13 6 -1. + <_> + 11 7 13 2 3. + <_> + + <_> + 1 11 22 13 -1. + <_> + 12 11 11 13 2. + <_> + + <_> + 18 8 6 6 -1. + <_> + 18 11 6 3 2. + <_> + + <_> + 0 8 6 6 -1. + <_> + 0 11 6 3 2. + <_> + + <_> + 0 6 24 3 -1. + <_> + 0 7 24 1 3. + <_> + + <_> + 0 5 10 6 -1. + <_> + 0 7 10 2 3. + <_> + + <_> + 6 7 18 3 -1. + <_> + 6 8 18 1 3. + <_> + + <_> + 0 0 10 6 -1. + <_> + 0 2 10 2 3. + <_> + + <_> + 19 0 3 19 -1. + <_> + 20 0 1 19 3. + <_> + + <_> + 4 6 12 16 -1. + <_> + 4 6 6 8 2. + <_> + 10 14 6 8 2. + <_> + + <_> + 19 6 4 18 -1. + <_> + 21 6 2 9 2. + <_> + 19 15 2 9 2. + <_> + + <_> + 1 6 4 18 -1. + <_> + 1 6 2 9 2. + <_> + 3 15 2 9 2. + <_> + + <_> + 3 21 18 3 -1. + <_> + 3 22 18 1 3. + <_> + + <_> + 0 19 9 4 -1. + <_> + 0 21 9 2 2. + <_> + + <_> + 12 18 12 6 -1. + <_> + 18 18 6 3 2. + <_> + 12 21 6 3 2. + <_> + + <_> + 7 18 9 4 -1. + <_> + 7 20 9 2 2. + <_> + + <_> + 12 16 10 8 -1. + <_> + 17 16 5 4 2. + <_> + 12 20 5 4 2. + <_> + + <_> + 2 16 10 8 -1. + <_> + 2 16 5 4 2. + <_> + 7 20 5 4 2. + <_> + + <_> + 14 0 10 12 -1. + <_> + 19 0 5 6 2. + <_> + 14 6 5 6 2. + <_> + + <_> + 0 0 10 12 -1. + <_> + 0 0 5 6 2. + <_> + 5 6 5 6 2. + <_> + + <_> + 15 14 9 6 -1. + <_> + 15 16 9 2 3. + <_> + + <_> + 0 14 9 6 -1. + <_> + 0 16 9 2 3. + <_> + + <_> + 14 14 10 6 -1. + <_> + 14 16 10 2 3. + <_> + + <_> + 0 14 10 6 -1. + <_> + 0 16 10 2 3. + <_> + + <_> + 5 18 18 2 -1. + <_> + 5 19 18 1 2. + <_> + + <_> + 0 18 18 3 -1. + <_> + 0 19 18 1 3. + <_> + + <_> + 3 5 18 12 -1. + <_> + 12 5 9 6 2. + <_> + 3 11 9 6 2. + <_> + + <_> + 5 3 7 9 -1. + <_> + 5 6 7 3 3. + <_> + + <_> + 4 0 19 15 -1. + <_> + 4 5 19 5 3. + <_> + + <_> + 3 0 16 4 -1. + <_> + 3 2 16 2 2. + <_> + + <_> + 4 12 16 12 -1. + <_> + 4 12 8 12 2. + <_> + + <_> + 4 3 12 15 -1. + <_> + 10 3 6 15 2. + <_> + + <_> + 16 4 2 19 -1. + <_> + 16 4 1 19 2. + <_> + + <_> + 6 4 2 19 -1. + <_> + 7 4 1 19 2. + <_> + + <_> + 13 14 8 10 -1. + <_> + 17 14 4 5 2. + <_> + 13 19 4 5 2. + <_> + + <_> + 3 14 8 10 -1. + <_> + 3 14 4 5 2. + <_> + 7 19 4 5 2. + <_> + + <_> + 12 6 3 18 -1. + <_> + 12 12 3 6 3. + <_> + + <_> + 5 11 12 6 -1. + <_> + 5 11 6 3 2. + <_> + 11 14 6 3 2. + <_> + + <_> + 10 5 8 10 -1. + <_> + 14 5 4 5 2. + <_> + 10 10 4 5 2. + <_> + + <_> + 6 4 12 10 -1. + <_> + 6 4 6 5 2. + <_> + 12 9 6 5 2. + <_> + + <_> + 6 8 18 10 -1. + <_> + 15 8 9 5 2. + <_> + 6 13 9 5 2. + <_> + + <_> + 0 8 18 10 -1. + <_> + 0 8 9 5 2. + <_> + 9 13 9 5 2. + <_> + + <_> + 12 6 3 18 -1. + <_> + 12 12 3 6 3. + <_> + + <_> + 0 14 18 3 -1. + <_> + 0 15 18 1 3. + <_> + + <_> + 12 6 3 18 -1. + <_> + 12 12 3 6 3. + <_> + + <_> + 9 6 3 18 -1. + <_> + 9 12 3 6 3. + <_> + + <_> + 6 14 18 3 -1. + <_> + 6 15 18 1 3. + <_> + + <_> + 0 5 18 3 -1. + <_> + 0 6 18 1 3. + <_> + + <_> + 2 5 22 3 -1. + <_> + 2 6 22 1 3. + <_> + + <_> + 0 0 21 10 -1. + <_> + 7 0 7 10 3. + <_> + + <_> + 6 3 18 17 -1. + <_> + 12 3 6 17 3. + <_> + + <_> + 0 3 18 17 -1. + <_> + 6 3 6 17 3. + <_> + + <_> + 0 12 24 11 -1. + <_> + 8 12 8 11 3. + <_> + + <_> + 4 10 16 6 -1. + <_> + 4 13 16 3 2. + <_> + + <_> + 12 8 6 8 -1. + <_> + 12 12 6 4 2. + <_> + + <_> + 6 14 8 7 -1. + <_> + 10 14 4 7 2. + <_> + + <_> + 15 10 6 14 -1. + <_> + 18 10 3 7 2. + <_> + 15 17 3 7 2. + <_> + + <_> + 3 10 6 14 -1. + <_> + 3 10 3 7 2. + <_> + 6 17 3 7 2. + <_> + + <_> + 6 12 18 2 -1. + <_> + 6 13 18 1 2. + <_> + + <_> + 5 8 10 6 -1. + <_> + 5 10 10 2 3. + <_> + + <_> + 12 11 9 4 -1. + <_> + 12 13 9 2 2. + <_> + + <_> + 0 11 9 6 -1. + <_> + 0 13 9 2 3. + <_> + + <_> + 11 2 3 18 -1. + <_> + 12 2 1 18 3. + <_> + + <_> + 10 2 3 18 -1. + <_> + 11 2 1 18 3. + <_> + + <_> + 9 12 6 10 -1. + <_> + 11 12 2 10 3. + <_> + + <_> + 1 10 6 9 -1. + <_> + 1 13 6 3 3. + <_> + + <_> + 6 9 16 6 -1. + <_> + 14 9 8 3 2. + <_> + 6 12 8 3 2. + <_> + + <_> + 1 8 9 6 -1. + <_> + 1 10 9 2 3. + <_> + + <_> + 7 7 16 6 -1. + <_> + 7 9 16 2 3. + <_> + + <_> + 0 0 18 3 -1. + <_> + 0 1 18 1 3. + <_> + + <_> + 10 0 6 9 -1. + <_> + 12 0 2 9 3. + <_> + + <_> + 9 5 6 6 -1. + <_> + 12 5 3 6 2. + <_> + + <_> + 10 6 4 18 -1. + <_> + 12 6 2 9 2. + <_> + 10 15 2 9 2. + <_> + + <_> + 8 0 6 9 -1. + <_> + 10 0 2 9 3. + <_> + + <_> + 9 1 6 9 -1. + <_> + 9 4 6 3 3. + <_> + + <_> + 1 0 18 9 -1. + <_> + 1 3 18 3 3. + <_> + + <_> + 0 3 24 3 -1. + <_> + 0 4 24 1 3. + <_> + + <_> + 6 14 9 4 -1. + <_> + 6 16 9 2 2. + <_> + + <_> + 8 9 8 10 -1. + <_> + 12 9 4 5 2. + <_> + 8 14 4 5 2. + <_> + + <_> + 5 2 13 9 -1. + <_> + 5 5 13 3 3. + <_> + + <_> + 4 4 16 9 -1. + <_> + 4 7 16 3 3. + <_> + + <_> + 4 4 14 9 -1. + <_> + 4 7 14 3 3. + <_> + + <_> + 8 5 9 6 -1. + <_> + 8 7 9 2 3. + <_> + + <_> + 1 7 16 6 -1. + <_> + 1 9 16 2 3. + <_> + + <_> + 10 5 13 9 -1. + <_> + 10 8 13 3 3. + <_> + + <_> + 1 5 13 9 -1. + <_> + 1 8 13 3 3. + <_> + + <_> + 0 4 24 6 -1. + <_> + 12 4 12 3 2. + <_> + 0 7 12 3 2. + <_> + + <_> + 1 14 10 9 -1. + <_> + 1 17 10 3 3. + <_> + + <_> + 5 17 18 3 -1. + <_> + 5 18 18 1 3. + <_> + + <_> + 0 16 18 3 -1. + <_> + 0 17 18 1 3. + <_> + + <_> + 9 17 9 6 -1. + <_> + 9 19 9 2 3. + <_> + + <_> + 1 20 22 4 -1. + <_> + 1 20 11 2 2. + <_> + 12 22 11 2 2. + <_> + + <_> + 8 14 8 6 -1. + <_> + 8 17 8 3 2. + <_> + + <_> + 8 6 8 15 -1. + <_> + 8 11 8 5 3. + <_> + + <_> + 5 4 18 3 -1. + <_> + 5 5 18 1 3. + <_> + + <_> + 9 3 5 10 -1. + <_> + 9 8 5 5 2. + <_> + + <_> + 6 8 12 3 -1. + <_> + 6 8 6 3 2. + <_> + + <_> + 2 6 18 6 -1. + <_> + 2 6 9 3 2. + <_> + 11 9 9 3 2. + <_> + + <_> + 10 6 4 18 -1. + <_> + 12 6 2 9 2. + <_> + 10 15 2 9 2. + <_> + + <_> + 7 5 6 6 -1. + <_> + 10 5 3 6 2. + <_> + + <_> + 14 5 2 18 -1. + <_> + 14 14 2 9 2. + <_> + + <_> + 8 5 2 18 -1. + <_> + 8 14 2 9 2. + <_> + + <_> + 9 2 10 6 -1. + <_> + 9 2 5 6 2. + <_> + + <_> + 3 1 18 12 -1. + <_> + 12 1 9 12 2. + <_> + + <_> + 5 2 17 22 -1. + <_> + 5 13 17 11 2. + <_> + + <_> + 4 0 12 6 -1. + <_> + 4 2 12 2 3. + <_> + + <_> + 6 9 16 6 -1. + <_> + 14 9 8 3 2. + <_> + 6 12 8 3 2. + <_> + + <_> + 9 0 5 18 -1. + <_> + 9 9 5 9 2. + <_> + + <_> + 12 0 6 9 -1. + <_> + 14 0 2 9 3. + <_> + + <_> + 6 0 6 9 -1. + <_> + 8 0 2 9 3. + <_> + + <_> + 9 1 6 12 -1. + <_> + 11 1 2 12 3. + <_> + + <_> + 5 9 13 4 -1. + <_> + 5 11 13 2 2. + <_> + + <_> + 5 8 19 3 -1. + <_> + 5 9 19 1 3. + <_> + + <_> + 9 9 6 8 -1. + <_> + 9 13 6 4 2. + <_> + + <_> + 11 9 4 15 -1. + <_> + 11 14 4 5 3. + <_> + + <_> + 2 0 6 14 -1. + <_> + 2 0 3 7 2. + <_> + 5 7 3 7 2. + <_> + + <_> + 15 1 6 14 -1. + <_> + 18 1 3 7 2. + <_> + 15 8 3 7 2. + <_> + + <_> + 3 1 6 14 -1. + <_> + 3 1 3 7 2. + <_> + 6 8 3 7 2. + <_> + + <_> + 3 20 18 4 -1. + <_> + 12 20 9 2 2. + <_> + 3 22 9 2 2. + <_> + + <_> + 5 0 4 20 -1. + <_> + 5 0 2 10 2. + <_> + 7 10 2 10 2. + <_> + + <_> + 16 8 8 12 -1. + <_> + 20 8 4 6 2. + <_> + 16 14 4 6 2. + <_> + + <_> + 0 8 8 12 -1. + <_> + 0 8 4 6 2. + <_> + 4 14 4 6 2. + <_> + + <_> + 13 13 10 8 -1. + <_> + 18 13 5 4 2. + <_> + 13 17 5 4 2. + <_> + + <_> + 1 13 10 8 -1. + <_> + 1 13 5 4 2. + <_> + 6 17 5 4 2. + <_> + + <_> + 15 8 4 15 -1. + <_> + 15 13 4 5 3. + <_> + + <_> + 5 8 4 15 -1. + <_> + 5 13 4 5 3. + <_> + + <_> + 6 11 16 12 -1. + <_> + 6 15 16 4 3. + <_> + + <_> + 2 11 16 12 -1. + <_> + 2 15 16 4 3. + <_> + + <_> + 14 12 7 9 -1. + <_> + 14 15 7 3 3. + <_> + + <_> + 10 1 3 21 -1. + <_> + 10 8 3 7 3. + <_> + + <_> + 13 11 9 4 -1. + <_> + 13 13 9 2 2. + <_> + + <_> + 3 10 17 9 -1. + <_> + 3 13 17 3 3. + <_> + + <_> + 13 8 8 15 -1. + <_> + 13 13 8 5 3. + <_> + + <_> + 3 8 8 15 -1. + <_> + 3 13 8 5 3. + <_> + + <_> + 11 14 10 8 -1. + <_> + 16 14 5 4 2. + <_> + 11 18 5 4 2. + <_> + + <_> + 0 18 22 6 -1. + <_> + 0 18 11 3 2. + <_> + 11 21 11 3 2. + <_> + + <_> + 0 16 24 4 -1. + <_> + 0 16 12 4 2. + <_> + + <_> + 6 20 12 3 -1. + <_> + 12 20 6 3 2. + <_> + + <_> + 18 12 6 12 -1. + <_> + 21 12 3 6 2. + <_> + 18 18 3 6 2. + <_> + + <_> + 0 12 6 12 -1. + <_> + 0 12 3 6 2. + <_> + 3 18 3 6 2. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 1 6 22 10 -1. + <_> + 1 6 11 5 2. + <_> + 12 11 11 5 2. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 0 18 18 2 -1. + <_> + 0 19 18 1 2. + <_> + + <_> + 3 15 19 3 -1. + <_> + 3 16 19 1 3. + <_> + + <_> + 0 13 18 3 -1. + <_> + 0 14 18 1 3. + <_> + + <_> + 15 17 9 6 -1. + <_> + 15 19 9 2 3. + <_> + + <_> + 0 17 9 6 -1. + <_> + 0 19 9 2 3. + <_> + + <_> + 12 17 9 6 -1. + <_> + 12 19 9 2 3. + <_> + + <_> + 3 17 9 6 -1. + <_> + 3 19 9 2 3. + <_> + + <_> + 16 2 3 20 -1. + <_> + 17 2 1 20 3. + <_> + + <_> + 0 13 24 8 -1. + <_> + 0 17 24 4 2. + <_> + + <_> + 9 1 6 22 -1. + <_> + 12 1 3 11 2. + <_> + 9 12 3 11 2. + diff --git a/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/inference_demo.py b/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/inference_demo.py new file mode 100644 index 0000000000..c4e00493d0 --- /dev/null +++ b/projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation/inference_demo.py @@ -0,0 +1,274 @@ +""" +Demo script of the image-based facial emotion/expression estimation framework. + +It has three main features: +Image: recognizes facial expressions in images. +Video: recognizes facial expressions in videos in a frame-based approach. +Webcam: connects to a webcam and recognizes facial expressions of the closest face detected +by a face detection algorithm. + +Adopted from: +https://github.com/siqueira-hc/Efficient-Facial-Feature-Learning-with-Wide-Ensemble-based-Convolutional-Neural-Networks +""" + +# Standard Libraries +import argparse +from argparse import RawTextHelpFormatter +import numpy as np +from torchvision import transforms +import PIL +import cv2 + +# OpenDR Modules +from opendr.perception.facial_expression_recognition import FacialEmotionLearner, image_processing + +INPUT_IMAGE_SIZE = (96, 96) +INPUT_IMAGE_NORMALIZATION_MEAN = [0.0, 0.0, 0.0] +INPUT_IMAGE_NORMALIZATION_STD = [1.0, 1.0, 1.0] + + +def is_none(x): + """ + Verifies is the string 'x' is none. + :param x: (string) + :return: (bool) + """ + if (x is None) or ((type(x) == str) and (x.strip() == "")): + return True + else: + return False + + +def detect_face(image): + """ + Detects faces in an image. + :param image: (ndarray) Raw input image. + :return: (list) Tuples with coordinates of a detected face. + """ + + # Converts to greyscale + greyscale_image = image_processing.convert_bgr_to_grey(image) + + # Runs haar cascade classifiers + _FACE_DETECTOR_HAAR_CASCADE = cv2.CascadeClassifier("./face_detector/frontal_face.xml") + faces = _FACE_DETECTOR_HAAR_CASCADE.detectMultiScale(greyscale_image, scaleFactor=1.2, minNeighbors=9, + minSize=(60, 60)) + face_coordinates = [[[x, y], [x + w, y + h]] for (x, y, w, h) in faces] if not (faces is None) else [] + face_coordinates = np.array(face_coordinates) + + # Returns None if no face is detected + return face_coordinates[0] if (len(face_coordinates) > 0 and (np.sum(face_coordinates[0]) > 0)) else None + + +def _pre_process_input_image(image): + """ + Pre-processes an image for ESR-9. + :param image: (ndarray) + :return: (ndarray) image + """ + + image = image_processing.resize(image, INPUT_IMAGE_SIZE) + image = PIL.Image.fromarray(image) + image = transforms.Normalize(mean=INPUT_IMAGE_NORMALIZATION_MEAN, + std=INPUT_IMAGE_NORMALIZATION_STD)(transforms.ToTensor()(image)).unsqueeze(0) + return image.numpy() + + +def _predict(learner, input_face): + """ + Facial emotion/expression estimation. Classifies the pre-processed input image with FacialEmotionLearner. + + :param input_face: (ndarray) input image. + :param device: runs the classification on CPU or GPU + :param ensemble_size: number of branches in the network + :return: Lists of emotions and affect values including the ensemble predictions based on plurality. + """ + + # Recognizes facial expression + emotion, affect = learner.infer(input_face) + # Converts from Tensor to ndarray + affect = np.array([a.cpu().detach().numpy() for a in affect]) + to_return_affect = affect[0] # a numpy array of valence and arousal values + to_return_emotion = emotion[0] # the emotion class with confidence tensor + + return to_return_emotion, to_return_affect + + +def recognize_facial_expression(learner, image, display): + """ + Detects a face in the input image. + If more than one face is detected, the biggest one is used. + The detected face is fed to the _predict function which runs FacialEmotionLearner for facial emotion/expression + estimation. + :param image: (ndarray) input image. + """ + + # Detect face + face_coordinates = detect_face(image) + + if face_coordinates is None: + print("No face detected.") + else: + face = image[face_coordinates[0][1]:face_coordinates[1][1], face_coordinates[0][0]:face_coordinates[1][0], :] + # Pre_process detected face + input_face = _pre_process_input_image(face) + # Recognize facial expression + emotion, affect = _predict(learner, input_face=input_face) + + # display + if display: + image = cv2.putText(image, "Valence: %.2f" % affect[0], (10, 40 + 0 * 30), cv2.FONT_HERSHEY_SIMPLEX, + 1, (0, 255, 255), 2, ) + image = cv2.putText(image, "Arousal: %.2f" % affect[1], (10, 40 + 1 * 30), cv2.FONT_HERSHEY_SIMPLEX, + 1, (0, 255, 255), 2, ) + image = cv2.putText(image, emotion.description, (10, 40 + 2 * 30), cv2.FONT_HERSHEY_SIMPLEX, + 1, (0, 255, 255), 2, ) + else: + print('emotion:', emotion) + print('valence, arousal:', affect) + + return image + + +def webcam(learner, camera_id, display, frames): + """ + Receives images from a camera and recognizes + facial expressions of the closets face in a frame-based approach. + """ + + if not image_processing.initialize_video_capture(camera_id): + raise RuntimeError("Error on initializing video capture." + + "\nCheck whether a webcam is working or not.") + + image_processing.set_fps(frames) + + try: + # Loop to process each frame from a VideoCapture object. + while image_processing.is_video_capture_open(): + # Get a frame + img, _ = image_processing.get_frame() + img = None if (img is None) else recognize_facial_expression(learner, img, display) + if display and img is not None: + cv2.imshow('Result', img) + cv2.waitKey(1) + + except Exception as e: + print("Error raised during video mode.") + raise e + except KeyboardInterrupt: + print("Keyboard interrupt event raised.") + finally: + image_processing.release_video_capture() + if display: + cv2.destroyAllWindows() + + +def image(learner, input_image_path, display): + """ + Receives the full path to an image file and recognizes + facial expressions of the closets face in a frame-based approach. + """ + + img = image_processing.read(input_image_path) + img = recognize_facial_expression(learner, img, display) + if display: + cv2.imshow('Result', img) + cv2.waitKey(0) + + +def video(learner, input_video_path, display, frames): + """ + Receives the full path to a video file and recognizes + facial expressions of the closets face in a frame-based approach. + """ + + if not image_processing.initialize_video_capture(input_video_path): + raise RuntimeError("Error on initializing video capture." + + "\nCheck whether working versions of ffmpeg or gstreamer is installed." + + "\nSupported file format: MPEG-4 (*.mp4).") + image_processing.set_fps(frames) + + try: + # Loop to process each frame from a VideoCapture object. + while image_processing.is_video_capture_open(): + # Get a frame + img, timestamp = image_processing.get_frame() + # Video has been processed + if img is None: + break + else: # Process frame + img = None if (img is None) else recognize_facial_expression(learner, img, display) + if display and img is not None: + cv2.imshow('Result', img) + cv2.waitKey(33) + + except Exception as e: + print("Error raised during video mode.") + raise e + finally: + image_processing.release_video_capture() + if display: + cv2.destroyAllWindows() + + +def main(): + # Parser + parser = argparse.ArgumentParser(description='test', formatter_class=RawTextHelpFormatter) + parser.add_argument("mode", help="select a method among 'image', 'video' or 'webcam' to run ESR-9.", + type=str, choices=["image", "video", "webcam"]) + parser.add_argument("-d", "--display", help="display the output of ESR-9.", + action="store_true") + parser.add_argument("-i", "--input", help="define the full path to an image or video.", + type=str, default='') + parser.add_argument("-es", "--ensemble_size", + help="define the size of the ensemble, the number of branches in the model", + type=int, default=9) + parser.add_argument("--device", help="device to run on, either \'cpu\' or \'cuda\', defaults to \'cuda\'.", + default="cuda") + parser.add_argument("-w", "--webcam_id", + help="define the webcam by 'id' to capture images in the webcam mode." + + "If none is selected, the default camera by the OS is used.", + type=int, default=-1) + parser.add_argument("-f", "--frames", help="define frames of videos and webcam captures.", + type=int, default=5) + + args = parser.parse_args() + + learner = FacialEmotionLearner(device=args.device, ensemble_size=args.ensemble_size, dimensional_finetune=False, + categorical_train=False) + learner.init_model(num_branches=args.ensemble_size) + model_path = learner.download(mode="pretrained") + learner.load(args.ensemble_size, path_to_saved_network=model_path) + + # Calls to main methods + if args.mode == "image": + try: + if is_none(args.input): + args.input = learner.download(mode="demo_image") + if is_none(args.input): + raise RuntimeError("Error: 'input' is not valid. The argument 'input' is a mandatory " + "field when image or video mode is chosen.") + image(learner, args.input, args.display) + except RuntimeError as e: + print(e) + elif args.mode == "video": + try: + if is_none(args.input): + args.input = learner.download(mode="demo_video") + if is_none(args.input): + raise RuntimeError("Error: 'input' is not valid. The argument 'input' is a mandatory " + "field when image or video mode is chosen.") + video(learner, args.input, args.display, args.frames) + except RuntimeError as e: + print(e) + elif args.mode == "webcam": + try: + webcam(learner, args.webcam_id, args.display, args.frames) + except RuntimeError as e: + print(e) + + +if __name__ == "__main__": + print("Processing...") + main() + print("Process has finished!") diff --git a/projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/README.md b/projects/python/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/README.md similarity index 100% rename from projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/README.md rename to projects/python/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/README.md diff --git a/projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/benchmark/benchmark_pstbln.py b/projects/python/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/benchmark/benchmark_pstbln.py similarity index 100% rename from projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/benchmark/benchmark_pstbln.py rename to projects/python/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/benchmark/benchmark_pstbln.py diff --git a/projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/demo.py b/projects/python/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/demo.py similarity index 100% rename from projects/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/demo.py rename to projects/python/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/demo.py diff --git a/projects/perception/fall_detection/README.md b/projects/python/perception/fall_detection/README.md similarity index 100% rename from projects/perception/fall_detection/README.md rename to projects/python/perception/fall_detection/README.md diff --git a/projects/perception/fall_detection/demos/eval_demo.py b/projects/python/perception/fall_detection/demos/eval_demo.py similarity index 100% rename from projects/perception/fall_detection/demos/eval_demo.py rename to projects/python/perception/fall_detection/demos/eval_demo.py diff --git a/projects/perception/fall_detection/demos/inference_demo.py b/projects/python/perception/fall_detection/demos/inference_demo.py similarity index 100% rename from projects/perception/fall_detection/demos/inference_demo.py rename to projects/python/perception/fall_detection/demos/inference_demo.py diff --git a/projects/perception/fall_detection/demos/inference_tutorial.ipynb b/projects/python/perception/fall_detection/demos/inference_tutorial.ipynb similarity index 100% rename from projects/perception/fall_detection/demos/inference_tutorial.ipynb rename to projects/python/perception/fall_detection/demos/inference_tutorial.ipynb diff --git a/projects/perception/fall_detection/demos/webcam_demo.py b/projects/python/perception/fall_detection/demos/webcam_demo.py similarity index 100% rename from projects/perception/fall_detection/demos/webcam_demo.py rename to projects/python/perception/fall_detection/demos/webcam_demo.py diff --git a/projects/perception/heart_anomaly_detection/README.MD b/projects/python/perception/heart_anomaly_detection/README.MD similarity index 100% rename from projects/perception/heart_anomaly_detection/README.MD rename to projects/python/perception/heart_anomaly_detection/README.MD diff --git a/projects/perception/heart_anomaly_detection/demo.py b/projects/python/perception/heart_anomaly_detection/demo.py similarity index 100% rename from projects/perception/heart_anomaly_detection/demo.py rename to projects/python/perception/heart_anomaly_detection/demo.py diff --git a/projects/perception/multimodal_human_centric/audiovisual_emotion_recognition/README.MD b/projects/python/perception/multimodal_human_centric/audiovisual_emotion_recognition/README.MD similarity index 100% rename from projects/perception/multimodal_human_centric/audiovisual_emotion_recognition/README.MD rename to projects/python/perception/multimodal_human_centric/audiovisual_emotion_recognition/README.MD diff --git a/projects/perception/multimodal_human_centric/audiovisual_emotion_recognition/audiovisual_emotion_recognition_demo.py b/projects/python/perception/multimodal_human_centric/audiovisual_emotion_recognition/audiovisual_emotion_recognition_demo.py similarity index 100% rename from projects/perception/multimodal_human_centric/audiovisual_emotion_recognition/audiovisual_emotion_recognition_demo.py rename to projects/python/perception/multimodal_human_centric/audiovisual_emotion_recognition/audiovisual_emotion_recognition_demo.py diff --git a/projects/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/README.MD b/projects/python/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/README.MD similarity index 100% rename from projects/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/README.MD rename to projects/python/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/README.MD diff --git a/projects/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/gesture_recognition_demo.py b/projects/python/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/gesture_recognition_demo.py similarity index 100% rename from projects/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/gesture_recognition_demo.py rename to projects/python/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/gesture_recognition_demo.py diff --git a/projects/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/input_depth.png b/projects/python/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/input_depth.png similarity index 100% rename from projects/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/input_depth.png rename to projects/python/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/input_depth.png diff --git a/projects/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/input_rgb.png b/projects/python/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/input_rgb.png similarity index 100% rename from projects/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/input_rgb.png rename to projects/python/perception/multimodal_human_centric/rgbd_hand_gesture_recognition/input_rgb.png diff --git a/projects/perception/object_detection_2d/centernet/README.md b/projects/python/perception/object_detection_2d/centernet/README.md similarity index 100% rename from projects/perception/object_detection_2d/centernet/README.md rename to projects/python/perception/object_detection_2d/centernet/README.md diff --git a/projects/perception/object_detection_2d/centernet/eval_demo.py b/projects/python/perception/object_detection_2d/centernet/eval_demo.py similarity index 100% rename from projects/perception/object_detection_2d/centernet/eval_demo.py rename to projects/python/perception/object_detection_2d/centernet/eval_demo.py diff --git a/projects/perception/object_detection_2d/centernet/inference_demo.py b/projects/python/perception/object_detection_2d/centernet/inference_demo.py similarity index 100% rename from projects/perception/object_detection_2d/centernet/inference_demo.py rename to projects/python/perception/object_detection_2d/centernet/inference_demo.py diff --git a/projects/perception/object_detection_2d/centernet/inference_tutorial.ipynb b/projects/python/perception/object_detection_2d/centernet/inference_tutorial.ipynb similarity index 100% rename from projects/perception/object_detection_2d/centernet/inference_tutorial.ipynb rename to projects/python/perception/object_detection_2d/centernet/inference_tutorial.ipynb diff --git a/projects/perception/object_detection_2d/centernet/train_demo.py b/projects/python/perception/object_detection_2d/centernet/train_demo.py similarity index 100% rename from projects/perception/object_detection_2d/centernet/train_demo.py rename to projects/python/perception/object_detection_2d/centernet/train_demo.py diff --git a/projects/perception/object_detection_2d/detr/README.md b/projects/python/perception/object_detection_2d/detr/README.md similarity index 100% rename from projects/perception/object_detection_2d/detr/README.md rename to projects/python/perception/object_detection_2d/detr/README.md diff --git a/projects/perception/object_detection_2d/detr/eval_demo.py b/projects/python/perception/object_detection_2d/detr/eval_demo.py similarity index 100% rename from projects/perception/object_detection_2d/detr/eval_demo.py rename to projects/python/perception/object_detection_2d/detr/eval_demo.py diff --git a/projects/perception/object_detection_2d/detr/inference_demo.py b/projects/python/perception/object_detection_2d/detr/inference_demo.py similarity index 100% rename from projects/perception/object_detection_2d/detr/inference_demo.py rename to projects/python/perception/object_detection_2d/detr/inference_demo.py diff --git a/projects/perception/object_detection_2d/detr/inference_tutorial.ipynb b/projects/python/perception/object_detection_2d/detr/inference_tutorial.ipynb similarity index 100% rename from projects/perception/object_detection_2d/detr/inference_tutorial.ipynb rename to projects/python/perception/object_detection_2d/detr/inference_tutorial.ipynb diff --git a/projects/perception/object_detection_2d/detr/train_demo.py b/projects/python/perception/object_detection_2d/detr/train_demo.py similarity index 100% rename from projects/perception/object_detection_2d/detr/train_demo.py rename to projects/python/perception/object_detection_2d/detr/train_demo.py diff --git a/projects/perception/object_detection_2d/gem/README.md b/projects/python/perception/object_detection_2d/gem/README.md similarity index 100% rename from projects/perception/object_detection_2d/gem/README.md rename to projects/python/perception/object_detection_2d/gem/README.md diff --git a/projects/perception/object_detection_2d/gem/inference_demo.py b/projects/python/perception/object_detection_2d/gem/inference_demo.py similarity index 100% rename from projects/perception/object_detection_2d/gem/inference_demo.py rename to projects/python/perception/object_detection_2d/gem/inference_demo.py diff --git a/projects/perception/object_detection_2d/gem/inference_tutorial.ipynb b/projects/python/perception/object_detection_2d/gem/inference_tutorial.ipynb similarity index 100% rename from projects/perception/object_detection_2d/gem/inference_tutorial.ipynb rename to projects/python/perception/object_detection_2d/gem/inference_tutorial.ipynb diff --git a/projects/python/perception/object_detection_2d/nanodet/README.md b/projects/python/perception/object_detection_2d/nanodet/README.md new file mode 100644 index 0000000000..92c456c235 --- /dev/null +++ b/projects/python/perception/object_detection_2d/nanodet/README.md @@ -0,0 +1,18 @@ +# NanoDet Demos + +This folder contains minimal code usage examples that showcase the basic functionality of the NanodetLearner +provided by OpenDR. Specifically the following examples are provided: +1. inference_demo.py: Perform inference on a single image in a directory. Setting `--device cpu` performs inference on CPU. +2. eval_demo.py: Perform evaluation on the `COCO dataset`, implemented in OpenDR format. The user must first download + the dataset and provide the path to the dataset root via `--data-root /path/to/coco_dataset`. + Setting `--device cpu` performs evaluation on CPU. + +3. train_demo.py: Fit learner to dataset. PASCAL VOC and COCO datasets are supported via `ExternalDataset` class. + Provided is an example of training on `COCO dataset`. The user must set the dataset type using the `--dataset` + argument and provide the dataset root path with the `--data-root` argument. Setting the config file for the specific + model is done with `--model "wanted model name"`. Setting `--device cpu` performs training on CPU. Additional command + line arguments can be set to overwrite various training hyperparameters from the provided config file, and running + `python3 train_demo.py -h` prints information about them on stdout. + + Example usage: + `python3 train_demo.py --model plus-m_416 --dataset coco --data-root /path/to/coco_dataset` \ No newline at end of file diff --git a/projects/python/perception/object_detection_2d/nanodet/eval_demo.py b/projects/python/perception/object_detection_2d/nanodet/eval_demo.py new file mode 100644 index 0000000000..759c6aa4bd --- /dev/null +++ b/projects/python/perception/object_detection_2d/nanodet/eval_demo.py @@ -0,0 +1,34 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +from opendr.perception.object_detection_2d import NanodetLearner +from opendr.engine.datasets import ExternalDataset + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--data-root", help="Dataset root folder", type=str) + parser.add_argument("--model", help="Model that config file will be used", type=str) + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + + args = parser.parse_args() + + val_dataset = ExternalDataset(args.data_root, 'coco') + nanodet = NanodetLearner(model_to_use=args.model, device=args.device) + + nanodet.download("./predefined_examples", mode="pretrained") + nanodet.load("./predefined_examples/nanodet-{}/nanodet-{}.ckpt".format(args.model, args.model), verbose=True) + nanodet.eval(val_dataset) diff --git a/projects/python/perception/object_detection_2d/nanodet/inference_demo.py b/projects/python/perception/object_detection_2d/nanodet/inference_demo.py new file mode 100644 index 0000000000..71e95b15fb --- /dev/null +++ b/projects/python/perception/object_detection_2d/nanodet/inference_demo.py @@ -0,0 +1,34 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +from opendr.perception.object_detection_2d import NanodetLearner +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--model", help="Model that config file will be used", type=str, default='m') + args = parser.parse_args() + + nanodet = NanodetLearner(model_to_use=args.model, device=args.device) + nanodet.download("./predefined_examples", mode="pretrained") + nanodet.load("./predefined_examples/nanodet_{}".format(args.model), verbose=True) + nanodet.download("./predefined_examples", mode="images") + img = Image.open("./predefined_examples/000000000036.jpg") + boxes = nanodet.infer(input=img) + + draw_bounding_boxes(img.opencv(), boxes, class_names=nanodet.classes, show=True) diff --git a/projects/python/perception/object_detection_2d/nanodet/inference_tutorial.ipynb b/projects/python/perception/object_detection_2d/nanodet/inference_tutorial.ipynb new file mode 100644 index 0000000000..96af81257c --- /dev/null +++ b/projects/python/perception/object_detection_2d/nanodet/inference_tutorial.ipynb @@ -0,0 +1,790 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "f8b84e11-4e6b-40f6-807b-ec27281659e9", + "metadata": { + "tags": [] + }, + "source": [ + "# Nanodet Tutorial\n", + "\n", + "This notebook provides a tutorial for running inference on a static image in order to detect objects.\n", + "The implementation of the [NanodetLearner](../../../../docs/reference/nanodet.md) is largely copied from the [Nanodet github](https://github.com/RangiLyu/nanodet).\n", + "More information on modifications and license can be found\n", + "[here](https://github.com/opendr-eu/opendr/blob/master/src/opendr/perception/object_detection_2d/nanodet/README.md)." + ] + }, + { + "cell_type": "markdown", + "id": "b671ddd9-583b-418a-870e-69dd3c3db718", + "metadata": {}, + "source": [ + "First, we need to import the learner and initialize it:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "b6f3d99a-b702-472b-b8d0-95a551e7b9ba", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/tqdm/auto.py:22: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n", + "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/gluoncv/__init__.py:40: UserWarning: Both `mxnet==1.8.0` and `torch==1.9.0+cu111` are installed. You might encounter increased GPU memory footprint if both framework are used at the same time.\n", + " warnings.warn(f'Both `mxnet=={mx.__version__}` and `torch=={torch.__version__}` are installed. '\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "model size is 1.5x\n", + "init weights...\n", + "Finish initialize NanoDet-Plus Head.\n" + ] + } + ], + "source": [ + "from opendr.perception.object_detection_2d import NanodetLearner\n", + "\n", + "model=\"plus_m_1.5x_416\"\n", + "\n", + "nanodet = NanodetLearner(model_to_use=model, device=\"cuda\")" + ] + }, + { + "cell_type": "markdown", + "id": "4ef5ce70-8294-446a-8cc2-b3eba5e1037b", + "metadata": {}, + "source": [ + "Note that we can alter the device (e.g., 'cpu', 'cuda', etc.), on which the model runs, as well as the model from a variety of options included a custom you can make (\"EfficientNet_Lite0_320\", \"EfficientNet_Lite1_416\", \"EfficientNet_Lite2_512\",\n", + " \"RepVGG_A0_416\", \"t\", \"g\", \"m\", \"m_416\", \"m_0.5x\", \"m_1.5x\", \"m_1.5x_416\",\n", + " \"plus_m_320\", \"plus_m_1.5x_320\", \"plus_m_416\", \"plus_m_1.5x_416\", \"custom\")." + ] + }, + { + "cell_type": "markdown", + "id": "10c74615-61ec-43ed-a1ae-57dceedfe938", + "metadata": {}, + "source": [ + "After creating our model, we need to download pre-trained weights." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "8a680c28-8f42-4b4a-8c6e-2580b7be2da5", + "metadata": {}, + "outputs": [], + "source": [ + "save_path = \"./predefined_examples\"\n", + "nanodet.download(path=save_path, mode=\"pretrained\")\n", + "\n", + "load_model_weights=\"./predefined_examples/nanodet_{}\".format(model)" + ] + }, + { + "cell_type": "markdown", + "id": "0e63e7a9-4310-4633-a2ac-052e94ad3ea0", + "metadata": {}, + "source": [ + "and load our weights:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "e12f582b-c001-4b9d-b396-4260e23139f6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model name: plus_m_1.5x_416 --> ./predefined_examples/nanodet_plus_m_1.5x_416/plus_m_1.5x_416.json\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:root:No param aux_fpn.reduce_layers.0.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.0.bn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.bn.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.0.bn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.bn.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.0.bn.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.bn.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.0.bn.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.bn.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.0.bn.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.0.bn.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.1.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.1.bn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.bn.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.1.bn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.bn.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.1.bn.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.bn.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.1.bn.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.bn.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.1.bn.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.1.bn.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.2.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.2.bn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.bn.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.2.bn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.bn.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.2.bn.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.bn.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.2.bn.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.bn.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.reduce_layers.2.bn.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.reduce_layers.2.bn.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.2.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.2.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.0.blocks.0.shortcut.3.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.2.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.2.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.top_down_blocks.1.blocks.0.shortcut.3.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.depthwise.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.depthwise.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.pointwise.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pointwise.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.dwnorm.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.dwnorm.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.dwnorm.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.dwnorm.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.dwnorm.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.dwnorm.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.dwnorm.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.dwnorm.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.dwnorm.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.dwnorm.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.pwnorm.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pwnorm.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.pwnorm.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pwnorm.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.pwnorm.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pwnorm.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.pwnorm.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pwnorm.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.0.pwnorm.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.0.pwnorm.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.depthwise.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.depthwise.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.pointwise.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pointwise.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.dwnorm.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.dwnorm.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.dwnorm.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.dwnorm.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.dwnorm.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.dwnorm.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.dwnorm.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.dwnorm.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.dwnorm.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.dwnorm.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.pwnorm.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pwnorm.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.pwnorm.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pwnorm.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.pwnorm.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pwnorm.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.pwnorm.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pwnorm.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.downsamples.1.pwnorm.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.downsamples.1.pwnorm.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.2.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.2.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.0.blocks.0.shortcut.3.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.primary_conv.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost1.cheap_operation.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.primary_conv.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.ghost2.cheap_operation.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.0.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.0.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.1.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.2.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.2.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.bottom_up_blocks.1.blocks.0.shortcut.3.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.depthwise.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.depthwise.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pointwise.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pointwise.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.dwnorm.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.dwnorm.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.dwnorm.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.dwnorm.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.dwnorm.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.dwnorm.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.dwnorm.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.dwnorm.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.dwnorm.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.dwnorm.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pwnorm.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pwnorm.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pwnorm.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pwnorm.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pwnorm.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pwnorm.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pwnorm.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pwnorm.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_in_conv.0.pwnorm.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_in_conv.0.pwnorm.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.depthwise.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.depthwise.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pointwise.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pointwise.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.dwnorm.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.dwnorm.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.dwnorm.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.dwnorm.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.dwnorm.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.dwnorm.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.dwnorm.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.dwnorm.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.dwnorm.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.dwnorm.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pwnorm.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pwnorm.weight.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pwnorm.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pwnorm.bias.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pwnorm.running_mean.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pwnorm.running_mean.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pwnorm.running_var.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pwnorm.running_var.\u001b[0m\n", + "INFO:root:No param aux_fpn.extra_lvl_out_conv.0.pwnorm.num_batches_tracked.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_fpn.extra_lvl_out_conv.0.pwnorm.num_batches_tracked.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.0.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.0.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.0.gn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.0.gn.weight.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.0.gn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.0.gn.bias.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.1.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.1.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.1.gn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.1.gn.weight.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.1.gn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.1.gn.bias.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.2.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.2.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.2.gn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.2.gn.weight.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.2.gn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.2.gn.bias.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.3.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.3.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.3.gn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.3.gn.weight.\u001b[0m\n", + "INFO:root:No param aux_head.cls_convs.3.gn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.cls_convs.3.gn.bias.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.0.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.0.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.0.gn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.0.gn.weight.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.0.gn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.0.gn.bias.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.1.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.1.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.1.gn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.1.gn.weight.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.1.gn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.1.gn.bias.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.2.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.2.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.2.gn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.2.gn.weight.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.2.gn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.2.gn.bias.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.3.conv.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.3.conv.weight.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.3.gn.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.3.gn.weight.\u001b[0m\n", + "INFO:root:No param aux_head.reg_convs.3.gn.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.reg_convs.3.gn.bias.\u001b[0m\n", + "INFO:root:No param aux_head.gfl_cls.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.gfl_cls.weight.\u001b[0m\n", + "INFO:root:No param aux_head.gfl_cls.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.gfl_cls.bias.\u001b[0m\n", + "INFO:root:No param aux_head.gfl_reg.weight.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.gfl_reg.weight.\u001b[0m\n", + "INFO:root:No param aux_head.gfl_reg.bias.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.gfl_reg.bias.\u001b[0m\n", + "INFO:root:No param aux_head.scales.0.scale.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.scales.0.scale.\u001b[0m\n", + "INFO:root:No param aux_head.scales.1.scale.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.scales.1.scale.\u001b[0m\n", + "INFO:root:No param aux_head.scales.2.scale.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.scales.2.scale.\u001b[0m\n", + "INFO:root:No param aux_head.scales.3.scale.\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mNo param aux_head.scales.3.scale.\u001b[0m\n", + "INFO:root:Loaded model weight from ./predefined_examples/nanodet_plus_m_1.5x_416\n", + "\u001b[1m\u001b[35m[root]\u001b[0m\u001b[34m[09-01 18:10:13]\u001b[0m\u001b[32mINFO:\u001b[0m\u001b[37mLoaded model weight from ./predefined_examples/nanodet_plus_m_1.5x_416\u001b[0m\n" + ] + } + ], + "source": [ + "nanodet.load(path=load_model_weights, verbose=True)" + ] + }, + { + "cell_type": "markdown", + "id": "4e3ce347-391f-45a1-baf8-91d8a9ce04a7", + "metadata": {}, + "source": [ + "We will also download one sample image and load it, so we can use it in OpenDR for testing:" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "9efba6eb-5235-4e31-a002-1bcb6e311704", + "metadata": {}, + "outputs": [], + "source": [ + "nanodet.download(path=save_path, mode=\"images\")\n", + "\n", + "from opendr.engine.data import Image\n", + "image_path = \"./predefined_examples/000000000036.jpg\"\n", + "img = Image.open(image_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "9f083566-3d57-4db6-baa5-0fefdf8fa8ea", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAMsAAAD8CAYAAADZhFAmAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9Z6xlWXbfCf7W3vucc+993oSPyEgT6W1l+SKrKJJFSqIIseUIanpkBjOjLy1gHDAjzJf5KqCBAWYwGCOgGyN2qyVREtmkqCqaIllFsrxlmsrMykgXGd68ePaac/beaz6sfe6LLFZmFWjUMUAeMirjvXjm3nP22nut//r//0tUlfev96/3rx9+uf+pX8D71/vX/79c7wfL+9f71494vR8s71/vXz/i9X6wvH+9f/2I1/vB8v71/vUjXu8Hy/vX+9ePeP2lBIuI/DUReUVEzovIP/vL+B3vX+9f/7kv+Yvus4iIB74H/AxwEfg68PdV9bt/ob/o/ev96z/z9ZdxsnwEOK+qr6tqC/wb4Bf+En7P+9f713/WK/wl/MxTwNt3fHwR+Oh7fcNgeVmXjhyZfywi9t/ysaIIgpR/c3L4d9DylYqqfaTl75R/VVXyHR/PvwVQBRHK1ysiggI52w/LqvPvt98J4gTK73KCvZ7ymu88qeevQUDu+J39986/vvwMJ3e+a+b3QOlfx+HPVVXekRPoO9/3ne/t+++r3vH1/RdWRJy397XYVDgRbu9PyDHjvGNhWLGxuIgTd8czsStrZne8R8r58LV93+vR8nDe8b6/7z7NH0z5xDs+zeHrFYTvz4j0+75Hvu93AGj+vo9RuOO+CjAdj4lt+86HUK6/jGD5kS4R+SfAPwFY2Nzk7/zX/zVOIKktwKEPVN6TcqLLGYctSAc03rFU19TBzRdNcI4uKwddR8rQpURVBTIw6yLTlOmSPa0qeFLOAHQxA0LOGeTwIeQEk5iYxsQsJjTZom6Cp6k8qkrMMKg8o9ojAuM20nYJ7xziIKdMlxXnPEEcKgBKzkpMGRXwInjnCN4RRPDeHpoTR3AOsN+z13ZMJh1dhpSVnJQYlZwzKkLO9tD7oJovlDve0zxWy3rUbO8dzRwPOywMFMnCg0eW+ejZE/w/f+cbjG9P8aI8+ugx/umnP8HK6Ag5JyCXRZZREl999evcPNin6+x+zbpE22ViTHQx0cVM0vJ6s5KyotnuRcq2Ockdrz1rRlFyVMSV95ft+wVIOdlO0G9qOYHappqylkDAniuC5oSIkNU2xJwSqhlRSCmX3wff/I3Pvuua/csIlkvAmTs+Pl0+945LVf8F8C8Ajpx7QBXw3kPWcnrYDUg5I+IQgZgyIuAVYs6Q7MY23lM7h2pm5D3ZCyl4u7HOoy4zjQlHpgqBKgiqni4DGYIPzLpImxMxZergqYLgg6NJnmmXiCmhKLUPLNQVPjgOZi1kxffbt4otThFb7MGRY8I5QcRyXucCHRGnQi6Lm5xxTsjOkZISnODLaZMVZrEjxhLMGVJSNJcbKRaUdhrO1847TmcXPDllcllsqWzgwXlElJSgTcIoK2jm9mTG2miEDw7nhdRFdsYtu5Mxy8OymeRsiy4r3nuevf9DpBQtdHIipsg0dqSszLqWNiYmaUbXRtqUmcaOWdsxix1tTEzbjpgS01lHlxMpKzEl21hiIqVk7zPbexXV+UnWH/sxJgDc/KRQUky2SWV7zd5ePSD281SR8l8FxL97ZfKXESxfBx4UkfuwIPkl4H/2Xt/gRFisAk0IFvkI3lkguOxAbOd3IYDaYqacQpQ3mQHvBHGh7FaZDoeIMPAeapjmDGpflxOgGXGOREIEGh9wksquLvOUrw5+HqhV6ANTCU1FzJaGxKwkmOc9OWeCc/jg5qeXE0Fyst2/3/GVcvqlkioKbUzMRAg+E1NmPI1M20hSIXYJ57w9WCe4LKizHdMyGFvEKKgo4hyC3BFIJd0rp1xKtuC6BMSMVIH9WYd3wqgJTEKgG0+ZtImdyZhTZaFpSYudt3tc+xpfj2zxlTMsZdsoNGt5XSDiSoqWcXKYAivl1EGJ5R7lnIk50sZEl+zPLEXaLtKVz086C7bJbGb/3kWmbccsRrqYaduOlEsQxkgCUpeIXSTnbL9T7YQj2/16t+svPFhUNYrIPwV+G/DAf6uqL77X9whiN1XtiK3KQk2qVM5TeU/MEUWoJNhuKkLt7OPKOXuEJcF3KMF7glemyW5ApaCuHNsIs2zHsPeB2EbbwXLCe0cdLFjsDQnee9TbjqbZdrfgHOo9KopXR9RI0gwozjmc2ILKyRaqYgVEyhHnHd45HH3NIsSc7QSLma6zlKGvy1QhRktdcrZFmFRxTgjOkRSi7RwA5V7aDelPGO/tvStSvkzAKaL2cUtNzi1OIWaljYmVuuKWjC2Y2sjN/Ql2pHmc2EkevCcle70xTXHiyOWERSCVe5XVNgdxYvWbOtvkkh2R4hziSioqubwPZ5tk2SxVFBFLTb335JTKBqHz1xJTn27l8ppSOU2VLkdsM2oZzyJJk21MMTJLieks8uKv//q7rtO/lJpFVT8DfOZH/XoB3Lzihi7bg3diN0/Ebrgg1N7RpUQQR+M9Hj3cncoOZbWOoiq0KbHXRUpNbvWEKt4J5HIzUTuZvLfCsKTywTsklBqBsnAFomr5upIKZMULDEJFTInK2e6ZxRZuU1co0HYRRCxtKydcTNDGhCq0baKdRVv4YnWY6zeOlHHe246bLCBVLSVLKZNSLsEBsSspGxl1jlAKIe9cSf0s8L048EqSTNSAZsWJBd72rOPY8iJvXt3BIcRZ5Or2Lil1VFVFjKksZrvvbTzguTe/jQO8D7ZpOU8VKpwTahdAPFmhCjWVDzhxeBFC+XsIVdloPK48H4eQEERcWSN5Xl+CpaMignOBjD1zVUt3wWpC55VKhAG1BWzdsDa0n2VBb+vNO8/yoHnXdfo/WYF/5yUCA+9tt8iZaYw456i8wzl7kXYTwSP4UJW6VHHBoznPA8ayCzulskLlHANvu9g0RlLKBG8/KzuhwpEy2NlkKURGDWjIGUthS36LWA3UJZJYEe5E6USZtpk2Wu0BWDGrlGJSmXWRruTGOTLf+VJSYrIdfzqzE07KiaACmbITi6Vzin0eVZy3955zxntXUthDBJBSUyRsV9asiHi886W8st/rnUNdYJJqarF7d313j6Ori4Ag3tHNOq7vT5ilDu+z/QxLPAvAUjNuE/uzsW1+zqOa52iTqBBztGJe9bD2MHiRGDPeCZW31wdW44lzOOepgyd4A1K8eOqqxjtH7QODuib4QBV8+beKKlTkZJmDc4alOnF4H+aYmqXwqV+FfwpN/P7r7ggWhBAcPb7rnaMOgarshnam6Dy9cOUUEedAsVQgZzq1OkYKTisoToRRCHRZSc52ZF/SHsFg4UqssGtLXUK2oHElpbKdyxuogO3enWZmbTK0J1tgWqblmHaZmJLVVEBWoW2zBUZMpBRJZYGrKqmkiikZiiNlIYsKQixQ6TyDtEVYAhjstTtxSI+COZ3D3s4JMdviDCHYz51D7n3NIOQQ2JcVlnQPEcfVvX0e3lhDRAghEGeJW3sTZt2MUbVkwVeQJe+FnD23u2W+fXFGE5TKKbUXanEEDyIZLwGnGS92IqiU+jEbaqZkvHMl3Swbl/bghZJSmteo/f2ISckpU1UeL0Jd26lUe0cm2/FePl85hzhPJcKwrkg509SVrbfK0NdJO3nXdXp3BEvpQ+RSeFbi5whTShm8I0i5iRQIEcHnSHSCRmWaM7NsJ0uTHaEAAIlyFIuwUAWcc8ySFZ4OxeHIFeSYqdR29FnMdJpw4iHa75x2kTbp4emgzF9P5QOaEgllNjPkSrCUx1KkkioVuDapolnoCgyqCLlLlmSopVtkcNmCNWXIKSEF8QNL75A8v4e5gBWqUhZynzYKYgUMKSVC5axuKL2KnGzRO4FcXnMlcGV3nw+fPoZTW4zdbMb+tGNvesDG0lFyTogoObfl9dQ0zSrfvn7LTsRyGpbfhA8BUUunbRnbSRIceLE0FlECSu2wjzVTeUVzZFh50IQn07WdIYgKXewgCcMBLDSe6TjhvIEjqvZzU874qSfnRC4f9/fHTnhnrQRVdsfTd12nd0WwAPNcOZd8fpKSwYMKqe1Yqiu8E/s4KyrQxUhXCtJZTJZSCDTB4UtalrDejBdhEDwuZ7qYSECXM6ijS1aTJLXaoC2omWKplgrMorI37Zi2cd5EVBWcg5Q6K7Jjpou2MLUUn7n0EFK20yyX0yeLAo6shvH3KVPwVveollMtp/mDBex1ZSWTcV5Kv0StHik7MjCHmRxC0mwBUtI+QRGxmlCd/bdPVbI6lI7d/RmVF05sLPLWpEMnymySuLW/xz2bttn0J6H1jhL3rq/gHXRZcN5BFrKUzSBr2Qz6QPJIcuRorw3sXjkpOK9qCX77Db4gcKVFiy+njhcDh5gmfCcFfFEqsRPMoVSubEmS8WSCQOUy3jA4NGcqB2gmf19T+M7rrgmWrMI4RsYxkVWYtB3Be1uUGQ6iMgyl8DMAlEnMzLrEtIs48QSsV9HGhCv1R1JDxpwTaxCWIjBmJZZiN5edJnMI/4bgygOBLmXGbcdk1jGd2cJ2UtJH75hptr5AabzhBC8OLUV3ViWW06hfzP1OVjZ9vDeUJxUok7KjS0F7XFk4Tg6RuZzsNTPvRJfUSvteuMxrIICYkxXrJTidy/NGHdlStqQOdY42tmQJHDu6zNu395FxoG0T13b3rcEXGkRqnHbzlPjYYsOoqdidxr4Ut/RQFMXNU0OrD2yDlFKQi+YSKFo2Iz9POz2HGLMFtKfViBdHwtGp1WIkQXFoLKks9t48zFsSIoc9Iiv9CoKIrYVxeveQuEuCxR7gTGHc6Rwrz7mlrqzYY+poKsegsgBqoxL7RZpsF2kVFEfGehFSKv4qKeKg7eHacnrF0vgylMZOk6haivOMrwJ15YmzyKxTZq3SdXleEjoRQmB+Cqnaw9FYimqFlCNgu6aWNDFLaYZRHpwTC57SXBNk/hoLsms7bAa87YQu+L63Nk+pDukspfcUDUZVT/lCKcFrv9eVU1ecfX+XleRt4YJjazzl6NLiHHDIMXJ194CYIt7VBUQoAYyw2DQ8cnyRN25PSj2RSVhHPWZ7DymXQJjXZpYGIW7enZcSam5+H9QWfYGNDSlzh51456zJW9K5nO3UAbVA4xAttyUh83vUI2Gqti7eo76/O4Kly5nb045pNhh1POvKzRW6tt9dM9NO2KElOLHjUsuDBirX56AJQZi2EVWDf1ux5e3dYb0hqoi3tEvQebDk8mA0BAY1kJWDWcfBpLUivdxoESFT+hsFWtaC0OX5wsjz3avfUYHy5Kzuct42A4NtbTH0iz5rxoklIF4cSS21cyGgapwt0dIiFHeYOjohluLelr07TOsoiworeVRAe+qNZpx0djJ6z429A06tLuGDp2oq4iyxtT9j0o6pwsAQuOwBB6Ue+T/+xDN2X3IiqaW8MVtqO4uJWcxMYmK/bVER9mcdUZWDaWtNV4VJl0gI0zYx6SKzLjFLiVlS2sJkaKMzmko2aDnnXHo54J1aOSfQo1xSnpPzDs2Wvln5128gpUn6Huv0rgiWnJXbk66gIEKXhVnbkZLivcMHX47yktM6R1OCRMTNu71zMl3pyKoKrSbbkUSYdFYLhAIP55nxtmKyIjVHO1WqYA3NLiqT3HJrd8p4GhGsCYna7hhzJql1/HOyRmFfVOdshbohOFIa+2WXy2Vxlycjd8DBttXb53zfTRZb4OLL9xfY2QppxamlcbGkdk4cTvvTJ/dr5nAXxWo4VbWUrZQJjkQlBn87cVzb3eXx40epgqMNjnbacXu/ZdJNWC3QsGpfD9hrHVah/Dzrxmud5ycfpS4SzfONxZXTrieUSqkFRbSwJsQKc5RU2gGqBrS0sSublHDQtcy6XJ4JTGOmTRiNRmGWIvuzyDQaWDNLmVkXmbQWhLMu0aXM6+/Bw78rgiWpsjeJpXCXeec5aUaSImRC5ea4vCvQn6jtFClbTRCzzgl7/WLUkg6UUEMQZjGRk/18K+ns4E+5z6cV1yZC8IwnifE0lqTAfr93gko2ACIpKlqImSX/vyMNopwYiswL8pwN+fIFPIhlp5PS40D6PpFS2RkwJwAaK3qOSeMLb86IiPZpW5f2/u00YY74OO9xWjhV/cIou7AtxL55qdyetiwNrQ45CMDUOvs70ynHczK0S+7ci7Xs0HZv7TRj3gBzUnovzuo5A8xcobn06VePghYmtvNWG5Zg7xu0i7Ur96sHNLLx5nIk+HqOGh5+jaWM2t+gHg1US8i6aA3jv/ffLLzrOr0rggUsdZikTDuzws1yTMtlVaxTbbCnI5cFPwiehTowrAJtUm4eTImizJ9fyfd76nnsj+0stJ3xj1K52d55Yk+BSd5Qqax0uWD4mXla5ZzDBY9zdmqkwimyoLBCX0oH3U6Dw76G966kbbawREvdIIbkiHPz/zrNFoQ94lN21UL5Kz9W56eG9jtzATekP8GwfzOEquzWBRpwau8n5gQSMHDWnsf+LKLA6uKQ29tjWjpms8jN/QMePJpKemdfm3MujUil66aA4FxdTv50x6K1brw40GxESftA59Qe1TwHNA5rMZ1/7Nz3U/TteFQSTsL8+w1lvANG1x75sAUizuFSuddByuZy16NhhioNVAx+ba04yxm0ApydJsE5BsF2lC4lYwd7RyWA9wTv0Lbvc1int7+xRpo7PD1S34cpjb2UKRwnK34PphHvfQEPHFVtcgGQkvMWNq9hzkg+3CUpi9V5b6dE4TeJ2Ody6RHkUtC7fov3BjQ4J/O+SqnzgUPkRkSgvK95vBR0By3UndIZL+URKcY5DO6yIn29UxanL2lUxqPZ6paUlZ3plGPLQy4EBw7aNnJtdw8oCzqn+WtLyRay9wOk0EjyHazxw0VrR5n34Y4AOPx6792cb3b4vg9P1pwPuWN9ym20IAc4kFy+381h/qyHbANLS63/pNKz2zt7ge9R4d8VwdLvFsPG08ZMTBFUCF6om0AdoPaeEDyFD0hdBRzCrFDasyrDEEgDyNoa69fbA+8K92q+cFLpFMeIc37OvO1vvoEGyqRN88ZjSqkkCWrNU+whZQVfCT67eRpTVR5NliIWvvHhg895TutBe+C3MHV7ohXKcpqy54b02E1/uuTSY3Jg2GsJOMWatf3OmAvC05M1je0guKSoE5w1fOapKqWE6tSDxPkuf3P3gOMri1h8OuKsY2tvTJdmJSis2WmbUiHDviM9kxI4+Z1pE4cwuv09FTaABe0h/Z/5++6vd6Z+9twQayf0sLAPbl4voj2MnOcBZ6/Zzb9f1VPQkne97opgMc2GErxSBYeLgkdYHNUsDyoa78gos5QOxVJIKdAiHmtWxmx1RFMFgy1TKrtrqSXU9vySuRC88a16oVZfE/W7eRfTXCQmUoiVCqH0Jpwwf5gueERNdNY/15QyXoVIBnFFCWnpCv2D7RnJhY0r3lnnfuca7sj9aMwlGOYdFQ6bFFYIzxeP7xeLFc9auF99+6IgExZYWtI+kcOUBaEtYKuKNVyvHYw5tWzBgnekaeTG3oxpZ7SWvtnZp3u2wN08fdKebVxOhD5g+t+rpb48hHAL2sDhur2z/rCaqKg1lTmQgh72T6xHZahoKQHLqWYnXErpcMMqm4r31TtAmB903RXBoiiTmPClYF0aVAyqwLD2NMEz8H6ebyomtupiZm8WcV2mTXkOJ9viUKNVFNw8eutQx9boDqFypKgQAlmiUTr6Ard0w1UtWBSd70CqlvfnlKhLo7PFCJQpG9sYLbRzMd5Tykooik7EBFf9CdAvUoeBFCKKpszCeMKZ62/TbJzhTWBPArEwW6TQ1KXHifv8uyB8Asa7cgXxsht8mKbBXJat5XDJqvNF1RbNTBM8oFzfPeCJY0cMmRMhtpGDacf+bMzKcI7PkgvTYN48fceu3QcH83/rUylgnnId1gsONBVticwl3fba+2dRckgois/DE8eCz4LSeZn/u9qeZZILNZp/jKkEd1FbvkcedlcES68/TykhzqS6lbOUp0ux5AnYYhPHpIuMW5OvOqfzWrcvbh1CFYz51cVM5RyJjKsddWl+RZeZdZYy6HyHMXg2Fqo6WB2iQIoJUQekObrVJRNn5VTK5dAXsHJ4ErlCcdF5hjV/LySdQ8YB2JjOePL6Hhtv3GKnm/C/+oBnZzbl5e2bvJQy55sR15sR+6JE70pDT+ZgSH9S9EpTAw3Krq6HxMS56qpk8NaoBVA69XQxUyfTD+1OZywNaoaDimnd0k5aJrPE7fEBp9cEEY9z+o5dui/4gTtOmDuL9cOFfWdgzU8Kd1jbWV1WWAAlGA8rD0t1+5/TAwjzrrzrG51u/poMKRQQI8E68eXkyX8qvfv+664IFu8cy01taZQ4muBwmNjJcuo0h2YVYdpZRzgUfUdMhoD5sku6yjGqKrpcmMF9Z7gIpbTkxh7Bt7EUAJbn5oKkdF1nhbceImpJI5rEAhT7XTHnUqQbFb9fqFUdLA00IgWh8vNUKCWTu7oghKSc2rnNR6/s8NQOLNKwP1rlc/vX6V6fsDEI/Fja4EOuY3v/gG7rFvvH1nm7ajjvHRdUuJFgP2c6Eut1xThibIZ8CEP3geOLXGG+iO+gflha5625V+79dDojoywPG3arCeKEru24vntAOmHy6P7n9Tu2oVH+HfVJn0qp5nfIEOCO9Anm+pJUOGOHwVZ+jv2Qd6R7fT3q3lET9Tw546kdBqUFmYjJPwyw0TmSedenYSIwqD0pmTCqqkzxltV6GlKO4jYlVIVh7Rmq7SjTLpVOSa9FEKqSy3cx4pwwGtSklBjHVDr2Quk7W09ApBSIMKxrg4bLC9Oy+xtxsE8XHJX3zLr4jlPbSVFOeof3jq4rLIKC5okzxSNZWcuRs1du8MyVHR6c1iz4RfxCgMoTYiRMYHdnTL01oGsjbuBZZ4CrHflCx33NlJ8gk2phNqzYrSuuxcTNvTGvpAlXxDEJFV1VMw2O7CtaFDQUuNwQtR6Onuf6QErWB/GxI2pmZzxjY3HAJbOyMdXk3piYZtShoqfpOyd0XcT7wGS6XWTF3qj5/QkIpWazms4VJrUraZ4g5DjmYHyVqlpiMNgsdUbEeYOFe5HaIe/NPlYtiGTqg/FPF+xWm9nv6yXPcud9eI91encEC+bYosHktgNv6ISqzAVg5hACVZH9xqKAbHMmdj0d2xntu2hTrH7J1AGqusJ3jtksMZsluoKS9Z1y50renpXgIQxrpm2HlhzZO6EpNBMtxboPgsRyYkXD9L0D7gjUrFjTMmZql7lnZ4+HLt7gsa0ZJ9wig/oIBHChIhcOl8sVq9WIg9iy3jZopyTtkOAtVZllGEGOgiwIo1ZpmHLM20L8uFYcMGWax/j9TDcyjwCNgekgM2sqoxalwHgQmFaBaVXROmFQVay5inCwz7fPf4/81GNc39vj1PoSL7x1w5gCs8i1nTExdzQFUcuZwmh2qCa2D66xtXetbDwFHSuLed4rUpNce+8NGSyAQHAOzS1Jb4G7Vhqv9v11VRk5E1egdYd33tSZCMF5C7xSz7k5+GCbGfRlni/B5nBZ5ifdeyDHd0ewQDGC8LBQVQy8n58UitI4Tz2ygn2WMtOUGHfCuMvFC0rMvKEUiV1K5GxsYc1qELRz1E6I3pFyoo2ZLKZ7QQvBz1kRWgc7PUJBR7TURDkr3gV7XaW2TXUpNrWHS3MxaLAACwLLkwlnr9zgoWt7nJtVrI2WqRbXoKBiRgMxtWZO4JyyQM12N+FkGlkRHg2ByJLJLhOyp51GfHakoOAFXbLTbyEGRguLOHXE0OKqgE6t+WaNyoALoJXlL3m/iN40IZrQBC+/fp6NB9dYbJa5sLvPsdVVO42DJ7WRrf0p427GQnOY7tjWnmjjGESsd5Uz5dCwfkcXCyXfIkxFScl6Hrnc40mxQbKGZmc/f96Mtd+VkjVs+7NAysY0azsGtcmUTX7s5kiGat8Bu6Nno0oVKlADXyaz8buu0bsmWLwThsHTeF9MC+zmh0J3KBkAXvpueaT2wuKgNhZyF0vdYuKmaUyQhar2DKtAJULlHLW3NGhnYgtlUFvn3ajxgvdKVdKo0RyNsa78LGWK7AEJUDvbnbqYkWBNVAQqEVamU87c3ObMpS3OHignF9cYDk/CYs8xkfL/zux3SuItyeg6I6nZjhODjDOIL/l7y5wi4xpP9rbgnAhxlgnBW13WCjkqOYFuRVN/BiWPM7JnAqkedNCk5FmCGpwhK1zI8JGj93OkWeLopW1uLqwUXp6QxpmDScfu+IDNRePX5dQjh55AjfhV/rvn3ySqbVLeYW0BgdrbJuIKDy14Ry3gJZtZCZa6BmcsYu968w7MP6FsYoI3LwXUMgJxVKFC1aFOiiWVMdp9yQb6nlDSWNjSmVnbkTHmd8zpB65PuIuCxcwlIGFalIH3LPhC1sOOeFdIk5qVYRUYhMAmihfPzfHMRFYl72y8p6szTTGvi7m36MmEIHZ6YPLTUHnqENAsiMdUeHNBlBJcoMuZg2ImkcmmdxFHVye8eLTNLO1uc3LrNqdv7HF2omxWCywMjlBtVGjfOBNfGAB9gNruZ+4lFMcVWHINb812IBQGQrTdP9c9gRTSIMFUELXAIAodEfGCBMEFOewhzRRXFZi4Nf+0LtvXpi4TxKOtErvMdJC4eqThiFvE3YgcGS1x+1vnOZUy570QkylCt/bH3HfUoG9xHjSaDFgdg1Cx09Zc3TPKjHH+eupODyf7eTvALJu8MQLmcDjYv5pvgvW2fEGMde6B4AR8UVwGqezfvOlggoOU7L/BWWZQl39zRUDn8KXOFSbx3dfoXREsWWF31tqp4u2N9QvClX/3KLWz9CaIMvCBhRBQlHFMrA0q2pQZF/Zo5RxDZ7lw9460SFhowhz3aIJjVAUGVUDUdr3Z7pjR0rA8RKPhH7SRXHvaqEhwNKljsL/HmStbLF/fZmOn5WRbsTocMRqcILoOsFopOXAFlqagN9YvdSTJhNIw05QtmFxkwVfszCZIfWeDEVwCvEOnoFPBV26estjR5qgXAtklJLhy8ggMhdxlSCbK6g3mNFuwxRTRDKFx/MnkCk/de9qK7dag14dPH+dvvXWF/7R3nefIpDZxbXePlDp8VZmIq+gZnVMWmiH3rC9yee+2Fe5zlrj0gFRB0XrFZ1FMllNWc0ktVTD+nSGh1lS2gGsLgKA5I/EQklYxgqShmYfOPJQ6CQA1eyZXKEUp2r3f6+5yNAyKnt2b/BZMB49aUFin3PBwpxDEaC+CMomR/S6iYrBvyqZ2NG6VmdFF1eIEIjRNzTDn0kcR1pqaQSguMk6onafd3UF3rrOyeZQ0neGnUyY39ohbu9S7Y+pbB6xOMiupZrlaYKHexC8EdJjNKc2ZS0mctdZ/cUW91z8or+CsPhGE7Aopxh1aM9VVzX5qzWtYHFp2Ze16aowabOvUsD1R49B5iF3ExZ7uge04ydJESUYaTVi/RWKhkhSqyixFvsc2/3jjKfLY6henAjcS546c4hcXhmy+9gbPxcz17QMjQxZxnSkzbdGBcs/KgK+IARxg/DhzmSl/R3GlYevlkDOG9kYhnrnUTmDOSu6/V/oOvcyDUDGRjtFYU5Fmy9whJ89FZUZqTdiawh32ft7tuiuCxUh4QnRKIpl9KZ6q7EaKCcSyCE6h9tYvmaTMJGX2u1QsWQ+loikncjaipSuQZFVsWZekYqkK7HeR2omhX95kyUPvGa0s8/Z//+/4+Pp9DPaEuN+RO/BVoAoNTlaQ4PFNDc6jzpIFK/RtByvqbogRp55cegDOCaIefOlGp75cUVQT3hktfcE1ZK9kb/cCtd05y7wwggKDIxmpLIXRrEgqasyodppkQXKPFJfiD0P/co52cpfFdqG9xalTm4ySQ4mGXgnkidU5J5oVfuHpxzn+2hu8fXOHaTelaZYNzaKnrljf5MfuPUZdVeZ3kBJtpjhLmrfCLBv7QdVEf1GMSJvFCviYMzG7YjyikCCSiwzC2eunEG7pUXx71qYR86XR7OamHYVpVwLrsDenoiWo7/aTRSzS22QmAl56HF6ZEqmDx6vQFap3wgp/nxNtsg552xVGsOYSIHYj+nK6y5ntaaISU1VWvjILpOL165x5TAUy+7/3BnJ1gS9dPc9fXXiEWj0yCEgTcJUz50lxdmI4K75zBvEOJZO7jASHqwK565AqEEIwik0ujbNcUrK+IajGkk6F/VhVFYLQaTRLpL4bHzxSOSvqy4PNgp1aGVQM2qYEjaizVEoOfbwkFYGa+UaZHZEmXCW8MN3i0+vPQGeL0CXFNYIMHDrL6DiztFLxqUcf5vztG1z4+ks8/LFlqkF15wNFxHF2ZYX71jfu6EVpMfk+ZA3H2OF8MIaEYLt/Qb60pFAxRfMu61Jxd+vZ4+bOGbMFX5fN7jVp0SypZR6z2KGYz4OIM9aFmkFJmxJtEe5lhf/P4M+hwReR/xb4eeC6qj5RPrcO/FvgXuBN4BdV9bbYOfZ/A34OGAP/WFW/9UN/B7bgyNBUwXb7YqqXVJnGTBBrFJq2XcwFppw2wzoYcxhBJNAWSkNpeRiMnIuXlwjJe5BMVQVC4alLtt129rW3Wbywx8bJB/nK1ef4dnuND66ept9xenmwiEOlHPHiEG/pQm57HlIxwSud62yap9J5DqW7bvVAf/UEz1QWyyA0TF2kiaE0Rp3l8mqMBTUhTEHT8vx1IfahFri5p/SIiNUxJbg8fYcfYgV7MmG64Djih+SpoVsaBG0cREVmxSjjoKOi4vHV4+zsT3j9V36H4598lqWzx+dN276JaK+pD5LCNJ73WyD4CsUauc6ZXZG/g/2NAsHcMmVYl95IqU2K+ZhK6c5rT/f3hSxpqajmhDgtaGdN76nsSu8GdH4q/epo8K7r9Ec5Wf6/wP8D+OU7PvfPgN9T1X8uNgbvnwH/J+CvAw+WPx8F/l/8kNks9lILTQXTUwPUwRlC4WwX6XIilEy0jZa3Dp1j4M215aDwiOrgcckktSnn4vaopYNuYylyspy5dhm8IWJOofvmGyz8yU1GgxG+qfnI6Wf4gze+yJnpKkeXVgs/yRna5Ap9ReznCwZEuBCsPig1Ad4Tu0hVO5DePiEjlUMIlib1TFspj60Y7A2p2Msta2GhpHU2FcDImQHxkFM8/D5fAs4DXQFIgqdNrbk8iom+wL4OZ2lt6jpCFl7gBs+eOYt02Wj8leAGHgkCM/M/QwSZWj1AhOWFAQubFZf/4BvcWFvknp/6CPXKggU2iel027zFtFevujnrV0r90Vut2huR0l13hvCVFWL9muJDVjrvznmydmju6JWwTux9ikA3U0ub/aGrSE6WWko/8UAtlS2x9p7XDw0WVf1DEbn3+z79C8BfKX//l8DnsWD5BeCX1Qg6XxGRVRE5oapXfvjvObwp6sz/K5Siq648EiOi4IvJUe93O3AOD0zFiI2+FOpNENDARCLqIEUrbnvO2SzqnM/UOKF68S2Off0miwvrxRy8YTQY8bH7nuU/fu8b/KPmI8bvCmo1R0l4xQs4ZwHoxDD+FHHY7uaA2HVEhFCBrypbFLmgQ67YPeHQaLxpK0QTy2HEdhpzxq8bQFB7C1SMJS0oUuxlNBcmsslWbK6JApLwjYeC8kg/Wcl5JJdGn7N68U3d5WdXnkInilTWA8qacFMz48vFPNCABBN+qYKrHadWVmkHyoVf+W2Gj5zj2EcexoVA2+4wnt4sJ7/O2cl9S2DO+dKMUiTSxUtNtFgYSfk6kWLgUb4m22bqncc5j3PBTAOlkDk1410F5eeiVrU4HwpEbX0018sdELr4F2+yd+yOALgKHCt//0FTv04B7xksOSvTWcfioCnryDErg4eaIAyAEGyeihkdZAJC7c25MgRPBAKRQRXm7h0iUDc1VRdpnHJAJGVnrGInaOngu1feZvWr11hd3qRqqoK2GTBw7+pJ3jx2gi9tnecnjj40D+qcE+o8Tn1pmLmC1vQLuC/cM66pD6FasUVoLftC6VcxTbpVrKbgc8JyM+LW/m009OxaK3ypXC8IRApKmHJCXaHKTzIkey05GuLkgrN6KMMcu5XyukV4y9/m3oV16uzIbUJGvvyeUmMlbPxH7aAqabMzZgAxISNHM3Lcc+9xdm5e4bV/9SabH3uSwT1lDEWRFRjCbU4sdp6Wk78nOfbs5DlzmHl/rafQpztqnqzQtjODx7GmY0xKVVXklIhF/2K2sLn0agpZs9wJFLoiPmvj7F3X6Z+7wFdVFZEfcoD96UvumPw13DhCUrMvqmqPiLkGuiZQFYBi0Gu5ncdjvZbKCW02XL8Wx3BQ41RxPswfBAgDCXSaGQXHtJhrezHBmL52keUvX+DE6kkG3gpx70MpIG1xffzkk/za85/j3M5tTi1tQmO7rKo5SJIFTaDO6gfNGRVr1mUVwtATx1NzaFHFqxXqlAWTUkKSzomAWjyvlusB5/M+gnHLfF1Zn6RLVE4M9iyd86QJIaM+obE077ANATWIVIL1cpzz5K7ICkRRB98aX+LnzjyNzjKyZMCFb9V4ab50zyuBILjaoHAXTI6tKUPtkDbDTFhlgeWFEVvffolb347UTy4QjizYYi8wbz8FQOh9w3TeXEvRlKx9T6oXhgUXCoXfOGEZLWqDYO9VwUugKosmA+Tyu0pdJMkspGLKZZqCBU7jw58/DXuX61qfXonICeB6+fyPNPULQO+Y/LV63znVrMww/b2jIgdXJmXZoggCA2+zWHw5YnunR58yTTCo2Ze0zNAwh4rQRnOOaXwmVZamkZXx65dpP/8aZ1dOMKxqfFVboed8GVenkJVaaz5x34f4te99kf/16OMMdWjeXSVNlODB9YzWUsP0Ml6x3otrAr1TuPbS39jhQoWrPbmd73PltFEWw4i9bt96Ea44VBYOXKIACBwSy23LMjMIhyPnhKur0uw0jUgyhVtxsbdUbYsp6iLHZRFqQRY82haGdACJiht6Ox0GntSfELFgtgNBK8HtgB5EGDh0nNhYWmQ1CFvf2mLc3CQ8tkE4MiJpJsaI4HChAg6RTDBwR8ROw7JW3kHj77l65hhaHF96R8uihhQVO3XnmhYLUl9+pvN2KvX/3ntFH0rk/vTl3vVf3vv6DeAflb//I+DX7/j8PxS7Pgbs/Ej1ChBzsSwqXKsuZmLZvb2zMQRBBDT1zHJQG3w0qgKNd5aWYdysphAna4FBMbaoC7Fx4Dxy8Sb5My/x4MYphqHCO5sp4n1ljicFZvQuIN5zcukoK5tn+P3r30WjVdHBVVZ2a5lqVfotztuD8k2Dq4ItMufpYkev8NNoHXaDSA/TISmsQ+8cQ1+ZZ1Yq3C4AVwibSckx2s8zApUtnA6kK+5GwaPRHGvwvWqyONRXxUxDhOdml/jg5klzwRlZUOAFKiCARpAkZA/ZK1IbJq9kCBkZONhX0n5CWsi7CekUxuDGiSPNKmfqkyx+a0z3B2+Trx8c0ol6OLsU96ihhb2V650yZC0gSD/ZwG6HUfJR+7vrSzInBTUzEV7vwWaES9vMgi8aFle+njtQ7h9w/SjQ8b/GivlNEbkI/F+Afw78ioj8L4G3gF8sX/4ZDDY+j0HH/4sf9vPL87M3Xm5EFmXOISr/LmrpVsDRhIAv8wa9F+pCvAwihuKgc7181ozX8kZDRaPK5PxFJp95kXuXj1K52na4qprvSta4oxhwW/0QqpofP/sB/tOLv8dbty9y/5F70GD08xwzGqxwzCTMGsuVrrZDouJdhXctOZtrjJdAkt7Q2xgHjl4K60maqSQUf7OIxNA3GFBvxnMEO2W0Kybq9MVwcYgpIIbNsFEg2yCk6tAPeCaZt2fX+atHHkcbZ0TMUhuIQB4bRSZ1GVmq0MbhKyFuzUAUt1aTdzrc2KDqGI014GpruJqEAdwssuJWWAur7P7JLlfSBV4ME7r1mtCYKrKy2RTmy2arr4BYvaa6GJWIEMucnR767u2lYko0laXR3tu68EXaAGUERzRPOJE7JrSpOe709dCfKVhU9e+/yz/99A/4WgX+qx/2M3/Ab8EmP9m0q8aLdfFLpzkXJ8JKXBlEJDRVZbQJV8ReWtipQoGMS5ML2z1DMPRn5+W3OPjtl3lg5SQi3kznxCZjSc9JK2le8WCgCgHtIk3T8LH7P8TvPPf7/KOFNRq/iHMGTSbNUOx2ehZx7qKlDKY3IlQNsQwphYh2kHu1nuYivLLURlSoQoUPA7p2RuOCpVXel13FKCVknb9OspCIeAkQFWL5+vI1mWzcK+cQsQbfa9ObPDAYUXc1sujt1OzKIu/XjQcaIzCKeNJOawvQGifogQEnfqUhdA6dJrRVsjM+nCSskZoU3VcW8gIPLixz0rfcmk2QjSFyLDBT5fwb17j3ng3q4MveUNKsnOlSNBa6E9rWGI8x57lJR8o6h7dTNtfKFDtiaRWkbDVoVsfuztgYygKzNjLrIjnl0tz+wdfd0cGnIFfeSI3DyjGsfJna2zcDmcuDUcrYawg4mwzWj8R2PZuXQ6+uUr9c/tYrTD7/PR5YPUPwA9K8uKQU3FoWelE9AmZrCp0o4jxHlzY5cvZx/vjid/jpez+GUKGNcddyxJpfGNmyR3esCZhx6pCMGXY7X2BNSie/Tx+sL5+KaZ0PNTHOaGrrXWRM4kzMpd9jC8mJGKwu9vPRni9ljUxF5ukJPX9OlRf33uJvPPIIUgnqMqkT8rgzaBgMHl/wUJWFlpJRQxLkEXArIrmwh6cJ2vJ+K4GZ2vt3guusUSoqSAeaEotVYNEvkV9Vbnz3Fr9/43t84G88g6BMZjMWhoN50FjxVFsvLmeGVTDun6Y7mpzWhVfKSe09OUd6B//54lGbroAwV432hiijdzAR3nn9WWuWv9DL3mRm0ASWBhXLdWCp8oyqQFUkqOYeadryabJpWwWvMutWVdpk5tNgJ1DtvQ1wFc+VLz7P5Hdf4fTiJtkJKubf5ZwjBNPEqC+KOe/mNrKuCJW02DGJFx49/givDisuXX0NYrKFm4ygJ+rKgFH7o0VU1vcJxIdiyOdRX97XXJPu5lws5xxROwbViBljxPm5e4v1CsruUHnCoEHqCuqSbnhB6rLYHYWFW8AH1w+EStxmgueAI8MjphFqgXHCBUd2ivrS4OwLgZGxilFFa4U2m75mpmjEuGiquCgmCaj7RWrfI1HRrjRfZ5k0SeRZ4trNWzz36mv8/CNPciwMGDUNywsLpn6UQ5tWS7APnXF64Z9lYWVOpvdF+9RrYHypBe1n2IQxmE9N1jmsQt/Hf7frrgiWfqG0XRndnE0GS1YcUDtHE/whPq42HqGfMDzNmUnMtAptQXmyQpuVWdvxxme+Cl+9yrmj91DVI5sWHPqMxB5AihFyf+xjC7sUg10/KwKHSGDQBD74wMf5vd3Xmdzegi6hMRn9pvQy0izawywsXNdUmPMj5NzR+4Dh5dChxBdWrneGkCksLSyw2+6D9kOHXJnmBZoUkpK6aOiWWkNXAqAZV4e5qV/feAOrAdQ5/mT/LT509F5cU6PjjM6yMZkDUBqWGmw5Ogn2PrWkfSOBKaRZNiZ0FohYZthmXAdpbOBF7rJNL2ttt9M9kwO4Snh57ya/deMNnn3iCY40KwzfHNumMGcA31nomxdc7/qiarC5fa3dw65roYAsWqKhDwotAE4udZArKsreWP49gDDgbgmW8j9JYZZga9axNe3Y7RItRlVpnGcheIbOMfSeobehnE6ExpvCcuidIV2Y2Gs2nvD6v/1jFl+dcGL5GM7V1FVDkGC5rotkUbP2VHOjD/RabC2+yoWPlq3wN7DBsb6wwuq5D/LVK8+TZzODUftmaBYbrzc1WKqfAe9DjbiKLiZy6lBJRr13RpOxeLSASWWnXAiLjLv2kOZBWSRibADrdKsV/jnhVHDZ4dTj6xpndp5IOYmksAVaidzcu8p9x86ie5lcGaqWq2zwaihSbWwH78YteRJhlskDhbGi0f6gmBlg7HOgTI4JpwlfGbKm2VSmeKB2aIAvXbvA89vX+MUHn2JDG/I0MzwAvzOjd2kx0qXvV8n8lMg9d4xDjX2PKiomaaagXv01t2gSC45UVJFdioWp3pue/ODrrgiWPpfMGIXbiWOWle1py7iN5bQws4qFumaxblioAyPvGQQDBKoeOcOREPav3ODCf/sFTm4HNpc3qeoheI8jEFyFpypWRd0cWhQHImo1hZruQr23hewFZxiqoWPB88ixc5xfW+TqpddwWdEukcvcEe89UtwqiYV/VBYOwRvsKwYmaBCSJPr5hkGCjZFwnoV6yG7XlgdcMCIR0EwuM2gURSUbiqiFyRxKE7egPoigwZcJyJnX4xb3jBYZVSvGEAhaiInFI612tjsHOymdt3orxQgTRXczeZJxpabLRTsjUZE6IANfTmhDMDUl8tBeX0T5rSuvsKUT/vYDTzBsHdIJ7GYaWaJ+5eYdHXtjJfTdexOs2ZPukVKLz8OuvmY7LRL9/Eg7JVPuvQashsscchK1oIjvdd0VwSICTeUZlvHNddFqW8B07HWJadZCxc5lnIF9n+fQwE2z0bm3vvkSe//665zzm6yO1nCuMtTLV7aIS9/Gy4CYlJgiSRO5S7ZDlxsqJSWzG2syYjNRsrSmCp6n7v8QfzC+yOz2DZx6HIIvJ4lzZo1khXgBJHygKh5pmg3KzQW5kwKFWrfd5rovVkMOQgKnZVGUxdtUuMbm1rg7eiioNS/VlR3XH1LzicVYrgq8fPsCz9z/sKVcQSCUCQWmrjNzicxhk25mqJNDYE/J08KmpizeBNqZWEw6RWMmB1eeVyJUxi7el45/c+k7rCwO+LkT5/C70QRoSY0JEGFhZ4A/mBV2cD8Ps/SwnCv1R/9289wnGSgnRp77UGsBj8D+3p/Yh0FUysvCNXyv6+4IFoxmX3sxQwpMyJWKuOvWpOX6tOXKZMpeZ8V9qzCJia5UscF7ZDLh6n/4IvUXLnHfwikGVYN3NrLZTC/KcY2hJ94Hgq9JuTPzhGDuKWWiSSmode7W0kGhj9jDcBJYbZZYfuApvnbpBbSbmvlDgYdF3Bx+7a18zB7Wl4lbcV47zwt732PN9pAHoWZfMrhs3lxCqYV0jv7ZgWP8snlvQkvKlszk2xqXlsJstfuEvM/mygmktRokxcIFqMoJ67EivsDOUgsaTM9CAsnmYaBdKYuzeayJQp5FSIJ0xp4WZ/Dx7f0p//LVr/Lk0eN8cv0s0hoQMFdIiqCTzHBpFXnxKj1Kdcists3QzBf7FMwCJ/VBcsepUaAZuhiN3oIpK7t46Kjfjz8HmWtk3u26K6Bjgz1tR1WFSWeFXA8BT2NmlpShdyQtmpWUUHHUJIbOMX3xTbY/913O+A1Gy6uIr/F1MIEWvTTXFr0L3jhSPQKFEnNXGnpmGCdZ0LIrO5TsAjGBl8JvUjU9SBAePHo/f3TzbR66+ConH3iK5Iw+XkLN+h9dZw1T5whaWZDGDhdqJDuscCmDmLwvehBPTWSSs6U/3norGjLZBYKLBn8nN8/XQU0T7wQXjEuFs3RPsonSXty5yNNH78Fluyc5ZtxCMElMTEhlHgVVZSCDpox4E9zlmE1MFpyRPgsop84Yydoq6sW0uhUWAEl5c/82n3nzeX7u3OPc36zPuXVSOags0B0ebcC1ntGu0I07GFXzWrG06efEzMNhRLZw5kmU2gaUioFzb1USywSxAq3MU7hY6C7Ke/uG3RUni2ICry6aXqRLmajQJmXSJSazxPak5eZ4xtXxjKs3t7ndJrbbyOzabbb/hz+k+U/neaQ+wcJgwayUiqu9U1Mgqsih72/uC7/iKuICwTWkWUI12liH0iNx2qMtGa9GU3EYqpXUiIqVr/jAAx/h98ZvM9m6jM9ikHJRVIqabDfkgkg5oapq61u0rWky2pY4npC7rmg1THMRXEVqKnJujQhYefC+uFt6cgzmHKNGf0HMBihrsjrCWRMzdRH1ZhF1afcKD6zdR9qL0BfLnaLe7JUK8YpcYfLlYm+WDmzGjWQ7bVMbkdL9ttnY1tiVSixF9A5tM8/fvspnXv0Ov/T4s5xtVoldNHAgZXJj/tbiBPE6t41dWj+JPHfV5MTSnyB2mqqWQbkllUJ69x7mtUoWo/ykO1oMh3+34LM6q6+BLKV7r2C5K04WVUpxG4oepbjKQ0nJbM+YJWUSlfFkQn7zKvcdTNj48hbNYBG/uoGrG3CVudcD2rsfZkD6wHDzxhz92q1MB9E4zyxOoYskrziCpTa+UE2w3bPXrvTjuFUdy8NFlh58lq+/9Dw/NlpDFwaIKH5QW+c6WwFvC7H3KgOdzgijEaCEZmCcdHHkbKmHI+CqhqgRV0ZouEIQJGBoG5ns7QSzuC5jNIphg3KYtlzotjgzGjDwS2Zo3iWzQRLmwilNJW2cKko0DY8Eq+m64pbSlZ8di44nGRzdz45RVdIs8sWt13nj0lv842d/nAXfGGfSi93Dylswgo3lLiImAapQs7w9ZOvmmLQ5Mil0qVHmELgWxUGxa81lFksuQIDN6LHZn1qGQ/Uo27xe6VE37Qmp7x4ud8nJYg+qmHLazcceWFM5Foc1y8OKlYWG4Bzt6hq7Wy0vff5PeH7/CtEzJ1uSk/n0KrgMXq2VRUyWhmjp36j1KDRpSSccUgWqekDE4VOGnKyfEROaCiFRsIApCEqhHlM5x4Mb9/HyxgrXLp83gqAAMRb3k1ILaNHGi+DrYEU9ufRYpHSjpRhhWTDW9QKT3NoMmKqwcZ1DQmVHpxMoOynB2+kTAq4OVrALlsKJ8NLtN/ngyQfBK34kUHvyyDYlyT1+bxoVatDGAk33knHQRJBQ6qHaDs88y0g0loSrbdFHgd+88By3r73FP/jwTzAKzbwvpLOERGuWukIh1gF9lNlCbxNr68fx37iKxlwM8OxPFyMpRjMlKSeMzRRNpBzfwVKORcIwr39U5yBADwwl7bX+7051gbskWFLO7E5apl0yIwU1t49ZG80L1wmDYK6SIThmGU7daLln+SH2Ftb4dzde5OXty+QuW8qFJaMuR3KKRgd3DvGWi3vvwbt5KkbO5K6zI987qrpGxRsMWnYlcp5btqo3nUouzANK17/2jqce+BC/e3CRdHvH+h1ZCqnTlz+KJhvJ1tQ22at3Q+nXtQ1unfdBGQwaZjoz+yJfdBpSgoySe2cpmnKdQ6XWKyrdbcnsphnSHrA5Om59EV8cAGZlIcVsu/SiR2qHFDhWWyWnEkxtOY2DQ2cJ1zHv2OdpRtvIQZzy7577IusH2/wXH/5JgjfzDSJI7WDBoY0FaL+RuCilZir2t9G6/JuLp9BXbpFjsr5ISYtjtlQ9qfVLckrvOC16a9bco4MYt2w+ur3cPauNi3rTBfQ9OpN3RbBohnEbSSLUhSfoHQwaY4/uTSJb446DWWTaJnwXOT1uWWgWePzYOX78vk/wcjzg37/+Za5t38QTrF+SDAmpXCBgdjimUSlFel0hdQ3OGQI265Ayj7KqKuM4Ff1HttykpAKli11OQqt9BBcqNkbLLJ97mq+89U3SeFpy5MLi9VYImw6loESVpXVZs9kcGVdjjo4BLDZL7E3GPYxnp1vAHCeDMQKQ8nOlP5kEVwec9/jKKB+vji/zxNoJ6KR4FJQhsEXpTLJaTaPBxDmA7mMpUjLNidTl63po2gnUMmcrbE+n/Ltv/SFPCHzyg59EnC/9mQI1l8KeUF5nwCg5mi2AyzwhV+qmpSPLrNxwxJ0pKSZmXUfblcnWOaPpsKsfi81Sl5KdPimZYQVqzjCl/9KfTjEXn7liuzTHo9/luitqFoCclMm0QxZrlhvzsJ3FTNtlkgqxywyDEfZWdiYcczWXqgrxjsYFPnj6GW5Nt/nNC9/mzNbr/PjJJxmORjQlGKw4LPBxWSQo9KO4g/dk70lF8SviCL5hMp2ZfSRm9iBis2MKE8z6HEXoZ7CT8PDR+/nSjQs8eu111gcPAQGRjASQ7K3p6b0Z2IWKrp3ZruVMSOZK0SlqhfpiPWQvd4XpUPoJKRbSpUdcJsbOtDelJ9EPnhWB8UB4a5T43mrDsyce4joLbFybUkdjHuMEqcskMszmVQdG/ccdQsKGIzl0psjIaiCyqSRdFq7oPp/91h/xyRPHefiRDyLiSF2CTvDRZAUKuCzGi4vWj3ELzjQzTc/29mbAMM1IIxw9cZLd75zn6tPLdNJrVYzi4p3MaxSgmFgUjUyxxe1Hjb+jAVn8zfp1oJKw2TTvvkbvjmARwfvKUI2YaeqKVoRZVhabYB12EZabmga4//XrVMMFXBtIYoW2OsfRhTU+/chPcf7mG/zyq1/gI0fu5+ljDxFq66bbIg3kkisb8QOiQHRiTcdiMoG3Tn5AmGG9ACl2oBYXpc9AgUAVhETwtiU+du4j/PY3P8MvbhwnbKyjUgaT9tJjMeDBS0WrM3LqiuFCLAIw6MdSj1zNm9wmi2k+ymgaUhcLq1bLCHGM3uIcNIG9Bce3hhO+2d3kufNvIM7zzThGU+LEcJGPPXQ/H4qLrF0+IIReSyPkOpmxxYGlYCJi/ZdYGre9wyMY7KvKq2zzx9/+In/9zAOcuv+xogY1rlkSoBFcU5H2OlKr5R5bCpHbwhJXKdPVBJcgV47kMn5Qcfrcvbz83GV+58Q9xWnUcuJCEjeuWQFscjb3mNILwHgdRYN/Bwdszg3rETeUnejfdZneFcGiqsxiYtEFWlX2u0gssGwIjsXKs1oFBsFMvo9d26MLQ0jeSILe7Ik8VpM8cvQBTq+c5DuXnuPFl3+fT596mpNrx41Bq7HQzzMZD05tjF4u9j6F5gIeYvGcKs2ErptRV7UxWVVLelUQtwJtOhfwdKw1S9T3Pc3X3vg2nxj9BDpwEI3aQmm8StHr1HVthSk9vHOI1mR1dAo30r5R62O0hVvkLBICTjOpzfb3piYD367H/Nvt1wnZ8fqLbzObzqBt2X7pZUIIXF5a5vnX3+KLH3yMjz94mge2Wo4cKGs+4IOQ9xVaRwhGsZfWVqNzFZmIThLSWeP4Ob3Kd9/8Ln/r4adZ3ThVoFmF1npaQtm924xJrYFi0tHz3RBBs6WAUinaKLJeoSOTDoxCzSePr/HG1g7fWjt2SFUp7PBe0zJnGEdLjecOMRj7QctpY30n5lB1H/sxv3tlclcEC9gGsHvQWld6wc0hZAHzBUumka4mUwZbU243Q3uTPeO0NJhsJIFjdbDAJ+/7GJd3rvObF7/DPTfe5MdOPsbyaMGabr7/PlM6Omxoq4FjZmbhqgqXPTqbgXOE0Fgjsd+0ipXOfIZjr5PAU/vEI8fP8cWbF7j/xlucPPtwQabMPikhmBZejZKSCs9r7gOMfW3l2HDLLLJutj9VIGmhmTcVlObt+WHmzbMNTy2ts/+9t/h/X/wTtrb3OHVmg8m1a7jBkNxO8YMRUjn80gKxa3nx+e/y/Deew08OaHA8c/o0//CJD+NONTTXpyzOomlwghBS4YqJwIGSJPOlg7e4sv82/8XTH2CxWUfHIAPmUL0RKrGTJtj7QRJaOyRavebVlWkBGTdwsOiQpQoZmQumC5Yera6u8re3r7J1sM3rC6sAJTE8lBH3JYf39rMLmjz/vFCygAKEWDtND7U/717f3x3BYjuoMp3FIt5SKi+sDAKu8tTeM3A2JdhfuMlUK8ZqlP1MfzIkehNpV+ZzIMqp1eMcXfwZXrp5nn91/g/4xPo5njr5EKGp7WSQXKTMziSxItTB04rSKkZE1NooFwjB1+RUuv1OikzY7JU092pJh/eeAZknH/wov/udz/H3N8/gl0YQjfAoGK9L1aj3xa4SChVGgid3HU4CTXDEypSSmWxpnFMInltM+dIR5SXpkKajenKZ720dkC8ecGJjiXZnh8c/8jRN5RjfvIVbaNCVVQZLQ3b2p9bRLmKyg2vbfOmN82wPHEtH1jm4dJMHhqv80ur91ONICg7djUi2OuH3tr8H8RY//9BTjLplg5A9ED2iDu2skLcSooAiteAqm+qWRXFNqbM8MHCw7q34r+SQxRzVdDa148yZk/zS6xf53OYqrCwWNa3QKTaJrDRkWw0EEUQTs3aM9xVV1dhYdjJ70wld7ieFCaOqwqG8Ud3lJ4t1Za0gm8w6umh2NdMuszqqzTEyZ5YHFXuvXGDLrSI4fAhFb2IWQ0lNXRh8MSvAkBjvPE8fe4j7V0/xjQvf5MXnfoefOvUMx9aOUNUO9YFQmQCs13EHb/T/SVJ6spSolh3T03UdvrIGocGT4IP1eUSMnOlE2Byt8Nbph/nGG8/xscc+TPJFFSmYzqVPAETo4ozQNAhurl3pJdV7VWaskUUprJfgcZXndw+u8N1Z4tFHz5F9hbpMHjpSUsZZcYMFbu8fsHF0ndMffQoqRzUacN/mBq9+903UCdcvbXEwnrK5ssREj3Njb5fL5y/SrA15+43XuP/H1vjY4grN2OI5xsRnb7/Eet3x0bPPULUDtLPuf2ooMLtDklLGHRTEqzxtZ+hZWKqsXvBiDjFLpuNhpjAtqVHlYVrGZniD1x999B5W3rrJsY+cw48aVE1inHPi6o1XmY23uefeZxEN3N69wMFBy8b6JqPBBqqJyeQms7ZlOFyhrpbw1chEe5L53VHzruv0rgiWeb6YkhloJyuoZwJ7s0iXMoPK045nrF86gPUjlsrkhO9/gID3wZjCiJH9nENcMaiLmbXBEj/5wI9zYecy/+Nr3+Sha8t8/OzTLCwuIWmKa6rC4q3ICJIyQzFtRHbWcReBRIcnELsZtdT0qAxaBomm3mpUqZzy5JlH+fyN3+LcjSusbxw3zlRVuuRir7OqGqbxwAADZxqaTiOT2DFOkUuzCS+0V/m4Xyd5mAZlp5nyfNpnbyexu3uLLnf4oeOt515k5623GCyMaEon/+KtdW7vHTBaHnHsyArfuHidsydOMlqoOL66wivPv8KlV1/jYO+A2HWkyYzujX1ElX+rn+Xg1GM8uXyGtWrE7958kfsXHE8efxyv5h0sjSc7kFSc/0XQxqhMkoXcJvzQI85Kc9cYD0yWgnkfdBmZWpGfCy2Fg4wsCbmMLZRsFBw/VY6fXOXa73+TYz/7EXwdyBpxrmJ58Tg3Z7s4PFU1YmXpBJtr9+FcU6xeAynDja03OX70QQbNOl5qQObtgHe77opggdLR9uZCkkStllAYzyKzKMyS0ly4zENh2YRM9k1QuEnOmV+x9NBhMbYga3GNsZtQ+8B962fYXDjCd6+8xL9+6ff51LFHeeDkA9RAGFZmGiE6PzVqJ0xjz4PyQEDp8M7TdS2uqvDF1E7Ke3GocbZiosnw0JnH+a2L3+CXVv4qrrFutgZKP8VGt+EwfpcPRmAUYVA30FU8tHEvqwRy5bh4JvBHeo2r12+xtAzpxg2+/pnXeOwTz/D8577AzsUrLDgh7d5iHBPiKkazDjfb43bbccPVyHDIef8Co8UFzj37BPc+8wgn7j/NC5//Em88/yKxPbQxPX/+PP/yxi0+cu4xTh45zhOLnifOPILbr9BGCpJUWM9q6VMeKGHR21i+oMZCLv0fXweoFZYOB6uSiwo2Wx3na28/e2ZpJ0GNChMEjRCiY3NtxK0/fJ7Nv/KEETkls7SwTNM8RkwtMe5ze+cii4snGTRrttGlGddvvs3aymkWF04iYuCKc44US5PnXa67JFgKEoLOi9ukRZvuSn6rysYr11laPG4LDEvdsrMzXgoUbM70BldGNZ2Im/sQGxvYoyw3Qz586glurR7nj954nvMvXOaT9z/Jmj+Kr5jziIQyck+tydnPUxcxvpikSNfZSATvjMBnVD0jbIbgCcD9x+7l7e2L/Mnll3j2wafnrz/lOE/LfF2ToonfpMgOUowk4NTqJrK1xcubkZfPerrrjr39LSY720z3ZjCZ8uoff4PZ/hSycR+Hi2sokfHBBE3K7Vu71HXNvedWWHnwYTbuOc1bL7zKdz73RWR5mZ/4ax/jQ3/jp2mnB7z98vcKUmQp7e2dbb708nMc273Bxgee4b5lYZQENzNrpf7YdZUFhLoyunvBbJd8ELS2HocsWe+GaHSj3s3eMoKMGxX4u42l+SkFLlckGh0qo9RLFWt14uaXv8PqRx6l1SkOoQrmooNUbK4/CMCsO0DV7Kaqeo2VlZOlDh3PTcdTStZ8fpfrLgkWi2YT9RyOLsup2OkoVNt7PHwguKHBw1nKdGIxBMoVVrHrN4as1kCTXJAP5gxZX8zd6ipwdPkYP/vkJq9tXeDfvPZlPnbzLE/e8zDVqME1TemKC8NQoRTZQI5WnHuP4qmyErspUtXgglmCpnQ43aquiLPEh+/7MF947re4b+sMy2troOArj2ZDYYITcyVxDueq0s2H16+9yeXxda6erHnTX0VfjjQp8ca3X8GLErsxIQyYTj2pmzI9GANKCI7BwgDnLEWdzSKaE5fevMb+rR2qreucPPsgjz7zONcu3eK7X36Rh598iE/8nb/Fl//9r/HWy69YZ10Nat872Cdcucq1pUvciMucHByh8YUjF0olP/JIdQiBJ1V87dCm2L52Cd2PxY6Jud4nJzPDyykj0TY9jSW9K26RGpy5ysQEtcepox55Vr3j4KUtNj9ogRFzoq6BnimL2scFRn7g7FG0t60SS+cVpfY2bezdrrskWAwC9IghTCguFZsayWQHZ9+6zvrqMq3D3qh4UlLqEEgYGc4XKNfUe5nMHQo6EXwRTWlxYDFXfqGqGp44/gCnVo7xrbe+xcvf+Rw/de8znLjnDKDUzpC3KJlJ4XGZy0lv+empa0dKHUTTfVgQJ1A7gUIQBtKwfuRefuX1L/OPnvo0wQ9LXSVFgOZQFWLXUg2CgRiu4sETp3np4nUu59v4SzfYvrnH7uUbdNMD2mRo3KTbp6ob68+4TOw69vZ2yUR87VlZrqgniclEIEeyJvauXaJKLTe2T3LsyUc498R9fPtrLzLohvzU//xv84Vf/U+8/q3nS3fcdvjtg31+6/WXadZX+dmNI1SVx2WMU6b2JFEx7hiCi8m4ZouO1NqwWukKEzz1ngZFgOWxfou39IvaQav4gUd9oUlUIMlDHcqodc+gFoQJs9evsbtZ8Wtf+yJ18IirqH3g733skwYl97w5FfrhTi9cPM+4i0xiy4mVNd6rZrkruGE9DSXdQc2JvauiKlWnfODWGBktkDG2qi88r1hMGlSFTrToGDJCplIrOCnEO+cczhvMrF4sZfKumFbAarPEjz/449z7xEf59SvP85XvfJVu94A863BJGbqKOtji7pstnhIwYqzn1HVmqFAKdyj1kmZmsePBow8Q6yXevPBaWWBFgOYDqKMZjoiFmkGxZJo08KJENh+4h4MxHOyMGW9vE9sZMbZMJ2P7c7CPdzY3M2lmMjlge2uHbjwl5My9x9YZDQNdGwkqbC4PqHXC1ve+yzd+7Xe48uLbPP3hRzl7/yncwoD1Zz7AvU89OqfZ9IX3NCvP3XiTG7JLbgrtBWMPaCwZgSrSKG4zkJuCIiagLX2PlJGpOb3kg4xO1dxhJmqjyDuQCUjtyQkbmdGKDVrtFagz4EYLe5k6Q7V9k+biDpPxlBtb25zeWOPU5gZRu7lGv79yNhDp0s5t9mctp9eO8r0bl+a6/x90/dBgEZEzIvIHIvJdEXlRRP435fPrIvK7IvJq+e9a+byIyP9dRM6LyHMi8uwP+x3GDzx0O8+lLrBRII5TV29x3/Iq0dkMRxviqJh6KiOaCC7jRQutXnGa6f8v5Whs+ULPpn9YSGE5G2UlkahD4OzaCX766Z/h7eWGf/+Nz3Ll9fPkyYQqdwzKOIoeD+2n4BrJxaTKB9OJ9S/E0pfUdSRxjJqGI0urfOyxT/DVq+eZTg7IKsUEDyN0hlC62uYr1Hn4k9FtpIl89/kLXD6A/du7qCZSiiCHDOpYRnWHuqb29fzEmk0m3Li+RRsTg8aRupbdvX0uXjlgY2WJB04uc24t89JX/pCXvvk8a8sLnDh+lJH3PP3pT7KwvnrIZk6Z4M0e6sXLb5rzTSO4oSPXGb8YkKHDbXhYC9aEbJW8ZyeOnSgG8VNoNK42oqcWu1fK5ieh5985XChMZ8X8AKYJ2Y/oQUIPIu0sc+XFt3jhf/hVxt/5Hvtb2zxx8l5+7MHHCK5i7iVd/hha6dhcWkIkc3b9BJ+473GTefxZgwWL9/+Dqj4GfAz4r0TkMQ6nfz0I/F75GN45/eufYNO/3vOy5aY207xnANt2hlPlY9d3aNbXMWdNQ14SNpjUozgycdbikgWMFHSs74ZTUrBeYKqa8U7n/CIRG4ZjtbnxigaDIR+/71nOPf0pPrt1ni986/PMdvdocqRBCWKDkCRUBlk7wHtCCNRVRdYO45kbHyk4GwPnvGNzcY3Bmft57sVvQLRmp6OYWqhQDYdWl2XltekW37j8Frev3eL2fiRub5Mme0xnE2LqTH7sPCFUiDPfrGpQ0Sw0ZaSccaIGg4Z7jq3hkzIc1OzsTLi5c8Arb25Rec89xxZ55MwiF776Nf7g3/06Ny5c4oMffRRf1UgzxFc2isMFz2hpAWmEb1+4wPb+2Nxd2mwO/OuBcKQ2Ov5uRg+S9U1ahXHZ3b2RWXt+WZnebfXKICCN0Zi0cTDwuJGfT6PmIBqxM8E+ke/evMpvfPPb/Mpn/5Dnrlzk7GNneObUMdzSIr/6tT/ixQuv4uabGkUpm8spKQwELly+QttN0DvskX7Q9aN4HV+hDCNS1T0ReQkbUPQLwF8pX/Yv+XNM/9LyP0aIywQf5o4dx27d5ul6ga4Fl4wl64tXq2CjCRQzv86YVY+ixK41X1zNeAnW0U86Jz7Gsjh747ti12B5bWEZO+c5uXKUzWd+hucufZdf+cZn+dSpJzhx9kFSFehH4RlLTObjwr2YOm88G1M3A6rKxir4nl6fM4+cfIjfv/Ab3HfhDY4+8Mg8/8wYzWfSdkglnN+/xc3tXSY5wP4O4fZ1Yjcr5GMDBNQF8yBDaKdTmtpDjoX+oUzbyPWbu7zw6hWauibGPU4e3yC1kVu3D+jayOpyxWA04qmHj3NzZ58X/+grPPPpT7F6bJWcYbC0gteOHBMxRt547Qr3bG6yJ2NOLK3BquKXgnmLTY13ZfJ/8x4jqKVR4nBeiWTbHMWZE2jjzN62MXg45Wwj+ryZWNBmcgU344Tzr1/ltWvX6dKMM0fW+NDZE2wujew51pm1/QUq3aZLaoxz6VX4zOe79DNfjq0e59TRm9wa7/Cty6+z307+7MFy51XG5X0A+Cp/zulfdw4zqtePzMU6PSyMOJwqn3zrFgsnT3Jztzv07K1MX6/OkR34TKlLIr6q8ZqsT6G5zDSB4CrQVFi9ghTjNjOtKEYWRdIrWJqWMUBqEGo+fPZpbh25ly+8/FXOfvMSH3j0o/ilJWKxNw3OHocWH+Hga4aDftx2Mvo8NmRIxbGxuMbmmYf4/Gtf52+duQc3bOx0E6PCe/F0uePrty+xe+MWadoi031SOyPFznhUZSSG98HAkJToNLK/P0G8UFc2szKJ0HYtF6/eZmFYkbqO1aWaDz5yjr1xy7Vbe8QOxuNZ8RtXtq7e5Cuf+wpP/sQHWFpc4GCr45HHH2Y6HvP6+bdZGDaEOnB9vM+5pvRH2owmOzhEfEEPTbGqiaJSzUavKcpYdUDjcNmR1KxjUbVnnIXZQcvl69u8fPESl3a3WawrHjq6yS88+QgLqVBjAD0wB85J7Dh+zymqSzuM6obHTt9f5AW2AXrvD8VhKCujJdoc2Bkf8NF7HpxT/f9cwSIii8B/AP63qrp7aK8Jf5bpX3cOMxqdPXc4Bij3dHPl5M1dPlwNEBeYFF8n7735HIsZghscWP4iYnMGtTK3xhRx3uyAUteZXVLhX6WccRiNQrwhK64MVe3tVBFLhQQTS20urPMzz3yaFy59l1977rM8vHmGhx/4EDoIuFCRNZGyo+2mDJrGds3kSDninTezB2eAgMPx9L1P8OuXXuaVl7/Dox/8BE4cXbYmXAiBtydbXN/fY3b7FjqbIqMl4nTHJg+XvtFgNER8jZMK383outY2ic7smkZNw3BoXmWTWaSNCZzjxq19NjcXeHDhOBsnNvmDP/4Gb18fM51MWV5paNUT93Z54+svM5tOqGrP0eOrXHhtQtfNePDZR7l9aY9Lezu8ef0yr79+kXvvu5fjssRSM7SzVosL5CTjh95MABNI5W1qWJetCaygNUYrQph0Ha9fvskrb11i7+CAIyuLPHbsKH/l/nuoxCMpk6dWJ8aY8INgiz9l0iSytnyEY3urXN/d4V/90W+zPBjwqceeZX15nZRiqYMEUTv17l0/zgtvvsyj99wPd6zrP1OwiEhVAuVfqeqvlk//uad/HUYOxbi67O5ArZlPv3GNhTOnGc8y06QgpjBMKF6AAh1Gyg3AUYVCOOy5VyKmVc95rhXJKZOzfQ+hKi4o7pAUWVR9PRMVBzl1dto5x+MnH+Pk0hG+9NpXuPT13+VjT3yCZnWVflT3oO/P9MRIsnX6vb0OXzaDzYVVHrzvSf741ec5e+MhRpubZi3kHD5UvD3Zo4sd0kbUBzRHSxFLeuXF40NDqEckbYkoqDVJTQRlkOhDmxv8nZ99lstbB/zhN17n2q0dFOUPvnyejz52Dw8/eY6PPnmW+uVLvHwhMpsqPndcv3rFrI9wzA5mvPbqJa5dvEI3bTn/3Td45OwZbsV9fu/y68zijFdfeoFfeuBD5NTYQCfnUK/QqdnU9t5l3rKDKJGUFKk929sHnN++ycsX3qKdzrj/5BE+fvYU666hqjzGlLS0WQcgtRCnidRGvDO/ARVB05R6oeGRM/eQrlwwh1PvmaaOHOO8wO/19yKOx0/dz7GVVcQJC9WfgxtWZtv/N8BLqvp/veOffgOb+vXP+dPTv/6piPwbbKz3D53+1VNEUEPFUHj87Rt8YGUVqWrasRVdSU2brhnzrXKmIQ/ii2bBzce/+dxrQhTV4pbvzI0yKkiaEXNC2mzjuLPRY7IUTyqKU4raaFdVCGJzGH3wHFs9zt985ud4/tqr/NqffJYPnH2K+048QNXUhN5EzwYkULmKTjtybBGp0OBNE4Py+PGH+dL+bb7y3a/yUz/x81B6TaJA3dgm4D2aOrSdYB5oAAYWOO9xweNlSFUFWhGEgzLoyZNi4uLNPW7vz3jq4dMc3Vxla3dMmkWGiyPSZMru1i5n7ruPlbVF6vAKr1/bw6fMkw9v8o3nLzEZT9Gu5ebl6+ztHSACbTtlGDzjWUuD1R63DvaZxBkrS8vkccLXFH24QCNzvbjN1oRbt/d56cpFXrn4Nvrm6zx1z1n+2sc/yOrCAl5topdE6/Fo3TM0jGSaY8LXoTSjhTTNuEVTumpwfOjBx/jA/Q9b6uv+9DK/MzPKCmujVUSE4N89JH6Uk+XHgH8APC8i3ymf+z/zFzj9q8/fbOgmrI4n/PzVPQYP3E92nv3Y9T1+9tspXioG3nozmntjBkfGZqD4oowyBXExMSiAgfawofe4GC0VyxGJkeyM9yWSzW29IG0+daWQp7ABIuIrKlfx9KnH2Fja4LMv/wHfu36Bv/rEpwgrK/b7y+mmmgmhMsFSSrgYUWd2RhtLy5w9cpbXdv6Eh994jTMPPkTOkEQ5SC2pmxkbgGzBVgDMHtnr57l4F9DsqAdY0EzG1FHJLrI7nfHL//FrHFlfYXF5xIn1RU4dXWRxEDh1/33s3trj2L2nEe149qlz7H71Za7vzbi5PWZzrWE6mbDXKtvb22SF4WgEInztxZc4vnaEh+47w/54SiRyI8+Qg21ONasGZjTevMiikINybXrA869e4NWLlxiK8NjJo/yMDhjt7HLs+Dny8jIyMY2/quIGgTSOVstk20hzF2HkobbFnZ2SfZECJAqy6ebDsHqdSy4TBe6cT/lOt/6e3feDrx8FDftj3r2t+Rc2/cuXwj7kxN985TInjx1Hs9B2iWlnKVNwxbDOFygwW+/Ehq56khhrNcXeYd12PMm9ztpEXeRcxk4ESw9ywAWly0Zn8Zjjex2sp5OlN2Irw5VEUC1DcpxwfGGTv/v4z/Hazbf4zW9/jk+d+yDHT54pPSHbDJzzuMqRU0dKLZIDVDXeOU5tHGPaPsznX/kaf+/kSZrFJUKhXeh0YovAC5rN7rXvqAffUFUDQtUYfCsO7x3J28jx2HXENEG6GTu7O8ymU45O11gLNcMTDQvDisFowPL6kIOtXUYrmxxVOHf/cc5/8RVu77XcvLVHbCOSzSXFB8fy8ojNzQ2q4Ni+ucP5q1e5evUaXddx8do1zq4d5Z/9zN9mWNeknLi8t8vzb7/N629fZBBnPHbfWX7pIx9geXFIGmd2Z5dpmnuJN64Q3L0kzP1GADotREpBZhQipX1OR4JOxVK+WCgZk444mZW1WGjiZTeW4qI/F3vdQZrMmgk/4AS687o76C4CqkLlhI+8dZln3QCaGhVhf9IV50dFc2TkbCKYxYIRFTWneaNQMY5RKjWJQ02n3mvUXQDReZGc1eDcLFpSapnruGMXyUIZFa6EYnqhUmakiLMF7Byry+t8eGmFpYUlPvPKl3ni5tt86PGPQDUkVAYgiEaCBlIXic4mZokLHF3Y4NroJjunTvP8i9/igx/7CZz3dLE1czjN5BhxsaXECYINKjUDjUKFL2Iyj+Cqipw7BurxwxGIMhwt8MxT9/PUoydZHi2wuOCoa1g7uU53MCN3HcPFNe47dZRzp29waXvGwd4BB7enRhVyZlO2uDigbVuapmHt2Do7e/sctDNi11F1M7QRrk72eO38Vc6/9RqDSeTc6aP84rNPsLK0YsI2zchMSZOINgF34hT58m2YtkDxKaCgaMV8L6d0OKjJCZJKNuHLqRCEXMNsMmGYZ3OpcY5mcOgKQmfeYWWep6bi0p9RUWLq3nWZ3h3BoiCi3L+9z9+4PqY+dQLnbGfcP5iSMgQxyv6wsiBSMeq9z4mIjaroj11VCxPnwBUUTXt2Mlhw2hZth6/zOBvRhVJG8blgwys0I11nNJvCdxI1aoZ4VyBcIaiNj3vk6P0sVkN+75Uv8vZXfpOffvKvsLqxYe+zvI5QN6Q4ZdZNaGSA9zWn1o9Ti+NPXvgaD2/dYnnzGAKkrkVTi+aW+UxrozwQmiE4RyKVee7MZyc6VRZrh4RATMqsSxy0kWs7U67uJ964tUUlmSfOZRY3VhhuHKfb3UaccPTYUZ55aJv40iUOjq2hs8SN6zeoRyOawYBRM0AFLl3fxyE0o4ql1VVm4ymnTq6zsVbz5T/+Ix5cP8rffeYZFheWjDRaO7RyRfot4D1ZWnIHcmQNfW4fmc2Q0ciallHRBFqZB4AUIZk22IY37ax31glUnpQyOhrwW1/4DHvfaXACbYz0jqYx2emT+55WcdrPKRfkM3Nja+tdl+ldESyVJk5Ptvi7O7usHF1mYX1AVQ3Y356ZEXhZkKgynbXUdWVQLBnnwqEfcQkMQecM3n6mydwoocDTodBcbEQEfYgUkl15Yc7ksU4UlxIp5jJhWPCqtDEjvsJJgUUVHImzK5v8zSd+km9efYlf/87v8Il7nuah+x4tfAmDv70bMW3HdGmKiLI5XOLK9jXOPvwkX37uG3z6J/8639u5hhRnfCdAqKHt7GRxgZQj+3tb4GFhMKKqa3A1znuOLnhWfOJ7b16k7SJ7kxlpPOZL39jlxfMXePjcGT746Ak2NxYtrVWol5bI430GC0s8/NBZnMscXa74T5ev40WIbaQZKDd3JxzsbuPCgMFogHZKCBVuwdOMRtx78hSfPneOSp25WmalH0DkFBuAVNtg2Pago5soceSoRovozgxZWUSymFm7NydPHZsc21UFTWu8mfp5u6dS3P4HSzWzNybcTDNraJbmo7mIqlFpbOsjxkKK9cXsT97bGPyuCJYkjvH+mFffOM+lzQ3qnR2OrC4wbqdc3N9hezJjFEbUrmGS4MzSOnW2xl1yRojzviBMmoCMOo9TrGnpbdZiP3cFOXR17E30nDf9hXfGCkDyfCxfn7J5BxSrpDZOaXPLUITswMUSqMGTRTi2uMrHzj7NK4NVXth6ize++gafeupTjJaWEFWyKIN6xKwdM5nsoc2A06tHub2/w7fDazz3+gu8fvsG5A7VZDVTcCWQHRCZ7m9TVYHBwgDVMSJmqudwbKwu8eGHV3n75g63r92kIzOoFxjVQ549d4a//hMPcnRpxLBtYRrJ9RgJAYJRd0bLqzz80H0sL4744pdeoV2u2W9tXMPk1jazyZi1zQGjxUVOnjyCCkz3D2inE2LKdopo8VXzd3gg52yEymmh3+cy47IO5JNLpFd3CfcdNXZx0a6IQqqt/MaVA3Zq6Vcqt8NVjlQLjQuMsmN/0hHKwLDcN+OEMlZCmR50dF1ksFCXzKbIQt7DOOyuCBYVIW4c58GTkNqGvFXh9h1pzzGctOztjLk9vcJePGA/TXjVAcETQkMzWGBUL7A0XGJ1sETjBoxCw8B5AwQKwuYKOKCqZo0abES08/0MdDe/YeRM5b0NK02mf9Hi/IITJCcUx7AekcvJlwGfi0kcjlx5VqoFHj/5AMNBzaSd8T9+/TN88v4Pcfbs/XOmQN0MmUw6ptMDFkaLXExjPvLUx/j3z/8hk9bETLnYLdFP1xLF+wGLy6u266aO6XRKSjBaaPjZD9zLEw+fxI/3+S9/4VP869/+Ort7ExoRji0NObc6ZDELDRB8hbaRNJ4QGn9HPZxplha555F1fvKT1/jlX/5tFpuGrS4z68zRf/fWDpNpy2w6YXllico7Yuy4deEmacNqwFw6974qnmvRUEwJ3tQLpfcSHXB0QH5jgiwGc4UhkNqEi4IvgaViLI6k5m3mxaHenquLih9VbAxHVHnHTjMUjzdPBCc4MWMUFRiOBvggheDZo5fvvk7vimABmLrA8uk1TqSaWTVgMBAuXRmSB4usnDqNy5AQ8x6eTplNJkxnB0z2dhjv7LNz7TrX4pSxtkQHoWloRgssDJdZH22wPlhisV6gCTXDUFmjzEFyVoOoKwbilGMfg3mhMASUQlcxVMWHymBLdD6r0OguGRMumcpz0TU8unkvr968wMee/DhfevU7XLx5kY8+9VH8YGikm+Ei7Wyf1LWcWj7GbpwwOnEGffMV8JUVtQnzas4eJ8ri0jK+HtC1LdPxlNhNqetMqGYcH1U8+sgJ0MjJg5YQP0SatlRdy8bygNGwpqkDlQSca3D1EJJND5Yq2WwaoJ21qHecWBlS+yFh5lhZHDJtp1ZzVI7J/pgbbWRvZ9/QtbpiZX1AisX7wIkxiZP1wCSIeahVAA4/EFxlUHkaOlxONiJ8QRAXcElNYelB22RFeiGtUovpk5LaaD4BGTiObaxRX99mmnuCbi7wsTCdduSoDAZVGZtn/bVD4753v+6SYBGSCLcbz9F9oakdvglUoZ1/xUHXcnM24fTqOs1ghcHKMivI3OfJ50K/bzva2Zh2csD+/g47t7fYvfoqV7oJ49zRVko9HDEarbCxuMrGaJ214RpLw0Ua8Ub4owSGmumEGbYK6ix4pl1XQAZDynqKTN8QRYwI6AtMXYvw8No9vHL7Ah9/7CO8fu1NfuOLn+FTT/4YR4+exHsYLC7TTsasDYZcvHqda+NtiBOTTYfajPR6A3FnaeVscsB0vE/XTgElZYyHnRSdTQkrCywPFvn4jy0xvnWDrfPXOdiP7McpPozQkbOhT6Em01kKq7ZJSBUIvuLmpYu8+eoNfvpDD/PW7SnfevUNSNEcdQTI0M4mjIYDJvsHDNaXObWxSRhWkBy57cgDT+XN/V+qCinDgySYLVKOkJ3SBcEfH5CuHOCfWkWjLfScLWD8yJstkmNuvRqCx1X2/a5x+MXA0ZPrLFx+gwNRxAb0ICJMJzbRYDBowGWqwhbpdUfeHw56/UHXXRIsoOK46pUnBsH0Dd4aS7kU7LupZZJaM/tWy1d93+kWAQ8+ezR4mtGA0cYmqwhngYTSdR20M8bbt5lsbbGzs8XOlQu8mp5ngpIHA8LCMiura2wsrnNseYPVapkGIbiAVN702UmZTCcsDEeEYB0ZFSCbjNiFCkGt6Vg0MkbIFB5dP83re9e4/+g95M3TfPY7n+cjZx7lsYceR52nHozQOOPMyhHkxsu44HA+UC2u0LUVVZwS9/bwocY74WB3hy62hODxfshoaQ3vPO000+7uExaGuMYT2xk33rzB5Qu3uL0baRZqUjTNvxslkt6mWjAvMlGPDxVSNWVKc0dVK3vTlis3d02Q1hlx0heVpyuanTp4NpeWqYcDCB4GnmoUSCTG047RaMFsYh1WTDqHL2P5kkAOEBfBXdvGhXUkdbDgzLSiEB/90KET83fzlSd32QzSwXpabWZ9dZnN7LjmjBqUNdPNDOwZjnp9sUHSzlkQG/z+XqFytwRLYdpe66w4902YD8bpCmW/9hVnVkY2QMiZPZEWc2xVxYeerWyFoBYNSe+Q7+uKatDQLC+zefZeTuVkE4bbGeO9fSbb24x3dtm+covd9jpvM2PPJdqq5dSRezi+fprNxXUWQ80sRxbnxEjmJhY9KtePsbYzypO9Fri549zSUS7u3yII/PzH/yp/9L2vcvmbt/jU45+gWqjIzrPsKz5y9AyfjxN28x4km9+epnuAjeigzH9ZXF4zTY8bUfmaELzVBr5BFjZhuosLDr+5xm/8d58np0DdNDx071GeTB0nfWb52BJ5YgVwGA2tqYkjVBUSPM2w4siZdQa3D+huz+anZ7+ygjpi6vBOuXzlOv9h9lXaD8KzJ+9Fq4L+ZRgfjFk8smDDmDol73XUgwYqU8bOOkErIXYdQydkMbGYdECxYs8pkasyXqOkzao280UPEoygXqh4aG2TV3evk3JmVpz2B01dCn0xy19nGUGKCV88Fv7cRMq/9KsssitZ8Ml0GYhx55zzZIRRMzCaffEpVu8o+IoZv7XdfDBn5f2852KzSYyXFAuNvkdGcghUdWBxYcTi8WPGM0od2iWmBxOm4wMO0gE7N66y8/J5LuYxW01kFpRHTz3ByeVjHFlYYxhqcy8pZn8O+30+Wz2jYiTOrNbRP7mwxo3929zYv8lfe+yTPHf5VX71a7/JTz/+STbX10gID6wc4dXRAftff4Vc5rCEklWnlHC+YmXtiHHfYsC7Ci9Q4fn6y5d56PiQlKZcvXWA7E9Zv2eD5WPH+d7blxgcTDl4dcz+ZMwHZi1nZhNWji9QF8M670d4V5PVvNw8cPvmLm9fvkbS4necbHaOaYksJZ21JsA78cg6v/Xcd3hw4whrg2XEOUIt7N86YPn4klnOTpM1T0cBV5xyyEKuPXWXcd483HJO4CG1nRnu5YyLGRFPGkdkKdhcGW/9G+0yaa/j9OoGC1tXuamZGCNNXRVXzEJzwZA2V7h3OClUorscDeupB7ed6cPrYnvTZiGXoaLOlYGixcmlt0lyZMQlE3gppBTpklmoxpxNKKaZEKxzbwFkaZ/53rp5o880LRVu4Bk1NcO1ZTZEyPc+BEmZ3t7i0tULXNm6BK9d4IXqbQ4WPUuDBdYXVjm+tMGxhVWWBiN8odf06aSIlLTFaqAjS2tU011e3XqLp07ez+nVE3zupa/yzLFzPHzmfgbVEveePsnLX36BpNA4OHn8FK+9sWcLSAQvDTmBq4INHHeZpoKt3W2+9Z032frqd3l1e4+feeocZ556mP/yH/4k/+KX/5A3Lr/NpNvne1cTkzThI3KWcyNhbVCZVEHL1ABfcfHyNr/2u99ia5zY7WaIE7MZ6t9fbwABiPdk53jxtTdYWVnitctXeKpeoG5kPvFZJoJ6eybaOGQaqRcCs30bY1c5h3OZFEGGNa6bkcTGUOQuwSQRluq5C06eKm6pQqfRastkUPHmiU1OvOa5kiKV98U3zp6/D8G+t8hBfHGoid17T/66K4JFgRACB9Ezbac0zsbQdW0k51QgP2c+T2Vib/Aej9CpGUUIGadK5R2arHFoebWAllkcZd6IUzuGyQpOSSplsG5hHfccMMrkKCylG22u8eDGGg/6p6FNtLf3uHX7Jlcnu+zsHfDSlYu8kKa40QJH1o5yZv0kRxfXGFWNoTXR7F1BCWQ2Bss0dc3L19/kgbWT/M1nPsWX33yRm698k6cffpKGilOPneXW1W3yxbe5tH0V1LwDPMLA13RFFgCJPNtD85Td8QGXrggf/uA57pu2nDt3FGXC6XuW+N/97/8Gv/Efv8Uff/VbXB/vMr02Y315yD0PbNJ1NkrQp2Q0Hm+S4rd3x+xOOxyeugqEYCO2RTD/r7IYV1dWOHVymYsXtljxQ17Yvsn6wVFOs8zV67c4eXyd3Fod4Ye1+Rt7oVoa0u3Z2LsYHVXtYJyR9RqGCYrBoRdHDOYDYFPECs49y4hC7CJePTJy1N7z0MoRXty+RBIteiWrcw39Mj6HanERKuTK99B+3R3BAtYMiiEwyZHFlNDkmHaJmFJZDI4qFB5XLu6HYt5gZhdaxjj0PZVsFBD63knxuPClphC1o0Ryb8pXpq0kCtco2785jw8BNCFA7ky77b1nsLnGmePrnBahjZGt2S5Xr17i4NJV8stv8l3O87WhsLCxyonNM5xaOcbGYMl2z1yjCqu+4uFVz6u3L3Jm5SifeuBJ3ti6zhe++xUGKw/y2OPneHX3Oa5m6+QjzBWHdQhITjYFazYlx5aDwRAWVnklKc9/4TuMlhp+YXPA6Pgiy5VnbWmNf/BLn+Lcfev8q3//OcbTfV66eJUP7d3L4kZDlRqTdGvGaWZ5bZngAynPcM4xm3XFCsr6Us1wSDubEBTUVdy8NWV1Y50jawMWFis+/+qLPLt6kntPHqceNEXhCnka8bURJOvlAQdXdwiNJ83MODFvj5EjjWXarkxEzhlXmzpWvDns+8YM/qIo0jgDU5KSu8SDZ86wefsKN4qZnAWCZSoixVMOYT7064es0bsiWASrqzpxjHPEAZMuMu5Mo1p7X4YFGeXeO6Pa95Y4pltxVGWj0TIOrde8a/kFZdapzZnXhIqb+41lESRH05tkg2k9hcmqNo5NySTJeIKNiVAzdHNi6cOx4TrH7t+ku+9hLm/fYHdrh3pvit/e4eall/ly/CbThZqN48c5c+QU96wcZzFU1GHIYxsP8NrO27Sp5d4lAxJeDYml4YAwTqwub3B9clAo/4nZbMJwo0ZnMyTPaNspqpnkA9XCiElnPZLl5WV2X7nN81+/zHh/z9L+QcX52ZS9W7fpJLMjFbe3x5ySdRs710V8SiRtOX5sg2ZQw545N/oQ5szdFDtLcauKpdUlJARGaysQO4b1iIVRg6hj8+gaq4tGYaHYfwmO3Jp3wmB1hKtqUjT1avKeOGupQkUeWI2iQJ5ZM5lxNC29F3RmyJwEqxWzYoIzhNWlZR4bLvOH3W7JXsr0gb6IL5LyXuN7mK794OuuCBagLGxlmqwxNJ10xOLqCBkfTF+iakpJVajEWKPOyVy70jNwq+DNF6znBukd3K/C0bLubjFP1DyXE6vYbmUbuJpvsRp7GQQXBMr8R1Hr7HvvrWGZEs5VPLB5mrR+ituzHW4c7HDaVTzROabXb3PryiXeevVrPOdbqvUVTp44wz3rx7hv9QwX966zP7nO6vIybmVA3N1iMtlnEDwj35BiS0dib7xP5WEo0CabAJbFEFvxDpmA7s64ePUttuKE+0+ucd/jZ3nwYydolpd58T9+m91Ll6momIowkwxBaLuOhUUzAREPo4Wa4aCysRgwd5Ppae5aJginrAwb4e03LrA4GjA6eZrj62vcDi3LNOYWWheulmBpFFY3+CzUC0PSrCPTkRRS1xlqFQJ5UCGCubQ4gaWiQi0FvRNX3HScmVtMIm6xwonwgXvv5xsvf4dpYwOvRCg+CRZsIg7RTAiOqjQ23+26O4Kl3/2dIzlHl+HG1oyZZIKvSNnET8aqdWiZSBy1s9qkUEycWrNKnLObIVLMCoxa4so8FUHMJb/8ehubZ3a93uW5hVJWJSpUBVSw6b7eNDRaejxaRGVZ5yrL3lXMezg6XOHIaIVxnHJjususWeTEuQ/xsHji7T1uX7nC1dev8K0XXuKg8awc3wTvWNUjXHmtY2l5gaObx7hw/mWG9f+Puv8KtixL7/vA31pru+PP9em9KdtdXdVdVd2NtgAIEARAAiRASgyNaEaaeZqJUExIoaeZh5mY0MOEhiEpYmYeKBdD0WlEkaBg2Wi0r+7q8i6r0pt783pz7N57mXn41jlZgNAFkEIoEjsqI7PSXHPOMt/3//6mwaQc4ZTQ9w+Pdlhu9tBlwGcBawOZSWAwZrqxgcoSmkstzl84y6/+1S/S7CW8+fYt3vr+27z67i1OtLo8+8wTvHXjNvujUnwL6hg0CwRfoxNFmmZRhuvF2TFOvhWONElRJsFWlsHhCKMM3lpG0ymTqmK0PSBfzmWomyqBjAGVG1RNDKUNFAs5R/cn4AP11FMNK0HNnBPhRRUk6dh5OciUUPKNMrjgxd3SgnagChMl4YHVM6s8cavFG0wjL1DLAFN4ubjgyVJDmuo5UvaTnsdjs8Q+g5jUNNgdczAsUV5UiWk0pxBNZPQujk1ucFZeTMycch+CmCV4H6soArmOzobxYxhl4k0ReWIw90kWQ2xRYRrjUEHjvTinZIlCEsEVM/2Miqelj9HT3svcBx3m3l0NnXKuvYwNjv3xgFvTQ4p2yupzT3LSPIufVAy2t9jdeMitjXVu33+APdFhpxX7J2MYj0dYJVHUSmvubW6weLZFI8uoXYryJdQlk819eieXWDy5RrPfQrkJ/+o7b/CtV97n/vou3TznU9kS/5svfY7VX32CV19bYefBDkmR4/xUBo2JmEBMjo44it7JxqSY1FCXNc1Wg3a7IDEpSSNnsH+ISQ0nTixxuDvEoHnj/Ts83VmRQOJUofMEZyuRhE9dRAs1ITc01loc3duT98dDtT/GjUt0M5fm29RgvYRBOaHrh8oLvT83UQouGTG+ErhZ2UCiEl48dY53774vbkCRmR6XHc1GTpKoiPL5Px8NfvDi31VVFUkayLIAZWBqS6w2FCTMzmwdCW+yRmXCr7wXxxZl5t5bAjnH0io2xzOqv0doDkbJ7QFK7FeVpFeFoMQWKGrenYLMpOhZ7hpimI0C54SCkURDi6C8uIjYWP4Jrity2GBZyBss5m0qHLujI8Z2ykLWZPHUCr3jSxybXGJ0NGB3sMcrgzvc3bpHOZ5S1iOstXLye8+0rri585AnT5+laRzDumRwtMOgHtNpLzMcH3Djzk22tveo6hqF5kyzw988dp6nz55j8ReeRzU9L75wnneUJWs2cUqsZ0XupnjzzevsHY4AOQx86aKTSk2Wtbly9TQ7+4d0mmscHAwZHg05e3aVcjDm8GhM1l1ltH1E7hoEZUmnAdPJxWdNAzagnRwGWbOg3J+ICGvq8IcVOs+iF7KWG4TIEA7gLeLJMKlko4QEP7JoiyQt54FgHKfPnuT0+m1u+SmzUzFoTaORkaTyXlqnsOHPAzdMCbqknSPLDEU7Y6kF+wMnlIYAxuQi2tHiRplqOdFrJye3hBYZCDEbRTFPvoUo3w7So0hhBuKY7h85IzJjskotG0PlAVFkeh+HX5HKrSKdX3yymMPVSaIIJFKKIeiPEJY1KhjpKazA3+3OMsEH9soRdw730SrQSzJUmrK6sMwTiWOvHAupUcZHj/qFENg+2GO53+NYv0/pHZu727jU8OG1W9TOEoKilxZ8/vQJTk0NT+xrTk5zWi+dJjvZxVb7GA3PPHcek2ZoQpxnOY4OB/zuH7wpZnmRj0bs36rKsr97yMO76xRZymBQUk+nTHXBw51DmklOq13w7vYm1x/u4WtHUnvalQIdhDHeLEh6DZpLbfIkIRxNCaUlQZGPFeb1B/Qv9UkichkSg5pKr+iH0Q7KgPIK08nQRuMzxAVnKj2o8h7TzPn82YvcuvmOGJckhjQ3gq76SJ8MkMxyf37C83hsFojUXk9bp+g0o91WFGnKuJahklAsNEZpdPCieotuIRGunxmIIROzSJKT90YYWkHcI1WkOwTnxI9YRTPxoEhTYQwIm0QUfR4p53TwouAziUReREIfPojReJDfcz766RrpZZhh/AFQicwEbEldOcpqyqSaMpyMOZoM2Z0esV7ustZeZK8asuOGBOWZ1mO8ltr8401oIHD9/j067Ta9/iKV97hkSM+0Ge8egqs5F1qw7tjyJUkjofeFZRbPJFSjPYwJpM0GKaJrl4lTINiKt1+/xsHBhDQ1VLWdf06lFM45RuMJH15fR0XWhFKKuioppzWu32Z1qccvP/Uci2kbHzxpEIN2r4VlXFvHdFxSplDZmvFCzvZ4xHQ8paws1+8PybY0eiROmB5FsBbqgKk9ygWMFttfowJFntNcWMIstmi2C/KppjhMyEYFp8k4phvsJp4sVyJHDw7rFJWtyZL0E5t7eEw2ywwSzn2g6Q2J0RSFopMZhpXHu0Cio6mFh6A1RkczbqVwXkI35baYCzJmHcXc0QWto/GERFSgxXg6UzM5sTiIKASOTKKJho66F+fzyD+LoiYlV7dPxJJHh6jMDIpQ15R1yWQ04Gh4yHAy4mAyYDAZMvKWUjvSNCPJM7rtDp1mk4Vek9XlPs9xjuAsD+sjbo13uJfskxYtDiYHcmgE5iZ7WmnKuuatDz/khSefZHVpicoFKBq02z3KhwP2bMmWLml3GgyX+zRcibq9wYmzlkYjQzcKdJZE0ZvCW8t4NMbVDq0NWWYYDMMfs5gUIWiCd1ReKPmpEgh4PKxgMVBOxtRFCzuuSdKMIkujqjSgkpSQFtJfJALQ7JcNJqMxSmmWr66QLzUwqSaMKnwi0/mytNgjS02g9JaqqplMapyHybRkOCnZGY+xlcVVDhcc9dYu+lxOttKEEL3cnPSnYgYePrFfgcdks6BkQaZVRRpE9psWKb1Gwua4FAWgEqTKOwhe4N+ZIEvpEN0pJVFKR42KNkkEBWaNf4i3hUwoEyNS0+CEFjOLiRBHVh17ooiceIVyNmaop+Km4muq6ZRyMmQ8GnA42GdnuMdROWZCjU5Tkjyn3enQ7/RYOnaOi60m7bRJI0lIFELmDDZazQaCrTGpke9rmLAxPmAyGpE3GqRHKbUXQwcg5m8KvD4pp7x98wbPXrzM2vIS+1NL2mrQ6rWp64qqHBNszShUPNgf0384oN9vEpyl02jMTQhdNWUyGKHTnMtXz5J/6z2KIp1rdj5uJSTzLanzjYJmnkHlCKnY5C532nhXs3e0TVlCq78qcdsajJPbSSVGQosQQCYtEqYTg/eOMLb41KLXMpQPmIElJJrcGYpCUDDvEoLJCSqalvQVYSLm4SEaVpAEPuw0+HBhQJopvHJIMLWK9KNZxa0/cZk+HpsF2Sw95yjSdEYmpShyjJqKYjpI7odBunsV5Id4exmZ6kf9gkHcEK0T9q/RaoYainWql59dPJlnIToQsCGQqciPco4QAvW0ppxM2R1sc/tgg9wbDoZH7JUDqsRTNBp0mj16nR6Lxy9xvtmhmxc0dCqcJhVZyAF8sDjnObKllGKuFlQuEb9iH6CaWnYP9tmZHrDrjlhZWMMHT6PRxo0PcUG4WbOUqlkPMxgO+ejBPa6cOUu/mTDyDm80WhkIKVPn0DpB6ZQ8KSgnliQxVOORaNs12PGUuqro9RYxObQbBa1GQWoGVCHMB5JA1KwLOTHNCzq9HguNJp12i3c+ukky1ZzsnsC0EgEGrEQNGh0zcmovHoZpDDBKAlmREdQEZWA6qsVvbZqgEoXarsW0op8TyiDvm5ME45AYIVRWTj5eECOLkAdcHrgWjvB5ImWgE2ssrWT24iL51ds/Dw1+kMHgYl2R5gWhttHMW8WsQkiNnpvwEfuL4JhnmejgJYEqDszwRL1LfCHMrAk3c4h3RucHxHLIe+oylk6He6xPNhkcDhlUE6okoFsFdZLy1PELnM5Oc1ZrMpPON6fSQYzAgcPphJEWOrsKAeVd5Gt6DAqTGIxOKLIWiTckOQQHdzbu8s76Da4sH+dnj19iPJ1wOK347Wqb5YU1srTgYLCLdzZ6pT0i/4UQ2NrZJk9Szp06RcMYyuhmj05I8xbtbotxrbj14Ag/rTh+vI3WjqQQSbGzjvbKEkqnNBoFSwttHu4dkmWJ+CQTT2Q1M/hTGAP9TotOv81S0QVt8M4zrIb4FNI8EYCkFPvUMPUSTZ4ncqsYMZHQXoNRJJnwq+3U4VseN3KYtoHKSn9YaGikMtjUSpzHDajUEKZaDAkz8VFQWnN7fZN7S5rRqGJ7c58kMTLEdpAXaXQzDTgH9SeQKR+PzRLpKKuTUiSuka9VVyWpFqq+mOPNSqjZgDDI9DzeNjCboshGkEWs8EqGWFpBsE4adedwtmZ4eMTg6IC9wz22yyMOqiFkCc1Oi36zxbnjn2Kh3aOTFyRKMXQVnazAaJEBOIR9EIjkTedJQLzKghezCeQ000BaFKA0iU6R/HZNsI7rD2/y2r2PWOn2+GsvfJkkwGR0xFGwfHD0kCd7p2glKd/LbmCD5+hwhywrSCkYjg/nzorBee5vbJBkGSfX1lBJwtgHlBeG9uBozATH2CjKaZMksaS5QxsRSa2dWyFJM5K8SZIVLK/0aDzYodHIGYym840ib1ug2ShYOr5GU2vW725w5YUTWO/o9boUaU7lAkXEW1Q7lcMjkzmLGIpoofjH298oyY8ZVzVOgzOgKy92R8dahLtHqHGFLxKpMLIAaQLOSc/hPW4swIs2isp43pzuMeynODztTpMsT0U8GEOgQqRQWefFV/knPH8ar+MC+BaIvwHwT0MI/2el1HngHwJLwI+BfyeEUCmlcuC/AV4AdoG/HkK4/YmfJIAhcLp00BBbUx8FVEZBPXNDTB/ZsgpFPAjCNDu9dfTBjaleGkGOVAA3mTI43Odof5f9/V22JgcMlcMXCa1On+Vjy1zsXmCh0aWVNkiVQruakEhEhIm3hrKx+Y8EPIz0Pyp4XC2bo/ZSvomCT6F1RpqaONVPohRaqBoP7t3hlbvv0e73+fLl5zjRX6CyJde3NhkZx92Hd1kwBae6fd483GDiJqwsrIJz5EWTqi5Rk8M4/CRO3wN37t3FACePHyfJEybAtKpwpXiQVQns70856ID2NdNqysJKk6UTXXk9VQIqYWVpgYV2wV4rZ2cvEg6jQ47Whsl4gvY1vaUFxqMu3VaLw9GQ5V6fJy6cZ+zGdJOGsIhTORjE10lccVQCTiPivbGFVNFcauIOxtS1pXQ1SZpKgdHL0F2D3xzLTb7WAqeoS0tSRJMLleDGDp0nkMK9Bw+50/XY2pKZhLRrRCAXPcOUkuGxUmouM/433ixIct/XQwjD6Kb/HaXUbwL/AfCfhhD+oVLq/wX8XSTl6+8C+yGES0qpvwH8J8Bf/6RPoBQkwXPSRQWg9XNqBV4UcihPDJkQZr2SWLgsyO0h9BT559o66vGQg719dg+32B0ecOBGTFJN3mqzvLrM2f5Fltt9ulkDk0jDrrSaC4lCjNYTVoAHlaK0Jk/FycUrLZ/MeXHxt06sYAlyi8VEMIBEG8TxL9rAljXr9+/yys23od/mS8+/yLFGj93xgNce3CLLM071l7m9cZdO0eD54+e4frhDd6FNo26RmpTDvIVWMBjuxdP+EZTjg+SkXL97F0zGycUeK4kmFJrKKFKTsNjMWWwZcBV37x8xKSvGVY/F1R6N9jJpI+B9zcJin24rp9NsUeQZVS1iKo3QYIbVgAf3NvGl5eqVcxxb7HE0GPPF0+e50FsRcmquUJkEqaixfG26I1JtPKjaoVJkg9ayETsn25RHFeWwpNyvyFcLmHroNVE3BnBviFntiGWvVbG0FqM81TTgAnVlefdgl6OlSE2Kor+6cvNIRkJ0rYzSh/9FE/zoXTyM/5vGHwH4OvBvx9//r4H/S9wsfzn+GuCfAv+5UkqFT0qJAfp1zbJOcC7gywqVpdEpRXa7ZKcIC1gFuWFMCBgfKKdjRoMD9g632dzfYlAOGCtP0mrR7iywdOw8z7eXWGi2yZJMUA8VjSiCkAAJwhgLZtbvK2kag4oQcwDvSLTGWofTFuUsMfGeEAKJSSUqT8vMQciHQusQ83HF3sMNfvTBj9lP4aWnP8X55WPsTCa8vn2XPHguLizTThrcWX/AzcEmP//Ec9za2+LU4irh4CGtZhPnA847xqOBZLq4MAPI5LWKrxkhcDAeY7KUlgos54aVtsZ6x+7wkAdHnuAcqZHDpkZxeeSoyilpOSBRXZrtFlmSkKcpjSLD2QkuSoqn0yk6ppxtPNyG2nO0NWD/8JDV85cJ5ZSFlUXpC3V0EW2n4AJ+UktKWAgCVDg9l0AYLbOt5nKTvJVztHXI6KMhxXKDxkJBZSuKpYJZiptwjeIg2kMwGqU9+0dD7qY11kucYpCuXjiGQfSnc3aA87El+snL9E+bz2KQUusS8F8AN4CDIEU3PEr3go8lf4UQrFLqECnVdv7Ix5wnf+VLq5ytpmR1pKfM8x8jadhLqeODx1nLcDpgf3LA9nCHg+kRpQo0sgYLrR5LFy5zudGhkzRJ0zTeSpo0MSglTpVBRXluBAFCRNe8Eq6ZibEXs3Qt56w0Td4Lpdt7vI29kjaYRIiG2hhmvGftA1YHEiQLcbS7y+vvvs5td8jzVz/FV5eOsT0e8s7uQxaygqvdBRppjtIZ6xsP+MH6NX7pMy9xd7DHQqvDiVaP77//JovLHcblGGdrev0ldnbW0UYyGK2t50iVwLwpWd7AoRjWNYPRCFdPsOUUG2qy1JCnCY0sIU8z9Khid3fEyukJRTUhSRv0em2SPCUvDO1mg8FgAnF+5WOiWbtoUtYlu4dH7B8e0skKru9s8qmdbZa6PZgoIbhmYsGLkhgIj0MlCCtjIonQKlM4DSpVeBw6ES+0EYF6UGEHFfrkAtnJDtoGlJv5YIPKhGvmERnF/bv77GQR4CCWjh6UCtRWKpfaelScsYSPIX3/xpslhOCA55RSfeB/AJ740/y7P+FjzpO/uheuhMuTIbZyMK7QzRzjPKm2BHfA3e1dNid7HNQjUIZW0WaxtcjpY5d4pmhRJBkmMSTzmIE46ETNN59DIGUbe5yZpEz5iFGZmROyvLGEmUDMzmc0Wmo1cfKPWS/iI2ZmfJboPQY+iCpvurfH2++8zoejLS5ffpJfXP0su9MBtwZ7nGj1OZ31sHUV6egNjnZ3+eaNN/j5F77A1uiQdpJzor3AYDTGKbjoOtw2UmtPx0M8nnarTbPZ4eHm/fjqSq/WbHcj/UcRTIINJVolEqrqQSHolFGJACo2cOP2DkvLbfJGg6y5SG+hj7PQ6zTptZusbx0gvLjoH+wtdTnmWL/PE+fPsVQ0+dkrT7O00CNVmRBKQ0A5HQmzAZWn2GklgAuihTcNadhDJa99MEBtCUqRdnP6CzK8lCExkGjwchMJFUeBCQQr1KTaWu6NjnDtWC6H2AdrhcPjrPTBzsuczdlI+f+zcqQMIRwopX4f+DzQV0ol8Xb5eLrXLPnrvlIqAXpIo/8THwVcSjVj7ZimA47UPoNRyX4d2AxDbJ5wrHWOUzpluWjRSFLxGNazTBeZys+4Y8FL6UR0QRG6torolEYnEswKjuCiTaufvQturn1x6lF1E0IgqEQ2SyI3z8xR3ykZZBKHdsF76vGI6++9zRsbH7F89ixfeOKL1FiO6gnnuqvkWlPbCVVVkuVNEgzleMxvvf09Pv+p5xjXJUYrTrV7KK24vn6Xpf4CF0+cwe48oLZlTBDzdNoLdFodtrbWY+0tup9Go0mSJVTBkZsEkzXwlZilH1to8sxTa3gM77/3gCq63dzfOeLczpjV0zXO1aR5A5PmTKsxvU6TxeUl9nf3hCoU+zlnLXuH+3xwx3F8eZnfeKfkbKfHF5/8FN2OmIKrQqJYfSU6epUpvFPo6hEbIcR+EefFj82IZ5hAjkBDCa1fq5jsFgeJNkTABzDSu45cye1yxCSCHtZa/CyKJJI8ghfQxvmZiIN59Pu/0WZRSq0AddwoDeBnkab994G/hiBi/y5/OPnr3wW+H//8G39SvxLKKe+8+w52ktBWx2nmTRabq5zpNWgsJmw9HLN9WFJ5R2Fiz6GNwJ0wN8mGQB0USWQWRxJD/LWmCopURa6Wj5SLEAih/tiAT7QaIjOdpYARE6FU/E9mNUprfLxpZjT6alqyceNDXr35Fm5lgavPPU8rLWg3G6wWbQwZ3pdUbopCkWctjEmwkym/++PvcPHKRZTWTMoJFxdWkGg+w53DLU6fOM1SZwG/8wDvLU9efIYPb1/DqIQ0SUmShKquSNKcJCtIi6YI3LQYfZjEYMg5f7zDX/rqZc4/cYass8gbr37IN37/dQbjkrTZYjKtGR2N6a5U5A3Nl7/8aT58+wMGhyN2RiXkTUxdc+nYMTY2H7K1vUW3aPDpCxfZ2D9kEjzPX346uufruV9xqL0oFesAw4DWAdIIeVspk6gtOjXSzCtIcoNPAhjRq5g0Wq1mRij7cdP40hFSBdOAShTbW/vs51LWK61IkzweeLP5kBBCRRUrcLiwM34yIvanuVmOA/917Fs08I9DCL+hlHoP+IdKqf8r8DoSpUf8+b9VSl0H9oC/8Sd+hrzBzktf5K+XFalNSRfbAlx4EWkZrxlOLLqOpY/WcxMCtBArZ40aXkoEibmL6EaQhjGL10TwTti1Sk4XAgQjQUYBsVYyKJIkiRvHxTnNTFSm53MBHTej94G9e3f54XuvsJE5Lly+wPmF4xxrL9ErmnF8byX70U5lTpM10Chc6fneGz+gsbbAamuRSTngXGdRvjdlGBwOGQdPJ80pjMHkOVmWstRtsbZ0nOWFBe5vblLkHYomJGmBVxqdtyBUcsHGEjIvcs6cXqTRFPtSnWpe+KlnOXdugXffuMPNuztMpp7D/TGLkzFJy3L16UucOnmMh7evU3QN//zVfTqdFl0NP/XTLzOclLx3/SP2D3c5n3dZy1vkJkWXQSLH83gQGTBWSROfGkgE5nZTJ8NJrYW3ZT2qKeiZF71FHC8o/FRe72DF8FCnCp+J/Sp4Qd6ahp3xFJfnKD/zlkNY6TBfF0pJqZ4YLcxq9Um5X386NOwtJM77j/7+TeDFP+b3p8Cv/Ukf948+P8zbXDZHfPHAQWlRaTI/BVrNhIU8Y8tGgzc8Oqh4+sswygSZjsv+EXd8Nd8w4tAyE3WJeIw5OuKdE2/eJJH6V6dChUcM7dSsl1FK6CIotLKRm6SoDnZ594M3+XC6xfKp0/zs8fOc6qzQyAqC9fGaD1hb4b0lSXISk0nNbWveeOd1dnJ46eQZhtMJZ1qL8nUaqctv3r9Lr93h2MIyIUiGPBp67RZLvS6Hg0NKX9PuL4rxh6sxaSFDz/g12nhqBmU4mljKIBZKkKBNRndpgZXlbR5upxwMJjx4cMTqyQHtFTl9m4tLnG0a0obhjWuv8vJXrmKnmnd/+BorrYSvvniFpGiQ6MBi2qZOB/izTUySgVWEkYdxQE2lbHZ4tJN+0hgNqXDOQqSc+IlEsOsikYNTam6UcoQgEgcPQnydit+xzhTeRPeJRp9cD6mGR4JkKogC8nkVgVIYZC4nnDc9H27/cc9jM8G3SvHPTIuzjUNO7I9Q7baA1DqQppqlhZydYUkVPBrpEZwWdIMgcdguwogzzy5PRIWQU8Mjw0+lYlgr4h6DD6RpDklOgotYj5RyKuYTKi1T3plZeKIVo8GQH916k63DXc6cOMMvH3+epaJDoomfKYh5QnDUVYl3FVmSSz0e4eSb16/xwXSLLzzzHOPpiNNFH+tqskQUgtYqXj+4w9WTZ+k321jvmDrHdDrhe2+8QekcHk2Sp6AMJpFhoc8KJoMxjUaKDzVpls5vwINBzXAgHgfeCaKVtbocP3uM999/yM7+lHLiOXPmgOWzU1TRRgNJ0aPX7/HSpS69TNNYXuP0yl+iHK3T6RSsrfVp9zKU8FAEStcJSqWCGLpAqCx6q8TvVDCGUMnBRRAPAJUkBOtIcpn2C7M63kIREFAuiO+bCrEPQUJqgxPZ99Ry/vQ5lneHDEZiVmE+rlWZVRjxdhHvhT8Mv/9xzyePLP9XfLQxHCUp/zgrqIzDjSaiXfAeyppmO6VdJOBlgTuYh68SB5Jyo3i8dXKixvLMEPsUkAFjCNS2JqDJsiZ50ZLTxoeImclG0dqg5V/LK+uExq9cYOP6dX7nld/lQTnghQvP8oUzz7CcdSIhL956lcXWJbaaErwnTRooLTcKtWPj9h2+v/ERzz35LLW3nGwuYKNJ4M7gkHt7W7y1cZ09UzFNAr916zX+H6/8Bt9fv4FzgbF1JFmDxCQEG30BvGz0qk6onGG5W/Dc5SVOLRo5NDwcjmpuPxgyPBoT4jBTpy3ay2ucOrMMPjAaVzy4d4CryvmiVSqh0erx3MvPc//d61QNyBqwFla4+uRpev0GwUm8oPBbEhne6kSQd6OhkaFOt0g+1UWfaaCbCUkmxD2TiTOlCtGSNXoRx0kKfmohLolghHyLRqLax5ZQCXu5OpjQPbnGybVjcbAs/8Y5P2/qZ6MJCeiNxosRKPpJz2Nxs8jiVGg01xsdvjme8jPjCmyKjwTKTGv6nYyDSY1VQpCcHwSzenU+9XeSDTJP8Qrzl8B6OWWSNEPJPRMbeSealRBN9rSJNkmCjgWksRzsbvPjD37Mbup4+dMvcbK7TOlKiapLYoZ6cBBqHA5bi89WlhaiedGaYC37W5t886Mf88SnnyVTmuPNNluTHQb1gKwLa+c7HD/RYeuNfZLDlO/c/4CqdjI9zxqAwpjkEWtaXkiJ+1YKVpf5hc+/wNdOeooErl17wP7QcePBiMPhhAfbQ9Y3DugvL5LlbdA5WaPD6UtnyF+5g68sB7tjyvGEvDdTSgZc0KycOsVi6xr2/fd48OQXWQv3KIdLpA0bXVcinUVpFEksbTRKSXmrlNBc1LlAaI1x1wdgNb52qFRLZg4e48HZWGNPxLPaJULF91ZiPQjiKsksrdgGpgcVvZUepzkm72EcB0jvEhshHalQscT2zPQsj3kZFniUceIJ/F6vx1PlDqdEGyccIhXodFKyPUPpBC+P+NScPaxUzFM0wttSeNkwKBIj2vxgVNxI0uPM6JcSMeEIypBqE+cIMcRVGexoxPu33uLDvbucP/0EX42Z9yZJUJVAkrN5TFAe6yoZumlNlhVRiy+isNH+Pv/qze+zfOUMvUbBUjNnr3mfsy8U9FeOkzUL0kYTpRNOXzzGT//Ms1z/cJPX377Dq2/fYTipSFJDkmekafYoGyZqdC6e6rH28qf4t758BTO8TkBjkoyDgwPOnGixd2g5OJwy3BtzuLNH0eqRZ01Im6ycPMnx430e3N2jnlrKyXSOGqE0WjmyRoPPfP15br79IbffvUX/9Ao/vvGALzy3TChFyuxCQDkHaSoUeB15WAGUEeqQAsIx8a+2H+xDDaGYwfIaXwdMpnAOcA5fWUwrFdh/YuXvaB/zRRU6FYf/2gaSToMTrKIxYqSOi3MWjXNO5mRezbU5f0IFBjwmmwWIxaPcMOMs51+0GvxvD0ekmUGkuJ4iVzQKxWDkY9ScisPGOO2P/KC57jF+TOci4hVhwTrCw+nMgX1mHk4gDWIOPhN/WWvZvHuTV++9R2tlgb/w6a+z2OzOsXoftRw4ETwJFG3xzqKTjDwpkC0d0EFRTUu+++orcKLHhaXjtIqMB+ltLp5vkWQKnRh0koNKZSakE/oLLV743Bmee+EMn3r9NP/l//f7jI4a5M0mRZ4RKidlHp7nLyzy1c+f4Z5uoydDgpcMlNUza2R5Tqs7Znl5LJtcJWRpSjkekjS6mDSl6CzxqRee5OG97xFsYHI0loNESSy6s2IVtbjUYeFrL/JslXO4t8fe9vtUdgU4hdIJJknFj7q2okXBiwPkzAQ83taoBLXUhs4AdizK+ijoU+gIE4eJmB5qYyRbMngUJsq1gZCACfg6oL2oN02esmwWKRo549EE5wJVVdFoNVHK4GdrgpmllagwP4kc9lhsFrHQtHMVng/wQb/Pmwf3+GzlxBVba3QKvXbKwchGxoWwjjUyeQ1ONCNWGYxJgWjEF+typUJ0gJHgIa9nvUkasyhVnO2LSGm4u81rH77JXhF4+bkXOdNakn+LzCysQZLIainVXLDSdOJI0hyjEuZpSyFgy5LXXv0h97uOL52/zGqnx+88eI0Ll5uUDppeUUc3SOXEa1io/k5M9KxFuynH1zqsb3ja3QbNLKep4eRimxefP8OplYR3fniT/X6fzXsjwsEGzX6L1lKHbr9F3soZHqYkKbR7fXSSRmOHKSoUKN3g3JOX6H7jDdxowv76LqeeFR9iRYBQEuwQBegkp1U02bh+m+zMz7O1+21W+nsEuyyLUcfEaA0Q4m3thCSrIu8vIEPDBcPDf/Bdjr1wlfTUogwqy0BIJCIvVIowcdKrFGJMojIIQcv8xgf581zjkwkkgWbSYKHbYzgUlnJWFHGkQOzDvPQ/8aBU2vz5QMP0LPuRqEEJit9Z7vPU/ohG3kMlMhjstDPMXknlHN55MZWLA0SdCKwb73yBfkOI/sYBokpvZgg963u0lzcvRI8vN6m49tE7vH20ztMXnuSrq2dIk2QONUOQk87rmFPP3IoJZYXmITRaQXB8QDnPtTde5/XyIV96+kWurp7nGw/eYKce0xsmrI5r8kxTtDJcXeGtRZkKHVNE66pkOpnwze/fQhlNt93g5z5/lroMXD69xJVnLpIkCjc9olJ3GI9HDEcJlJbBg306gxHtfoskS+l2mxwcjjjcG7J8fDU6bIKtDzG6otFu8uSzl3nz22+wc3+PMC3xlLipIEvytRVoZNh57OJZ/tWr73Hh8kvc3LvF5eVFVlSNd45gDM5ZggqkkWAKeq7lmSGOxaklrp9p88o/+wOeX1vh3MvPYo710HUgJBrVEHTR6BCdRh1ojZ86qB06MQQjQUnDbEpZjWnkXY6fOMmDwyHKGHSWQyL8PZWlBG0wKsXkGSrJ8cag/6ff+4nL9PHYLEGIcEHF3iU2ZdvdPj/aO+SrKprraWgWmmaimNRWmtug8NECSSyMZqiG1MhSFiGnmAoSYqoUabQ0ElujePWHwNaDO/zg3ru0Vlf45ctfo1s0pGqI8KIM61X8ml0k33mcryEoEpPhIoSpiDandc29mzf48f59vviZl/nU6csc1Ie8P9ykDorN3Qmr3ZRWrhkNp+hEk2QZ2ntsrXCVZVpN+eFbG9zeGLC23KbVanLpVJt+r0mrWWCrQ4n0RdNutSBpcPHiAqNdzXQ0jopSCYGypaW/2Gd/64j33r/DxctnKHIvuSs6oJTnuS8/y9bdhxzt7THZ26e1phkPHtLuHwOdYbIWqBwCNHotPnfxDHW9z7Buc2OSsLDYIqmHzH3doozAe4v4NQOIM0vwoBPDl/7iC9zSLd6+u8Xr//K7fKa3zNkXL5GcXpq/V8FoQmnlxh6FOG/T4IKQL52n1m0Gk0NaRZfzz3yWd/rnUNqQphlaablJY4WgdTJfA4LIFT9xmT4emwXm1pkhyJBKkBfFd5cXeXH/gMZST67LLLDcyRhMHFYbaq8wTqbxDkjipH1mTgES/pMYcXbRRLcWxYyOKnDj0YAfXX+TDV3zhade4ExvhURJT6Jm/ZSbMaEFnlbxBrOuQilFEo01QFLBjILgHPs7W7xy8y2e/sxzPHfmCkrB7cMtJq4iEBhMavbHNculpVFZ0kmFLq3MhhRUleXh1oDvvfkQVRjyImNpZYl3bh5yYnHC2kKLbqdB0WoKN62Zs5M1CFqTNYTMaKOptzEGjNTs/bUlqu0B7775EU8+c4VWMwekVyqaGT/96y9z/ftvih/Y8ICiuYTO2iidorTMUCSRAI6fO8mNDwZw6NnfPGDy2VMsZk2cmwLIfEobOeC8jXwwcVCQ6awmPdZmsd3hp84XjE6d5N2NLV77nR/xbL/L+ecvUpxdxVdBynUlpbfyCuU0KhciJXWgffwkB8M9jvXPcLLfI92bypA3JiXIyhCL3hAip1Dum4/hpv/z5/HYLOqRx5ZQpeO0KHj2ul3ePjzgpTIQCkOWaNLU08wSDkobWb5GjO1CgDC7MWY5g1J6ETl6CplFOGfJ0gRfW27eucmPd25z6eRZ/tqxM2RJio7hR0ppvHNz1xlnrRhMIxT12pckKsGoFEm+DfPNGqxl52Cfb733KheffZoXzl+NzFjHvcG2vGU+MK0t49IyqSxlZXGHI9JEVJ+1dQwnNd989QFH44pOvyHoXqZ4eFQymtZUteeYdfScp2gUjK2ludyTXi0Rs78i9oNBiaHhrF9YWe6xcf0u1996n9MXTtNdXiVtdEFnNHs9rnz+KoO9TTBLZM0+6JSgErltXS0LUGmc95y59CS/+Y9+yHt31jl59TRLa03xXfMOHUttIUaWJLoApeIhKRsnaWY0z/eYvrdHR2u+eO40w9PH+GB7h7e/8xZPfL/g4pNnyc6soDup2CelCq+9vKch4CvP4tnj3Js8RGnFaiMnVYFKOExi2Djj3io+Nm/5pAmLPI/HZgGBC5XQ2mcsaW0MjsB3+z2e25uQpwvY4OkvF+R5yv3NEXsTG+eFXhSJyO2kg+gcrLUIX09Hx8ogE/+g2N/Z4ZW771M1Cv7CMy+x1Gih0xjPRiRNOglJ8jaa8yUSV4Gy1HVNmuQkZsYhC7NuFuUsVVnx+x+8QtlPeebUBWZZ9rWrOXCT+aDT1Y7J2FJbCSpNEubWTpV1vHXrgFsPhyRZSpYZQeJ0Qu1hf+poDhythiXNK/Hz1YrVRoPaDoRLlyVoE6n6yLRbmxSFBgXPf+l5nPfUk4rB3jZa7aN0QFGjVcrC2jkpT5QmOI8y0aRdC/09BIHXdWL46qdOcOPD6yzpI7Q+Jo4xtoJo7K4U6DSfJxeEMIP9Ba7PznSx94+gFEOJdsj43PGT2LMneH9rj9/48DoX3r3F05+5THFplaANOojPgvZg60DR6zM+uEttS5rK0E00u7XHe0+aJBHUkRttFjGRKEPl/hwkf4HcJnPtg2L+RqLgQavDrd0hV2uLKVK085ilnDNaw/qA/UmN0dE6PLKBnfeRLlJTmCTOYFw0raj54MFNrg12eO7UBc4vrpIZIWgqM0uCcuBAu0cDLKFvB3xt8a4mSYW6okAc9GOaWKhrcIEPPngHn6f8lee/QFIYgpFSpLQVg2pKcFIiKgWH45LDYUpRaIqQMLU1QWvWdye8eWMPnWqanYwQPZanoxJrxd526jzjaU0xTUCVYsYQET+ltMgZ0pmeZAaCCC1mJoBLlSLPi4iOpYTIf7PVdO6ppo1GWG4qHg7xDQohTtMDZ04v8ZmTKVuHlrMrSug9WkRVosR2aO2x9UQGnd6IhVUEQ7JjbcpmhkrBVRblFFQSj/7s2hKXlrrc2j/iW9fucOH2Dmc+d4HiRBvlwY8qVJ5R9Ntkwwaj6Yheq8dqnrBn69g9RXg4zGZ7YS4r1p9Mjn98NoueXcd4HlWWiLGAMvxoocPVwwmqEM8g4xytVsLJ5SZ+a8iojtBkABMCznm0D7SzDB0DgJQL7Ozv8YPN6yx2+vylq5+lkaakxmCNFmAhBJlNRK6Qi+VDqC3eeryv8cGRpjkmz0VY5r2YW4cIQfrA+tY6b4zX+eXP/yyLnV7UiHuJY7ATahweH2P5xJy89FDWHo3DB8X+sOKH729T28CZk11ccDgVvZ9ViLJYEXGOK0+jdKAk/co6T1VWGFVHqDWRBDNm7iwaTCI8uZjxSITfPVFx6EGb4hENPhYtWps5kKKU0IF8kHzOvJGRWcvR5gGTM6dIjNwa4hHgQZl4CCoINd5PQUmIkEJ0+clyg+rWIcHHwaVWqKhpKUzKMyvLVAsLbAwO+cZb1+jfb/P0pRO0mwUmyzBpQr/d52B0QKfZ5nSr4IOxiHp9HB0opQSIUYI2Ou/nngk/6XksNotiRmaTG2XG51HEeDQVuN5us7/7kCU/IzfKsK3dS1ktG9zfHlMHkZkqPEmQpCcTDTBsafnx+nW2/ZSXz1zmWBRV6STBRpwdwNVOjLxVpHjE8sopT6AmBEeaFdLLRB8tPROX+UAIjuHBIb9z+xU+/+QLrLS64ASuVji8r9mZHFLVlpm/WSBgnWNS1kynYgVbWcebH+4ymjhOrTbpNRXTSlM5z8ONDXa3N1g6tUjwEl09LD1mUKJNQlk5ClsxnZSkiSPJUgwOb4NkrSglG0fp6PkrX4e4nDD3D5jpOySPcUYj1HNlqELhnCVJUkKkFqWZ4fz506we7+OqfSj6cZQifjtSjsmGC64m2CnapDhbYhKD8w6z0oC7AxHZWR/b1wClvKfW1hirOJG2WDnV4mE55huvfcjJboPL58/QMtAqOmzsbHBm9SzHikwOS63EmF0RpcgBVPx6YB5B8pOex4ZICUDk78iAKNbE0XJnaBTXco2vKrwWm1ZvaxLv6HcN3dwQXEAFJwGsBIyHUAfu72zyG3ffoGg1+MVLn2Ktu4iKTbz3AeVkDhIbp3iSRhmqD/iyxk1LQnDkaYMkSSNQ8MgWNv6K6XDMd979IU9d/BRPHD+PN0ryL2NYjFaKremR3JixrFMRyZuUjknlGZWWd24f8XB/zLHlBmeONUlUIFGK/d0xNz5a5+H6HjsbeyilqerAwbhmUAaORhXDYUVd1gwHU6pKbhhXC6tAUPQZzUPo6YC8zknCfFYYnEDtopsGFEonKGVi+RSjCHWCs7XcHMGjNbz0F7/M2YtnGO6OcD6JqlQvtyuzBABp7kM9wldDlJ+ilQPv0P1CZmKZmdtb4aP2JfjYhymUSUiAtbzg+V6Xav+Q7964zvW7t2nlbfaGRxilWclziliaai2McRVC7DVnxZmfVf0/8Xksbpa44hBUJKoT44ufJDGkKEn4sFPwhUGNzxKBL2xA20DmPd3ccnBU4YOekwmHwxGvbtwktHJ++tzTLDYaOG1QJoHIPpYSQgRCotmPIiAF1LLxXF2jlQSx+rmPsvQAc6aq97hatCnqxAovnrmKSTMZ4MW3wFuHcyVbk0O5vSIRcIbxW6UZWcftOwNubw7otTOunurgnWSi1NayvrEvKc7BUU+nuNoytpbaJJTVhNJ6BoOadDjETgcsWQl61UrRaDaEKqLSef0uRjDicqMwhHR2+iMb3Ft0msfSWPpI70USoYIiBBu5eJL7qKNzjtGaxaUlDg8n9LvipqKjpFeBhBhZmI5GJKlBp4VIpdMGqqPQzYRQh0jNVxLB7RSu9jJkDqCTiIANHQfDKatf/yIvf+WzjMoxoCiritpZullC2yQcOmFP15EbFt9A4azFQ+uTupbHY7NEJCyoRzs70ULH9yHE5kux0Wwy3tmlUWUiErJx4OgszUJTJIHaS39y7eFDro12eW7tFOe7C+JbZZK5p62UzuLvq0hlKWgDQegY1OIJVnsrZhhJSphlq6kZ/VK+bu8k2fijDz/gXlHzK09/EZOlc6QnhOiWGBylrdivJzLzUYo6yKa1taPynvubI9a3hxSZ4eq5PkorqtJTVRZrPXXt4qITDYv1QQbhtcPViofTMcORJVscsbs/BNMizxLywpBYcbKfFbxGS58Wd5N8T3GAGyL9aDYw9rM+QwnV3ZhYLs+PYh9Ls2gcbiythTaD+7vU7TbGD0R5SpSCqxR0RlV5fF2SFBVJIulEKlGo3OCHYoKujI9EvECSi5LSVw6scAkf+kPUX7pCcXmFuwe3aKQNTvXPkaU5k2pKMytYzBP2htNoKhIPh1g+q9n3lPx5yGdRcvK5OXV61qvIm2fiMOkgMRwoRz4pMUUKlUN5YZo2Q8rJdot3723xysEGvbzgZ09cptVuCcM13gISNSGOw0QPMI0nUU42pRYUKVgr6V1pgkoTfJJIME6UKBtPJG4qQlWzu7PNqwf3+IWXv0SW5aB1nEMEtErwvsb5momtmNhabJ2i9Y6O/fbe/pSDwxGpMVw61aOVasrKUteOsrKMp1UsUzVJkpJkqZy0qZYNo8B7zdSLXv1wWLI3yOg0LVlakhjZIEprTFZgrUNJ9JWUvWo2gNCxVxEj8Tm1PURUCyFhhuDm86bZ6RyCjQeI3IbNVoN33z/g2SsBR4wlNAlaJShtMGnOcHefRq+GYAm+llClpsEfxl5SK3zlHgm4UlnwlavYbRnO/vW/QrPfwuhUykSk5Oq1uuwe7dFaOclaprkOsqdjeekj3SnEUt9/grMLPC49SxA0RSbronmXK1Ixo96jFNYY9tsFKliCFYM4X9YwsbA3waZD1tN9rnYWeGn1LK12E0wibGMtkCdReWeiQYHRwhXyM8jHeaicIE15Kiex9IGR0Uqc3IuJdrCWyXDIN2+8xeeeeY6V7qKc2HUkewZZTM5LQz90FZWLiyY20yEEqtJycDhGK82JlRZFpqSHKS2DaUVC4OUnz/CVz15kcUGUi3mS0G1lzFz0k1QL5aPyZGlKVcNg7DgYVkzGNeW0pqpK6mpCPTqaG5nLfHJGZZ99XQna5KAMzlrReyDsBblN3PymIc5vtEx9mUm/Q/D0ei2yQjGtFb4uIc7PZyTGyioGh2NmkRezBREaWrInMw2FkWiKSJHx3mO15XYDDj97Bp9pHu7vsnO0w97RFre2buKCZam7wMOdbZTSHG80mCUlPwKQmMcmCvXmf6EG/3+NR9CXqJcn6qGDNF1GJ49M95RhJ09gWIK3grWOPd6V7F7WGNXjL3e6jMea3SPLtPYYLXn3PmhxntRGHAudNKMuuvQrhCBIkCbSpIbEiP6bOMCaUTtmDb0KCl9XfP+DNzl56SJPrJ0Ss+ta5HxBzYRQHmdrkiRhHGwsAQX2xckCnEwrjFb0+wVFppmUlkrDpKoZjWo+s7bIX/jSkySdBX7tr36Rb33nbbav3+bXXzzBOzuOb7//AJ9IKK2ylsHdQ6oQGE5qBhPDKFc0i4Q002RpxmBnh6W8SZI35zMH572Y38VNo7WUxtpolE7irORjiWcgSFmkJwU/I14IDBsCBDynTzX5YDjgmbwk06Bmpw8QgiLNUumlnBwKLniSZsLYCkOA2goDI5EN7ZTn7nTKsV/5Gjc2b3Nt/TrHF4+zub+FNpp+u8O0HrHQXeDd2zdBBY43G2Ra4+KGmJmby+xJxSrmz8mcZUaUm+U5KnzkK7q4kaRE2GukGF3jBjV2NOVoVTFqaJbrJp20SehB0XHkLc/GdsXISsRDpFbOZxMqcoEc8aR0Du1lw+gkkSBXGTTIv7Qyh5ApttinUjveufYuo8UGP3PuKjrmX4ZK8gzxsnCcr9FRnVlGKYBz0f0wzBpjaDQzcqMpS4Gkg3dUtWM89bz57h5PLL/Pxa8+z2K/za/84sscbD9LsvOAF59e4/qDQzaGQ6ySQdvBnX2qXs0kUxweaXpFQmtqyXJNntV0ji0RvMW5Gh1LMWOSjzW8Uc6rtMxJYq0o/8ZFqYKK8xEV9UNBND3iRhh3TmCh06Y3PmLoUnq2FgKjSQk4UhNotxsQe0bva4GmE03aSYW+5zRMPd4GQhJYH1me/rf/Gnm34NTqcWEPaB03sAjVtDYY7TgajbHW0U00Da2ZSDU5+9Kij9is4vjkQuvxKMOQul1FdxYfZCEnCGyo1aNJ8UGWEnIY+zH3jtW4qefUMKet8nhaeZI0JWukpIkIe2YfEzXLnvR4JREDzOrWIGiVSVIh2IHMWAjinxtzO7yroa6htjx4cIv33CE/+/RzZEUWM2N8BMeirVIQUwiCDCurshRAIDw6IGYZlwb5QmrrKWtH7QNlVRN8xlY55fe+8xE3v/Ma1XCPgGfh+DLNSxfQyvGlz12hrqWk0kZTVTW1c9R1YFw5DiYVg6llPHVU1svNHRwqxKAlb4FZLqaC2VQ9xBofI+TJ+D4I41qg6ICPwAhx4yeEMNtksnHONjMSk+Nn1BdfiTVUKfMVuW7lWPPeoVJFXVmYRvmwk/5lY/OA/teeZmyO2B9ts3V4l/3RJntHGwynuxyNd9gbbnAw3iIxhsQYpnVFQ2n6uZk7TkrlNft1iDfNn4ubRfI5IHpDgbBkZ9oTxAzPGMPW/oi74wckHk4Ol2h224RM0sJsCEwnnqNBzf7hhMoFfBALnVmZN1uczskcIThRVzrv0JnBKyXOMf4RO1mFAEk0ZnVyMw339/n+1i2+/PyLdBtdaRZ9nHEnCd45fIy/I0QTch9kamxEvSlzB/DekQRD7Ty1h0SJ4blGYR2EYCirkpsu4ZXrI1auDkgafYIbYoomwY44e6bDcrPJbjnFNBJ8qrC1p64c47HisEjoNBydpmcyqUhSTZJmWFeDEg8zYoCsWM/K0gjoiJQZVBwmWl9JIy+hIBHNFEKl1oqAASVuklppfFDkzUX8cIC3u/Kaa4VSnulwSKubE3ygno5RLaH9ewLKgFc+zn00R8MJ2YtPsXbxDEZJlGAjbZClBZUtSXQa2QXRZ0wZ+s02B4MjWssrLKeG9amdrwFm5X9chTOd3k96HpubZfZTcLJIjDGiqxbKEYkxmP0dtnYPeGvhBGuXL1CkKeW04nB/zMONI65f2+XmzQPWt48Y14HKIadgeMQqddbL5xCyFz7IsExF4ViQaSjGKFTkhwlVzBNqS7COejrh27fe5uqlJzjdX5HN5HyUqmqcq+X70QqnHCbPZACKOMTLsI85XKsTSUx23jOtLTYErPVUVY3zGussSbuNXjnG5v6Uw/UB2JEg2DoDU2Co+PynL+Gcx6eK9HRPjAg1lLVlMqmZlDXD4YQ6bqK6rGU+5GqsreICkqEjcZGjFVrFqA+EJR1CwNYl5XiIq0txA42LrjzaJ7haylXixzNGTCU4lMGjkhu6mk4Y7B+JRRKQZnkk02qSViYLP2pPbLBsZ4rl589zMDhkWE7ZHe5QuZL94QEHo212h9vsDXfQmDhHgeOrK2wf7BLQHG8UmNirqFjNBITXN3MJ+qTnsbhZZg1+QG7j+fWIfP3aRCi5v0hjcZnvBs/BwRHPfPQQExSlynHRNiRJU7w2kASCMqBnzh5yZ6mYoeKdJ3grNXeknOvooesUaASOjR45wpuyHqzjjQ/fwayu8umT52TiraI7iAuoVCQCM1BCozHezP3HEpWKMVwMi7V1TWKEcuJ9wCUKrxNq78CDCwobAs3FBVRlefJijxNPHkMnBcpkaJORdteojh7w9JU+9zdP8t33bpOcy2FfELs6KGofIrpmmE5rms1MlIzeiREeM6RollHyqHfxnlnnPr+dUZqb737I2cunafQXCTi8Tsi6SwI7BzAmRYUgSkkCSdrE+m0pZQkMdgfUEyucVeuir5uev45MBVQJmWJr+5DFX3qe/fEBrbwBQKdokZkC66bkaYeAppEVAkAgI4C1hSXeuf0RRimOFWk0t9DzMszMcHv1SUoWef7UmyXat74KPAgh/OKfdfLXvHcg6k+Y1ZKzEVoQtaJSmJBwbXGB+199guO//SZnbCDJcpI0pQqSiR6cdHFe63m2ZJwSErxo2lFgklhfw9wONi6VR19eEHsknOf+1jo3Tc2vXLwSB3Mx8Zgg7iLOih4fR7BywupE6OchxvzNYvxUIqWCsx4dhGBoK0eVGMnQNApXBZG9BrBecf/2IUd3t+ifj0G1mbj866xFGOzyC1+7CgquHRwQnKOsFEmRUpaWcW0Z10Z0M6Ulz4WTJnmc0dDOGDFJl4+KIkqBldiAuJjYTPCcvnyKpMhRXlxxCA4VvQ+EgOEJuLjJNEnW5+hwk04jxdYj7t9apxxbxpOStFkQ6hprSpklFQm0FEwV49GUxnOf4uLlZyJXbzZQFkpSkbTnTISP9x2BQL/dZX94iAuOxTQlVwiHMJIp1exfBZGEqE+4Xf51yrD/I/D+x/7/P0GSvy4B+0jiF3ws+Qv4T+Pf++RHyRdi5h3vH/njWGOKvxhRIgvT1UXu//oXeG8tYVgO5CZCWBrWh7ka0rsaCeNUMlsI0g8lM01HBACCjhTuiMKpmfYFBbVlMDjiu1u3+JknnqORNJlFrAU1G6DG00kH8DOOmsH5Wm4VrSmS9NHB4OPUO85aAnKDit2sDO8IosPx3mOrikY7Z7AzpJ6WmKSQJtvbODPxqGrCz3/1Gc4tdOZmcpWtqerAZFwznVpGgynjSUVZ18IZi/JomXPITTv/oR/NN6Q5Fv6dszXGKHGhd9LDGKOR+GELyqGIPmIU1PTZT9YYrXyR9TsNqsmUrXt7TCuHLWf5m9GzWgV0bjB5Qgie/Upx8gtP4eTLoLYlEWqUnkMrxuUEsfJ1jMshtS0JIVDEwa11nlaW0kuj/W6crkHkIUaO2Cc9f9owo1PAXwL+b8B/oGT7fZ0/w+QvYhk2y6j/w+iE/Kwjn2eWH6kUuEbO4Bc+y0c/usbp1x+w2l9Gp0LwC8GRGBNziBwKj9MBVIjqxpn8OH6KuE91kHJrFkMQvMfWNd+8/TbPXXmatU5fLkMR7uOUzIOEjKci7czJ7a6NANQRUGgluQQPOYcKGh+czCkigq0iNI0O2ErAAqU106MhL5zr8df+rRfJ8wXpG8ZD4cE1m4BGmYBKNCY1/OzLz/L+2w/YPByhlOSVlLViOK4pUkU6GJPlhkRDmqcYZYBUoFsFkMp7EAKzdC2llZSQGvBWFqd1Ej5kUmxdQoTcTZJhVYYyPUqdkWoJSe2mhrdZRK1/yGB/QLayiK0cNiJ0II22Dw5faIY7Y+xTpzksB9y5e5/jC2t8583XWVte4PTqcW48uENRpAzLilaRcWr1FPuDPbSyNLKCp09/mmbR5HA8YKndY8lojrQnRFZHxFlRGvzcR+6Pf/60Zdj/E/gPgU78/yX+DJO/mqtrCHwsqJWffRPxzUpnE/MZaTHOSTyKJNLopy8/ye2lNpNvfMjZhTVQYpANjzZgjSUoQxaFYrX3UZMvpgUqCEUjRGjX+4gMOMePbr1L79gaTy4dExJg7GW8B50ZGULGhRWsOJcITYdIwRCTt0JnJEpjI/+KWUmhgrAJggxKU22EFJomVJMpWZJyuDNl94M7nPjMssyCGg3QDfz4IZ6ATjLRpriaXqfg3/ubP8v/+7/9HfYnY5zzjCYW4z1ppkkSRSMbk+qAMYpmVxN0ggmKJNOCaAWN1vJjZmColGiFAo66roS9qzTKVDK49A6nctZtizNNicxoxllMZh0NNN1eh8M7clDV1mGdipJtcezRmQEsOk94OJ2w208wO7vsHOzTyJqcOn4M7yu0URxfXMMkmjPNJllq6Dd6nFhco64l2cwHz2J3kY3tbZY6PVaLlFsTCY8KYZZ+7eUwiHOaf+PNopT6RWArhPBjpdRX/6S//6d9Pp78tXzlydBMpCn0MC891Wy6ijSVOjyaLM8ippV3UqY4j79ymu1+m/pfvMa5xqJQXXA442RxayP2RfjoYxybc+/iFFv4UKmXnkXmD45b63e4p2t+5ewVTJ7io3zAV2LHE2yY0/VBjPl0ojAmjVoQEU2ZJKGpCwz60ZwlqPlk3Mc3KngFVmOVJ5QVtqohSbm7fci7r63T6Re0T56G9iqJKfDeM7xzg8ZiD91ZkGa7rjh2rMPf+htf4x/8zjuoNKXbzun2miyfXKORaB4Gx77xLATNKZvSTwSlcs7Gqb2gg9HhWRAvpSBY6smY6bhEEUitFUd+5SnTBX5rqPnRR2O+eHaHJ5Y6nC8KGlpRjCy7/91rPP2rT3Hz6gs0br8tacUuxOStiKVE+cC0qTj5i1/hwqkFFlo9nj5/Ka4dD2jev3uLRpGztrhMajKG0wHbg20SlXJ84QRJ5JKdWlnmo3t3ePr8JU42G4S9ITpEqkvsThWIS/8nrNk/zc3yReCXlVK/ABRAF/h7/Bkmf+VacaZImDrP0HomHizEvA4X+xQT3Th8/CYRb+M4M0kiMc6vLjD4tZf56J//kCt6gXoWaWA0KcI9s37W6AeMBhN8DCMgWuv4mCYVOBoO+M7eXX7h2c9S5AUecZDxwvAEIyo+HyzKJARrpYcxGS5YKW/CDAKADEMjSRlWE8Bg0sDMG9jFDxmUp6wmJGnBdDIlzTKcqzG5wRrFrffX6e8c0ezcZPHcSY5u3WXz+ianP21IGm0p3RKZj5w9d4yfetHzP75yl/bSImqhwwelZWoVV5cWMEXObpoySBJeai/w7uSQZ3WBDVOqqqaTGqEIxfcgOABHNR5zuDPAB2g0C8o7nvGrO1z6313hyW7NXeP4zfeG/Lba59xKyv/hieOMXrnF1nc/ZPj+Ov2vXqB/5UX89vv4ELC1lGGi/Re42JzocO3WQwav3sT5miQ31HVNq9nh/IlTvPbh+7TbTbL1Gzx1+gnWD3aYTA/otRr02h06podSiqVunx8O3kZrxVKWSq5ocPHzyHjCeYcO+g8BO//amyWE8B8D/zFAvFn+TyGEv6mU+if8GSV/aSVBQ8pEt0evGDuPDTFaIKJlejbtVo8YoxFCi9VmQIdA3esQfvXzfPDPvs8l26XQBUlkGc90FwCJ1nhn8TqJdXiE2gMoo6gmJd+49y4vXX6S1U4fQpyJzHqZZEZzj+xjL2iRVoYEUeQJgqLFFUZJ7mWhU0yiI3w9+yYUaRxmosDkGltKGalTDdYzHU3ZPBzz45v3GY9rjqWOl547TkOnJFZJ/1Bb0jQQdAIqRZmMl55a4/c/3GKrdGx/eMjmzpT+023OnzjOvnU82e2y0JCIc5u22TMNFnSP10aHvEzO9IMHZH1DfjwH5fDWMT0ac7A5xj00lDe9mHsnhhvZ9zj5t15i62DK/u4h1bRm77blXkcz+v3XcV1PubmNu91g/4vPsDK6hY7olKQfSMmmtaHTatLOKlbXznIwOCQxOd1mQbfdpdvo8BdfXqTdaILyZCbn3LGTc/BkNpgMIdBqNCmtxXlPN89oZ4ZR7SJwFPBBzeH7P4ue5Y97/iP+jJK/omKUqbc4ZdBE+DcOWmbYvo83QQizXkN6mlmzrwI4ZJLuWw3qX/0iN/7HH3Cl1Og8QSmhlCdKzx0kZ0gU1omqJfZLPsBr6zdZWFnmiaWTEBtCfBABmH6kpAzeRZaBmFTPciUh4JSeN80qyPCxmzVZL/fndBfgEWQZbzwfPLV1QuZ0FhVqKMfc2tNc/MzTnFxb4L3vvc7DwwkXEg9pxnj3CFM00I2O6My1RqmENAv88qe7/P3vvsW71wsY5Xz6dIu7oyn9ImfqLHcmY042mjybN3i3qjhWNPlqq48yio039ln/l2+RHitoXGnRutTAbeecal3Au5rhdJtxe0SeZozv7/P+2w9499ZDnPMUnRZZt8u/Osz49a88z+Cfvk+9ZAkPjvhUOyFc/SzT9TdEGFdLxqfzFkxCkhqef+oSmD7jejIfNlZuitYeV084GI1p5B3WD9bpFz1KO0YE3IFEG/Iko99cIU1SDkcDFto9+olmXMdaIt7mgjx/Mjj8rxvA+k3gm/HXN/kzSv4KiK+T9Zo6yI0SP1gcGoUYI23mNGvhJsmkWYzBZeMYZQg4rIfQzFF/5SWu/9MfcHUKqpFHqncgszKQlKGjoFheRSawMazvbLLhJ/zSmecxqcH6GJUXTadC3Gh+xo8KQeDUNGrUXUSSnGjIhfYh9JbFtCXljFfz7182WJjD2JVzpGlKXVYQNKoc4csRu2qJZ579NEvHVvlqt02/2iCdwODGbVSAajgmX7FxluQwyqKD4/LJNk+dPcY2ms03R3z43S16icPljp87fYxO3mAjzymyBnmi+eCVW/CjDRaeO4EfTAmpYbpXcvTNIw5+NGU5aXDmdJvt/+gLmN0pvX/0DgufOUXv+VME7XmqTnk4lgOp14CWfkD2pTGr559h8kHJ5jfeJ98c0HvqPLfvv09WSYNv64osswRnCTphMNzgo8P79JsdNvZ3OLdyEhumeG/IEx1tdT2dtClJayGQpRnNTPzVUAqlDcf6C+wcHLDY7nEsz9mIWS4z5FWImBFA+gnPYzHBD3F+Mnu8D9EMTkobhZIT1sdsjjjkUvE0F9wvzBt9Zsle3uMbBdWvvMSH//33uOpWMWiMmbmRxCtfKSpb472jleUcDYb8YPsOX7/8NFmaYWdDLK1xLtr+zBC5qHiUYCXQKpppRNmwyKMFQZIbDRa1CNIkOs/FwWhsbGPp4AlYLOBQPlBPS2rrOLG8TNFZoN3sceKpS7QRh8j704rxvT2KpQ7eVphMSKVhlmGJ5+go59xCzkhvcfKk4sJSm+WVPpdOHGecJKzlBYXWnAjw6r/8LvbmDtUHm5h2RnGpA6VHbSiWioLTv/ocjbTBcDhE9XNaFzrs/9b77P7uexQXFvmLv/4kP1w/5OaHE55/puAXFzSJvYs5eUjnzAnaF15msDfm2/sVx1ev4Ic3wQWJLHQ1Oi3wKtBtNXiqtUyaNDm5cJzUJFhXo03KaDoRBoJKSIs2w8kIdI2znp1yRJoYFltraBTHlpd5sPWQK6fEwEIdjFBGz0GAmeTgk4aSj8dmCYEqQO1n5m2xgVbRsEIJYVxo13LKP2ILh/nHeORxbKLJdwwh6rTwv/Q57vzTVzjfXROVojFoHyTjxSiSoMS4zjr+4N4HPHfiLAvNluSMzGyaIp8sRIoLsfxy0TlfJRqPJtUpwcr/E2qCjZNxJ5ugo3KM1jgfSX3xpFBa3C/nF6sKJIXBlxUBj0ka9Lt9unlByxSkuoNWY1SwnP35L7D147fxgxKqEvJSPoYG3ISjvRG7m4ds4mg/t8io1eStsuTsoGTwYJsqJIzQPNHp89Jih0tfeJr9++/itqZkz3VprTWZbE5wd4Z4D4vPnKEIhhN/7zsc/7svk//cVXYvdXjwcIMPd7b43n9/nfsfbKKOFL/1/gK7VwuurB5wvhOoeJ/F9lfZI+Xh4SLLqo2zjqr25M6TOEkMMFmK1oEHD25wc29Ep1HEEEPPan8RrVLKesJip8e0ntJMC3pFh1E1pZW35kPqEDyrvUXevH6doGA5z4QdoaLB+6wvloX0E9fp47FZkIk7RHr4jA4SiCewwHzWB6yz0VVkhv07DHFfzWTB6tHHCN6D0dRLfUZfucreN2+y3F8kiTLYaHMvFBtjePXuR7S7HS4urko/4RyodJ5HSaTOKIX0Lg68r0FHDb8OeGqUCYQgsLIXOy+pi4Pj8PAwfm8KFUM/QywFQ1QYokUam6RGvh/nUL4kKVIu9nqstVqocgK6gXMVClh65jLTjQciIbAl3llMZjBace3BkPWBZeP+EaoVyC4ams2C3/mDmwzKQCiamEbOajfHnMxoPNzHfdYw+eER2fdKOmf6dNodWo0+VWk5vL7JzuubHAwPWUhHvLa+zjfu32NjY8zGfs3TT5zlK195kf5Bi9+8dsDPfr5kp3eSC9kNmsGjkuN096e8urdP4+oq9WGBsx5b1SS5JWtERSVwbLHJ8so5pnVNM2/SzHK0NiQmnfsVB0QBqxR0GhCCkgMq0oy67TZlXRKcp2MMLSPmIImRG17NGOaP+80CkEV1pAvSNyRzIU4g0UosVFU0OyA25Uri7GQRygYJhGg7pOaT59mt4584y4Nbm/Q2xiRFC0XApKmUVkZxd2eTDTvi5889OzcE1xE8CEEyW1Sk44TgI9Uc3EzsFctJkXeJnaj0ObIhZ9/bvZ1dzEKCqmSmK4fZo9tUzSgFUb2YFvk8KatG084aZImhGk4JupRBZHyt0qU+uujgS4eiwk5LlA78q+895P4HR5hTHdaOJ6y24Gh0xKefafOjV44oncWkLc6cWOCpL57m/n/+Hfx7+7QuL1Pd2+Pg+gY7+UPM2MuJfu0exz93guWvHWe7nfKRarJ88Rlu6IrmcYNZzmmvFhxQoxYWeOUh/FLf84E7y1nVpat6qHM5K124tltxRWfU1ZS6tqTWYutKSuYko50rdNFFqwLRLP3PF7SKszgQJMz7SMKM731qUhSGwWREr93hiWbKfg0HVkwdGzow8vF1/wnPY0HRD8DUOSrvcPGbUzMoOFK9xYUx9ilqpuN2EPk9PjghTMYFrtWjhFoJeYpS4K99mtv1QLy0vKd24pU8HAx5dfc2X7vwFEWaRZd3gaRrF/2ugo9CLuLQVLyUfYgalhghpZlNV6PV0NxAUFGOp+w5x8m0G5vLGdQZdRWR0PiI9ymAQpKnuFbBIE24tXebo9Em5XSf6mgL6omQNk2KyVso3UAXTQEavMX5gF0w9K8UrLgBJ97d5YQv+OLZ05zoNam8lQRg63lvt+Q/e22HlX/vS6y+eJns+pSTT12i31yh316k312gMBnu5g6vnyn4veM5v+Vr8m6fnz95jMS00Cbn3pFm/SNLt2841XW8+aDkt14rcRue//tvD/jtD9/kqNqjAv6HP3iPddcgmATrAt5KiloIdj4wxk0I+LnkeYZizofTcafMNseMdQAzeyY4sbLIztEBCsVinkrceZhZIM2JLz9xnT42N4sIvmShiToxPJqwhrg+4xBSRhNybYaP68VDmL+IegYa6NmUVpARXxSUX3uSvd98n+X+EoSA9ZbvrN/khTOX6KRZHFDKV+KcFaa+in0LSM+Em9PstdGo2srnCvJDIzQK52pQcVrsHNtHByz2eyy0U25Mt6JxhlBcZltqNvXHAqlM+b0Hk+WYPOPN6x9i1xZYq8Z0Jtt0T3qSVk8YvzqTWy/2exBwrmZtWKPTNqM7Q8rRlO3fex977yGHCwuEyVgYC2NF9XDKGwdb/I7+kL/yt0F/7TRJr4l9O3B454Ci22b5hGHpxB5v3Rnw/NXz3M/lFvSlo17fpBxNyUPJOwdDPlI1nzrXpJFWrDzRY7l9mX/nqbM82N+naGmaWcrzX3yW4+FNlDFR1epxtsKkKUoLgBPcAJIes0rBfwz2n2VFzg7ZMDtkwyN4OITASrfP7fV1Lhw/RaJTtqKVlgKGHycH/oTnsdgsMr7wc3O7j5dZLnghPMKMMIbyfj6MFPpF/P34nUpjL4TL4MUPaqZWCABXzrD95l06OyNMXvDeYIf+Yo8z7R5iNSeeYUaL04w3CuflbUh1MkfSpM8QzQoKETH5QDCzNykabQRFsOKHfONgj0vnTpG2IGyGqM6b2YeKGE0WuECbRiu8kfQyW0OoKu7c3mWyv8slE1je3+OCtzRXK5JeP9LMRYeC99hyilcGN7VMbz3EjiYE5zna2aZWQ9LPKJJ6QF1Pcbbm3IVF/v2f3uHpsyWNZpveZzxlWKJ/5SXSac3WP36LpZe3cGqLrz/o8cZm4HubD/G3R3zrjuWkCjwxHtPtNbC15npngVtVj//wa4E/0F0We56xanFtfcBXOo7ivS2mNw/x5zSYgKslGjyzLvLtpJdTYSxsDmb0p0c3ycf7jBkZU4R2Zq6IVRERe+ujGxBgMUux3mFMNof+9exU/gnPY7FZgD+80EOcO8QN42fNb3xhTIzUE4QK4XrN7ECj630IQPSgDt5FMwJBUqwC9dPPsv1ffpvy6IgdP+Bra1elH3IVtVXCI4scMufc/LTyXpKIjTbo4DCJIlXRfywSD4NzhBC9jUNkVgWP9Z4tar7Q7VNrS25SplEI5YkOKy6+2d4TjAAYSYhMZB+oBkeU4wOK0GTLwEfvbKA9nCo9XQJJuy20mzhncc5DkrLnNEeTCmXGqNSxeukUJz5zkvcCPHP5DPevaaZ5l5d++STPntqkqe6Bqgl+ws1ywqvD+zxhU9J3bpJ/5gKts4ETjTbYJpPuCb6VOVafbfK3tw9IXrlPs7VM43Oneem1e1zfsbz50UmOPd3jTm2ptePL55u8Oxzy2UsrLBWWRjWU0hlRpQqp0qGMR5sEfAVuhE66kcPn5+jnzId5dnDOUDBr7aP5iYJ+u8u4muCCpZtmLOYpVSx7BTAKM/HqH/s8NpsFJTF3PjbBykWT6tlMQytUmLF445yEj8PF0kPEbp9ZtEA8h0AJ7dvMXtDlHrvPneT+v/weL527QKhqKj+RFN80QesgfVIEC1T8eC74uLQ9nW5GURSUR3VE5aSHEa16mPsIx0uQg+mIZrNBYRIyY+ikDSZVKfoXIoITXUpMopntbxfz4VEwHQ6ZTkZMCnC9JttTz7feuMdzgwmXphWLZ5YxnSYmTdEEsnaXoFLWVgr29TJpdobljw7pTFKa1y0/8/IZfrNZsXKh4v57G7z99g7/4O0Rp5LXGVaQqSFVXUF2hqr7KTbXpmz+k7dZ6xfkxzL6p3f4tYtn2N2fcubDdfZ+eIO6tqi9I7IPdyjHI/be2sRt93jzRznbexP2dIunP3eBlaWME8mE3lLKdHNISERG7J2PpVgteTmk8v65IUG15ofmxzfJrJchMiVmr/oM5AFp8hNlGE2m9Jodjuc5e2WF0Xo+v/ukJv6x2CyKGdLwyG9W2g0prWaO5zO4ddanBHgU3Bqv0NlJotUsvEcGgzq6vAcgNVr6h1XFiX//F9i9scPRvT16JfS6HdJ2iyTStkkkP8R7R0K0aQpyQ7WaGa2mweaa/AiyZsZwZ8RwXOI8BOeEfxZqggvcP9zndLsnsd86YSnvsDXalzt1PtgM8fuVGGytRXWZaKGbKzvFlRPKKqXX7fPCCydZv7XPGx9tMh1NeCY4+qeXCO2CYBJMmlHXgcHuHt2pmNdtJ0fcGpe4Ow8Y3n2Hg7LCtwxpq8Hed3v8d/uan//CFX75p1Z4r3GK02bM+XafbX+cYy9mnAuB4f09dj7YYGdzlzfe+za9D7aoPzxka7lBcSpncm9CdesBo9IyCQ6zr/jsV89iV89zzy3yleNdbr55j8PxhIPBkI6pIddYa4Wt7UTSEJwjaMm1CW5Ikh2TEvtjtkViKRXf+zgoho+thZk1LbDU67O5t0Ov1aGjFUcaGgoyLUbnn7QhHovNAkhpFenysu6Fhk0IJJFi8uiGFKVbMnfc1zEUSBys/LyfiUnFkc0qlPzYGHrHsSeukjUaqGfO46ua/Z0jtq+tk97YojN2LLbaFO2m9AGIfZVSTnB5FzAmwRhxpDGdnERD1s9IdGA4KglGmnrrxHPs/mjET60dE71OgNWsxfs8yr/UxmDrShAxL+WI9WAS8VMmeAqjmKBw1nF3c0JR1Zy7uIw51WH3zh737+yTNwxJ3SRb7OPKkt39gEsa5M1Ao9dg8MQKt+5vwcMJduxR6SInXjhD8eklluwih3cSbjdyHk5SrmZD/uEbgYHb4MXld3l+9Th3JjXDXcviqUVWyxWe/vJn2Pj+Og+u/ZCQ5CSHmlbZIDveQt0fkwTF6otPYT6/wrDe5vCjhG/f17SLHmMzRqmUSe0w05pWM5mL7VI3U3GKPaymxvsKZQpJP2B2YEr94NzMI+ARz24GKYuPgKaRN9k9HMCpwFKecX88oTIK6xQuBD4p++ux2CyzKWvwcsMoo0VvoKXxTVQgjUpKh5wYBhkSJjq67BtxUvQCPT3qd7w00YmendzS+BLLLeIE1+QZ4cQS2ell+Ipjujfg9ru3Mdc2WXQZC/2uWCV5T21Fbeiqkoog9q5WofIE4yHPNA2Tkfoght0OdoYjVpebnGhlZM0ElRgu95b51sY10fCHgDKzwaT0LNaKHRAWtA8ok5FkhuDkz6al4/b6kAebRyznmtXFJg8PRjQ2G3Qc9IocEsutuyOG0wE6b1FPS24Gx+XPXqG91+b1LU3SbWNbBU/1+uzmIxbuHbJ/8x5va1haS/lcs2Rse/z2tzX9r+d8+XyHuuPRNNgeKO5PC/R7ByxdOUl5/xClNNmVFar9McXn+uxd2+TB2w9Y+foSw7TLieYe1fAGR3Wf1x+OabcnLOuKVEOqNYmWWEJnxaTdBU8CMipwQ5QupBoxM+b2TFMvKPMMYk5m2ZfRcb62Ne/cusXPf/7zBKXoZRmNREdirjCPP6FleTw2SyA6AxHII00/6Ec0kLl+BSgUpFoawRRoGulVMpPMMyWNllAc+bhxk/lAGQJT55BsrRAFcmpuieNC/D2tydd6tI49B1/xHN7eYOdHN2itD1hoNcW9PkmYTksSDEbLZ6oGHuMdmRf9fhocQQeKkHLHT3nh1DL9OmDGFmU8Z9IWRZIyslOUAjsL05GBC8Gpufw1EGh2unQ6LbYf7oB1VJMSW9YcVZZy7NnYcbSTwPL5FSY7I/LlLplJubVzxKCfs9TtMd3bwk2gdzplMjgk3D1Am8Ak0bz6BjhdEUi4+nSXhWNtOH6Fl9ZGfNee4G9fzXBHr7FeNrk33GJv3GF9o6AR9sncDru3bmDLmtxojg16pMuG1488N3NL5Sq+/uYrhNNnOQwt/sLiSW48LPnqc2e48d4rDH1NhidLNHkhBouuttRVhU5SQkilsbdHkC7NWd9hPjCuqKylrEqqumYyLSmrkmldMpmWDMZD3rt1D2cVF08cl4oFRTdaLc3WiXncG3yFIjOaQkFLiwhq5B2WOINQgVQHCqVJlYjFkuBpJYZciTQ00xojoxQhUM7GLLEPsgqyAG1j8EGABBc/u/jngtWKKvLTDIpEQUg1vcsn0FdOMt08YP1775Ff32at32OcpfgqIU1y8hQKE4dalWQ0KutRXuOCZ9yBU97gbT2XMbfJWG20uDeusc7GnuoRMOCDw4lCGR8SJsMjThcdOu2MdjNHuxDZuZ6JjTmXJqHZb7C9PaA/mrLQ7PLBh5uU5YCDgx1CeUQy6vHGP3kFO6y4eHyBpalnrdVheWmRbtVl2l3kjeVzPJwY/M4h//zDAW/e+wBz5NAtUKcWSRZX6C71SVyF3ky48nMvcPfyExy/t8VBoyB75iRff6LLz2WKA5vyP62XvH7g+OurKZujAWdOa365aWkauG4dtnYc+YpGISGz2jhM4gjOMT8qVaAc7/H7P7jO+tYBg/GI4WQqxhsuhu0aQ56lNIqcZrOg027SajZpFAWffeppPnPlKnmaoRSMnMWFgAnCodNz9sQf/zwem0UFsjgXKcPHNPgRTtVBGvTZMwvNDHHBK6Ww81QrPa9llYrafe8jAe/R84eiVqRym2tkgtYkWm6smZevUZAeW6D/V3+KsHvEzjffYff6OiudHsdX++RJhvWBzGuSRoIuS2ZV9bCuyduG5q7BGzHBDgRUpjnb6HF3si96GUJ023c4Jy4pgRxXOZyqCB52D8c0spyzqz0erI9QGpy1WG/x3jK1MK4c+0clp6wYg5dWkSUJZdZhrZvQH8MDs8TzXz7Pi9tDzPaAdtFiaWWJ3R/cohit89z777N5vMV3dZvp4gqme57O2Q5/+6UTlAtHOFqsBsv6aA+rj+gWGe+WmtdPn6fW0G7kpC1NPRpghiOenQz5vVd3+Y33D7h84ST/xavwUyawN3oPYwQIqWvHeFLRbqcUPuCczEe8dWhd4oIizzStwnDpwhk67TbNPKfVbNJqNGhkOVmaofWMTazmAJDS0tPMQKQ6eG4PB1Re4fGkAfKEx3/OYpSKpgYaG/wfyiTXHxtOWiVmccorCgM1nkIpCo8E4EREaebG74MXV8U49bdByrHZoDZaX1OFmXdU5JDhqL3Mc6ybDUAFLEi1pljqcPzXvsB0/YCH33iT8a17XDl/nMWikM1loxrTBUKi2HYTFkkl/VjeQ1AaHQKX8z7fDqI7spXU3yE2tcF6XDKz/AGaLSwJXhu0ShiPK3TwOCRjRhuN15qjScnxkz2yRsb2oWVgcwZ7JQsnLfdKTbPVZenhHumrb3In0Zz51FlCUvDwuzfJlCbxBScGGVdCh8//3NOkP3eGXktR6yPubqc8vK55ajXlcPqQ549d5870JK3mFs93pnz7+pTpzj6HBy2+fVPRSDMa3T6HaZv82BqDzmlWryyyeMqznI4obpVs3fsAowNOMc+jCdE+17no8UY2d8b80vNXSPJTUq6HRwfkIzca5mjZTBmrvMysbAiUbsrN0YiBdZGIqRgHQdE8P/l5LDaLBhqJLFYTFNpDHSBBpuQOISTKHFJ+r3aKBMgTFU30xDxt4j1uRnVBtNVBa4lTQ1J8VYQUEyV+uzbIAk2UfCwfGQHWCV14Zhhn0FTOUhmNdoqw0qb961+gvLnF/g9ucrxdgP2YtVLT4CaWnaTi6iCXuLdo3aoU+NpztujTTnNGrkL8tnyETG08VSuhbRjNZFixeWuTrFHwxmAXXdc4JzqcABACk7Lm+6/d40svnKHTa/PR3pRat+leOcH29IiFrGZ5uU2vtLRv79NPG5x+8gx5FXjw3gEjZ6HhUN5x4Cp6G9cZ7sPrt3Y4bTf4/RsNqqnitbIk73W5+NVNfvfdKV/jAZ9ZUZjuszw8vMpuY4XmlUU+f65FlmZ4ZTizW/KfvbvHubUe51JFYqd8sLGLDkQF7Gwoq6JQLnK8kKFvkhqpCOoD6nSNSW2xKNlkzlF6T2kdpRdTdeJhJwnFkbAaA2VrFLV0/3gEOKqtHNQ/6XksNosnUFkf5SuaTAdypcBDpeSbyrQmRXBwozRGazINmdFkiI2NV1p0McHjYtlW+0duISF4AgrvpWfR81UNqdIYHTlgMxQtiIGeVtFu1cgmtP7RXMcpTTi/wsMzq2Q3Nrg0qPB7E0wmuvlxaVF5oDlIUEbhrJcAV8Ey6aiUC+1F3jrcRGtFWYpnsvcBcbKNwaBJRj06IOQL6EwBDjePERQ5Nc5jFCy0GxxfadNsNpiWQxYXPcoPGO8PyBLNONSMrSfYmv29TT74//wGBQntRorxCWohZX3rkL3gGb+2zenJNf7yz1xgd/kzdE5lfMYY/n//4gY9ptzbPcUXn+nw3JVAmpe8nE+5Z1tM7S4PyoxvPhiyMFHcyKZkTnPV1+xujGh3cuoa3nzjdY41BNgZ+xpnYTSuaDUzZupXHzyGGICEh2B5d/cBd6qcGHsl3pmRm/eIYOnFMSdAomKcCfJ6ifJVz7llRFPzP1quf/x5LDYLESaW+AMxz25rhUkUUx+wQTZPM5WcSR0XM3HiXwOV8/KzD3ilxBNMR8/B2JvM69cgXmQCBgQIXm6ZyDXzhKiPkVtmttkIPjoZysBT5M8BlGGsA+9eOcHGeMJzB0MWDybow5qD7X2W0wJVeYK2mNREGofMjvDwTGOFd452sPFG9F7FbHoZwGoFRaOFdRO8rXHRQSY4h42IkHNu7nbTaaY0mzkoxfbWPdToIS4/ifNN8pBjdcb65gaZgobOaTZSQhKoOymuYWivduhfOcuHwx7p8iJPPZ9SPrnKK6+lXFgb8lNPF7z0dz7H4eQW2/uOo7uGV36rwZnP1ejpTXa3f8jULeJbz/OPN3JGqsvf+NpJfm5pkc3zNQ3goC7pW89Cp4ez2yRJQophWnmcC9RWMjRzmPtNK0E68DhOZRMe2AbOC4Kl5r2J/H09I9wioEkdoCbmiapZ7N8jqosw22fT/j/+eTw2C5AqRWEMqZbyKiW6viSidsR7dOxnbJBrM0TLpNo76hDwKuaOoFCR8DjrN2Rde4yWWyrKUsQKCdHSzNjMM3uled+kFEacWkm1kZlNXPAOiboIOhC8Yrso+NZSwrPNgosrJdMHtzjXWkLllfRTWhp4gokEQbjaWqBZFFTlNE6jhbFgXY1WmqTdJmm3yb1mOjokbSRC/6hriEG1SsWyxRhazRQI1JVlWFuaiye59oHDj7bYHx2SDdqcXNRAn+1pwjDNWDq+wt/56jFWFjp0F7rorMHvPlD8s1sVbw8VT+0o+mbMX1gbc+1DxaR8nzPHxpBfZeX0kDtulc2dJZp317hw8TTLV8+hu01239jgn72/z+5BxTvtIVfyJgTomkSslp79NDuv/Ba9JCWJhxWINMJZJ2COViiTSP5l0Chr6SYVp9MpN6oGRM6evH0CBcteEPRGq8DMPk88GuT0VCpg/cwZKIq/PmGNPjabxSupI20sJXItDTBx56MM1kn/UoeAJeC8oGEeyRQhCAc4IBNy9zGiXYhUFbnKH032Z0hYQE6iMH/R5cpWCDSdaA3eYYJHB1FV+kgJV/HzEbNYqjThjbTJ1pEipBNWlpvofh+Va6baY7KUalJBloKTr2Nh1Gdnd4+Ao54OMCaj2e2SpBkmEdNz02gw3rhJs1cIEOAkKgNkKBqCsJSzRBpe62CwtcXBwZDl5glq6zF5j/bxFQAGe1O2OudYXFvia8+tcf6JLnY8YP9wxODoIUtHY3rXDvhoZ8p7w4qmPs433lnk2keHlImi10/5aO82v/bkLis/HNE8bLB9d4t7/kO6L5/lyb/zeerRIefOBvK+51454am8hVGBuqrJH26RNxTZwjIqGZEpw7SuKWsX3xOPtZZMvsO4UJDDwQTO5VPWbcE0zG6LR9ogWQORFqPi0DeyOlBqLodI4pBNAplm2sw//nksNksAKhdQRt5wDVgxKAbFfH5ilORCViHEG4U5PDtbtj7IxF55H/MPZy9LkOZeCa0hnuuShaIeUbtnPUyIJ5pRCuvjFHmmr0GsTEtC7BuYgwLz0CQM662U+xfOs/XkRWHOEqT8AikrlEiHnbPoh8cIN65TTUuyok3RbKGVIQSNTg3VdIJOM0wIjPYOaTQlWkGTRM8CAUFSLZFxPihqr9BZmyQbo/WA9NgxapWytT8iVYo8Af/gQw42NTfrDne/V8rt7QJ5I+f4iUVeurzAXmuN5z53if/mh0dcurDAT7/Q59rNCdfXJ0zzAZfP9hls7rL3gw2WnzhJCJq9l89yx1r6l5v871sdzhZNUp1gFNTK0FUZ69d3edg55MLlJ9i/8SMaeYJCMxpZBkcVRSMnsw5b1aS5mFloxG4KX9FOEi5mEz4smzFjhXjAxUm+Ela6VpqER+rZmbBQMRsh+D/sRfcTnsdis8z4PMLtEWcX632c0kKFoFgmTvVtADBY/+iWACJdX055YwwBL/HbEUacKSlnL+QsA9572WzS04RoTO3FDD+WcCZuLMEE3Mc2V/wejIkEpBD/C9jKcuriRUKWyNfmgtBWZsCCB7SgNSpPcSFQNNqkaY5w3gwmE8qGtzWoQN5bojraEUZxs4gyAilBpffTkg2pFaOxABHNTgufdNmfdNh+aNnfnbK0kHJ8TVM0a0aqwbo+xl/94grdPKHfb9LuNknTlBENvvutQ/7zNyxJq8HaQpPPdluUFyqGhxa/M+a/+h3Hs72C/qU+d585SZU3uXqqR1G0eL6j6agEbTIe1iWnU01DOlPcVDNeaHD83Bke3ngXXU3xAUbjmp29Ca1OTtGUuVRdVxgM2sxsdWvquuR0kXCvzhn7NLK0Z9WEirJiFflhf7iSIFYSBB/7nUi4/ISp5GOxWQiB2oNFUKcklj7eCfynlJja2YhayBqXW0dUu0Ko9PFjCWFSTpCZ9gEEMREYMea0yyeXun+2+GPTL44fPm7cjzXkMDdJmN9aIeYgzr8W5JZpNkiM8M+EKatijIUUFUki1q5GKepJiVIJWZE+KhsSIxvX1SgV8OWY1uoSdrCHrSYELc6N2swo64ZmoyDPDISU7Y0po2GJN5pxpbH2EONK2rnlcHeb6lChVns8/+I5GtMO75YdfuliG+MdVYDRdMrwcJ+rWcUPru2zsqSori1yq7vEM70Oh2s1+5uGnz3RoZWV/H6nwGcVo8Two1e30WGPhabm8rGMxW7gGVfwg1HNpq95oYTyZI6xE8rgOHb+CTY+eo3aeSrn2dqbkjWGNNsF7a4WgimQmBxrxbRQJxmpcpxNRnxQ90RNakSwJ2vGRDFddOSZHYb4CPrILK2euQXNaOk/4Xk8NgtEMqHAey7qETzC1pVmXEEsw2b6EvlbcXaBnP5q5gYZfMQUVbx5wrwHEVZAdLg0j7yI44EjtxtxUxDiNNjP0TNJw43ctXhLzGTMM4UmEGMM4hsRPrbJZqdXRPaSRPqzrNUQr2Vby0ZQEJzDYSFIZIaejOicPs/u9WtkKKYAhSE1iixL6XYa9BealKrHW6/9mJ2qQztLuLt7QDMrWF1tMK40JA0G00AnswyufYuj4Lh5s8t3f6cLk8DKpS5Pnl2h32pxebnLzz1zBd9IOdbpMLAlOlg+GO6w+3DE378NXQNfb4+p39riYcNS/fg65XhK84mT6E9f4Ae7nr+/qfCdLp2VBZ7+zBr1wxFjP2TvaMDJM+e5/s5rJEqjleFwVPFwa0ynMyTLDc1ug4DDu1LYFUYjGv2SEw24XVVMSGKGjIp+bbPWXd6jWU8psg/5PZmfyYxvHgDzE54/bT7LbWAAM/AnfFYptQj8I+AccBv49RDCfsxu+XvALwBj4G+FEF77pI8fkClqCLM47+jiEksm5qWLLDj9hyj5sxcCZiEnJiolZ4iXihQHWaxyCwUQv2Hv4g00o/LPFJWxjwk+lm9InWce/blSQCwX1cfRlPh1PTJVeEQGFUq/zIdSJSigQk44kyb4siZo2STWBlABbcT/GOuoqhptLN0z5xluPsQeHaJCi0a/Rbvd4NyJFRq907z3w2tsHB3h0i6h0SZpa8aVoSan6BraaYpqVpSjQ053M6wDWxvW7w2wpaFTrHDi/HFOLPTxiYHUsNYoOPSWew6+tLjKZy9m/P76Hp8dTDn5/h3U2/dIJ5ozxxco91fEm+CW4czPXeTbkxJ1zEJiqJKM3556fu2ZHrz7kK3BEc+ePo9XObYei8DOBzZ3x6QNQ7eb0mhmkBhcPcUUTYzJo7d0TWo0J9IJt6r2fOD8R9f8/7+9Pw2yLD3v+8Dfu5xz7n5z36qy1q6l9wWNbmwEAYKgKUoiaFKSpVFobJmOsOSxR475MJI8EY6Y+eSZiXBY3sZyaEZjhSVTC80gBEgkAUIACYDoRu9LdVXXvmZWbjfverZ38Yf33KyiBIAto4kuRdTTkdE3b97K++Y973PeZ/v//1Osi6+ufiTCsK0QoQ1QkYN+aDnL573392us/A3gd733/4UQ4m9U3/914E8Ap6qvFwkCRy/+6F/tq875dOv7A8wH8IecImxCdxCfhkrUfVWv+yoiB5netBzp78sXqp/JKrmXYvoh34++DO8rpyXNKuYV05AKcUBcfkBaPj3Oq1NselwJEU6+WEAsBDUVBjWFCOwx9UYQH8XZoCvjzb01lYEgQwkfGm0mQ4mCmdVlytLSiDSPnznL6vIcUVly4duvcvnWHdL2CnUcqhxRjnqUu4ZhalA1yZ7WOBNC19Ozx1k5vswcgq2dHQb7Od2kxdv7A9ZnO8QobqYp+96yVG9SFBnWFDyqNGefX2L29h63vjamV2hE5vHXxjRETJ2EVqPFbGeeU6cF6V6OyXJaTDhSu8jVvmI0GrItHTrWLCwfZuPSe9XokSNNS+5ujdhZatCoRbQXJLWkjjUGp8MwZLgslhU15pZsBwm86hK7g1DZT7dZxR4a+JSr7CfkuVNYx4+wHycM+xLwuerx/0TgQP7r1fN/r2LO/54QYkYIseq93/hRv8zhDjZktQ0PTgaqJF5V8ngCjxZhGnka2oRyYODmmtY7wiaW4YQIFcRqvwfYr4eDbu+Uhzh8rr6S7QP8VN47LEbI0BAN2jFVXd/fo2eKVFhn6MnIaiZt2oUO/aNEhX/jpn7sYW5hLggBVTJzBzNktmrIWV8VG9zB30AxJvKGcpjz2nfu0tGCMytt0rjJ23mNxU5MoxEzLhzG1XAmpRYrOp0m48JT5IZDaxpESbezwHK3xUtnupyaFxw63GQSlYwQHKnVWavViaOY2wjm4jY13WDxwg6yLkmlZRKlREcXiNcXqY9K9l+6hpm32M/N8drtt9i/tUt6Ycihpxb5Dz+hefLwkJv9YyAkk7LEOsOhI0e4ce4dYhU0NaUS9AY5d7bHLM02aOQGG7mAdK2UPKWXOJvTkIYGBX3iUEae7gI/JcEKN0QEeFlRAFe7yVWyJB4+lATfA78jQnLwtyshouX7HGATWK4eHyh/VTZVBftDznK/8ld3eSWQ6nl3APgScMD+WD2s2M7vHbFKClwVwoX40x9g7IWUOKZsKRzcaZiGWl5Moe+hcmZDbT+SlXCP98EZpt+LaX4EXoaejQOE80Q6MPOr+z7QqSy5wx8g8CyhISqnJ959sNe5xXlUHDEZFWBK0AoVhRGMUK2ruMsAlEcKW/FAe0SthmpKjNJcpolzEXk5Zt/mRCWMMosaG5YPtZD4MNI+SBEe6q15Lg0m7G9sMN+r8dysIkoijsR1FmZnadcb6KTO7bLgqIqYzXM2bt7l19+6yv63ztNd63LyxVPUjs6y14bvdya0bU6jNqY5zEj/xfs8+ld+mssnl+n4Hoceb3JOpaxNGlwbhMHYUVYyyVNWD63ihEY4h/KCSCmK0rK1NaG/ktKdq+HKCKESrA2KCvlkgo4jpHB0mDASSSD/IETN4ZSp1KQrgKGS8uDUdm7K4hPot/gRp8sHdZbPeO9vCyGWgK8JIc7f/0PvvRfTLPsD2v3KX4cffdTHwlZVrCr88dVdv7pDSAjjHBVwy4gweu+pZPBkNVHsqcgtwmiKPsgfph37KVHflLQPhLdIFRwzUiF3Cror/uC1eHHQZwlTzCEGkDLMHblKcg/BgYKZr/KZaeNUABaBs74a/nQHDrs4O8vs0hzj3nZAelZVvFiokFcdDEtaJPpeqdOJKo9TSB1hEUwmBflenzwvmJvv0Ls6YDJM6d8JVK7Wh82Cirl+I6UYZwzHJfncLHOLHVxfkl3dosSCdYxLw7goOLLYZnm2SfHGNs2tjFlqzDcajG/exSWO9L0eP1U/zuFf+jiXn16nNxmTNxTfTLfZGRuUKLn8Bxlnf26Jc2aOi5tXKiEjS38y4fDcMq2ZLuXuLhKHsAHPsrc/ZntvwsJCnXoz6GdqqUDCZJTTmgk3lqaYIMVc+JymvHNV5OCdC+Ty3lUOMj1X7r0Gea8c8IPsAzmL9/529f8tIcRvEKQm7k7DKyHEKrBVvXyq/DW1+1XBfqAJoIYIpVRCGXZKYyqmYZO6x+winTjQMpyOzuPD8Tol5pNVUh1VJ9VUAG1KdTPNaw6yIe+4h5cLoZ6SgSzPMy04TB1A4QnO6sT01WEIz/tqcqAqMvgp9oaAzMNVVTV/L+SUPlTlDh8/xsbFS9V7KZSEsiiwRRiR0VqHvoEIjsaU0YSwOGNDODvaGTA310I1WuzdLcgKjUg6CCRWqOpEC2yM2SCnuVBn0SqksFy8s8N4ZBhdH+GMR9SayEaT2YUZPn1qhYWNPqs/9TR3/ul53KBAvFHQ8AKjHO2lOZZ/5nG2jOd7y21u3PQMNjPKfs7MM10+89NzfPflfcb7DT42W+NSmQfJDg9b+/scWVxj6dBhru/sEElJrEOD1TtHZhx5bjBFAbVa+JO9pzvfARFC06YogmxidX2kmF7NgJ4NA5O+UmeoQueqSR2uYnUz/CH2QTQlm4D03g+rxz8H/D+4p/D1X/CvKn/9x0KIXyMk9v0/Kl8BKPGhj4KvqIp9JfNcOYS1FWNL9cdXH4aochmcOyAVV2I6FFlNmorQ0UdWHfaKLXIaDgmoyMbvNS2nVbQQ84YPu7T2oNpyfwHBhP/h8diKOsmaapCzEmDCBfwN01H0g43uD/6WuaPrqFoNm6aBsLpi6Q9Vu4DRkVIGlkYXHFFHCiU11oCIBGVpyLKCRk2TlAVzM5rhcExrpUOSRFjniZxHlh6SBmluMFmK7U843Jjh7LNnubBbcKXshypYs0nUbPDZ5w7x+TOzXPxvv8n22++zeGyRrD5mvDtij31UXTGam2FnPOBKaojqMfKOYzKsoyLF/isZNxdu8NPLESePKnaGgn46AAJf153ePnjLkRNHufT6m2gfiEu0qoooAmzpKIt7UuT39748Au0ypJ82IgmYIDEtzgDCHeTDrlLKDDfbEC6LH+Eo8MFOlmXgN6oBNw38A+/9bwkhvg/8IyHErwLXgT9Xvf6fEcrGlwil47/8R72BB3IXKj1aVvWJaX4R+I8CU6SfNpYc3roDggFjbJXAiQMS8YOxEwGqSvQD3asPmw1/IO08LcaHD9gdcOP6ylEcYXMbqC5OGLSEe03I6ccsRAgThaj044WvVH6pavoVS6KcNlepjjtJc2aGzsw8vfIO4aZRMeozvQv6+07DqpJjAwwbL3Be41UDWc8hrmO0YtjPMaVlmKWMZYlzjkYUMdOJaDYSJnccpYsxdcewP0D1Btzd1tSOrCIaDXwUEbuCdy7f5FS0xfBojwEb6OQOc598iqMnH2e+1sGPDFe5xUt5gcNw1kmuXd/kma5gRUjKPryTtjj76RX6rskaJXhR9bQk24MR1hmW15aQcYwwOdoFtk8hQzitIlV1CKaNxerC+dBz0cKivMVUdeBp03g6uX0wvXHfzpvKsYdRqh9T2rtS+Hr6Bzy/C3zhBzzvgf/TH/V77zcJNJWsjkOBVqLi6KrCniqMEfJeGCN0xRhZ9ShEFXopUeUWfvq7p1xc00TZB2xKVUYIyEigCocEgKuml4WsknNfDWuGfGg6ZiMBWTlNwG9Pm2GBailAku+VrqchxTQ8mCb30/mDehKzcuwYvZ27WFMGmQl/X6h1EDIGmLVH4l0ADSAEj7/4STpzS/z2P/l1+r0JhRWMBzZ8wn1D3EzIlGRkLBM0c97TWu5Sz0rS3T7bTct7596l1xsjRj7AeFcanFlVHFsYUgwSHvlEk/k/Ocf1xudxcol26rj4tXdpNPbZXhpze7zCxvkRL/cUv/jIEh/fHNBIYrSq8bGNITtv3eHKsA3NOExnOIe1jmGWUhhDq9mmu7RE//YNQlARJDusDShK58M8njIu4OZlCNGtDRXSuvTkLtBUBZTkNDqQ03siodrp4WAXgHP2j2TJfyA6+EpAS1WAp4rmKOQMslLFmg7HeYTmgEY1bD4OWCvhvp5MVTe30zuJD+HPlD7HiWk/JQhwHpCxVbBkcxBW3Wtk4aGqjSEJJ5ep5PssVCM04YjSSlDYkNarijLEORt6NlQVGcnUZUFIajri2NlTXHztVUqXQ9UPkC5cZqkUqgoiQt4iAvJPSE5/7AU+8W99gevv3yRyEotgNLFM4QA2g/xOiegqombEfL2GU4pskKLMPs2GRBSaztEV4iOSYjNi/vAMh56f5U8s9JkT7zEcZ4yHE169M+QPBt/keGeW7Hob/c6A43/tM2SjPZ46tMTGYMJTjyR8/M2LZJf22E4KRifr2NGAp07M88m1mDubJVeThLIs8V4wKR3jLKXeqbNy+BC7d24DDq0Vk7JgkpYVIC9EElKWKBTGOKI4rpqLjtPtJu+OSsZVH2Wq2RNSWRdwUHJKATwtK3OvSvoj7IFwFoBYViMlAFSTw0xDKqpSq0BUZdR7khBQ3aarUZnpB6Aw1UYPkOIqma4UDCQVi8oB5Dhg+6dUoAdNR+6xtE8bV9O8xbmKtLzKl6byeYjAGSDE9DQJ+VYgOA8VPFNpTfqqiBGGNmF1fY2ltUPcung+qIcJgcdUuRaV+pgK+vQSZJTwzGc+ywtf/BxKSYbjAY2VJbIrl5ntKqRu0NsvyTOLc4LDc7PMdhJoRAyNw0YeL2YZ5VB4w+DWHUQjIR9rzKUee9uXuNZRtJuep1csx5aOc+yI57F4g073NObUCW6Ul7n1uxeZDPc48kSfpfc2kZubfN0WrH96Dc5nNG5a1gYNumsn2d7ZQM01aQ1bjEYjvHeUBvbGIxa686yur/L6y55IaZS0lNYxmhhs6TCFqWb9guNIpXE+EB9a56gJx9lOl3eHffJKT2eqquBFiFrCdgnXWCkZRmSm1/ZHxGEPhLM4D0XV5zg4Ihx46Q6GI5UQyPuw1CZEO0EgqPpnxoe0zeIDZaoImMbpCP20aOCqE0WKcMrcS9bdgbyFQhyQT4sqh5l25vEWraJAvSPCRZjOfE2bq/d6PuF0OdCirG4H09wrwApcuANWTbNnfupTbFy7RlkUB4UID3gbQg8Io/uq3uJzX/qTPPLM40EwyXnajTpYiObn2dveZ2UphqhkayunvtBAk9EfF9y8PcQh8bFmeWkRsNx+7y5SK8QRTzLXxu7B4P2SRz7V5hMvrrGpWtS7jpYckahD3EwPsVtMOPEz67QiSXa9TmuuxS8fLdi/scf8oyu0VIN+qWjf9ggtuXz3Lul2RrcdoZQMim9VmXOrP+D0qmdldQkVx/g8w0uB0jGjScFgmLMw36imicO/mZ7WXlXCU94wW4s45Ru8tdcLtFpFhpaKTr1xXxofbj6uIiSZVlrvFV7+VXsgnAVCNcM4W4U3QHVqBFiwD/2Jg6OkIsSbHqX3kUIDFeRYVuHKPTK9af9k6o9TAglXyVII7uv4VwUDKkdQhI6997LisnIVDr9iCCH0XKQLeRZVn2RaZAiOck8NAH9vti3SuppGDifY2ol1Pv+lP83vf+Wr5INe4B7DVLotIQuLmi1+6d//SywfWWM6vOm958btDTZu3KWzXLJytEG93SJpW7oznsJrJrlDKkEt9qS5RYqYIptQpFBbXCJZPsSZM/MkJ5rcvuno7wncYszduyVHF1NeOVfw+ju7zKuLHF+9ydn14+wWgiypIWKY0S38/h7NnTa19xQMcw69cISbept3XzzJJ47P0D63w2V/jXPDHTrdFUoT8rveZIzzhkYjoT07y2BrkyjSRHnBJHX0BhlpWhIXJUktCbujukFJJFJprBmTqGWW6w0WxR1+47Xvs3V7g/Zsh0++8NMstGZDuCwCIckoG2OcCRBxLxilkx+6Rx8IZwknQ0C7TdGH0zF1fGCnLKcSA1MIqZiOu0/HVkLvQcjpnX56VAe03HSkfnrU+ip0EyKoF4vpHJH3qKpYYH2VAKr70ZDTUe7gcbLCykxDMH9QmJhKVEwHKe9/zyrEFFSb3B00VoUPZfHTzz7B2rF1Xv72N7n1znsMd3s4ZykKgxGS5z73GMwGBecwViN4+aULvPbaVWqLHQR7TDJHOi6oz9WpNzRuKMnGmvZCzCNHLLev5vSGnoFNWV9dw5dD+jfP8db5LJCTK4mQmu1jMXdERr27yhPrXX7pxdO8vqU5n2l+/siA+dZhJmnCd966zY1//gr+Zp+oW2Bsn+R0i5X1lKVfeYGv3YHfuzLi3/l4l2hmla/9/b9PoxlQm9Y57u4Psd6gRMTa2iF6W3cR3qHjiCwr2NrNWFnK0HVFLdEoFeDjQoSmsFICVw6CMJQQPLF8lPear3Bl8wrlZI6rt6+RLxuWm7Nc3b7DxYvn6O/exZscpSUyblKk2Q/dpw+Es3gC9VEIY8ImtM4HztuQdITkXNyrLk2rSWGHhvGTsIdDdjINfw6qH5XEnqs+yPspYQ+mlP29wUrvOWh6HuBgqvkhVzVEVUXANy0zh19YJfPc+1ZKGSo4B/FU5UDcm3a23lJaw95wB49job3EjfFdduUmS588THdvmc33ryOzjKUjK4yaKW9c+D4/9czPEKkGzlpub21Tjndo13pAHR01GPY8XtbYHOzTjDpMdnoIURLXYkTTsNipk5oxgzsbdO6MOX72MO1GHbVfMJqd53xR49M/1SZbm+W5pTnm5Xvc5AiNPGVreJs3L90g1/tcdfO0s5IjR5Z59caI0coC+fIc/+dffpIThxKubg3JJpbasRaHHl1m47bl6NoSRVGikxiPpz/JKYyhHsccPnqIN994PZDCm7APBsOC7b2cek3Tatap1T3eW3x1TREaZ0dIUV0/FfFvf/pPsTfo83svvcNLv/XPWFhf5sipxzn32mX2r19BSU+tFeOEpTXbQYoHPAzzgKlOinCHCIrDYQAxVMt81Tii6plMo/+pUq0g9F5E1VgUlRiSrxzAVyeXrhRstZqGQ/7AIWTVzJz2aKZd4IOpgMrprAwOYKxlUkyI1L2P0XlLImN0VTgQVdikxL3pAsQUvRcQmTd7W1y4fJ5s3Gfcu0V7Zob5o09w68o1RhtbmOU1XL2Jm1PEImHiRxS3bzD79LPEMsCV797d48bdTeor84hCov0Q7zZYWFuhl2uiZIZap0vnRMzOuauYkaTW8oiZDnONDihH85Emvas3GBhBZCX19i5zUY23fruJPlHnVlmwMxywl13iSLPD4nHNt8oVku4cLzbr+OFd+rMtXj11GpfUmJltsLG/j8pz2mvHOTOfcaauSXcdZ+YTLh87zYW7YZDdexgXJYN0TCNpsra2gkpq2GyC8gItFeM0Z2t/QqcVMTub0WjV0XFQGFBaVcQdIqg3i6BknMQN/tIX/wz9/ZRX3rzAYOsu7+0P2b+ZYSY5LlJE9YDpT4fjeyH9D7AHxFmq5l+VuN2bMgvl48CIrrDehM0+bR7d10+h6sNM0ZEBHXdfR14EIgzBNLmu7uzqD4/1h9KyPZgrss4yLIZs9W6jpWZl7jDGWdJ8yDAdcf7SBaK4HtZlDUIJHjn+OHPNLnVdC2wi9zubVpSmBCQ7o33ubN/h7Ve/R+/mDcosRyeefrLHcOTJ+gNM6SgmI9Aaa2oYVxLHhlrsaNaTSkXA8d23LjHaGVOUObPtBF/sMs4lNSXZvzpgebGDSQdQOEStw7iQjHeAnRQdQzvRdLSnUUZIIVHSIYcZh6WjMXGc/cwJ5tZus9V8Atk5zRJLDMc9Ti/VeXnzMnPpFfJOm69eHqFlgbl1g6NRl+NzZzhy7DROa36lYemZAj/u89bQMzj6OLr3B4EE3kHpPP1JxlLb0W63mJ2dY3tjTKQkpXQUCLb3JnQbCYvzNZqdAhXpKo+jinMNzhUoHR1MWLTqXX71S3+Wfvp32djdwtsxUB6E19YE/Zt8XEyj6x9oD4SzAExRbErJg9AFG5pKqhppnzJDTje291SMhZ7QsQgJPd5C1bOZCiCFClLQGZx6mIAD/Mk92C+k+RAhFYV1XL3zPtl4l8HONrYsuDa3TDnZxeUjvEi4e2MTkxegIuqNBqUp2Ll1naTR4uTZpzm5fJxmlNDPx0RKEbuYCxvXGYyH3Lx6hdH2Dr1bN/BlIOaLY4nJh2ydP0dU72JSg3c9rNsjbswzGRqEyMmyjK07l3k7qjMpNVd2tmgutWnkNcp8gowSZhuAG7L4yBHczgRd1yjg2FNrTHYHUBiSRky9qRG7E9LtHkpYGp02NSuJjCeWini2hqwV7A4a+N4mw/ENfu9yQaE7fPqxdUQjYrfhyYv3+cuPGc5FJyjiT9KM5pDLMwy9Z7/wWK1oyIhI1Bj3Yt69W/DUzCybgz5CgnSO7f6As2sKhOT4iWPcvn2TBNCVvs54bNgZZuzuZzQ7GSoSRDoCZIV6tdhygo7uKYR5D4vdFf6jX/kL/Ff/6O+wtbdDa14x3vYU4yD0igRvHM484CeLIGCn3RSf4izec9A4cjbMAekDfHs1GFkpgiFDWTk8X4VoKrzQV5BhXzG0DKpNW9cxWZkfhFo7w220jImV5sqVV3BIBhNP/9Z14thSZBnGZGxcvUZSj0mSUEDIxxmutCALbJ6ik4h8sM+4t02RTRgc22amPc+Vq+/RaDY4vH6a9y+8x86tu3gHo609RhsDlBbUZjS5AB0J8lFG2kuDTkuZIBWk+4HhxWYGnOD2lQtsb+yQ1p9kkpbM1xJKa4lki+FklVJAM57QqSny9hKFhbqzRJMJ8zNBxgEscV1TVGyOuYjQiaY3zunt9qHToKYnnP/ODRbWjhI36+z7CWvPrXF8do0/eLlHu9vhrpjdagAAN6xJREFU504foTV3h5UVxal2yjv5LV67sc//+2t1Pv/JNb6w2MLLGO5MuF6zLM3CZ0vNijnM3VE/kN9JuLm7F9hupOLkySP8/u+F66i1RilDnhv6o5z9YcnMIKXRTFBUkYAKYbct0yrEDif6dEDy6NJx/oNf/HP8rV/7/zNOx6ga+HHIj2uJojxoD/xgeyCcBUJfIozhV1xPIS67NwxZVcmmhSgpQmOvtIZEawyBGzkzObo6UbSU5Lbk4sZV+vs9TNpnNOih4oR6vYkxJWVRoKUgG+whkhZFWlL0N4ljxbBXUqYlUpdY41CxAuvJRinNjiaqCVxpyUYGKFBaYE0UsDHKsX/rCns3r1PmAu9yZpda9DauMRoY+jd7lKVHSk9UV9jcYtLQdMvHBS7zxJ0IZx2TnRSVKLyTJC3QicMUCZO+JaovoF1Et90lG40ZjQs63SY11Qn6iWWHtAw5nSs9w40hM3MxQtbBG1B1JpsDhpnj0edOsDy3QOwMBs97PcfVIYwiQWsOTh/pMru0xhtpDxuD793mWCel0Wlxt79HPzmCvz4gaSWcml/GryzS6+8xa3IiMYtyjttv3uClY3UOL9dZm4G6nw8EerYED71RetD0WFhYoNVqY0YDvIMkCsOs6Shnfz9lPJuQjjOUrIUGow5DmdYMqRpjB5CPkJp6Hlt/ir/087/If/9r/wvJjMOk4MqqB3cApPrB9kA4S5hymioDmypmDhNT04ahACZlzub+DkkcM1vvcPHW+wz6eyT1Jq1Wh6bWbO3eDb9DJ8x25+kN9rl56TJmPMKmm0SRRGvPwIVju8hzJv0cISMaiysIGSN1gzybUOQlxjjMOMc7Ta0FSU0wGUyIa3WElOg4RseeYmIxmUUoTUmBjiXWeNJxgcsKonrEZJBS5CWDHcNwo4fH0pyrkXQ0tnToSKMijxCW3IXWqNY1jClwBbgyIxcFQiWYbISqt8nMHPlozO72LlppVLeB0wnWSZw36Jpmtzdgtt1Eake8UGPr/U0aCXQXZkjaGjlbpzPTYDudsHHxfbw1gSZVSpLUsb2dkiXQvymYmAYnTtTZ6beIj0fEUZv3b1qOlEN+en0dcb1JerRNNtOi8Cl/5VPHEThe397l2M0x3BxyaD7hm709bL3Bf/zMXDgVZGCgHOaGrMho1JokScThw4e4dK6Pv08RoTSOwbhkOCoZ9TOiSKGVwhpDFGlMMcCYAq3jgyLPQUgGPHXyWZYPvcLdrfMIKcn3S2otTdSiIrz4wfZAOAsQBEZd0Imc9i7wgswUTLIJw2zMzdtXuXvlIt3VQ9RqTYa9HfauXseT05lfQLiSdLQfxDtthFN1cJ50sE8UQVyzOONwUgCaLM0CeCtOUHGbMiupNyVCedA1ZCRpNgRlHjQsTWnwCNozTZKGRkYCHSfoWLKf5XgFSoPSgTNMJxLvzLTBQpFaRns5490UVxqSrsYZQ5kZdCQDvRGgYkHSrfo2xuIpcVlISFWkwBmcL5mdfYL9XDC82iPfGiPLAtXV+KMzNOc7wWGkxmYZhY5IajFeCWpH5tm/02N4e0ijXaBrCuciXjx9mLa3RJFCuoqIO4r57tUxu05jVpqcfGyeL50+zNYoZ6+f8frbA1CemVrOW+9fxx5apHlsFltLyJzln711icv9gq2sxl9/fBU3Os/jMx1e7isaUYtvb/XZ3DDML4ATktxYJnlBPWkggMcfP8vFc+eZYpq0DOjJ0aRknFmy3JKnJfVY43W4wY4nfd545yVefOJ5El2jyvSZ8lUP0hHzh0/Q2+uzt3sXkxvSvj0Yuvyhe/SP2wk+iGVlwSTPSKIYay1ZOWZ7b5fNrdsM9reY7O8TJzGTvbuYskQUBRvDEbrWpEgLvJswYgMlDVlaENcExciTj7cCmz2h0+eMIC/BlhqlPUIJisyQp55uW9C7s0mmNLWmJqpFaO0IeAtNsxNomSYjW8lfaNJBTlSbThH4IJTqDB6NTkBqQ2dRUo6h29XMzCg2blnKocAVPgxKlgGTYkpL3i+JmjHOlCgtSLoh9BIyohg7dN0TtzwmM4ioTSmWqQlHfalN1ozwCgbX95nspuh6EhCgSYRXMXuXB0TzEUkjotltUEpHQ2r0eEJeZMhmwZVty/JsE5EavAxlSVFCd16zdaVPanswO+CdyRb1WkxEg+eWBHtFwrWNGp/8zDFemsS88dIWJzb36Z6e4fZdya2iSd3ljMRrnP2rHa5OGnyi3aVvFN++MabXOUa7vIQACu/YGvRZ7M7gvOfIkTVqjTrp0KKFJ1aSTEgmhWGYlkwyS6swlIUhTjQeQUMrvvXKeQor+OmnP06sk4MJDiEEu5MR1sPcodPk+xG+zEhHOdnAMCVk/EH2QDhLOh7y8lvfBZ1g0xH5YIPh1hZpf0iZlSit0VqC9MhIMRlaMIZiMKrmewpMnuMUQSVLKWTi8WODECG3uK/GTD7xeF+SNBRxLCnTCf3NlMlOQa2doKMEW2aUhUOiSUc55UQyt9xAR57xyKGUIh3mOOvIJxnOOZIkxqQlzhqUDpXtejeiNhfCK2stKnbU2xJbSLJ+hk5ihHDUOjGqI9Cxokg9Iha43KMiidCSWqeGrBlk5JDGY3yXrO84fKSBNRPKpE7hoejkjMcZmXGYvCDaTLFbPVRSp9zIYVFhjMWWirF0zDfqLMwIluc3uZNGvLa1QHFXsHyqy/qZLme6XUZCcvxTNVaaLZ5bmGXHepajhOt5ytev7VLbkSydLrjgc44sx5xcP8QLNiJdm+drvQHX39ngV55NeXb1OjfKBd6f1NAz8+z1Rkx8wuzyArXd62S2wCPYGgx5jPD5NZoNjhxd58p7F7BliZQQiUDlmheWLDeUJhB8W+tRThAnEfOx5yt/cI5Oq8XHTz8Z0JM+4Fc2+yNKC41OgzOfeoxh6rizlWERiLff/aH79IFwFm8KNt/9A4yx+NIQ1RTWVPjv3TFxPSJuVmPYZYEQOVKHMq/zHh05pFLBUUpHZl1I9jxkowJdCqKaxBlBoyODCM4EvLOYJBBSCgG+DJDmfFKESWGlyPIUZyxFphGiRZR4VFaSTXpkowkm0+RpRq0dQ+xpNGKsKbHWEddinBX0eyVSS+7mlnpTU2sqyjzGWofJDcJ4aosJK8dq1OoqlEd3LXkOZVaEosKkwA9LVCRRsUK0F0gHOW6omIyLwOmrJQunFrDX9ohVgt1MGWwGNKLPUqRwZJf2EUmCnumyenKJs3NDci352GNf5Iiuk79VsKGG1BYSHj+8wFxTg81ZjSP6zrJvR9wsSmZ1nVc3d/A9w5vXHJ/e3eTp3LL09DHGL22w7cE/v8a1VpfaTJMnjmYkseaRpMO8GPDtXsYTDc0rXvFLiyWvbBYIHShwN/b2w6RFhYQ8deoRLr93IZC5E5CxxsBoXFAUFdzY2jDlIQPJx9GFJt+7tsPvvPouZw4foducAyTOFmwNh6G/4j2F84yNooyaGEsFKPvB9kA4i7OO0e4AnSiEcJS5xGaefBwoUaN6hEktJjXhNbGh1oqQWlD0LFJ7EDYcHtKS7Vts5vHWUJtJUFrhbBgxcYCuebCEo7uukNrjnUTGFqk0MnJk+wU6iYjrEuoSk3myNAvtHDy1lsTmEcU4xbsCVRckDQEktJoa5wviGHCC4d0yqIg5STbIEUiKocFkBmccejai3dUIpZBSMDOnaXcFdzc9G9cLioHFZjnWlCTtGkl7mfm1M5hhSmkLknpEcatPsTPGtSMWVtuM91LSrQnCCXAl1okgr+eBNMNkKZM5zw1RgFT81ivXELEm3ZWgGywuzNCo1zk5M8OZOGbPlTwWNxBCsD0a0hQxL84I3rEjHtsbMXt1QjyK2fnmJexMwuqXnuT8U6usbo9ZMZJrrTly2+WwmOXGaJ8VPeHCZJnPzEz4/PEmdy93uJgN8d7TS1OsK5AqwXs48cjx0MMaDXFOhlPCWvLckeYlZZFQpBZTtwHWkMScOblC+VbJxY2cf/ytb/MXv/AFWrU2mS3oVZHAdMS8HVuyCMZxjPgRcsUPhLN4D7YMTB5xIxALFLmhyApcYZnsjLBjj4gUeIiiUCLMxwabGxAaHVOpakmksIz3JmHgrhMjFEz2S4QQ5JGg1tJQM6gkQHvLTKBiQX0uwqSGbGQoRgXgiZsJ+ICTyTKDlDDp5ZS1BBUr7ACihqpKk4ZaDXQMcRJkxpMEvJTs3YH9GwVSg4wB5zETU3VGFekEclvQqEl0DLUYmk1JLYFRf4zLLaqpQUisPoFJLV3tMTomz0p8HEPLIdeaSCmoRSWd5ZjhLqi5ZeYVjHd2GPkSvbBEMj/PaDJi//YOM4/WWVtdIDaeEyswyAvyjdt8++4NXpGeyHushG69RrfeYCQFwkUUF3Y5u7ZIfm2HnaZj74kWazRZHEb4iz30ap3NTcGgSOhv9Pn5p7r8w2s5Ymj5xUc0T0W7nFmqIVHs9sbQBCzsj1MmxZhulCCloNmscezEcS6//Q5CVNghAXlWkhWWvLrpeB+UErQQrM3UWJltcHNo+c5723Sb3+Hf/sznGBc5wzQPVFmlPcA2LXQE9VoTqdUP3acPhrMYR95LsbkNm1OGpmJckwinA4tJ4lCxRMZhwncyLMBC3NLU2gpvJWUWmlNJB0YbIel21mIygU0duqEoU0neL3DGUpvRlFlJ0orIBoZ8YCuibdB1j26WWG8pBjFYjTEWjGeyPSHpEgi5I4jbGiSUJpBBxFJRllCkHtcEqYOKmTcOVwrMOMOV7gAUlvUL+ncz5o/UySYGUoFtCmp1T6sr2anoTJ0DO4F4YR6Vpkyu3IW1OrIeIecamHocOICVRLViRCpgewLJPicfXeP5oy8w6Rt+/41bXN/bo73YZWl5ne1sxI2Lt0kizZGVeWbm6rSiiKxwrHQatBqamaaiozzeGO68vcHdlzZpxw3KTYOWAvNTx4kfP8bg/btEmWXpE2e4hGXlUEH9yi7zosWx2hK/dGTI5Rs5DHK6q6vU64pBv8dqu852MQIpSQvLYJLSbVRyhhKeePpxLr79LkpKIqUQ0mEQZIVjXFhGaUk9K0LFz3miSPK5J+f4J1dKStvm6+e3WZx9g/XVZfLSHkiK+Io+SwKNbIB+0Pss3rnAjSUFtjDIukXXBVLXqCc1vDfgHMXE4b0J+JIoEDVkw4K0B+U4YEGsydE1BVoQdxKiWoTQlnzssYUnaXlGuzkmdRSTAhkJklYUgCjaU5uR6EiQpRH1dhihcblEqybpoB8UhIucbOAD4EoY8hFELUnSkuTSEcU1nHFMBqAihVQJiBKViOCo3hLpGJVIbFnixyXpIGfQk9jMUBpLd6HF4loNU+b3EJPjktbJkywcP4T+zkUmhWf7ek4RpSyu1pg/tMD63Crv9nOGG/uMNkcQNymt5bvXNnj56h2WGhEnji8zmO2idJ3ReMhpFbM+08Y/vU7hBUXu+d65HbyKMbcmKC35uUeX+LjLibZLxLjDYNgDLDaaIE52SF6+zvOPPsL+55/iUpbz5bdvcePiNqeeXKLbWmR7MqEwtxhHmhceP4Lv7bKgDXme02rWUT78fbIVUxrHZm/AofkAiLPOs7a+SntultHOLpEUJEoxMYY8t0zSKtHPS0xpcBUvwvOHanxtEyYuxiQRv/G98xxfu4UpAmGIddUEiK+a4sLSjB7wMAwBuqHxpSMfZjQ6AhVrMCWj3RyVVP0F7aGEcmIQsUTHGtVSlOOqrNsfQVFiZxKiepBumPQL4mZEXE9wORQjiJsJUcOSDTNsGYR/dKywNRtIuFWAVhYpRLGmORujkPTvZhT9AlcWeGOwEiBwcOEjlIqQyiFnPDJ2xA2PjCxlahnujpAxuNKE0TUsQoEow1yaqil0IlEiwk8E3mrKXJG0ajSXakx2LN4IVJJg94e43TETB0NbsnZygagu2b69x+a1bcaFQtTn6Z4+TjkYk+/3cJ0ca1M2ipLexgaPz8/REIrd7Zz9a9foTcbE5y/SfmaV27dTendyEAqhA8b/TrTPpVnBzreuBxh2R+AmhjldZ/YOrP3JM8h2xHdfeYvvvHqNXq/B7MnTXN2IWE13eOT5Gd4YtuhtZ1DfJ71wC3luyOE//yTpcos8LWlImFRI053hKGwMEWYCIx1x6uwp3vjuLsqJip/dM0wLukVCmjvGw4JmoySpl0R1xUzkOdyUnB96okYNl6zw/rUb1Fo14kYCThzAw6daOQ98n0UogTcF5agIITw1hHMYM8HmHm9iSh/4f8u0BCtpLNdpzoRysmtDc77OaNsy2gwy2EIFggdvBOW4JN/PqTWbmJFFaImMPY2lMALiyoKoFthZJj1DazEiaQlcGRqJSjjimqfRrVMMClxuUA1N+3Abm1smOyV4hSsdJvWkaUGkFbWGIIoCorLR1aQDSzKXIJygHOcopYNokwq4Fq0FWWaRcYRHUWYOIWNmjs5QZAPsWNCYn6U5Lknn6lglaexWozBZQZJoXCxR2pCObjK4NIJWl/r6UeR8h7oeM1ED7HDA27s9ZncHPPvkURbUUVqdmDt1x7mi4ObVPj4vQx6mJGjJa6+Nab/YZPkzC0zOFSwemaO8M0Lfyhn4gvd9ydffuUBTzjHsPsmZQzGf3t+i/ls3aTx3hqURvLRzjqOnV9i4UefuxoTOuVvs/Jf77PzlT/HZ584yvvA+t01KVhpu7fUDVsWrAyDg2SfP8sZLr+CNJdYRKrdkWUmWO0bjknaiKEuLNSXaRkSR5ok5uJyFEf5C1fCzC5jRXhijipIDcJ4XATMVqQc8Z5FaABYvAxOJ9xqpDXEiiRsC5yUmFeS9kua8QDc8Ogk0nLVEYYTDKxumdlshbjdFoDn10lBMSlzuySigVDSSGmU+RukwaDbp5UR1TVSLQZTkY4OOJUUa5suy1BFHEbVmkygZUw4IXMSJRMYCPfaoWIbJWe0p0pLMFrQ6TVSkyfMcXVfo0mNLBRn4ehTUwgZlGMT0niJ15CNH3JB4VzIZ+eBwsSTuKuKVeY48foZ8NGTmzJimr7Erm0S9nIVjHbwRDIZjrIB2vUWrlbB7Z5/+q99DNRt89lc+xjPHjiGlZBfJW6/vcvfmBomEp3/6aT5+9hCNbMSznyz5nTd7WGc51IKfO3qLR1e36NQy6vU68pc/h8Oze91z6xvvc217G3tojhOdLu9/6zYff/0yzy/UOb20xkgkbH/jVRqrT/Nnv3gEUVygf2yGXjJP79vXSGSd97ZzDt3ps7V9FznfwXnPuLA4bxFeMRUdWlxaZOXIIe5cuor0Di0dRQ6DcU4tUbRTy3hc0mgaoppDKs/ZecU/33KUNiTzoj0TULejHnFjinuaduHcg+8s4GktCdKewIwF1hTkKeFkcAKdaGQMuhNiWJU4hLAUI4d0gTPL2JS4Ycj2Da5UeOOxGOJOhM8dMpHEdY2VFamEgmzfYUuBalRyfJMSFU8ZKRWuNFCXRHVHkWeMd8a4wiKUCkpkmUUlEfWZCFMUeONxVpAPPdaAko64LhnvZ4x3LPnYYsYWl1viZo240SD1Y5SQGGPJxyXFpMBjUQl47TDKEdVh/kSDVnKUpJgwZsQuLbp6TO3RFcqXb7H7yg5nnj7MFz5zCh012ekPubLd4x2dMNov0Ery9luXuHWh5PSjh6E9w7CdsfbkCt1hxm//znfIX29xObZEooba8GitmZtTuMll3r48oisKIrHN+d33GbhZnj31SVSR4euK0c0dLn//Cj+/P+HYjqD74jpX37oGk5x6oqG2y356kkge5WbUpBGlpF3BYl3w5090ca/2SDfAtYJ41CjNSfOMZk0H4sKq7/Lok49z99p1bGHRQpA7Q5aVpLmhP8qYmcR0i4TEGFQUsVCDrhRsm4pnQUrkwgIeSz7sE9XDjJ8XnloS3VNu+wH2YDiL8xjj8SiKvEQ6gCh8ILUYKQVlanAGysJjC0lUB5c7JsYgIwPCoyOozyWYMpA/xElo4EkN5QSiWiCcy7MRzpa4IpSbvTdhotlLyrEjakLScCAc+bgk6SqSWsL+zRHZICNuRaAErnToRKCSygERlKPA6h/XJPnEMBqMUVphcotLHa5w4CXl2GDGozAoqlUoXyuJKyRGREykRscaMY7QUQN8hDEzzCIYZprbuzkcOgrUEE+u4G/t8c73b3Llzj6LawVHVo/wqSdPcTU5wuz8GRbrE37qZBMxGHBxP+P65m2SVofvbV1lvt2mNdNi7DV2rJk93GJ1SbOUN/jW7TEd/TiPn20z8JLHxQ1W1jdwCBrtBunPPE5+7jKHZha4K3Pq/R1uND21775LfXmOnVsjFn/lUc6v5nz3pS1+5eSAM8sztA7vsX1mhvnHZjj25A7m8U/y515f4X999/vkwpGXjn6a06o1Kz6FANI7fvIY3603cGZYccxJ8sIwmOTESjAcF6RpQa1ZEsUxjXrCWsOw7yVOVKQjQmJmFiHN8ZMsjAYpwWwzCVwIP8Q+qPLXDPB3gCcIJ9a/D1zgw1L+clCOw0Z1ob1BnhvQEqUlpgCXTydHAzWqKxRxPWI8nIRwSghcFFd4BIdueoQMTlTrWryQlNZgHZiiIG5LrPbYscFmBmcVcSPCOoMWgfDPlYEgQ6BQIqEYloiKUlXLKKypYZESsCAihTMebEyWj+m06nRaDWo6YXBnh1J5ZBxjM4AY1ahT79ZBSZJGgwKYXWmh6jUmucNHESKpkTpAKNaXmoy2xiRL6xxKUmzq4Nom/vQSHFE4axiPMlpS8HLpuHzuJs3emDVR8ORjx3n26HGWWoqVNKN3/grZrV3mVxaZHWneePc6ZjxBlAV7TUU8K4kmltFOyasLDa69GtFZbfNPLu3iappPPwEffyKnLj1RR5JdvYPub+Bw1KTGZpLlk4fYGjb4Hwez6NcGLEd7HD80z1cvGoRM2FtY4uzjipuTiPj8LQwZUQy5FRR4doZD1mbmAk9BIC+g2apx+NgxLr75JkHhILDwp5lhEJXsDwtmBhnNdoOk6YiAo23BuYlFCoXWYbLYawFLS9g7t3FpStSosdhtfih4lr8F/Jb3/s8IIWKgAfxnfGjKXyBkSTEMZM3eOqxxxDrBTHJknOBxgevWh7zAGygyh8sq3HsnoRwbUJZiWOIt6I5CJQUBva9wzqBqilqsKcceMwk0RDKRGEDVNJH0RAk0O5JyInHOECcxkVCoWgSRQEYVRRIC4RUYz8LyIiBJ6yU6TnCuTXumxfEjy7STNhu7G4iOhLiJ1wkiihFSQRwF3IWS1IWEWoTXAmUCz5aSgZxPA3Hikb2SyfUBrNWp13OEzTEXd7GHZ/H1Fjv7hs13S2bWrtOLanzq5CJ1ucTo2pD//Dd/h7luxJFjddpNQ73R4MxMl++9uo2LWiSrSyS1GCsU7bbisedmuHHJsNDRvHimy/J8i6vbEy7cGnG8e4XdrR0WmpJ6PUFuTJAywmQFrX3J7NF5lp87xObRBPXWDvFik//0Fxv8znsdVFTw+rkxh2Y7fN3U+HzWYXTlJu/O1pG+wPlAfLc1GCFVEGG3zlQ0vYJTT5zl/JtvVNStgqywiMwgBAxqmnFaC+ToZYnRmiNtjb1R4KTHe4UUgbi9dJAsLeE2NrDjnMVm40fu0Q/Cot8FPgv8ewDe+wIohBBfAj5Xvex/4sdQ/vLeg3TELRfCpYYk23OY3KJbGluWRInGSY8SEbYwqEggY4e2geGjPucY7zmiRgweTFqQy4AS00KitMcWDps5osYcVo2Im4H+VUYSMREVIYxG6QhnLbVWgi0d9XpEJCxHHlsJmPhSUBpwTqDiOg5PLlp4oSkbklLHqLjGXhTz8u0aXkfYxTNEKzroRkqJ1DLwFQtCLC0EiKA/g4SI0KkOqmQCs59h0gjVrCNrEruVU17dxYyGpAw5ebTDI587wY1Lu9y+nrO4NmZ+eYkkfZ/tuxdJiwbzK6tsT3KW05iT9Qnfu2D4ivOUueVnVzvYC1fJ0xyvIsYm59qbDfaiGPNoi388iXj6aI0bewVPrR1jXCwx2xlwoX2IxrjAp3B41EZ164ybBdunPOn2VZ564iT/wdHj/H9e2+bmjmClY5hZ7PCZI0t8+bWbPJHGPDJziNs7N7muBaYMHAlOWG5v71KUGZFODphCpRIcWV+j051jf3cHTQAH5lmBkCLgYTLDZJIxUzZxkWO1Dg1X0i8lWomKI8wH5pi4junOYXfuEv8IZpcP5CzAcWAb+LtCiKeBV4G/xoeo/KUTzWTbENU0UUOg4tDBl5EiamhMmuNSi4+g1klw1mCtJ6lphBaUQ89oqwhE0tZhjMEWHpV4xjueuKEQMjCsFKWDscGZsBFtFfbpeoxWTWxWUuome+MSZIMo0aR5k9RrciRGRVil8CoOJd44xnmB1Sqoq2qFjCPQEqkIuu1xjJYqjPFH4c42re0LGapogTA88IxJJRFCHXCiaaGw+yNsLUF4CWlKFEVkJ2cY7cb0r2yx/+0L7A4c64ueU08/w83eLC+sLaEbc3QmG8yYGi+9EbGzv8uh2YRaa5aGuIHf6HFqqUv39j6yhKjVZvXZw4ze3MIbjzq+xvaZw4glCXnGGTK23trmSn/EoUfnKfKcIwPo/tQpfvPdDfT+Dl/42CJPP/8IM/OzIARH8j2iW7f4/bPH+StPHOP94Q6/9s5FLlzL6KYJj9fv8vvzDZ45tci1i3VupwZhJb1JGpTbKn1DUUGfG606jzz1GK9+81soAbqCDhtjGI9zeuOCmUlMXpToWky3HrOWSPYnYFylz+KptHc89fkOTju+/vIFClP+WM6igeeA/8R7/5IQ4m8RQq4D+3GVv5qzda8TgS0trpDkI4+znjLLUeNAcWRyg4wlRaWt4gqDUSHHccaR9jzJbIQdGrLtMnx8WuFLTzrxJK0WcT2m3pRIWSfXVQ2/HuO1QkYxTjeJVltYBFIrnI4wKmKsdWAw0jpwAVQlZxVFeAI5n/PBMYQMiXqgqBFESVRxk6mDeFggKtk8Gy5c4HcK5UtRcaHdB4c11tJoeXJhyUeVrJtz9DfH9K5sE7VjVL3OblZHpYL65BouV7zzfo89PLoZ87Mry5w95nhva8Q3LqYcmhvx2MkV1m9doXX+JhObM24KpMu49U9fQdQkZA5/6wby1Rr6M0uMGjm7xJTM0hvCqUab8V5BnmX8+qU9JtRZOvYIz3zhUVoy4+7uLrfTgqtFm+c//ghLEdzu9zjRmWfu6Jgnr95h7bWL7KzHXGvCo5PLOJ8hhMLjGeeWrDTE+p7WjZABMfn0c0/x9vdeoizHgY/NWJyD/UHBfj9nMtegyA1JYYiU4ZE5xfnU4vICFUuiA8FVgTEO0e6yOyjY7Q9/LGe5Bdzy3r9Uff9PKmf50JS/nA13BqUUTgZmQesdogCTCmwRwquoqXG5C5ht6SknqoppI6JEEsdNiglEnRmEjlFxAkmEihNE3EC1GiitieKEPPconaBjhYgUQimUgUeOLnFjr0/uQngUOMV8EGiVGgglTMmUPxmoYmCtVdCBqbDgrmK09BUrjfcQxRHWObSaapEF8g0HmNJiTEmSRDgLUlXcu1aSpZpiPGCnKJkVMc1GE92pM/vMYURSMluzmNTSMBusz8ChEw2a3S7fOtehm3nOXT7HZLXD5M51JmNLcvoYzzx5mL2VAcNbGeutDqKmKfsl+uOzDM7dQc5KfOpoNJs8+ac+we6iwZqCGZb48jtDHIqnOynRypBPPrXGWC3x1hsD/ttvX+RPnKixMa4xWmkwKwzHDkvOrq7yxv4u9v2rfFHGtD9xgrd/9zKjrRFnjy/yifkbvPbGANmZR3jHKM3ZGw7o1BtV8zDgkryH2bkuh0+c4Op771FUp44Tgklp2Rtk7PdzxuOCRjPBmJL1JkQyqCI4GwRtlXO0NMwbi4slnYUaV3+cqWPv/aYQ4qYQ4oz3/gJBk+Vc9fXv8iEof01zBVzQ6ygmFowAGeMKiUShojpSxVDTeCJ0u4GXGq80sW4gogRXq6GERusYEUUIraqvcNx+/NFlTviU5SjitzYmXM88xgdeYqkVvrBsTSaQKGpSMZXwdtaihKqokkLTyuGRFV5bSonQIf+QkUIqEXDsCLybcmLeU1tWMnCTxbEOo/s+hI9RpKppZR8oeWxwQOsccaOOlprOeB9Xa5INHfu3eriu41jL0LR9mvMdZmLFOM9561KPQR5x8+Ymv/Cps8z1GhRrx3j9ZMyXzizR6w24/I3XscOMYcuw64a4PUdtJsFne4xbGeMERjanLPt8/x98k8aSJ27H5GiiekRWLPPuqMef++xdTpTv8PZem2dOnqG1ssI/vT5g99Im/4fPn+ap9SO8Ndjl4s1NdBLTOJ/ihrsM5urImZg7madXb6MbLyB3LyPiHKE0XsDuaMKxJXAufJ5TgSIpFU9+7FmuXXifSDtsYREu5CH7/Zyt3ZTFhYxup4YUnuOzdZ6ZUSTGcESmtKShpWAm8dAvKCcFoqH48o+YpPyg1bD/BPj7VSXsCkHNS/IhKX+BRLgZHBofC+LFGkQxcaODiBK80hAneK3RcQ2VRHzu2cN8/8I2mQGvJToKH66odCW9kgck2lJ6vIWrW33WOxOu9zUyaZAIEGUY/RdCBAy4lAdCsN6CrTQ9hKjCLecIorYhFItjXQkYhZK2dWCdIdIKYX2gJ9UaqQJQTbgQqhlrg6NwD8Mpqj7AAQw6D8OlQnpkJGgWBiXH7N60qIbBTCxRt4FghFMa7YY0YsnCQsTM3GleubLEezdusnO7x923bzC/WuPfWnGsuX1EM+bS/CEm9FjPC+z+AI+gUJ6FZ9ZplQtc7xe8cnGMQTFLzF99IaJoPspX84ynlhQv7Kbs7ghqjZSuFhxdmWF3NMP3L+7w9pUmjz9ygk1ZkNy4xYnFOVYfWaO31+Ob3/4+zuU8+8vPsPyJVb52vs/O6gjZHfPZ59b4nfd3YHYG62Fzf3gAB3YugPOECDeb9eOHmF1YYnPzdmDAN4GwcVwU7PRSdnYnLM4lSJnQjhV/8Tjk+yPytCBqxJjC4kqPiV1oEtfiMIP44ziL9/4N4Pkf8KMPR/kracLRF9BxjUgrZBQhtA4VIxGk0agwDGVe0mxqljoNji02ubCbEddjRBR4bAQCZ0J/RU6p7/EIJelbz28Pugg8pTGBMFxUqlFKB0ZuD95LnDUVA36YMRPeY6vpXyElzjiiqFKX8iHxdFikdxUzf/h3yVS3RQXSQOdCY1RKgRIKU72P95VYhrGUeREIy6VCCE8xzognlqxXkC2v05iR9Lc3aNRShjdy+o0VZn/qMdZqBYfzK1zeG/NWLnliVTL3s6d4vl1nd8/x+GefpMgGZD3L1TdvcPpwi2+XLW7HR5FuB7Tks59foaMMRR6xMhOzZxu8fGGfWAneug27Ozc4MSu58SY89UKLC3sT7vTaHLuds/bYKU5++mk6KyUXx+9xsj7kmZljHJvpsL2xyStvvc/Nr7xN82JJ93CX7L0BSivW+/t8cWGLtlZ8/Jl1Xnp/i54P17I/yQ7UA0LK5w/+q9ViHvvY02x99U6g75UCaUPI3B+mbPcz1icltVhRRAX1WoKNY6wrGewMEELRaMQ0ZupEsUZE8b8BVEhxTLK6jFIKIe8vpQqopO2mCa9KQmj1/c0xO0ai6zHIcCzDPeEhZDiyvQh38sDOrzA4hBc4BzqSoAItn7UmcBVUJ4aqZoQqDr/w/k5ijUUlkihSOO+wZQUZUIoo1mhd0bhKcSDzJkQgHy9N1ez0U9I/A9ajIh3WaAxSBB3FwCEeOABELrDDAislrckGIpL0xwXz6xHpWyk7b15F1nKuzyn+nUcXeOFYRtwc0dJ7xFEXZ06w9eJJ3t8Y89W3e/zMasITp1bYynfZ8jEX67OIJw/zqUM1Xvz0AsKWeCzOeh79pOCr5wb85uUhTx3b4HxjBSU8l/cH/M/fcjx6bpezrk7er/HuxlVeLlNqRxb4hU+f4JFmg7fevs7f/f53+Vi3y9OFYvG5pzl/43W2d/bZ+9aYpVMtfvrfW6A9Z5EokkTwxHqXb+8VCKnY3NujtDmxiMNeqVQUvA+kek888xivf/dlip1tpDOU3qAQ5IVhd2/Cfj9jYbYB1TUui5IiK2m0EpJmQtEvsLnBG4+I/w2gQhJCENcDEF6pIFAaaY3Dhc3OtMQayqzCS+4O8xB2ReFPKI3FOhfCn0gdSKqFaRKFLUqECHLWztqgBizuH9EO2Aapgu68qhgnZBUnO+vw2Eo9t5K7sIFRJCT64bl8kqIjjZBB495VZOVlUWBsRfVU8TJb6w70Z5CCJEmCgJEMMbqSIJzBlSVmmNOcTchSRbHnMBs5/YYGJxCdBCsUet/z937rOutdy1x8lYW2odWcpb48z2u3Dbe/c4nN/Ql7i22eflTzs8+M+QvRKr/1xjVuXM64fhv+51uCGUGgQfWQW8elLUd2O+Vv36jzzCdbfObUOrNHZ8lue27v3uB3C8PV+ToLT59k4msc3hO8dOEV+ldK7iZzzC8d5z/82cPc+nu/z+Y7d1h4foHBm9ssPdXg1C/PUJuRAbRG0Nd89rFVXv7GNYpYsTcq2R8NmWu3UCrGOwsV2WJRZCQ1zXOfeZHf/vJXQIL1IZ/0zrHdm3D5Zp9WIjl1aqkibZTs7efgPEtakZYlkTFgBaXJceUDTt8KgY84YKsdkY7wMkhwex9KgsIBNmxWY91BIu2MC+JCzlGrBVI1rEVFurrrC5xWobxoXeAUm7Lma0WZZSAEcRSqVL50aOHAC6RQ1YnmEQp0PcEUJWVh0AK0BCU9lI6yDAOW3jqQoXnm8hInBEIopIBIAj7oTobStg4IytLinCNLc2xZENVivPAU1qKEJGolbPd3mT+cMMo6jLb65JlispfhtWfh7CKLyzMcXmoxVmu4KOb5xQRp79KUbZLGYY4fj7i8lfPS+1u0010+dXoHW6acWBzy736xxbfLs8wME3ypWZ5JOLUYI7RCKUU/hZ1v3qaxGPPso21mrSfvD/j97U1WXnwEeXyV+q0Jt169w9kZy8n5w3QWzvDV3CLLHO6e5603X+K5P7vEE+0v0j8/4kbZ45Evtal1Kq7qqlHo8SwtNFnvRFzIPMZZCldgbUlpJ9TiLkFiBLwQWFfyzMee5P1zF7h44TxTulYtPUWW8d7FDcw4Z6YVsX5siVpT0J1pMthL6W+PqiHWUKRJh0UQwvoh9sA4S5hyA1TgOhYyLE0qhVCSsgx3Wuv9QV3dVAmdlvIgJ1DT0yIPeBE/VQz2Dm9N4JuQIZ9wUqKbDZyxGEBGcWCMseaA0T/U9itJaV9VwGyJFaCkpixybOlQKsYSknUhBa7MQUqUCE7rpKp6KoK8tMRVo7IsDYU1aK0QUUSc6OBM1amjtcbKnNahGYzSpLtbLC6XuNRj5lrIrqLTbeFKw929AYdWZlnsNDBxi5nukfB3ephVmo83LY+ud7jw5R43v2JpfHoJUVxkPL5FPd1jo7/D5sDzjdtNpPDMrLeYbydMUk/DC1b9DD21zPzhQwzjGtYLbuV94nM9Tr21weHzOR/73POcOHOG/+a1O9Q3LvMzh7b40396xFy3YGIzLF3iZcWjv9JGNyqhKanCdSVMe3thefr4HBfe2cUIwU4/Y21uGRxIoQk6n/dUoqUS/MIv/Tx/57+/TVn0kLbEBR12RsOCyzd2aCYCm1oOP7JMe75LkVaq19aStCOKUUnhHD+qh//AOIvAI1xAqgXBISriOo13BmksIpJhPL6i47TGECUxSgqsKUP9XKqKWDxcCC0VwhqsNeADZVKkVDUoGZxCao3wDmsMQkfhJPMWKUK4JL2hLMLHaKwBQkNSIjGlQenoQF5PCkE+mRBpUJEOOZFWWOuDBAa+4jpzFAdDmQIlCFBXY7B5jlKAE2Q2RwjJ3OE249wilrrsjCfMH4PRpI5PwOYFpRNYC73dPuP9AZGDmZomrivuZIb1TpvnDq2y6wzZrQHm8BqvzM7wq7MTOskiiIKsaPBPN9p8+SuOtZUmH//0AocbNQZVWTZxkiu7Q9ayMZmB5X7J/NeusDS3iLyVIMcxV795jm9tXuGlQnH048f5E19YpDN6iZv7jq+PjvAFuc16PSeq+AJspXqmZVA5EFLijefU+gzt87vsGrjbH6KEwokgdRWqhlVOWcl3d2dafPpzn+Wrv/GbSBV6ddJ6jBYM8pxXL2xy4/oun39xyNlnjjK71MUUhklvzHhS4jzsDjOy3PzQPfpAOIsQApzDlK4SEgJvDEIpTFEgHOHuXIJKNBCSdmkNvvCBSMKHUIa4ho6jqgk4nVQOPROT56A1IolCl1gKlKhyDg9Yj3UluCBN4YwDLSjKIvR6lMRrhROS0oF0BqEihNQV9juw/QspgzBomCyH0qCjKGC+rUXL0In2TkBpA02qNXghiVXQobF5YHePGzFRHMFkQHptgjUZfi5h565AuTTMypGjnMKUDjVbp9uqU2/GnJibZbZRY9jvc7cseX2rhx2PWUhiFudnODuMGVxZwy7MsvjkGnPzHZaLCeL4DodPzbBwZpZPtdr0TWBMyfB0ypwXag3y/oR3bo7oFzHqVsqeH7O/VKI/f5JLT61Sf1shlyW3+wY16eKHjp9z2yw2NagI6wBkkCf3hJtOFIjQlVS0m5pTiy36d4Zs7g3CKRDuoFjncN6hpD5Qg3Ze8MInnmWwu893v/kt8nyCBpqRpJ8ahqPAOvk7372BlTEnTy9gnSctLDu7Y4z13OqNyMwD7izee8o0CyFXlXcID9baSpK5kq9zLigzqTC671GV3HagWI2TuCodl/i8RDVqIMCWJcIKpI7RSUSRZ8S1OhqPw0HpUJGkdCWm8OhYYwpTdYwVUkdQTsVvJMqHwT3jXAjxhCFq1JHK453BQsU/FoUwTkmk1lAabFFCHE4aY2yQyqiGPb0tKU0e+jJR6NdIDTpWeFvQNntkZU7vSg2T+9Cxr2uSBPb3++T5hMiW2FGDqJOwO5pwammWw1Ly7c0Rr73jWCom/JUz6+Rfv0VT13FDg1MZ4yct3V99kc8d7fAbiynxIvSKIb+30+cZFTMeT5gMB1x67Qq3bo9I7mZsyj4bR2O6zTqrNxyrtDn7/LO8307J9q5zuF2wle9QsxkzcY1GAyqK/JAbmKn0YCiTMx2Pl2Hq+Mn1GV65PWBjf0BWZCRxHGQThUeIUHCRUocIRAb48c/96c9jV+f5+jf/gGRvD5VlNIwldY5Y1thPS377u5d4qjdiab7J3l7K9v6EUsClIeQy/qH79IFwlhAeSaxzSGORWuKKMsSzTDXlRcCSTHJyW+JadaSW4DWyVsPZwBklrMEMx+G0UCpoOWpZMchIhAxMk2WWIWzY7EIrnIoQQhLFleQzjiiOwlhNVuCswYwDMhIl0LEM2BXCbJcrCjwh31GRRk1H8LEID+VkgrdhXMMaH8RiTYkzNoy6+NA81UpgTIkUoOMIpCLLSiLjiOMQRipVYJVkc8eTuoLR7phYaPy256bpM5iUdPYiiPZJRmNmui3ceITdS2ksdbn20kXS8Sj8Gx1IL0YXbjH69Q0ymyP7fUZbkmtO8HZa8g2hWGrWWU1qZP/iOmSOspVw6FPrNM+u88kj63THkqiuuCMn9N58j3UxYXZ3k6IvSJbrgSutiqGCSK0INx8B3luQIZQNrJIKLzzHVlq0Y0V/kHLtzk1OrZ+s9kRVgJFhzzhnKnmJ0L9aXF5mb/0R5LonynPaO3eoXb6Cq4oz29mEb35vwPx8h3FhmbTalKtHyGa7WPWNH7pNxYFw6EdoQoghAUz2UdsCsPNRL4KH6/iX7Se5jqPe+8Uf9IMH42SBC977HzQh8BM1IcQrD9fxcB0/zH444PihPbSH9ofsobM8tIf2Ae1BcZb/8aNeQGUP1/GH7eE67rMHIsF/aA/t3wR7UE6Wh/bQHnj7yJ1FCPHzQogLQohLFaXSH+d7/f+EEFtCiHfue25OCPE1IcTF6v+z1fNCCPFfV+t6Swjx3Ie4jnUhxL8QQpwTQrwrhPhrH8VahBA1IcTLQog3q3X836vnjwshXqre7x9WoD+EEEn1/aXq58c+jHVUv1sJIV4XQnzlo1rDH2n3Sx//pL8IcKvLwAkgBt4EHvtjfL/PEsg33rnvuf8X8Deqx38D+H9Wj38B+OeElvMngJc+xHWsAs9Vj9vA+8BjP+m1VL+vVT2OgJeq3/+PgD9fPf8/AH+1evwfAf9D9fjPA//wQ/xM/i/APwC+Un3/E1/DH7nGn9Qb/ZAP6JPAb9/3/d8E/uYf83se+5ec5QKwWj1eJfR8AP428Bd+0Ov+GNb0m8AXP8q1EIgTXyPwJuwA+l++RsBvA5+sHuvqdeJDeO/DwO8CPwN8pXLin+gaPsjXRx2G/TCOsZ+k/evyn32oVoURzxLu6j/xtVThzxsEdp6vEU76fe/9dKLw/vc6WEf18z4w/yEs478C/q9wMCE//xGs4Y+0j9pZHijz4Xb1EysPCiFawK8D/6n3fvBRrMV7b733zxDu7i8AZ/+43/N+E0L8KWDLe//qT/J9//fYR+0s/9ocY38MdrfiPePH5T/71zEhRERwlL/vvf9fP8q1AHjv94F/QQh5ZoQQ01Go+9/rYB3Vz7vA7o/51p8GflEIcQ34NUIo9rd+wmv4QPZRO8v3gVNV5SMmJGxf/gmv4csE3jP4V/nP/o9VJeoTfAD+sw9qIsz+/3+B97z3/+VHtRYhxKIICgkIIeqEvOk9gtP8mR+yjun6/gzwjeoE/N9t3vu/6b0/7L0/Rrj+3/De/8Wf5Br+dRb7kX4RKj3vE2Ll/9sf83v9LwTO5ZIQB/8qId79XeAi8HVgrnqtAP67al1vA89/iOv4DCHEegt4o/r6hZ/0WoCngNerdbwD/OfV8yeAlwncb/8YSKrna9X3l6qfn/iQr8/nuFcN+0jW8KO+HnbwH9pD+4D2UYdhD+2h/RtjD53loT20D2gPneWhPbQPaA+d5aE9tA9oD53loT20D2gPneWhPbQPaA+d5aE9tA9oD53loT20D2j/G+y8leMctJemAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "import cv2\n", + "from matplotlib import pyplot as plt\n", + "plt.imshow(cv2.cvtColor(img.opencv(), cv2.COLOR_BGR2RGB))" + ] + }, + { + "cell_type": "markdown", + "id": "cec8ed0d-8e6a-4997-b67d-a5e49f87c0b5", + "metadata": {}, + "source": [ + "We are now ready to use our model!\n", + "The only thing that we have to do is to pass the image through the model.\n", + "Note that there are standard data types supported by OpenDR.\n", + "However, OpenDR also understands common data types (e.g,. OpenCV images) and automatically converts them into the most\n", + "appropriate format:" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "6cab7dae-8892-4a16-ad03-651fa3bb20ee", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "forward time: 0.030s | decode time: 0.004s | " + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/torch/nn/functional.py:718: UserWarning: Named tensors and all their associated APIs are an experimental feature and subject to change. Please do not use them for anything important until they are released as stable. (Triggered internally at /pytorch/c10/core/TensorImpl.h:1156.)\n", + " return torch.max_pool2d(input, kernel_size, stride, padding, dilation, ceil_mode)\n", + "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/torch/nn/functional.py:3609: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n", + " warnings.warn(\n", + "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/numpy/core/fromnumeric.py:3474: RuntimeWarning: Mean of empty slice.\n", + " return _methods._mean(a, axis=axis, dtype=dtype,\n", + "/home/manos/new_opendr/opendr/venv/lib/python3.8/site-packages/numpy/core/_methods.py:189: RuntimeWarning: invalid value encountered in double_scalars\n", + " ret = ret.dtype.type(ret / rcount)\n" + ] + } + ], + "source": [ + "boxes = nanodet.infer(input=img)" + ] + }, + { + "cell_type": "markdown", + "id": "f3c85496-89fa-44f8-ad03-a234f466ea4e", + "metadata": { + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "We can plot the results using a utility function from the Object-Detection-2D module:" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "d7129fe6-a198-4196-b35f-93ba41e50031", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAMsAAAD8CAYAAADZhFAmAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9Z7BlWXLfi/1yrbX3Pudcb8p3dVV1V3s7PR7AzBDAACRBBEELgY+iUUiiQiIVchESpS/6oC+MYMSLkIIhQ+lRIt6jA0HgAQQGbgAMzHiL7mk3Ve2qy9trj9l7rZX6kGufWzOYbsyD4StG9J7pqGvOPWeblSsz//nPf4qq8t7x3vHe8Ucf7r/vE3jveO/4z+V4z1jeO947vsfjPWN573jv+B6P94zlveO943s83jOW9473ju/xeM9Y3jveO77H48/EWETkL4jIqyJyXkT+8Z/FZ7x3vHf8pz7kT7vOIiIe+BbwI8BF4MvA31LVl/5UP+i9473jP/HxZ+FZPgScV9XXVbUF/i3wE38Gn/Pe8d7xn/QIfwbveQJ4+67vLwIffrc/GCwv69KhQ/PvRcT+Ld8riiBI+Z2Tg69ByysVVftOy9eU36oq+a7v538CqIII5fWKiKBAzvZmWXX+9/aZIE6gfJYT7HzKOd/tqefnICB3fWb/t/PXl/dwcvdVM78HSn8eB++rqnxbTKDfft13X9t33le96/X9Cysiztt1LTYVToQ7exNyzDjvWBhWbCwu4sTd9UzsyJrZGe+Scj44t+84Hy0P59uu+zvu0/zBlB982485OF9B+M6ISL/jb+Q7PgNA83d8j8Jd91WA6XhMbNtvfwjl+LMwlu/pEJF/APwDgIXNTf76P/2nOIGktgCHPlB5T8qJLmcctiAd0HjHUl1TBzdfNME5uqzsdx0pQ5cSVRXIwKyLTFOmS/a0quBJOQPQxQwIOWeQg4eQE0xiYhoTs5jQZIu6CZ6m8qgqMcOg8oxqjwiM20jbJbxziIOcMl1WnPMEcagAKDkrMWVUwIvgnSN4RxDBe3toThzBOcA+Z7ftmEw6ugwpKzkpMSo5Z1SEnO2h90Y1Xyh3XdPcVst61GzXjmaOhm0WBopk4aFDy3z41DH+77/+FcZ3pnhRHnvsCP/ok9/HyugQOScgl0WWURJfPPdlbu7v0XV2v2Zdou0yMSa6mOhiJmk536ykrGi2e5GybU5y17lnzShKjoq4cn3Z/l6AlJPtBP2mlhOobaopazEE7LkiaE6ICFltQ8wpoZoRhZRy+Tz46i/+yjuu2T8LY7kEnLzr+/vKz77tUNV/DvxzgENnH1QFvPeQtXgPuwEpZ0QcIhBTRgS8QswZkt3Yxntq51DNjLwneyEFbzfWedRlpjHhyFQhUAVB1dNlIEPwgVkXaXMipkwdPFUQfHA0yTPtEjElFKX2gYW6wgfH/qyFrPh++1axxSliiz04ckw4J4hYzOtcoCPiVMhlcZMzzgnZOVJSghN88TZZYRY7YizGnCElRXO5kWJGad5wvna+zTu74Mkpk8tiS2UDD84joqQEbRJGWUEzdyYz1kYjfHA4L6Qusj1u2ZmMWR6WzSRnW3RZ8d7z3AMfIKVoppMTMUWmsSNlZda1tDExSTO6NtKmzDR2zNqOWexoY2LadsSUmM46upxIWYkp2cYSEyklu85s1yqqc0/Wu/0YEwBu7imUFJNtUtnO2dvZA2Lvp4qUfxUQ/86ZyZ+FsXwZeEhEzmBG8lPAf/Fuf+BEWKwCTQhm+QjemSG47EBs53chgNpipnghykVmwDtBXCi7VabDISIMvIcapjmD2utyAjQjzpFIiEDjA05S2dVlHvLVwc8NtQq9YSqhqYjZwpCYlQTzuCfnTHAOH9zcezkRJCfb/fsdXyneL5VQUWhjYiZC8JmYMuNpZNpGkgqxSzjn7cE6wWVBne2YFsHYIkZBRRHnEOQuQyrhXvFyKdmC6xIQM1IF9mYd3gmjJjAJgW48ZdImtidjTpSFpiUsdt7uce1rfD2yxVd8WMq2UWjWcl4g4kqIlnFyEAIrxeugxHKPcs7EHGljokv23yxF2i7SlZ9POjO2yWxmv+8i07ZjFiNdzLRtR8rFCGMkAalLxC6Sc7bPVPNwZLtf73T8qRuLqkYR+UfArwEe+Beq+uK7/Y0gdlPVXGxVFmpSpXKeyntijihCJcF2UxFqZ99XztkjLAG+QwneE7wyTXYDKgV1xW0jzLK5Ye8DsY22g+WE9446mLHYBQnee9TbjqbZdrfgHOo9KopXR9RI0gwozjmc2ILKyRaqYglEyhHnHd45HH3OIsSczYPFTNdZyNDnZaoQo4UuOdsiTKo4JwTnSArRdg6Aci/thvQexnu7dkXKywScImrft9Tk3OIUYlbamFipK27J2Iypjdzcm2AuzePEPHnwnpTsfGOa4sSRi4dFIJV7ldU2B3Fi+Zs62+SSuUhxDnElFJVcrsPZJlk2SxVFxEJT7z05pbJB6PxcYurDrVzOKRVvqnQ5YptRy3gWSZpsY4qRWUpMZ5EXf+EX3nGd/pnkLKr6KeBT3+vrBXDzjBu6bA/eid08EbvhglB7R5cSQRyN93j0YHcqO5TlOoqq0KbEbhcpObnlE6p4J5DLzUTNM3lviWEJ5YN3SCg5AmXhCkTV8roSCmTFCwxCRUyJytnumcUWblNXKNB2EUQsbCseLiZoY0IV2jbRzqItfLE8zPUbR8o4723HTWaQqhaSpZRJKRfjgNiVkI2MOkcoiZB3roR+ZvheHHglSSZqQLPixAxva9ZxZHmRN69u4xDiLHJ1a4eUOqqqIsZUFrPd9zbu8/ybX8cB3gfbtJynChXOCbULIJ6sUIWaygecOLwIoXwdQlU2Go8rz8chJAQRV9ZInueXYOGoiOBcIGPPXNXCXbCc0HmlEmFAbQZbN6wN7b3M6G29eedZHjTvuE7/e0vw7z5EYOC97RY5M40R5xyVdzhnJ2k3ETyCD1XJSxUXPJrz3GAsujAvlRUq5xh428WmMZJSJnh7r+yECkfKYL7JQoiMGtCQMxbClvgWsRyoSySxJNyJ0okybTNttNwDsGRWKcmkMusiXYmNc2S+86WkxGQ7/nRmHk6KR1CBTNmJxcI5xX6OKs7bteec8d6VEPYAAaTkFAnblTUrIh7vfEmv7HO9c6gLTFJNLXbvru/scnh1ERDEO7pZx/W9CbPU4X2297DAswAsNeM2sTcb2+bnPKp5jjaJCjFHS+ZVD3IPgxeJMeOdUHk7P7AcT5zDOU8dPMEbkOLFU1c13jlqHxjUNcEHquDL7yqqUJGTRQ7OGZbqxOF9mGNqFsKnfhX+ITTxO497w1gQQnD0+K53jjoEqrIbmk/ReXjhihcR50CxUCBnOrU8RgpOKyhOhFEIdFlJznZkX8IewWDhSiyxa0teQjajcSWksp3LG6iA7d6dZmZtMrQnm2FapOWYdpmYkuVUQFahbbMZRkykFEllgasqqYSKKRmKI2UhiwpCLFDpPIK0RVgMGOzcnTikR8GczmFv54SYbXGGEOx955B7nzMIOQT2ZIUl3UXEcXV3j0c21hARQgjEWeLW7oRZN2NULZnxFWTJeyFnz51uma9fnNEEpXJK7YVaHMGDSMZLwGnGi3kElZI/ZkPNlIx3roSbZePSHrxQUkrzHLW/HzEpOWWqyuNFqGvzSrV3ZLK59/LzyjnEeSoRhnVFypmmrmy9VYa+TtrJO67Te8NYSh0il8SzEj9HmFLK4B1Byk2kQIgIPkeiEzQq05yZZfMsTXaEAgAkiisWYaEKOOeYJUs8HYrDkSvIMVOp7eizmOk04cRDtM+cdpE26YF3UObnU/mApkRCmc0MuRIs5LEQqYRKBa5NqmgWugKDKkLukgUZauEWGVw2Y00ZckpIQfzAwjskz+9hLmCFqpSF3IeNglgCQ0qJUDnLG0qtIidb9E4gl3OuBK7s7PHB+47g1BZjN5uxN+3Yne6zsXSYnBMiSs5tOZ+aplnl69dvmUcs3rB8Ej4ERC2ctmVsniQ48GJhLKIElNph32um8ormyLDyoAlPpms7QxAVuthBEoYDWGg803HCeQNHVO19U874qSfnRC7f9/fHPLyzUoIqO+PpO67Te8JYgHmsnEs8P0nJ4EGF1HYs1RXeiX2fFRXoYqQrCeksJgspBJrg8CUsS1htxoswCB6XM11MJKDLGdTRJctJklpu0BbUTLFQSwVmUdmddkzbOC8iqgrOQUqdJdkx00VbmFqSz1xqCCmbN8vF+2RRwJHVMP4+ZAre8h7V4tVymj9YwM4rK5mM81LqJWr5SNmRgTnM5BCSZjOQEvYJiojlhOrs3z5UyepQOnb2ZlReOLaxyFuTDp0os0ni1t4u92/aZtN7QqsdJU6vr+AddFlw3kEWspTNIGvZDHpD8khy5GjnBnavnBScV7UYv32CLwhcKdHii9fxYuAQ04TvpIAvSiXmwRxK5cqWJBlPJghULuMNg0NzpnKAZvJ3FIXvPu4ZY8kqjGNkHBNZhUnbEby3RZlhPyrDUBI/A0CZxMysS0y7iBNPwGoVbUy4kn8kNWTMObECYUkCY1ZiSXZz2WkyB/BvCK48EOhSZtx2TGYd05ktbCclfPSOmWarC5TCG07w4tCSdGdVYvFG/WLud7Ky6eO9oTypQJmUHV0K2uPKwnFygMzlZOfMvBJdQivta+Eyz4EAYk6WrBfjdC7PC3VkC9mSOtQ52tiSJXDk8DJv39lDxoG2TVzb2bMCX2gQqXHazUPiI4sNo6ZiZxr7VNzCQ1EUNw8NLT+wDVJKQi6ai6Fo2Yz8POz0HGDMZtCeViNeHAlHp5aLkQTFobGEsti1eZiXJEQOakSW+hUEEVsL4/TOJnGPGIs9wJnCuNM5Vp5zS11ZssfU0VSOQWUG1EYl9os02S7SKiiOjNUipGT8VVLEQdvDtcV7xVL4MpTGvElULcl5xleBuvLEWWTWKbNW6bo8TwmdCCEw90Kq9nA0lqRaIeUI2K6pJUzMUophlAfnxIynFNcEmZ9jQXZth82At53QBd/X1uYh1QGdpdSeosGo6ikvlGK89rmueF1x9vddVpK3hQuO2+Mph5cW54BDjpGrO/vEFPGuLiBCMWCExabh0aOLvHFnUvKJTMIq6jHbNaRcDGGem1kYhLh5dV6Kqbn5fVBb9AU2NqTMHVTinbMibwnncjavA2qGxgFabktC5veoR8JUbV28S35/bxhLlzN3ph3TbDDqeNaVmyt0bb+7ZqadsE1LcGLuUsuDBirXx6AJQZi2EVWDf1ux5e3dQb4hqoi3sEvQubHk8mA0BAY1kJX9Wcf+pLUkvdxoESFT6hsFWtaC0OX5wsjz3avfUYHy5Czvct42A4NtbTH0iz5rxokFIF4cSS20cyGgapwt0VIiFHcQOjohluTelr07COsoiwpLeVRAe+qNZpx05hm958buPidWl/DBUzUVcZa4vTdj0o6pwsAQuOwBByUf+d9/4lm7LzmR1ELemC20ncXELGYmMbHXtqgIe7OOqMr+tLWiq8KkSySEaZuYdJFZl5ilxCwpbWEytNEZTSUbtJxzLrUc8E4tnRPoUS4pz8l5h2YL3yz96zeQUiR9l3V6TxhLzsqdSTfnCXUZZm0kJcV7RwgeRIvrFnCOphiJiJtXe+dkulKRVRVaTbYjiTDpLBcIBR7OM+NtxWRJao7mVapgBc0uKpPccmtnyngaEawIidruGHMmqVX8c7JCYZ9U52yJuiE4Ugr7ZZfLZXGXJyN3wcG21dvPfF9NFlvg4svfF9jZEmnFqYVxsYR2ThxOe++T+zVzsItiOZyqWshW0gRHohKDv504ru3s8MTRw1TB0QZHO+24s9cy6SasFmhYtc8H7FyHVSjvZ9V4rfPc81HyItE831hc8XY9oVRKLiiihTUhlpijpFIOUDWgpY1d2aSE/a5l1uXyTGAaM23CaDQKsxTZm0Wm0cCaWcrMusikNSOcdYkuZV5/Fx7+PWEsSZVPfvkXeeziN0EcyXnUBfJkbLvswjIudRBn/OsP/g+4cvg0w7pC1HaKlC0niFnnhL1+MWoJB4qpIQizmMjJEl9L6czxp9zH04prEyF4xpPEeBpLUGAP0ztBJRsAkRQVLcTMEv/fFQZRPIYi84Q8Z0O+fAEPYtnppNQ4kL5OpFTmA+YEQGNFzzFpfOHNGRHRfmzr0q7fvAlzxMd5j9PCqeoXRtmFbSH2xUvlzrRlaWh5yH4AplbZ355OOZqToV1y916sZYe2e2vejHkBzEmpvTjL5wwwc4Xm0odfPQpamNjOW25YjL0v0C7WrtyvHtDIxpvLkeDrOWp48Bqdb7h9oth7fEXpohWM/+Z/tfCO6/SeMBaAM0sjPvjQw3DqYRAPzQBuXYXRIhw9BTcvk1/9Br/c7nOpLPhB8CzUgWEVaJNyc39KFGX+/Eq831PPY++2s9B2xj9K5WZ754k9BSZ5Q6Wy0uWC4WfmYZVzDhc8zpnXSIVTZEZhib6UCrp5g4O6hveuhG22sERL3iCG5Ihz83+dZjPCHvEpu2qh/JW31bnX0H5nLuCG9B4M+50hVGW3LtCAU7uemBNIwMBZC+X2ZhEFVheH3Nka09Ixm0Vu7u3z0OFUwjt7bc65FCKVrpsCgnN18fzprkVr1XhxoNmIkvaNzqk9qnkOaBzkYjr/3rnvpOibe1QSTsL87w1lvAtG1x75sAUizuFSuddByubynwEaFgZDWD8El16HqobNE7B9E0KA88+DCFI3PHn1FdbShJQyg+BZGgQa54gIW9OW8bQlFgqEwLz3JBeKvhZ300YjLuaUy7pzpHzAyaqCZzgwGDd2PdUkzxesc71hmHfpd35FSV0qhDydo00HHC1PjPHgfcrunOlZxTrnQhmPSg7soiBpfZ7Wh6W9cQDzcKbvO+kNKWku4WgfyheaRz7oS1FV1v2YQZUsTEMZfXmXj12/xH23t2j3powuDVn56g1GNy/hfW1kSZFy74phqs6pNQdeReaIVP8aVzwYcBfh0vhjWjaJ+XX1fyNmBL2XP7jmsiGVmLIP0XtcsCdyHry2nJtCt36I8emHy8/eeY3+qbcV/3GOzdNn9JuPTLm5epSJOmLZbRyZUFWGzswD71wapeyCnZRwBgpLVedJ+hyV78OrsrPlgjr1P7ebf7D7GwXjbhdO+Sz5ttdnPajoz2O+/l3uuq19cv9tEQvMd7oeMOgdBlkZpCnTMDx4cX963x71HCA88p0vvOtP5yjZXc7orrBmDjmLUJGoXZq/38ZoCChX7uyR24R4WF4ccP/GGt7X82s+uKa+peKucLE/z75/5q5ruNt47r5BPTBy9w8MxTpYCv2G85039u4crTewg2vk29aGm03w27d4+//y36CDIX/1r/xtXnj+xe/qXu4Jz5KzEsXzzz75j7hSr7A77fAqLC7ULA8CjfcomVnhXnnnCAizriOEwGJdkRTaZCzSaTRIuO+FmRZWsRUIpVArLKzq+q+TxbQiDu+lJLlyVzztqII7iHfLUki9G3eGTHUx9fta6ZcwsqZzvjzTQt7LEGM02gcHbFx1gsvK0s3X2Tn0IBotlGDOgraE1luLZoGb+9/1OU/ZNIV5iFf+lJLLH9QeROZ5g4iwImOODsZ4b4TT504e5cTyAv/f3/4a0ztTglMeevQo/4sf+ggrw0OIQEpWNdesZE1UoTlossqGRErvLSlGWmhE89CzzysASqipJccCY230oVWP/NmGZyEXHFBkLD8prQQq3J23iLPzNSRPqa5d4Mg//V8SXEBDzbvhYfeEsSjmCSYxQeNYGlQMqsCw9jTBM/C+7BIRxZqtupjZnSVcp7Qpz+FkIzsqAV9CJcsTnBNia3SHUDlSVAjBKsyqZcdhXg1XpRAX7SH1CXBPDa9KobPFCJQpG9sYIBfDc87ymVA6OhFruNJckKFCX3G4AkoomjIL4wn3Xb9Is3E/bwK7EojzZq8+dOsLmiX+xhAzFDNAVxAvu8GW95S3mIflJZbvuysFmJWeGR9skV7f3efJo4fsnogQ28j+tGNvNmFlWAy1LFrLX3zhpNmn9XmM8esOQthcOlURmVP85/mCCOSEOIOl5zkPfWjVQyjlMu7yjJTXaGkJMCPO5XW27/iynuxfu3vQd1W+c6R1TxiLkRUhRes9GFTe6iYoXYoHrl4giGPSRcatta86p3PP3rt+h1AFY351MVM5RyLjakddil/RZWad7VJabpKqwbOxUNXBkkDFzk3UAWmObnXJmrNyKuly6MMtC7mCd4grFBctl9E/CwGSziHjAGxMZzx1fZeNN26x3U34n7zPsz2b8srWTV5OmfPNiOvNiD1RoneloFcMolxHj5i5PozMd+VNfejYd13NfSbzludOPV3M1Mn6h3amM5YGNcNBxbRuaSctk1niznif+9YEEY9zOl9odyf88O2h3t2F07vzjd6wDCks0HVhoFvx9mBz6L1IH4c57+bv04fZ86p87zFx83MypFBAjARb07/fwbm903FPGIsvuP6ormjrQBMcDmt2spAizaFZRZh2VhEOpb8jJvMMvsSjrnKMqoouF2ZwXxkujVIqgvdGZ/FtLBCqtRHngqR0XYfjrkKfF5JGNIkZKPZZMVvY4zAqfr9QqzpYjcWIFITKz5OZlKzd1QUhJOXE9h0+fGWbp7dhkYa90Sqf3rtO9/qEjUHg+9MGH3AdW3v7dLdvsXdknberhvPecUGFGwn2cqYjsV5XjCPGZsgHMHRvOL60K8wX8V3UD8QWf1Z7LQLT6YyMsjxs2KkmiBO6tuP6zj7pmLVH9+/n5xHAATLWHz2Uq5q/rQ0BDugnwDy/SIUzdmBs5X3sTUo7c4+Y3QV2zD9T5zmp8+4uozQjE7H2jz6/EueL17rHwzChiFRUnq5yVJV1vGW12Nx2RMtJVIVh7Rmq7SjTLpVKSd+LIFRiRa0uRpwTRoOalBLjmErFXih1Z0NyRCxfAYZ1bdAw2EIru78RB/twwVF5z6yL3+a1nZTOSe/w3tF1hUVQ8h9x1vFIVtZy5NSVGzx7ZZuHpjULfhG/EKDyhBgJE9jZHlPfHtC1ETfwrDPA1Y58oeNMM+UTZFItzIYVO3XFtZi4uTvm1TThijgmoaKraqbBkX1Fi4KGApeXnEcPDActhNJkdRAfO6JmtsczNhYHXCp5UtdGbu6OiWlGHSp6mr5zQtdFvA9MplulrdgbNb/3gFhoBpbvuT4UtZVrzzqO2R9fpaqWGAw2S14Ucd5g4b5J7YD3Zt+r2obYb1oi+ofWvoEy9nk9z9A8vsxD0Xc67gljAUBgUHu6QWXNWupRlXkDmCmEQFXafmPpgGxzJnY9HdsZ7bv0pijWZVkHqOoK3zlms8Rsluiy0kWjxnjncK503WUleAjDmmnboSVG9k5oCs1EpTSiBUFi8VglEfcOuMtQs2JFy5ipXeb+7V0evniDx2/POOYWGdSHIIALFblwuFyuWK1G7MeW9bZBOyVphwSLsfMswwhyFGRBGLVKw5Qj3hbiR7VinynTPMbvZbqRoYQaA9NBZtZURi1KgfEgMK0C06qidcKgqlhzFWF/j6+f/xb56ce5vrvLifUlvvnWDWMKzCLXtsfE3NHMcxYKo9mhmtjav8bt3Wtl43FztO+ASWwIRM5FqERz0SWwVmnNLUlvgbtWCq/293VVWc6GKyCBwztv3ZkIwVmuOs/TROZ5jhMzUnNinpwVpwnomdj/GXDDwBLUhdrTDGsG3s89haI0zlOPrK99ljLTlBh3wrjLpfYgJt5QksQuJXI2trBmJXihdo7aCdFbPaWNmSzW94IWgp+zJLQO5j1CwSBLqcNcvwt2XgUFTXUPAffV7FzqBWZgQWB5MuHUlRs8fG2Xs7OKtdEy1eIalATWaCDWrZkTOKcsULPVTTieRpbvRkMYsmSyy4TsaacRnx0pKHhBl8z7LcTAaGERp44YWlwV0KmhTlaoDLgAWln8kvdK05smRBOa4JXXz7Px0BqLzTIXdvY4srpq3jh4Uhu5vTdl3M1YaA7CHduaE20cW+JeCrnFaRi62MVCyTcLU1FSsp6bXO7xpMggWUHTaFA9QtbnKylZwbavEknZmGZtx6C2NmVrP3ZzrNzWk8wNQ0RYuHWNj7cTvvraV9DhIpPZ+B3X6D1kLMowBFrvi2iB3fxQ6A49Uuqlr5ZHai8sDmpjIXex5C3W3DSNCbJQ1Z5hFahEqJyj9hYGbU9soQxqq7wbNV7wXqlKGDUqMW5Wq8rPUqa0PSABame7UxczEgyaRqASYWU65eTNLU5eus2pfeX44hrD4XFYdPMrtv87k98pgbcko+uMpGYrTrB+CxBf4veWOUXGNZ7sbcE5EeIsE4K3vKwVclRyAr0drfgYlDzOyK5BvT3ooEnJswQ1OENWuJDhQ4cf4FCzxOFLW9xcWCm8PCGNM/uTjp3xPpuLxq/LRSrKOU+gRvwq//ULbxLVNinvoArmkWtvm4grPLTgHbWAl2xiJVjoGpyxiL3rxTsw/YSyiQneIHTUIgJxVKFC1aFOiiSVMdp9iQZSafxLGksbgPEFJ92UKdmYDO9w3DPGAkKblL3SizLwngVfyHqYi3eFNKlZGVaBQQhsonjx3BzPjIhZCk6N93R1pinidXGOwWdCEPMeWPtpqDx1CGgWxGNdePOGKCW4QJcz+0VMIpOt30UcXZ3w4tE2s7SzxfHbd7jvxi6nJspmtcDC4BDVRoU6V+o4vrBeewO13c/USyiKK7DkGt6abUMoHKZou3+uewgV0iDB1Kr8OSpEoSMiXpAguCBzsT+dKa4q/e+t6ad12V6bukwQj7ZK7DLTQeLqoYZDbhF3I3JotMSdr53nRMqc90JM1hF6e2/MmcMGfYvzoNHagNUxCBXbbc3VXaPMqIjVPkrOZ3mSPyjKOofg0V48jz5/sAatUDQGnPjCgdO5BoIT8KXjMkhlv/PWBxMcpGT/BmeRQV1+58gc2XN8MApfveaZNZ5JfOcVek8Yi2Jkyr02sjvrCAchZ2nKAY9SOwtvgigDH1gIAUUZx8TaoKJNmXFhj1bOMXQWC3ffFhYJC02Yg49NcIyqwKAKiNquN9sZM1oalodoNPz9NpJrTxsVCY4mdQz2djl55TbL17fY2G453lasDkeMBseIrgMsV0oOXIGlKehNSiXXkUzQUhtI2YzJRRZ8xfZsgtRW+9GedZ0A79Ap6FTwlZuHLObaHPVCILuEBFc8j8BQyF2GZE1ZvcCcZjO2mKxAGhrHH0yu8PTp+yzZbg16feS+o/zVt67wy7vXeZ5MahPXdnZJqcNXlTVxlX5G55SFZsj964tc3r1jifucJS4H1XcMHTOCaemYLF5WcwktVTD+nfSZBdah6mgLgKA5I/EAklaxup2hmQfKPKWiWxadyTOdmgQmSfj8Vce4qtjt7nE0zFCIvmUYwPrgUTMKJ0WmSIz4F8TjxUK3SYzsdREtnKGUrdvRetZNjC6qFiUQoWlqhjmXOoqw1tQMQlGRcULtPO3ONrp9nZXNw6TpDD+dMrmxS7y9Q70zpr61z+oks5JqlqsFFupN/EJAh9lK40WlJM5aq7+40r3XPyiv4Cw/EYTsLH5z7kCaqa5q9lJrWsPi0LIra1c8ZFaDbZ0atleq/M5D7CIuutIegO04ycJESUYaTVi9RWKh/5Tq/yxFvsUWf3/jafLY8henAjcSZw+d4CcXhmy+9gbPx8z1rX0jQ5bmOuvMNIMH5f6VAV8QAziMuVD0AYoWmKK4UrD1BeLucx+rvXnmrXYCc1Zy/7cicymk3ggVa9IxGmsqrdkHCjl53lRmxercs6Id8/bmdzruGWNRjObexmTypXiquwiDXc5kEZxC7a1eMkmZScrsdalIsh60iqacyNkbmlYgyarIsi5JxVIV2OsitRNDv7y1JQ+9Z7SyzNv/zb/no+tnGOwKca8jd+CrQBUanKwgweObGpxHnZ2/9hwT+84MJUacenKpATgniHrwheyYDq5fNVnNyXkWXEP2SvZ2L1DbnbPMEyMoMDiSkcpCGM2KpNKNGdW8SRYk90hxSf4w9C/naJ67LLYL7S1OnNhklBxKNPRKIE8szznWrPATzzzB0dfe4O2b20y7KU2zbGhW8XB9Xef7Tx+hrirTO0iJNlOUJU1bYZaN/aBqTX9RhBjtOadkvSkxuyI8opAgkgvXzihDmdJsR4/i27M2Mm1hDxTqTv+7UmGY7yP0T+zb1Nv/8HFPGIuUXb5NJiHkpcfhlSmROni8Cl2hPSQs8fc50SarkLddwqq3uRiIFSn7m9HlzNY0UYl1VVa+MgmkovXrnGlMBTJ7v/kGcnWBz109z59feJRaPTIISBNwlTPlSXHmMZwl3zmDeIeSyV1GgsNVgdx1SBUIIRjFJpfCWS4hWV8QVNPDSmWXq6oKQeg0miRSX40PHqmcJfXlwWbBvFYGFYO2KUYj6iyU6ntJVJFUGtRMN8rkiDThKuGb09t8cv1Z6GwRuqS4RpCBQ2cZHWeWVio+/tgjnL9zgwtffplHPrJMNajufqKIOE6trHBmfeOuWpQWkW8rUDrniLHD+WAMCcF2/54bVkKomKJpl3WJnpCS5loGyaRz1Z5xzCZzO4upUKgis9ihmM6DiDPWhZpAycINx/JXK/7y48eZ1kP+X4M/QQ++iPwL4MeB66r6ZPnZOvDvgNPAm8BPquodscDw/wr8GDAG/r6qfu2P/Az6vVhoqsp2+yKql1SZxkwQKxRab7uYCkzxNsM6mGYvgkig1dI4hEUnOZcbmRJJhOQ9SKaqAkHUEu9sZzH70tssXthl4/hDfOHq83y9vcb7V++j33EOGMQOleLixSHewoXclkp1LiJ4pXKdreepVJ5Dqa5bPtAffVEslcUyCA1TF2liKIVRZ7F84bKpsSQLmpbn54XYt1rg5p7SIyKWxxTj8n1bgEKsYFcmTBcch/yQPDV0S4OgjYOoyKwIZex3VFQ8sXqU7b0Jr//Mr3P0Y8+xdOrovGjbFxHtnHojKUzveb0Fgq9QtPD3TK7I9wBAKZoSTC1ThnUpRJbcpIiPqZTqvPbUfz8nS6aUTGDDaUE767mmshMhLCdWm4r/4vFT5OESPzca/PGNBfj/Af8M+Om7fvaPgd9U1X8iNgbvHwP/B+AvAg+V/z4M/D/4I2azwN1FcOunBqiDs0q8s12ky4lQItE2Wtw6dI6BN9WW/cIjqoPHJWupTTkXtUctFXQbS5GTxcy1y+ANEXMK3VffYOEPbjIajPBNzYfue5bffuOznJyucnhptfCTnKFNrtBXpDdzAyJcCJYflJwA74ldpKodSC+fkJHKIQQLk/q+XrFNgyKwN6RiN7eshYUS1tlUACNnBsRDTvHg73wxOA90BSAJnja1pvIopW8Gex3OwtrUdYQsfJMbPHfyFNJlYz9Xght4JAjMcmFmCzK1fIAIywsDFjYrLv/2V7ixtsj9P/Qh6pWFQmJNTKdbpi2mffeqKwvZlxDSpFf7509J3p1zhvCVFZIKu1hKTc1YyJ6sHZo7+k5YJ3adItDN1MJmf6AqkpOFltJPPCg1GOfcPPd/p+OPNBZV/V0ROf0dP/4J4M+Vr/8l8BnMWH4C+Gk1gs4XRGRVRI6p6pU/6nPQg5uizvS/QhGhqCuPxGiM0SJy1OvdDpzDA1MxYqMviXoTBDQwkYg6SNGS255zNos65zM1TqhefIsjX77J4sJ6EQdvGA1GfOTMc/zHb32Fv9d8yPhdQS3nUMCLIVzOmQE6MYw/RRy2uzkgdh0RIVTgqworyBV0yBW5JxwajTdtiWhiOYzYSmNO+nUDCGpvhoqad0CRIi+j2dnfOrO9HEubsSR846GgPNJPVnIeyaXQ54TolDd1hx9deRqdKFJZDShrwk1NjC8X8UADEpzlSAqudpxYWaUdKBd+5tcYPnqWIx96BBcCbbvNeHqzeH7TF1NlXhKYc740o5QW6aKlJtq3EZTXiRQBj17CyjZT7zzOeZwLJhoohcypGe8qKO9LAZKcDwWidgxuXWU9zrh081ukwQJd/NMX2TtylwFcBY6Ur7/b1K8TwLsaS85lX3C+rCPHrAweaoIwAEKweSomdJAJCLU35coQPBEIRAZVmKt3iEDd1FRdpHHKPpGUrbnMOUFLBd+9+jarX7zG6vImVVMVtM2AgdOrx3nzyDE+d/s8nzj8cHm45aE7j1NfCmaOvpfCFnCfuGdcUx9AtaUQaSX7XktLrCfdMlYb6uOE5WbErb07aOjZtZb4UjlDbzJIQQlTTqiz/n6dZEh2Ljka4uSCtQHQK5rYypkTCd/ydzi9sE6dnTV5jXz5nLL7Jmz8R+2gomwSxgwgJmTkaEaO+08fZfvmFV77V2+y+ZGnGNxfxlCIGbMh3KbEYv60eP6e5Nizk+fMYeb1NeuN6fUOCqtYoW1nBo9jRceYlKqqyCkR1dBEk4XNpVZTyJrA0s5Nutjy9vW3iIMhbZy94zr9Eyf4qqoivVTC937I3ZO/1jeAAdM2MkuKiKkGuiZQFYBi0PdyO4/Hai2VE9psuH4tjuGgxqnifJg/CBAGEug0MwqOaRHX9mJQtb52keXPX+DY6nEG3hJx70NJIG1xffT4U/z8C5/m7PYdTixtQmO7rKopSJIFTaDO8gfNGRUr1mUVwtATx1NTaFHFqyXqlAWTUkKSzomAWjSvlusB5/MegnHLfF1ZnaRLVE5IgCuV86QJIaM+obEU77ANAXUkQILVcpzz5K60FYiiDr42vsSPnXwGnWVkyYAL36rx0nypnlcCQXC1QeEuGNSvKUPtkDbDTFhlgeWFEbe//jK3vh6pn1ogHFqwxV5g3n4KgNDrhum8uJaiaR73Nam+cSu4MG/2EnFktHQbBLtWBS+BqiyaDJBl3gLuvUOSSUjFZIIjRnEyrWMpslTvdPxxjeVaH16JyDHgevn59zT1C0Dvmvy1ceqMqnbMivKjoyIHVyZl2aIIAgNvs1h8cbG90qNPmSYY1OxLWDZvNxahjZGUlcZnUmVhGlkZv36Z9jOvcWrlGMOqxle1RYPOl3F1Clmpteb7znyAn//WZ/mfjj7KUIem3VXCRAkeXM9oLTmMs3jIidVeXBPolcK14Pk5drhQ4WpPbg3NsxdY+LEYRux2e1aLcEWhsnDgEgVAKNG6QKlsmxiEw5FzwtVVKXZaj0iyDreiYm+h2m2mqIsclUWoBVnwaFsY0gEkKm5oFHYZeFLvIWLBbAeCVoLbBt2PMHDoOLGxtMhqEG5/7Tbj5ibh8Q3CoRFJMzFGBIcLFaZIfVBnCcUYXDjoVbmbxt9z9UwxtCi+9IqWRe1GVMzrzntazEh9eU/nzSv1RmmOvo8Ovvvh3vE37378IvD3ytd/D/iFu37+d8WOjwDb30u+0mPksRAfu5jpYiaW3ds7G0MQREBTzywHtcFHoyrQeGdhGcbNagpxshYYBKu31IXYOHAeuXiT/KmXeWjjBMNQ4Z3NFPG+MsWTAjN6ZzvO8aXDrGye5Leuv4RGy6KDqyztLmIIUuotztuD8k2Dq4ItMufpYkev4qjRKuwGkR6EQ1JYh945hr4yzawUD4QlSjsuSckx2vsZgcoWTgfSFXWj4NFoijX4vmuytN9W5i5UhOdnl3j/5nFTwRmVvg4vUAEBNIIkIXvIXpHaMHklQ8jIwMGekvYS0kLeSUinMAY3ThxqVjlZH2fxa2O6336bfH3/gE7Uw9kluUcNLeylXPvGsV4tB+mbBW1Ru0LJR+1r16dkTgpqZk14vQabES5tMwv+rn4luVuF4bsf3wt0/G+wZH5TRC4C/2fgnwA/IyL/Y+At4CfLyz+FwcbnMej4f/RHvX95fkCpi7gimt276vJ7c5lKwNGEgC/zBr0X6kK8DCKG4qDWpai223stFxoqGlUm5y8y+dSLnF4+TOVq2+Gqar4rWeGOIsBt+UOoan7g1Pv45Rd/k7fuXOSBQ/ejwejnOWY0WOKYSZg0litVbYdExbsK71pyjnhvPR5JekFvYxw4+lZYT9JMJaHom0Ukhr7AgPrS2x7My2hXRNTpk+Eyo7KAGDbDRoFsg5CqAz3gmWTenl3nzx96Am1M0DCV3EAE8tgoMqnLyFKFNg5fCfH2DERxazV5u8ONDaqO0VgDrraCq7UwgJtFVtwKa2GVnT/Y4Uq6wIthQrdeExrriqxsNoXpstnqK4u5b0GGujJDimXOTg999/JSMSWaysJo721d+NC3D5cRHNE04UQcK/sTUs7c2Z2QZgf50B/LWFT1b73Dr374u7xWgX/4R73nH/q7/mSCJzhH48Wq+KXSnIsSYSWuDCKyeoyWFtRKjM7gy26SSk2lL3ZqAQEkK9uvvMX+r73CgyvHEfEmOicGLMx1EEqY1yvpVCGgXaRpGj7ywAf49ed/i7+3sEbjF3HOoMmkGbSIMxR+U+6ihQzWb0SoGmIZUgoR7TC6RQlBrPHKQhtRoQoVPgzo2hmNCxZWeW8xpjNKCVnn50kWEhEvAaJCLK8vr8lk4145h4gV+F6b3uTBwYi6q5FFb16zK4u8XzceaIzAKOJJ260tQCucoPsGnPiVhtA5dJrQVsnO+HCSsEJqUnRPWcgLPLSwzHHfcms2QTaGyJHATJXzb1zj9P0b1MGXvaGEWTnTpWgsdCe0rTEeY84lRLPX9vB2yqZamWJHLKWClC0HzerY2R5ThQoZz4gxc/nmLhM3KcXt737cExV8sBi08o6FKjCsHMPKl6m9fTGQeXswShl7DQFnk8H6kdiuZ/MW1yoyz18uf+1VJp/5Fg+uniT4AWmeXFISbi0LvXQ9AiZrCp0o4jyHlzY5dOoJfv/iN/jh0x9BqNCmCFlErPiFkS17dMeKgBmnDsmYYLfzBdakVPL78MHq8qmI1vlQE+OMprbaRcZanIm51HtsITkRg9XF3h/t+VJWyFRkHp7Q8+dUeXH3Lf7So48ilaAukzohjzuDhsHg8QUPVVloKRm/K0EeAbcikgt7eJqgLddbCczUrt8JrrNCqaggHWhKLFaBRb9EPqfceOkWv3XjW7zvLz2LoExmMxaGg7nRWPJU2wiNnBlWwbh/mu4qcpqOtVI8tffkHOkV/OeLR226AgKDG5mmDjx59jixHjD6NibCtx9/3JzlT/mw2zFoAkuDiuU6sFR5RlWgKi2oph5pveXTZNO25rlOSfTbZOLTYB6o9t4GuIrnymdfYPIbr3Lf4ibZCSoZ762rLgTriVFfxCa8K5TyXnFEUG/oi3jhsaOPcm5YcenqaxCTLdxkBD1RVwaM2n9amsr6OoH4YJCv86gv1zXvSXdzLpZzjqgdg2rEjDHiivI8fa3A9zsMYdAgdQV1CTe8IHVZ7I7Cwi3gg+sHQiXuMMGzz6HhIesRaoFxwgVHdor6UuDsE4GRsYpRRWuFNlt/zUzRiHHRVHFRrCWg7hep/Y1ERbtSfJ1l0iSRZ4lrN2/x/LnX+PFHn+JIGDBqGpYXFqz7sdcg09KFyQGC1Tf+WRRW5mR6X3qf+h4YX3JBe49eBms+NVk5AA7eNWO5R4ylRyC6roxuLmJ5ZMUBtXM0wR/g42rjEfoJw9OcmcRMq9AWlCcrtFmZtR1vfOqL8MWrnD18P1U9smnBoY9I7AGkGCH3bh96WnlWpZuL/DlEAoMm8P4HP8pv7rzO5M5t6BIak9FvSi0jzaI9zMLCdU2Fai4jEToKzRK8HCiU+MLK9c4QMoWlhQV22j3QfuiQK9O8QJNCUlIXDd1SK+hKADTj6lJzKrJNzBeFDWb9g723+MDh07imRscZnWVjMgegFCw12HJ0Euw6tYR9I4EppFk2JnQWiFhk2GZcB2ls4EXusk0va223011rB3CV8MruTX71xhs89+STHGpWGL45LuMoZL46DhL9TCrTh02owmBze60ra6iFArJosYbeKLQAOLnkQXPVTphPPn63494wlnKSKcMswe1Zx+1px06XaDGqSuM8C8EzdI6h9wy9DeV0IjTe03jP0DtDurBmr9l4wuv/7vdZPDfh2PIRnKupq4YgwWJdF8liwnCoqdGHAjH28G+m8NGyJf4GNjjWF1ZYPft+vnjlBfJsZjBqXwzNYuP1pgZL9dKhPtSIq0yIL3WoJKPeO6PJmD2awaSyUy6ERcZde0DzoCwSMTaAVbrVEv+ccCq47HDq8XWNqwPU3moIQrk2RyuRm7tXOXPkFLqbyZWharnKBq+G0qqN7eDduCVPIswyeaAwVjTafygmBhj7GCiTY8JpwleGrGm2LlM8UDs0wOeuXeCFrWv85ENPs6ENeZoZ7oPfntGrtBjp0sTRba3IXYhW8bO9llhBFRVraaagXv0xl2gSg6JT6Yo0TYAyUe5dnMs9YSwACHMqtxPHLKtpF7exeIuS09Q1i3XDQh0Yec8gGCBQ9cgZjoSwd+UGF/7F73B8K7C5vElVD8F7HIHgKjxVkSrq5tCiOBBRyynU+i7Ue1vIXnCGoRo6FjyPHjnL+bVFrl56DZcV7RK5zB3x3iO+5AjRcH5XFg7BG+wrBiZoEJKkudpikGBjJJxnoR6y07XlAReMSAQ0k8sMGqOX5yKyV5jMoRRxC+qDCBp8mYCceT3e5v7RIqNqxRgCQQsxsWik1c5252Ce0nnLt1KMMFF0J5MnGVdyulx6ZyQqUgdkUBQ4xRBMTYk8tPOLKL965VVu64S/9uCTDFuHdAI7mUaWqF+9eVfF3lgJffXeGtbsSfdIqdnnQVXflDEtL+snqfUtIIZIWw6X6WtjlPTynZEwuEeMpW/eGQSb2V6XXm0zmI7dLjHNWqjYuYwzMI/kORBw02x07ttffZndf/NlzvpNVkdrOFcZ6uUrW8SlbuNlQExKTJGkidwl26HLDZUSktmNtTZiE1GysKYKnqcf+AC/Pb7I7M4NnHocgi+exDmTRrJEvAASPlA1NTFbYTAntUY1KSiaSKm221z3xWrIfkjgtCyKsnibCtd4RErIAfNYI6cSjokDf0DNJ5qwnKsCr9y5wLMPPGIhVxAIli9l664r0rUHITIzQ50cArtKnhY2NWXxJtDOmsWkUzRmcnDleSVCZeziPen4t5e+wcrigB87dha/E60BLakxASIsbA/w+7PCDu7nYZYalnMl/+gvN9sIjx4IyiZkkXOf7Ms8clFk7rEPjEgOjE3ePQ67N4wFSj+8mCAF1siVSnPXrUnL9WnLlcmU3c6S+1ZhEhNdyWKD98hkwtX/8Fnq37nEmYUTDKoG72xks4leFHeNoSfeB4KvSbkz8YRg6illoklJqHWu1tJBoY/Yw3ASWG2WWH7wab506ZtoNzXxhwIPi7g5/NpL+Zg8rC8Tt+I8d54n9r7Hmu0hD0LNnmRw2bS5hJILHVSdzeEYv2xem9ASsqVon+3tP3HC7XaPkPfYXDmGtJaDpFi4AFXxsB5L4gvsLLWgwfpZSCDZNAy0K2lxNo01UcizCEmQztjT4gw+vrM35V+e+yJPHT7Kx9ZPIa0BAfMOSRF0khkurSIvXrVrUr2LWW2boYkv9iGYGU7qjeQur1GgGboYjd6CdVZ2Mc3XwV36MPMemXc67gnouG/nLPkqk84SuR4CnsbMLClD70haelZSQsVRkxg6x/TFN9n69Euc9BuMllcRX+PrYA1avRh3LwEavHGkegQKJeauFPRMME6yoGVXdijZBWICL4XfpGr9IEF46PAD/N7Nt3n44jmOP/g0yRl9vJia1T+6zgqmzhG0MiONHS7USHZY4lIGMXlf+kE8NZFJzhb+9CLZIZNdILho8Hdy83gd1HrineCCcan6llnJ1pT24vZFnjl8Py7bPckx4xaCtcTEhFSmUVBVBjJoyoi3hrscszWTBWekzwLKqTNGsraKerH+7wozgKS8uXeHT735Aj929gkeaNbn3DqpHFRm6A6PNuBaz2hH6MYdjKp5rljK9HNipkHFOveq8yBKbQOyJj3r2xdsdF8+gFbuYjyXHqL+vd7huCc8Sx93xmT9Il3KRIU2KZMuMZkltiYtN8czro5nXL25xZ02sdVGZtfusPWvf5fml8/zaH2MhcGCSSkFK6I5tQ5EFTnQ/c194ldURVwguIY0S6jGwoKWkvT3aEvGq9FUHIZqJTWiYuUr3vfgh/jN8dtMbl/GZzFIuXRUilrbbsgFkXJCVdVWt2hb68loW+J4Qu66uWK8qBBcRWoqcm6NCFh58L6oW3pyDKYco0Z/QUwGKGuyPMJZETN1EfUmEXVp5woPrp0h7Ubok+VOUW/ySoV4Ra6w9uUib5b2s4Vm2bxtaiNC0U+uejqJ1Vhc4w2CbzMv3LnKp859g5964jlONavELho4kDK5SfY3ThCvc9nYpfXjyPNXrZ1Yeg9iXkC1DMotoRTSq/cwz1WyGOUn3VViOPjajC8nm8zW11/6toF3Ou4Jz1JS1EJ1sIJjygc1hT7xmiVlEpXxZEJ+8ypn9idsfP42zWARv7qBqxtwFb6QL7VXP8yA9Ibh5oU5+rVbWR9E4zyzOIUukrziCBba+EI1wXbPvnelH8et6lgeLrL00HN8+eUX+P7RGrowQETxg9oq17kk8KlFRJGYaGdTdDLGDYeICrUP5OCtPyZb6OEIuKohasSZJAyuEAQJGNpGJnvzYGbXuVTs0xwa78OWC91tTo4GDPySCZp3yWSQhHnjlKYSNk4VJVoPjwTL6bqiltKV9y5jQDQZHN2PuVBV0izy2duv88alt/j7z/0AC74xFMfbOA5XeTNGsLHcpYlJgCrULG8NuX1zTNocWSt0yVHmELiWjoMi15rLaOdcgICcM8Hb7E9NaV64hAME7MDL9ITUdzaXe8JYtJi+aTlZQ08sXqCpnCXFzvKFgNCurtG+eoOXP/8HrHSBZxceoi4NQCknVC0Z7xPU3CNcvp8JAk77Ia12i8Sb2F3lBrRtR5UyKqlocqWCvJRBPeqKvA7GVxGTXnpo4wyf3bjA2cvnOXL2cYtRYiRnY9CKJg597mcYXXkZvUvYoc9WVRwXf+gf0m6etCIQBgnX9QKT3DII3ti9wWbVi/NobCnZLxnMo3pb9M6Xwa2KhXCqvHznTX705EPgFd8I2TtyZUox0usDdGXBDC1MkyzobkK7ZAblseJiLei+1VFc5XCNQ4uuWRT45QvPw/4t/s4HP4GXYHy0BNqabK7WildjVOuAu5Aqe83a+lG2v/Iq3SdPk10/GqPw3aSImZRwzECZDEVet2cpx5RKB6QchG3aT49m/n4Wgb4z1QXuEWNJBeqbxV7v1sbYxZhNrKISBsETs/UkzDI8cKNluPwwF6d3+Pc3XuTD8X4eXTuFD8HabxVcjiQ1w/NVjXjTGfMlF7Fyl1pdIGWorFJZ1TXaRSRa8NtX31VL4usNnhQ9GI8n3lGr8vSDH+A3vvbr/K07J6gObR4wAtQKic1f+FEWNv48HDoCt27CYIDevo0sjIjf+AOkzYWuMq+DMhg0zPZnNjbDh2KoDrQ3YtBcpqGVh9+DWOLEakAi7MYWaffZHB21rszGG+w7E7QyBEscyKKhbCZEjOUhyfQGSCBDkOBIs4TripHlA5bAvov84stf5b484+Mf/EHrLM1W4ZfGlc+yTCIna06TCOqxxe4FzQKzzObiCS6/eov80BqpiHSIiHkLcUAv4XtXJV57T3MANYtQph+7uRdRDDmkv4cuzNP973bcE8Yyd4ki1B5i4fuFJtB1md1JZOotT6iDZ4hy37hl2izwxMoGZ+VBXrj5Kt98/fN84vBjHF09DGKkui4n6qYpjVgl8zNBYaOIAJoSLil51s3h21BVtF2LL5i+NRiGeV3DFQJkCcRKQl2xMVpm+ewzfOHcV/m+hU/gl4ZYFO1xknDjbdJrr1qO8fpruAfOQk7ojRvkmzdY5zW6wWKB7OzefGz/DmvjjqXBivXuh0CBxazrsNQ4+mq2FHgVKdCvGEEzpCl/abLD8v6OxUqhGFd5nTo9oNyQLeGzWMVo+ncZcLYPxKUSuMRc2qwdN6e3+MFbN3jwwbO43/rZg3vfGZ8tF0aElPytJBsl1C1TnVG0cixUwmDrDvHUot0z7R3pgXZYXyvpx7zb13cbDvMJa5TNuP+6Ho+Rb+3w8L/8BXJVE8Z/+m3Ff/qHwnTWMRLHcmMatrOYabtMUiF2mWEwwt7K9oQjruZSVSHe0bjA++97llvTLX7pwtc5eft1fuD4UwxHI5q6PkBQevi43wUV+lHcwXuy96TS8SviCL5hMp2ZfCQm9iBis2MKE8zqHKXRz2An4ZHDD/C5Gxd47NrrrA8eBgIiGR/3kZ/5/3D99AfRekiuz6KXldhFQjiMrh41QmYpsCk2aiE4x53pdUb1AtKEuSE4Z8U+zWravZjqjROx3aYsyPHI8fqwY9Issrz8IC47Ni+NqUpdBG+QcblwC90ah7aJ3BX4OPb2WyDi2ljEOWLeJylX0i4X3zrH+x56iPrwafCGSKVU0HBfVroYXp4jRmmqiqcOB0aiTkwXbRBYObPKras3uH2kJpmSO5QkPfQzYdADljmU2N42A196fXov0oMsqkqTrR60Mxiyeuk69c7eOy7Re8RY+pjBRNaauqIVYZaVxSZYhV2E5aamAR54/TrVcAHXBpJYoq3OcXhhjU8++kOcv/kGP33ud/jQoQd45sjDhNqq6Za3BLKUGB3jnkWB6MSKjkVkAm95TkCYFaREihyo2UWpM1AgUAUhEbxBSY+f/RC/9tVP8ZMbRwkb60VR0oxs5+kfoRstm2i3wGS6hw+eUA0Pdna1duO27bg+2eHN8Fl+9OgPUC0vFgkkUyoxkTwbSeEw4T9xDmkCOwuOrw0nfLW7yfPn30CcZ21xFU2JY8/cx0ceeIAPxEXWLu8TGltY4sUQKhF0O5Pa4q3UED6jtQhuwRs3rVNwcK67w+9/47P8xY/9BPGBx+lqhzi1XphUOhIbIe12aFdgWC9zcQlV8MPiGQYl5/TAosMvV2ie8tXXLvPrx+4vSqNWSSkkce4eaJazqcfQ54OYlGsqdP5+uTlxHNu5yv9x5xz/7Pv/Bh/4+lfpvnLuHVfpPWEsxSninKdVZa+LxALLhuBYrDyrVbAKvwhHru3ShSEkbyRBb/JEHqsPPHr4Qe5bOc43Lj3Pi6/8Fp888QzH144ag1ZjoZ9nMh6c2hi9XOR9Cs0FvC2OnJBSTOi6GXVVG5NVtYRXfYJZGrdcwNOx1ixRn3mGL73xdb5v9Al0YA1XgEGhhesFSl3XpBxL/lTuSR+aqqNTuJH2jFofoxUOSzuLhIDTTGqzfd3UZODr9Zh/t/U6ITtef/FtZtMZtC1bL79CCIHLS8u88PpbfPb9j/PRh+7jwdsth/aVNR/wQch7Cq0jBKPYS2ur0bmKTEQnCemscPy8XuWlN1/irz7yDKsbJwo0q9BaTUsMskPbjLVaA0Wko+e7IZanaMxIpWijyHqFjqx1YBRqPnZ0jTdub/O1tSOFqqKllbiXjeWAYRwtNJ4rxFCEUfrk3wmSoYo1SYWbsWEnh4NazXc57gljAdsAdvZntHsdbsHNIWQB0wUrU4GryZTB7Sl3mqF52Z5xWkJeG0ngWB0s8LEzH+Hy9nV+6eI3uP/Gm3z/8cdZHi1Y0c33f2edjg41yFoNIE4Krqpw2aOzGThHCI0VEvtNS2yLn89w7Psk8NQ+8ejRs3z25gUeuPEWx089UpTiMdi3H+CT1SgpqfC85jrAmFFVjg23zCLrJvtTBVJJ7KWpoBRvzw8zb55qeHppnb1vvcX/8+IfcHtrlxMnN5hcu4YbDMntFD8YIZXDLy0Qu5YXX3iJF77yPH6yT4Pj2fvu4+8++UHciYbm+pTFWbQenCCEVLhiIrCvJMl8bv8truy9zV955n0sNuvoGGTAHKo3QiUWVgW7HiShtUOiwfBeXZkWkHEDB4sOWaqQkalgumDF0dXVVf7a1lVu72/x+sIqYLlTr9UwdyQUHeNCXepBELAgzXQVeyTUfuE4yHPe6bg3jMWCTKazyP7E6AyVF1YGAVd5au8ZOEdOCX/hJlOtGKtR9jO9Z0j0ItKuzOdAlBOrRzm8+CO8fPM8/+r8b/N962d5+vjDhKY2zyC5tDI7a4kVoQ6eVpRWMSKi1ka5QAi+JqdS7XdS2oRNXklz3y3p8N4zIPPUQx/mN77xaf7W5klqX4phqmX6Qg+DWveiyVZazUOCJ3cdTmzGZqysUzKTDV1zCsFziymfO6S8LB3SdFRPLfOt2/vki/sc21ii3d7miQ89Q1M5xjdv4RYadGWVwdKQ7b2pVbRLM9n+tS0+98Z5tgaOpUPr7F+6yYPDVX5q9QHqcSQFh+5EJBsV6De3vgXxFj/+8NOMumXyLJvHix5Rh3ZqPfy5JNoOqAVX2VS3LIorOZh4YOBg3VufVyUHLOao1mdTO06ePM5PvX6RT2+uwspi6aYVOsUmkZWCbKuBIIJoYtaO8b6iqhqCWO65O53QZeG4rxhWjg8cGfLQcm1iJu9w3BvG0h8iTGYdXTS5mmmXWR3VphiZM8uDit1XL3DbrSI4g4lV6dRg3FQGfwZfxArwSDb27jNHHuaB1RN85cJXefH5X+eHTjzLkbVDVLVDfSBU1gDW93EHb/T/SVJ6spQUNE2cp+s6fGUFQksercZBTogYOdOJsDla4a37HuErbzzP959+uFxoXxA1ycD+2rs4IzQNgpv3rvQt1btVZqyRRSmsl+Bxlec39q/w0izx2GNnyb5CXSYPHSkp46y4wQJ39vbZOLzOfR9+GipHNRpwZnODcy+9iTrh+qXb7I+nbK4sMdGj3Njd4fL5izRrQ95+4zUe+P41PrK4QjM2e44x8St3Xma97vjwqWep2gHaWfU/NVircXZIUsq4A0q6hyETBjWHpcpQNS+mELNkfTzMFKZlD608TMvYDG8Kmo89dj8rb93kyIfO4kcNqtZinHPi6o1zzMZb3H/6OUQDd3YusL/fsrG+yWiwgWpiMrnJrG0ZDldYuO3Z+PWa//nTZ6ivnue/fBc5pHvKWPqxZylZQj0T2J1FumTjvtvxjPVL+7B+yIbw5ISHOczqfTCmMKUe4CzJdN5BzKwNlvjBB3+AC9uX+W9f+yoPX1vmo6eeYWFxCUlTXFMVFm9FRpCUGYr1RmTnSKV4nOjwBGI3o5YaMHkm+kGiqZcaVSqnPHXyMT5z41d55MYVjhSvYsBZgXedo6oapnHfAANnPTSdRiaxY5wil2YTvtle5aN+neRhGpTtZsoLaY/d7cTOzi263OGHjreef5Htt95isDCiKZX8i7fWubO7z2h5xJFDK3zl4nVOHTvOaKHi6OoKr77wKpfOvcb+7j6x60iTGd0be4gq/05/hf0Tj/PU8knWqhG/cfNFHlhwPHX0CbyadrA03uqzqSj/i6BNgbOzkNuEH3rEWWruGuOByVIw7YMuI1NT1cmFlsJ+RpaEXMYWSjYKjp8qR4+vcu23vsqRH/0Qvg5kjThXsbx4lJuzHRyeqhqxsnSMzbUzONcUqddAynDj9pscPfwQi/M5kwGRdzeHe8hYDlpFk6jlEgrjWWQWhVlSmguXeTgsH4ihFQpHP47bKdZ+q1aocWK0FlcQF4DaB86sn2Rz4RAvXXmZf/Pyb/HxI4/x4PEHqYEwrEw0QnTuNWonTGPPg/JAQOnwztN1La6q8EXUztDX0gDrHMREk+Hhk0/wq+d+i8ey6RWTSheic6jY6DYcxu/ywcoOIgzqBrqKhzdOs0ogV46LJwO/p9e4ev0WS8uQbtzgy596jce/71le+PTvsH3xCgtOSDu3GMeEuIrRrMPNdrnTdtxwNTIcct5/k9HiAmefe5LTzz7KsQfu45uf+RxvvPAisT2oN5w/f55/eeMWHzr7OMcPHeXJRc+TJx/F7VVoI3PAQpMl8EQlD5Sw6G0sX1BjIZsYghFca4Wlg8Gq5NIFmy2P87W3955Z2ElQo8IEQSOE6NhcG3Hrd19g8889aUROySwtLNM0jxNTS4x73Nm+yOLicQbNmm10acb1m2+ztnIfiwvHceMieSdp3gz2Tsc9Yiwy/7efGJu09Ka7Et+qsvHqdZYWj5aimcXB2ZmPlwIFmzK9kASiWp+Im+sQGxvYoyw3Qz544klurR7l9954gfPfvMzHHniKNX8YXzHnEQll5J6a3m8/T13E+GKSIl1nIxG8MwKflcyMsBmCJwAPHDnN9q3TTM5/Bfr6saqhYAXy9HVNitb8JqXtIMVIAk6sbiK3b/PKZuSVU57uumN37zaT7S2muzOYTDn3+19htjcFK7wzXFxDiYz3J2hS7tzaoa5rTp9dYeWhR9i4/z7e+uY5vvHpzyLLy3ziL3yED/ylH6ad7vP2K98qpEYLae9sb/G5V57nyM4NNt73LGeWhVES3MzqJL3bdZUZhLoyunvBUEAfxKgwArJktRqioqkgWloKqCnjRsYWp42lL8feDxSJpouQUeqlirU6cfPz32D1Q4/R6hSHUAVT0UEqNtcfAmDW7aNqclNVvcbKynFyiqQ8Rcm03R4hjnm3BrB7xFjK4WyZ2WLEGqPKjMVqa5dH9gU3DPNqvyv/pZ4rlK01GCg95DYxy5AP5gxZX8Td6ipwePkIP/rUJq/dvsC/fe3zfOTmKZ66/xGqUYNrGlN9FGEYKpTSNpCjJefeo3iqrMRuilQ1uGAjE1I6mG5VV8RZ4gP3P0v1lf+W6d4+vlTpfeXRbATE4MRUSZzDuapIHsHr197k8vg6V4/XvOmvoq9EmpR44+uv4kWJ3ZgQBkynntRNme6PASUEx2BhgIFvgdksojlx6c1r7N3aprp9neOnHuKxZ5/g2qVbvPT5F3nkqYf5vr/+V/n8z/48b73yqjFx1aD23f09wpWrXFu6xI24zPHBIZpSKyGUTH7kkeoAAk+q+NqhTZF97RK6F4scE/N+n5xMDC+nXGhGeU7JMVkqhwZnqjIxQe1x6qhHnlXv2H/5NpvvN8OIOVHXQM+URe37Aq48eOowWmSrqmoHQQhhRFMtvCsidk8YS7++pSR/GcUlilxNJjs49dZ11leXaR12oeJJSalDIGEiFr5Auda9l8nc1UEnYtSVbCzhMpYFj1BVDU8efZATK0f42ltf45VvfJofOv0sx+4/CaiRNMURJTMp5EdTOSnjsvHUtSOlDqL1fZgRJ1DzQKHE3ePc8fOvfZ6fWPkxgh/OqSnWgGYcsti1VINgIIareOjYfbx88TqX8x38pRts3dxl5/INuuk+bTI0btLtUdWN1WdcJnYdu7s7ZCK+9qwsV9STxGQikCNZE7vXLlGllhtbxzny1KOcffIMX//Siwy6IT/0P/xr/M7P/TKvf+2FA7KpCFv7e/zq66/QrK/yoxuHqCqPyxjzV8G4MYXsieBiMq7ZoiO1NqhKusIET72mgSm2WKt9YR/PsomQt4ofeNQXmkQFkjzUoYxa9wxqQZgwe/0aO5sVP/+lz1IHj7iK2gf+5kc+Nq/iG3FS6Ic7vXXzIisp8vuvvciDd6zh7J2Oe6KfpTfmlA/w8NirKqpSdcr7bo2R0QIZUCni2pi6i1PrrOxESx9DRshUagknhXjnnMN5g5nVi4VM3hXRClhtlviBh36A009+mF+48gJf+MYX6Xb2ybMOl5Shq6hD4V2VYounGIxYq3LqOiMvlsQdSr5U5jVWvmLHVbx54bWywEoDmg+gjmY4IhYdZ4ok06SBFyWy+eD97I9hf3vMeGuL2M6IsWU6Gdt/+3t4Z3Mzk2Ymk322bm/TjaeEnDl9ZJ3RMNC1kaDC5vKAWifc/tZLfOXnf50rL77NMx98jFMPnMAtDFh/9n2cfvoxWz5ykHhPs/L8jTe5ITvkxvJGwNRkYokIVJFGcZuB3BQUMQFtCUJTRqaK7kbyfkanauowE7VR5B3IBKT25ISNzGjFBq32Hagz4EYLu5k6Q7V1k+biNpPxlBu3t7hvY40TmxtE7eYUov4wxnnm2u4OTjz3rR3hzmT/XdfpH2ksInJSRH5bRF4SkRdF5H9Vfr4uIr8hIufKv2vl5yIi/zcROS8iz4vIc3/UZ8xPplRzc8kLbBSI48TVW5xZXiU6m+FoQxwV657KiCaCy/hCkhRVnGb6/6UcEaxhaM676ttMCxHPRhAk6hA4tXaMH37mR3h7ueFnv/IrXHn9PHkyocodgzKOosdD+ym4RnKxVuX96cTqF2LhS+o6kjiGVU3tAx9+9KN88ep5ppN9GzeRewqNM3EMJ2jRFeo8/MHoDtJEXnrhApf3Ye/ODqqJlCKIB28UmVhGdYe6pvb13GPNJhNuXL9NGxODxpG6lp3dPS5e2WdjZYkHjy9zdi3z8hd+l5e/+gJrywscO3qYkfc888mPsbC+Omcza8oEb/JQL15+05RvGsENHbnO+MWADB1uw8NasCJkq+Rd8zjmUXIZLyJQGVwsfR9NxF6nJrph/DuHC5RhrZgewDQhexHdT+h+pJ1lrrz4Ft/81z/H+BvfYu/2Fk8eP833P/Q4wVXMtaTLf4ZWOlYXFlDgzPoxHtw49iczFsze/3eq+jjwEeAfisjjHEz/egj4zfI9fPv0r3+ATf/6ng5XyIGuMGVBcKp85Po2zfo6pqxpyEvCBpN6FEcmzlpcMoORgo711XBKCNY3mKpmvNM5v0jEemEsNzde0WAw5KNnnuPsMx/nV26f53e+9hlmO7s0OdKgBDGqv4TKIGsHeE8IgbqqyNoZVbcwg4OzMXAiwubiGoOTD/D8i1+BaMVO6+NxoEI1HFpelpXXprf5yuW3uHPtFnf2InFrizTZZTqbEFNn7cfOE0KFONPNqgYVzUJTRsoZJ2owaLj/yBo+KcNBzfb2hJvb+7z65m0q77n/yCKPnlzkwhe/xG//+1/gxoVLvP/Dj1lrQzPEVzaKwwXPaGkBaYSvX7jA1t7YaP6t9bfIeiAcqiGC7GR0P1ndpFUYl93dG5m1H1RSpndbvjIISGM0Jm0cDDxu5OfTqNmPRuRMsEfkpZtX+cWvfp2f+ZXf5fkrFzn1+EmePXEEt7TIz33p93jxwjncfFMzpDIXjpgi1PSBe2dTF+ZJwR8+vhet4yuUYUSquisiL2MDin4C+HPlZf+SP4XpX6qUnSvMFTuO3LrDM/UCXQsumT6vL1qtgo0mUEz8OmNSPYoSu9Z0cTVb45ETg2sL8TGWxdkL3/X8tKyKFpaxc57jK4fZfPZHeP7SS/zMV36Fj594kmOnHiJV1jdD8YYJmY8L92LdeePZmLoZUFU2VsGV1eE08+jxh/mtC7/ImQtvcPjBR+fxZ8ZoPpO2Qyrh/N4tbm7tMMkB9rYJd64Tu9mcYZtTsj4MSShCO53S1B5yLPQPZdpGrt/c4ZvnrtDUNTHucvzoBqmN3LqzT9dGVpcrBqMRTz9ylJvbe7z4e1/g2U9+nNUjq+QMg6UVvHbkmIgx8sZrV7h/c5NdGXNsaQ1WFb8UTFtsarwra/837TGCWhglDueVSMZJGSzkBWmcyds2Bg+nnG1EnzcRC9pMruBmnHD+9au8du06XZpx8tAaHzh1jM2lkT3HOrO2t0ClW3RJjXEufRc+8/kuPdXlyOpR2pS4ubfNlZsX37UB7L9Tgl/G5b0P+CJ/wulfdw8zatY2QIbzUQBW6HU4VT721i0Wjh/n5k53oNlbWX+9Optj7kvbYswRX9V4TVan0FxmmkBwFWgqMwUFKcJtJlpRhCycs3gYM9qMebtBqPngqWe4deg0v/PKFzn11Uu877EP45eWiEXeNDh7HFp0hIOvGQ76cdvJRlf0PCTxbCyusHnyYT7z2pf5qyfvxw2b4nlAyXjxdLnjy3cusXPjFmnaItM9Ujsjxc54VGUkhvfBwJCU6DSytzdBvFBXNrMyidB2LRev3mFhWJG6jtWlmvc/epbdccu1W7vEDsbjWdEbV25fvckXPv0FnvrE+1haXGD/dsejTzzCdDzm9fNvszBsCHXg+niPs02pj7QZTaU7QHxBD8EVFpCoWqOdFxCTjVIHNA6XHUlNOhZVe8ZZmO23XL6+xSsXL3FpZ4vFuuLhw5v8xFOPspAKNQbQfVPgnMSOo/efoLq0zahuePy+B8qcJNsAfaH0S2FrLNRDVJWt/V3ed/zMvB/mT2QsIrII/Afgf62qO3dDbH+c6V93DzNaPnlG0TJ+OVuDECjHb+7wwWqAuMCk6Dp5703nWEwQ3ODA8oWIzRnUytQaU8R5kwNKXWdySYV/lXLGYTQK8YasuDJUtZdTRSwUEgQXPJsL6/zIs5/km5de4uef/xUe2TzJIw9+AB0EXKjImkjZ0XZTBk1ju2ZypBxN/qhvTHJG9Hvm9JP8wqVXePWVb/DY+78PJ44uWxEuhMDbk9tc39tlducWOpsioyXidNsmD5e60WA0RHyNkwrfzei61jaJzuSaRk3DcGhaZZNZpI3W+Hbj1h6bmws8tHCUjWOb/Pbvf4W3r4+ZTqYsrzS06om7O7zx5VeYTSdUtefw0VUuvDah62Y89Nxj3Lm0y6Xdbd68fpnXX7/I6TOnOSpLLDXW8NYLkedJxg+9iQAmkMrb1LAuWxFYQetyYxAmXcfrl2/y6luX2N3f59DKIo8fOcyfe+B+KvFIyuSp5YkxJvwg2OJPmTSJrC0f4sjuKtd3tvlXv/drLA8GfPzx51hfXielWPIgazvoYqTJyktvfYvje7vctaz/0PE9GYuIVMVQ/pWq/lz58Z94+tfccO7+rIIg1Zr55BvXWDh5H+NZZpoUxOSFEooXoECHkXIDcFShEA577pUIEqrSOmyTanPK5Gx/Q6iKCoo7IEXGvvPOkBcc5NSZt3OOJ44/zvGlQ3zutS9w6cu/wUee/D6a1VX6Ud2Dvj7TEyPJdF2LV51frKJsLqzy0Jmn+P1zL3DqxsOMNjdNWsg5fKh4e7JLFzukjagPaI4WIpbwyovHh4ZQj0jaElFQK5LaJN/IJLY8vLnBX//R57h8e5/f/crrXLu1jaL89ufP8+HH7+eRp87y4adOUb9yiVcuRGZTxeeO61evmPQRjtn+jNfOXeLaxSt005bzL73Bo6dOcivu8ZuXX2cWZ5x7+Zv81IMfIKfGBjo5h3qFTq2fp9cu8xYdRImkpEjt2dra5/zWTV658BbtdMYDxw/x0VMnWHcNVeWtbyZa2KwDkFqI00RqI94FQ+JE0DSlXmh49OT9pCsXTOHUe6apI8c4T/D73ntxjlEz4ONPfpDmxu/Nhej/WMYi5kL+K+BlVf0v7/rVL2JTv/4Jf3j61z8SkX+LjfX+I6d/lfB73ouAwhNv3+B9K6tIVdOOLY5MmgzGz5hulZPSHetLz4Kbj3/zue8JUVSLWr4zNcqoIGlGzAlps43jzkaPyVI0qShKKWqjXVUhiM1h9MFzZPUof/nZH+OFa+f4+T/4Fd536mnOHHuQqqkJvYieDUigchWdduRoI6hNJswU6Z84+gif27vDF176Ij/0iR8HxKrWCtSNbQLeo6lD2wmmgQZgfTzOe1zweBlSVYFWBGG/DHrypJi4eHOXO3sznn7kPg5vrnJ7Z0yaRYaLI9Jkys7tHU6eOcPK2iJ1eJXXr+3iU+apRzb5yguXmIynaNdy8/J1dnf3EYG2nTIMnvGspcFyj1v7e0zijJWlZfI44WssJhsINFZD66F6Ubh1Z4+Xr1zk1Ytvo2++ztP3n+IvfPT9rC4s4NUmekm0Go/WPUPDSKY5JnwdSjFaSNOMW7ROVw2ODzz0OO974BEE6zH6Luv6275eG61SB2u+e6fje/Es3w/8HeAFEflG+dn/iT/F6V+9Z3Fii2R1POHHr+4yePABsvPsxa68Rthrp3ipGHhrEdai9m4i3jYDxZfOKOsgLuIEBTDQHjb0HhejhWI5IjGSnfG+RLKprRekzaeuJPIUNkBEfEXlKp458TgbSxv8yiu/zbeuX+DPP/lxwsqKfX7xbqqZEKoinQR03VyaZ2NpmVOHTvHa9h/wyBuvcfKhh8kZkij7qSV1M2MDkMmxRQqA2SN7/TwX7wKaHfUAM5rJmDoq2UV2pjN++j9+iUPrKywujzi2vsiJw4ssDgInHjjDzq1djpy+D9GO554+y84XX+H67oybW2M21xqmkwm7rbK1tUVWGI5GIMKXXnyZo2uHePjMSfbGUyKRG3mG7G9xolm1DbDxpkUWhRyUa9N9Xjh3gXMXLzEU4fHjh/kRHTDa3uHI0bPk5WVkomhPTxoE0jhaLpNtI81dhJGHWgwMckr2pRUgUZBNNx+G1fe55DJR4O75lG4uHF6iiHc5vhc07Pd557Lmn8r0r/7Ns4DXxF9+9TLHjxxFs9B2iWlnIVNwRbCuSBqRrXZiQ1c9SYy1mmKvsG47nuTSqSfW1EXOZexEsPAgB1wwcYuUMx5TfK+D1XSy6HwWiC9QtGoZkuOEowub/I0nfozXbr7FL33903z87Ps5evxkqQnZZuCcx1cNYH0XGjuoarxznNg4wrR9hM+8+iX+5vHjNItLhMKG1enEFoEXNJvca19RD76hqgaEqjH4VhzeO5K3keOx64hpgnQztne2mU2nHJ6usRZqhscaFoYVg9GA5fUh+7d3GK1scljh7ANHOf/ZV7mz23Lz1i6xjUi2HnYfHMvLIzY3N6iCY+vmNuevXuXq1Wt0XcfFa9c4tXaYf/wjf41hXZNy4vLuDi+8/Tavv32RQZzx+JlT/NSH3sfy4pA0zuzMLtM0p4k3rhDcaRKdJfkAnRYipSAzCpHSfqYjQacmcNEr8TDpiJNZWYuFJl52Yykq+n2C3/Pzyrr9z6T5CzOYWjwffusaz7kBNDUqwt6kK8qPiubIyNlEMLMFIypqTvNCoWIco1RyEofa9Cff7yQBROdJclYTasiiJaSWeR937CJZKKPClVBEL1TKjBRxtoCdY3V5nQ8urbC0sMSnXv08T958mw888SGohoSqBxB6I1ei2sQscYHDCxtcG91k+8R9vPDi13j/Rz6B854utuaBNJNjxMV2rsMg2KBSE9AoVPjSTOYRXFWRc8dAPX44AlGGowWeffoBnn7sOMujBRYXHHUNa8fX6fZn5K5juLjGmROHOXvfDS5tzdjf3Wf/ztSoQs4EXxYXB7RtS9M0rB1ZZ3t3j/12Ruw6qm6GNsLVyS6vnb/K+bdeYzCJnL3vMD/53JOsLK1YY5tmZKakSUSbgDt2gnz5DkxboMhHUVC0Ir6XUzoY1OQESSWa8MYikCDkGmaTCcM8m7ca52gCh64gdKbQb5tsavdQzYwnWzDb/c+FSCk8sLXLj12fUZ84hnO2M+7tT0kZghhlf1iZEakY9d7nRMRGVfRuV9XMxDlwBUXTnp1sH2Xfl91LnMfZiC6UMorPBRteoRnpOqPZFL6TqFEzTA/MCp9BbXzco4cfYLEa8puvfpa3v/BL/PBTf47VjQ2gbHAKoR4QUWbdhEYGeF9zYv0otTj+4Jtf4pHbt1jePIIAqWvR1KK5ZT7TWgFxhGYIzpFIRt5U5rMTnSqLtckmxaTMusR+G7m2PeXqXuKNW7epJPPk2czixgrDjaN0O1uIEw4fOcyzD28RX77E/pE1dJa4cf0G9WhEMxgwagaowKXreziEZlSxtLrKbDzlxPF1NtZqPv/7v8dD64f5G88+y+LCkpFGa4dWrrR+C3hPlpbcgRxaQ5/fQ2YzZDQqijKKJtDKNACkNJJpg214085qZ51A5Ukpo6MBv/o7n2L3Gw1OoI1xbgAxmffp4eGclY39Lf5n29v8v//Dz/L4y+eZtu07rtB7wliCZurU8Vd2bzE8fJyF9QFVNWBva2ZC4P2Mc1Wms5a6rvDO267iwoEecTEMQecM3n6myVwoAQupQqG52IgIehMpJLtyYs7aY50oLiVSzGXCsOBVaWNGfIWTAosqOBKnVjb5y0/+IF+9+jK/8I1f5/vuf4aHzzxm/TZiRdW6HjFtx3RpioiyOVziytY1Tj3yFJ9//it88gf/It/avoYUZXwnQKih7Up4HUg5srd7GzwsDEZUdQ2uxnnP4QXPik98682LtF1kdzIjjcd87is7vHj+Ao+cPcn7HzvG5saihbUK9dISebzHYGGJRx4+hXOZw8sVv3z5Ol6E2EaagXJzZ8L+zhYuDBiMBminhFDhFjzNaMTp4yf45NmzVOqgttpTP4DIKSbcUdtg2Ha/o5soceSoRovo9gxZWUSymFi7N1V/Hdv8GVcVNK3x5Gk2YT7Emv4cDJZqZm9MuJlmVtAsxUex7NSoNLb1EWNmNG3JOTOZdmZM77ZO/wzW/n/nI4m1oJ678AazY0q9vc2h1QXG7ZSLe9tsTWaMwojaNUwSnFxap85WuEvOCHHe+6K6koCMOo9TU1e1QhTzuSuIFNE8h2RjDztv/RfeGSsAyfOxfH3I5h1QpJLaOKXNLUMRsgNXVB8leLIIRxZX+cipZ3h1sMo3b7/FG198g08++DT0tWSBQT1i1o6ZTHbRZsB9q4e5s7fN18NrPP/6N3n9zg3IHaqmDUZwxZAdEJnubVFVgcHCANUxIsHQMRwbq0t88JFV3r65zZ1rN+nIDOoFRvWQ586e5C9+4iEOL40Yti1MI7kem3hfMOrOaHmVRx4+w/LiiM9+7lXa5Zq91sY1TG5tMZuMWdscMFpc5PjxQ6jAdG+fdjohpmxeRIuumr9LAzlnI1ROC/0+lxmXdSAfXyKd2yGcOWzs4tK7IgqpLnfOFQc7tfArldvhKkeqhcYFRtmxN+kIZWBY7otxQhkroUz3O7ousuhn1mQ4aZnOunkO892Oe8JYFBuP8Mjhh7jOEfLtCrfnSLuO4aRld3vMnekVduM+e2nCOQcETwgNzWCBUb3A0nCJ1cESjRswCg0DZ2PCjW9FUaUvyXHGhChc0QPuJwlLWYw5U3lvw0qT9b9oUX7BCZITimNYj8jF82XA5zISHEeuPCvVAk8cf5DhoGbSzvilr/8GT3XTwlezkLBuhkwmHdPpPgujRS6mMR96+iP87Au/y6S1ZqZc5Jbop2uJ4v2AxeVV23VTx3Q6JSUYLTT86PtO8+Qjx/HjPf72T3ycf/NrX2Znd0IjwpGlIWdXhyxmoQGCr9A2ksYTQuPvyoczzdIi9z+6zg9+7Bo//dO/xmLTcLvLzDpT9N+5tc1k2jKbTlheWaLyjhg7bl24SdqwHDCXyr2viuZaNBRTgrfuhVJ7iQ44PCC/MUEWg6nCEEhtwkXBF8NSMRZHUtOh9uJQb8/VRcWPKjaGI6q8bd4MxeNNE8EJTkwYRQWGowE+dfQBupO+1/W7H/eEsQAkB2v3rbBZrzKrBgwGwqUrQ/JgkZUT9+EyJATJGZ1OmU0mTGf7THa3GW/vsX3tOtfilLG2RAehaWhGCywMl1kfbbA+WGKxXqAJNcNQWaHMQXKWg6iTIh5R3D5lJAGFIaCWVDsxVMWHymBLdD6r0OguGWtcsi7PRdfw2OZpzt28wINPfZT0+u/ztee/xGPPfQw/GBrpZrhIO9sjdS0nlo+wEyeMjp1E33wVfGVJbcK0mrPHibK4tIyvB3Rty3Q8JXZT6joTqhlHRxWPPXoMNHJ8vyXED5CmLVXXsrE8YDSsaepAJQHnGlw9hGTTg6VKpl0MtLMW9Y5jK0NqPyTMHCuLQ6bt1HKOyjHZG3Ojjexu7xm6VlesrA9IsWgfODEmcbIamARBB85mt+DwA8FVBpWnocPlZCPCFwRxAZfUOiw9aGvC5FpIq9QGlphCZjCofuA4srFGfX2LaZFsdZILfCxMpx05KoNBZWPzyvoLIfxngoYJdBnu1I61LDS1wzeBKhwkW/tdy83ZhPtW12kGKwxWllmhdOkBPhf6fdvRzsa0k3329rbZvnObnavnuNJNGOeOtlLq4YjRaIWNxVU2RuusDddYGi7SiDfCH8Uw1EQnTLBVUGfGM+26AjIYUtZTZPqCKGJEQF9g6lqER9bu5+0rL+FDYHsAv/jZT/Hxp76fw4eP4z0MFpdpJ2PWBkMuXr3OtfEWxIm1TYfahPRK45lzFlbOJvtMx3t07RRQUsZ42EnR2ZSwssDyYJGPfv8S41s3uH3+Ovt7kb04xYcROnI29CnUZDoLYdU2CakCwVfcvHSRN8/d4Ic/8Ahv3ZnytXNvQIqmqCNAhnY2YTQcMNnbZ7C+zImNTcKwguTIbUceeCpfJglUFZKzeeBgskg5QnZKFwR/dEC6so9/ehWNttBzNoPxI2+ySK7w/jQTgsdV9veucfjFwOHj6yxcfoN9UVPFLCPbp5NITspg0ICzyQcSC/XFWevzux33hrFgnYyXnXK4Dtbf4K2wlEvCvpNaJqklJ+ulcF5KezAlawafPRo8zWjAaGOTVYRTQELpug7aGeOtO0xu32Z7+zbbVy5wLr3ABCUPBoSFZVZW19hYXOfI8gar1TINQnABqbxNNU7KZDphYTgiBKvIqADZ2ohdqBAUdSadlEiFkCk8vHrM/j16muMnh/zKNz7Dh04+xuMPP4E6Tz0YoXHGyZVDyI1XcMHhfKBaXKFrK6o4Je7u4kONd8L+zjZdbAnB4/2Q0dIa3nnaaabd2SMsDHGNJ7Yzbrx5g8sXbnFnJ9Is1KRoPf9ulEh6h2rBtMhEPT5USNWUKc0dVa3sTluu3NyxhrTOiJO+dHm60rNTB8/m0jL1cADBw8BTjQKJxHjaMRotEEIhT2bTSvNlLF8SyAHiIrhrW7iwjqQOFpyJVhTiox86dGL6br7yNu4ilOBJQdvM+uoym9lxzRk1KGummxnYMxz1/cUGSbuCsIXqnWku/XHPGAsiXI8KIeObMB+M02Wj7Ne+4uTKyKrgzuSJNMU53dqHnq1siaCWHhJKf4yvK6pBQ7O8zOap05zIySYMtzPGu3tMtrYYb++wdeUWO+113mbGrku0VcuJQ/dzdP0+NhfXWQw1sxxZdN7AN8dcxKJH5fox1uajPNkXXbPUEcRZ+0DV8OMf/fP83re+yOWv3uLjT3wf1UJFdp5lX/Ghwyf5TJywk3ch2fz2NN0FTDKKokS/uLxmPT1uROVrQrA5LOobZGETpju44PCba/zif/0ZcgrUTcPDpw/zVOo47jPLR5bIE0uAw2hoRU0coaqQ4GmGFYdOrjO4s093Zzb3nn0ME9QRU4d3yuUr1/kPsy/Svh+eO34arYz86jOM98csHlqwYUydknc76kEDlXXGzjpBKyF2HUNnIueaQTqgSLHnlMhVET8vYbOqorNkvTMjqBcqHl7b5NzOdVLOzJKNXRw0Nb0Mvysomytiya6/mD8pkfI/xSHAlQw+WV8GUjSnnScjjJqB0eyLTrF6R8FXTPit7eaDOSvv5zWXrEWgzQmx0Oh7ZCSHQFUHFhdGLB49Yjyj1KFdYro/YTreZz/ts33jKtuvnOdiHnO7icyC8tiJJzm+fIRDC2sMQ23qJUXsz2Gf57PlMyq5KFoaneL4whqXU+TG3k3+wuMf4/nL5/i5L/0SP/zEx9hcXyMhPLhyiHOjffa+/CrGSDMBpowJhjtfsbJ2yLhvMeBdhReo8Hz5lcs8fHRISlOu3tpH9qas37/B8pGjfOvtSwz2p+yfG7M3GfO+WcvJ2YSVowvURbDO+xHe1WS1AT8euHNzh7cvXyNp0TtOkdznBMFC0llrDXjHHl3nV5//Bg9tHGJtsIw4R6iFvVv7LB9dMsnZabLi6SjgilIOWci1p+4yzpuGW87J1PjbzgT3csbFjIgnjSOyFGz+jLf6jXaZtNtx3+oGC7evclMzMUaauirESSnrzZA21xeuiorMux33hLH0p3jHBboUqYvsTZuFXIaKOlcGihYll14myZERl6zBS21EQ5dMQjXmbI1imgnBKvdmQKBl8KqUCj5K6WmpcAPPqKkZri2zIUI+/TAkZXrnNpeuXuDK7Uvw2gW+Wb3N/qJnabDA+sIqR5c2OLKwytJghC/0mj6cFBEqVxltRzyHlpaopjucu/0WTx9/gPtWj/Hpl7/Is0fO8sjJBxhUS5y+7zivfP6bJIXGwfGjJ3jtjV1bQCJ4aWzUTBVs4LjLNBXc3tnia994k9tffIlzW7v8yNNnOfn0I/ztv/uD/POf/l3euPw2k26Pb11NTNKED8kpzo6EtUFlTAMtUwN8xcXLW/z8b3yN2+PETjdDnJjMUH99vQAEIN6TnePF195gZWWJ1y5f4el6gbqR+cRnmQja6zw3DplG6oXAbM+4cpVzOJdJEWRY47oZSUxrLXcJJomwVM9VcPJUcUsVOo2WWyaDijePbXLsNc+VFKm8L7px9vx9CPa3/ezQwhlM39Gn/53HPWEsPbK97x3TWUvjHBmhayM5pwL5Odudy8Te4D0eoVMTihAyTpXK29wQ70qS7QTUlR6PZExlNTdMVnBKUimDdQvruOeAYdXejIV0o801HtpY4yH/DLSJ9s4ut+7c5Opkh+3dfV6+cpFvpilutMChtcOcXD/O4cU1RlWD1NbzYnuaJ+DYGCzT1DWvXH+TB9eO85ef/Tiff/NFbr76VZ555CkaKk48fopbV7fIF9/m0tZV0FzeQRj4mq60BUAiz3bRPGVnvM+lK8IH33+WM9OWs2cPo0y47/4l/jf/27/EL/7Hr/H7X/wa18c7TK/NWF8ecv+Dm3RdQnI2754jzltL8ds7Y3amNtKirgIh2IhtEUz/qyzG1ZUVThxf5uKF26z4Id/cusn6/mHuY5mr129x/Og6ubU8wg9r0zf2QrU0pNuNpJyI0VHVDsYZWa9hmKAIHHpxxGA6AESj6aNYTqMQu4hXj4wctfc8vHKIF7cukUQL3cjyXCdSOi9MXcZAjXgXqfK7H/eEsfSepfOBSY4spoQmx7RLxJTKYnBUofC4clE/FNMGM7lQW+TS11SyUUDoaydF48KXnELUXInkXpSvTFtJFK5Rtt85jw8BNCFA7qx323vPYHONk0fXuU+ENkZuz3a4evUS+5eukl95k5c4z5eGwsLGKsc2T3KmangERbzHS40qrPqKR1Y95+5c5OTKYT7+4FO8cfs6v/PSFxisPMTjT5zl3M7zXM1WyUeYdxzWISA5kbKhXzm27A+GsLDKq0l54Xe+wWip4Sc2B4yOLrJcedaW1vg7P/Vxzp5Z51/97KcZT/d4+eJVPrB7msWNhio11tKtGaeZ5bVlgg+kPMM5x2zWFSkoq0s1wyHtbEJQUFdx89aU1Y11Dq0NWFis+My5F3lu9Tinjx+lHjSlwxXyNOJrI0jWywP2r24TGk+amXBi3hojhxqLXF2Z7pUzrrbuWPGmsO8bE/iLZQBTBpvi1iUeOnmSzTtXuOF6siT0obtI0ZQTY633g4vf7bgnjKU/OhHGucMBky4y7qxHtfZ+PqItq3kN7Qt7874VR1U2Gi0zKr2YcalgYYujoCDexjaIm+uNZREkR0QqNBtM6ylMVs02jZdMkown2JgINUE3JxY+HBmuc+SBTbozj3B56wY7t7epd6f4rW1uXnqFr89u8dxkj98592XWjj/I/StHWQwVdRjy+MaDvLb9Nm1qOb1kQMK5kFgaDgjjxOryBtcn+4Xyn5jNJgw3anQ2Q/KMtp2imkk+UC2MmHRWI1leXmbn1Tu88OXLjPd2LewfVJyfTdm9dYdOMttScWdrzAlZR1MmdRGfEklbjh7ZoBnUsDs2Tx3CnLmbYmchblWxtLqEhMBobQVix7AesTBqEHVsHl5jddEoLBT5L8GRW9NOGKyOcFVNita9mrwnzlqqUJEHlqMokGdWTGYcrZfeCzozZE6C5YpZsYYzhNWlZR4fLvO73Y7lfKFMH+hzkzlIUTzNAWbxXY97w1ikx7od0zLKbDrpiEXVETI+WH+JqnVKqkIljqwWpvW9Kz0DtwredMF6bpDexf1yAH11t4gnap63E6vYbmUbuJpusRp7GQQXBMTEKkStsu+9t5udEs5VPLh5H2n9BHdm29zY3+Y+V3Fkd4/hL36RzetTXn/zSzzvW6r1FY4fO8n960c4s3qSi7vX2ZtcZ3V5GbcyIO7cZjLZYxA8I9+QYktHYne8R+VhKND2E8DEEFvxDpmA7sy4ePUtbscJDxxf48wTp3joI8dolpd58T9+nZ1Ll6momIowkwxBaLuOhUUTAREPo4Wa4aCysRgwV5PpKe2aMzFnUlaGjfD2GxdYHA0YHb+Po+tr3An/f+r+LNiyLL3vw35rrT2d+dw55zmz5u7qqu6q6m70CIAAMZEASZAUQxJpWrL9YkcoFFLoxdaDHQ49OGSGpAjLD2RIClMUSZkiCBpAA2g0eq7u6prnynm4N+88nHHvvQY/fOucTJDo6pYJORI7KiMr8+a995y791rr+/7ff6jokotbaBa5Wgopo5C+wXhF1mrgyhpPjQvg6lpQqyTBFylKIS4tWkEnqlBjQ6+Vjm46WswtJhbdTtFK8alzF3jl/deZ5hJ4pRTRJ0EW26yXSY0mTT4ePn40Fku8gtI4rak9bO+VlMqTmBTnRfwkrFpNUIZAwIZaepNIMdFBhlVKa/lhRDFZxKfipD0afD9EbJDYPDH5M9rPLZR8CNgAaQQVMAqNEQ1NTB8mRFGZD3OV5cxVzBhYbfRYafYY2ylDNyJkhuOffY5zzT52f8D+xgb3r2/w6tvvMcoNvWPLYDT9sMLGtZpOt8Xq8hq3r75PI2swKUc4JfT9w6Mdlps9dBnwWcDaQGYSGIyZbmygsoTmUovzF87yG3/l8zR7CW+8dYM3v/cWr7xzgxOtLs88/ThvXrvJ/qgU34JawAM5oWt0okjTLMpwvTg7+hhxjiNNUpRJsJVlcDjCKIO3ltF0yqSqGG0PyJdzGeqmSiBjQOUGVSPpzzpQLOQc3Z2AD9RTTzWsBDVzDkWASnIscV42MiWUfKMMLnhxt7SgHajCREl4YPXMKo/faPE608gL1DLAFF6uMLSVIs9NDH798dcjslhmL1EgyMHumINhifKiSkyjOYVUl9G7ODa5wVn5YWLmlPsQxCzB+1hFEch1dDaMX8MoE0+KyBODuU+yGGKLCtMYJ7n3XpxTskRJUrHUdsKWj7ul9xKz573MfdBh7t3V0Cn91iKJEq3NjckeRTtl9dknOGmewU8qBttb7G7c58bGOjfv3sOe6LDTiv2TMYzHI2x0e1dac2dzg8WzLRpZRu1SlC+hLpls7tM7ucTiyTWa/RbKTfjDb7/ON19+j7vru3TznE9kS/w7X/gMq7/xOK+8usLOvR2SIsf5aUxeFvrH5OiIo+idbEyKSQ11WdNsNWi3CxKTkjRyBvuHmNRw4sQSh7tDDJrX37vFU50VEkCnCp0nOFuJJHzqIlqoCbmhsdbi6M6e3B8P1f4YNy7RzVyMFE0ticg2OvYYRai80PtzE6XgkhHjK4GblQ0kKuGFU+d45/Z74gYUmemzpy7PskhfSuChDfRPux6RxSIvUSuoqookDWRZgDIwtSVWGwoSZnu2FrRPNgoZQaK8F8cWZebeWwI5x9IqNsczqr8HMYZQcnqAEvtVJelVISixBYqad6cgMyl6lruGGGajwDmhYCTR0CIoLy4iNpZ/guvKrg0s5A06zQUqHLujI8Z2ykLWZPHUCr3jSxybXGJ0NGB3sMfLg1vc3rpDOZ5S1iOstbLze8+0rri+c58nTp+laRzDumRwtMOgHtNpLzMcH3Dt1nW2tveo6hqF5kyzw986dp6nzp5j8ZeeQzU9Lzx/nreVJWs2cUqsZ0XupnjjjavsHYqtqfcOX7ropFKTZW2uPHaanf1DOs01Dg6GDI+GnD27SjkYc3g0JuuuMto+IncNgrKk04Dp5OKzpgEb0E42g6xZUO5PCCpgpw5/WKHzLHohazlBQE61QEw7hjCpZKGEBD+yaAuhdpAHgnGcPnuS0+s3ueGnzHbFoDWNRkZSTgCwzs+//o+7HpHFIpcCssxQtDOWWrA/cEJpCGBMLqIdLW6UqZYdvXaz7HbJUyHEbBSFzFDiQvSKqCIEKcxAKelY5s6IzJisooKULyKLLkli5LaJDv8+RI9jHX2ymMPVSaIIJFKKIehPYKa4VJgkBxISY2h3lgk+sFeOuHW4j1aBXpKh0pTVhWUeTxx75VhIjTI+etAvhMD2wR7L/R7H+n1K79jc3calhg8/uEHtLCEoemnBZ0+f4NTU8Pi+5uQ0p/XiabKTXWy1j9Hw9LPnMWmGJsR5luPocMDv//EbYpYX+WjE/q2qLPu7h9y/vU6RpQwGJfV0ylQX3N85pJnktNoF72xvcvX+Hr52JLWnXSnQQRjjzYKk16C51CZPEsLRlFBaEhT5WGFeu0f/Up8kIpchMaip9Ip+GO2gDCivMJ0MbTQ+Q1xwptKDKu8xzZzPnr3Ijetvi3FJYkhzI+hqOW9rZaP7mOfzkVgsav67p61TdJrRbiuKNGVcy1BJKBYS+ayDF6FOdAuJcP1D2F/EJ5GHWBKmo9cu8qBpI7EQXgmtxuNRQZGmwhgQNoko+jxSzungRcFnEom8iIQ+fBCj8SB/53z00zXSyzDD+OPwLvEKay115SirKZNqynAy5mgyZHd6xHq5y1p7kb1qyI4bEpRnWo/xWmrzhwfNgcDVu3fotNv0+otU3uOSIT3TZrx7CK7mXGjBumPLlySNhN7nllk8k1CN9jAmkDYbpIiuXSZOgWAr3nrtAw4OJqSpoartg/ulFM45RuMJH15dl4GrkT6hrkrKaY3rt1ld6vFrTz7LYtrGB08axKDda2EZ19YxHZeUKVS2ZryQsz0eMR1PKSvL1btDsi2NHokTpkcRrIU6YGqPcgGjA1oFjAoUeU5zYQmz2KLZLsinmuIwIRsVnCbjmG6wm3iyXIkcPThmc0jvAx+7UnhEFgtEBCsEmt6QGE1RKDqZYVh5vAskWrLUg4egNUZHM26lcF5CN+W0UPM3HTuKuaMLMVkMJKICLcbTmZrJicVBRCFwZBJ3Gh11L87nkX8WRU1KieN9IpY8OkRlZlCEuqasSyajAUfDQ4aTEfZoixOTEf/sO7/DXp6RphlJntFtd+g0myz0mqwu93mWcwRnuV8fcWO8w51kn7RocTA5kE0jMDfZ00pT1jVvfvghzz/xBKtLS1QuQNGg3e5R3h+wZ0u2dEm702C43KfhStTNDU6ctTQaGbpRoLMkit4U3lrGozGudmhtyDLDYBj+FAM6RQia4B2VF0p+qgQCHg8rWAyUkzF10cKOa5I0o8jSqCoNqCQlpIX0F4kANPtlg8lojFKa5cdWyJcamFQTRhU+kel8WVrskaUmUHpLVdVMJjXOw2RaMpyU7IzH2MriKocLjnprF30uJ1tpQohebg7sQxvsxwm/4FFZLEoe8qSuSIMh+EBapPQaCZvjUhSASpAq7yB4gX9ngiylQ3SnlEQpHTUq2iQRFJg1/iGeFjKhTIxITYMTWswsJkIcWXXsiSJy4hXK2Zihnoqbiq+pplPKyZDxaMDhYJ+d4R5H5ZgJNTpNSfKcdqdDv9PjePck+dsFX33pS5jOIolCyJzBRqvZQLA1JjXyvoYJG+MDJqMReaNBepRSezF0ANFguAivT8opb12/xjMXL7O2vMT+1JK2GrR6beq6oirHBFszChX39sf07w/o95sEZ+k0GnMTQldNmQxG6DTn8mNnyb/5LkWRzjU7D1sJyXxLtmajoJlnUDlCKja5y5023tXsHW1TltDqr0rctgbj5HRSiZHQIgSQSYuE6cTgvSOMLT616LUM5QNmYAmJJneGohAUzLuEYHKCiqYlfUWYiHl4iIYVJIEPOw0+XBiQZgqvHBJMLZsekRiq/jxM8GdX1zuKtJiRSSmKHKOmopgOkvthIpdHBfkl3l5GpvpRv2CQN26dsH+NVjPUEKU12svvLu7MsxAdCNgQyFTkRzlHCIF6WlNOpuwOtrl5sEHuDQfDI/bKAVXiKRoNOs0evU6PxeOXON/s0M0LGjoVTpOK1krDA7TSOOsZTUusLaldLahcIn7FPkA1tewe7LMzPWDXHbGysIYPnkajjRsf4oKN9zd2YLGHGQyHfHTvDlfOnKXfTBh5hzcarQyElKlzaJ2gdEqeFJQTS5IYqvFItO0a7HhKXVX0eouYHNqNglajIDUDqhDmA0mQh0082xRpXtDp9VhoNOm0W7z90XWSqeZk9wSmlQgwYCVq0MQo9lB78TBMY4BREsiKjKAmKAPTUU2SpPhpgkoUarsW04p+TiiD3DcnCcYhMUKorJx8vSBGFiEPuDzwQTjC54mUgc5H7mR0BArygHn356jBX6oq0nyJUNto5q1iViGkRgt9JSJRKsQ8j5hlooOXBKqZp7An6l3iZNfMmnAzh3hRD8DC4IQXVZexdDrcY32yyeBwyKCaUCUB3Sqok5Qnj1/gdHaas1qTmXS+OJUOYgQOHE4njLTQ2VUIKO9ojI9w3jGcDrCpweiEImuReEOSQ3Bwa+M2b69f48rycX7++CXG0wmH04rfq7ZZXlgjSwsOBrt4Z6NX2gPX9xACWzvb5EnKuVOnaBhDGd3s0Qlp3qLdbTGuFTfuHeGnFcePt9HakRQiKXbW0V5ZQumURqNgaaHN/b1DsiwRn2RmO/LM4E9hDPQ7LTr9NktFF7TBO8+wGuJTSPNEAJJS7FPD1Es0eZ7IqWLEREJ7DUaRZMKvtlOHb3ncyGHaBior/WGhoZHKYFMrcR43oFJDmGoxJMzER0Fpzc31Te4saUajiu3NfZLEyBDbQa6mOOfZ2T7k6GD46GvwAVCK1UmJLoSGrZBmMdVC1RdzvFkJNRsQBpmex9MGZlMUWQjyECu8kiGWVhCsk0bdOZytGR4eMTg6YO9wj+3yiINqCFlCs9Oi32xx7vgnWGj36OQFiVIMXUUnKzBaZAAO5rkeRiuc8yQgXmXBi9kEspvlzpFqw6mFVUJ7Gclv1wTruHr/Oq/e+YiVbo+/+vwXSQJMRkccBcv7R/d5oneKVpLy3ewaNniODnfIsoKUguH4cE4vD85zd2ODJMs4ubaGShLGPqC8MLQHR2MmOMZGUU6bJIklzR3aiEhq7dwKSZqR5E2SrGB5pUfj3g6NRs5gNJ0vFJAStdkoWDq+RlNr1m9vcOX5E1jv6PW6FGlO5QJFxFtUO5XNI5M5ixiKaKH4x9PfKMmPGVc1ToMzoCsvdkfHWoTbR6hxhS8SqTCyAGkCzskJ4T1uLMCLNorKeN6Y7jHspzg87U6TLE9FPGg0rVKMCfuLXRr384+l6f80XscF8E0QfwPgn4YQ/k9KqfPAPwKWgB8B/3YIoVJK5cB/CzwP7AJ/PYRw8yd+H+85PRVWsYrDI43UwvXMDTF9YMtKIMblyRJRQaBcYSfLYtMIcqQCuMmUweE+R/u77O/vsjU5YKgcvkhodfosH1vmYvcCC40urbRBqhTa1YREgnRMPDWUjc0/SnZsI/2PCh5Xy+KovZRvouBTaJ2RpobUVkLhMClOCVXj3p1bvHz7Xdr9Pl+8/Cwn+gtUtuTq1iYj47h9/zYLpuBUt88bhxtM3ISVhVVwjrxoUtUlanIYh5/E6Xvg1p3bGODk8eMkecIEmFYVrhQPsiqB/f0pBx3QvmZaTVlYabJ0ois/T5WASlhZWmChXbDXytnZkwM5RIccrQ2T8QTta3pLC4xHXbqtFoejIcu9Po9fOM/YjekmDWERp7IxiK+TuOKoRPwXTKIJYwupornUxB2MqWtL6WqSNJUCo5ehuwa/OZaTfK0FTlGXlqSIJhcqwY0dOk8ghTv37nOr67G1JTMJadeIQM5LyatnC18rkXF8zPXTnCwl8NUQwjC66X9bKfU7wH8A/OchhH+klPp/AH8XSfn6u8B+COGSUupvAP8Z8Nd/0jdJveNktCXC+jm1Au9jEpgnhkzENymxcFmQ00PoKfLp2jrq8ZCDvX12D7fYHR5w4EZMUk3earO8uszZ/kWW2326WQOTSMOutJoLiUKM1hNWgAeVorQmT8XJxSst38x5cfG3TqxgCXKKxUQwgEQbUWymomdhWrO+cZ2Xr78F/TZfeO4FjjV67I4HvHrvBlmecaq/zM2N23SKBs8dP8fVwx26C20adYvUpBzmLbSCwXDvoUaV+PORnJSrt2+DyTi52GMl0YRCUxlFahIWmzmLLQOu4vbdIyZlxbjqsbjao9FeJm0EvK9ZWOzTbeV0mi2KPKOqRUylERrMsBpw784mvrQ8duUcxxZ7HA3GfP70eS70VoScmitUJkEqaiyvTXdEqo0HVTtUiizQWhZi52Sb8qiiHJaU+xX5agFTD70m6toA7gwxqx2x7LUqltYxsqRpwAXqyvLOwS5HS5GaFEV/deXiTEU24RAC1rlok/RvsFiid/Fw9kzHXwH4KvBvxb//b4D/NC6WvxT/H+CfAv+lUkqFn4DLpSh6JmfqAr6sUFkanVIEoZDsFGEBqyAnjAkB4wPldMxocMDe4Tab+1sMygFj5UlaLdqdBZaOnee59hILzTZZkgnqoaIRRRASIEEYY8HM+n0lTWNQEWIO4B2J1ljrcNqinCUm3hNCIDGpROXFODxpIIXWEbzMB+q64ve/+/vcb7d58alPcH75GDuTCa9t3yYPnosLy7STBrfW73F9sMkvPv4sN/a2OLW4Sji4T6vZxPmA847xaCCZLi78CbrsjPRICByMx5gspaUCy7lhpa2x3rE7POTekSc4R2pks6lRXB45qnJKWg5IVJdmu0WWJORpSqPIcHaCi2zd6XSKjilnG/e3ofYcbQ3YPzxk9fxlQjllYWVRNggdXUTbKbiAn9SSEhaCABVOzyUQRstsq7ncJG/lHG0dMvpoSLHcoLFQUNmKYqlgluImXKM4iPYQjEZpz/7RkNtpjfUSpxikqxeOYRD9aRUdfKrKUdX+Y2ctP20+i0FKrUvAfwVcAw6CFN3wIN0LHkr+CiFYpdQhUqrt/Ctfc5781VhchpBjfZyHzPMf43TVS6njg8dZy3A6YH9ywPZwh4PpEaUKNLIGC60eSxcuc7nRoZM0SdM0nkrCKFVKnCqDivLcCAKEiK55JVwzE2MvZulazllpmrzoH5T3eBt7JW0wiRANtTHMeM/aB6wOJEgW4mh3l2tvfI8TdcXZx5/gs8fPsz0e8vbufRaygse6CzTSHKUz1jfu8f31D/jVT73I7cEeC60OJ1o9vvfeGywudxiXY5yt6fWX2NlZRxvJYLT2gUmcwLwpWd7AoRjWNYPRCFdPsOUUG2qy1JCnCY0sIU8z9Khid3fEyukJRTUhSRv0em2SPCUvDO1mg8FgAnF+5WOiWbtoUtYlu4dH7B8e0skKru5s8omdbZa6PZhIjqfKxIIXJTEQHodKEFbGRBKhVaZwGlSq8Dh0Il5oIwL1oMIOKvTJBbKTHbQNKDfzwQaVCdfMIzKKu7f32ckiwBHhYe1BqUBtpXJxThjsNkLN/0aZkvGhd8CzSqk+8M+Ax3+az/sJX3Oe/LVw7mIIlFSVRY0rdDPHOE+qLcEdcHt7l83JHgf1CJShVbRZbC1y+tglni5aFEmGSUR9aOYaa7FknS0+h0DKNvY4M0mZEh4/3sxMC+TGEmYCMTuf0Wip1cTJP2a9iI+YiXD3zHsMfBBV3nRvj7fefo0PR1s8dfIEWZajtOHGYI8TrT6nsx62jr1M0uBod5dvXHudX3z+c2yNDmknOSfaCwxGY5yCi67DTSP8qOl4iMfTbrVpNjvc37wbf7rSqzXb3Uj/UQSTYEOJVomEqnpQCDpllNBwahu4dnOHpeU2eaNB1lykt9DHWeh1mvTaTda3DhBeXPQP9pa6HHOs3+fx8+dYKpr8/JWnWFrokapMCKUhoFw0htABlafYaSWAC0KTNw1p2EMlP/tggNoSlCLt5vQXZHgpQ2Ig0eDlJBIqjgITCFaoSbW13Bkd4dqxXA6xD9YKh8dZKcFslGJUkwpb1f/mJ8tDD/iBUuqPgM8CfaVUEk+Xh9O9Zslfd5VSCdBDGv2PuUQJN1GWYTrgSO0zGJXs14HNMMTmCcda5zilU5aLFo0kFY/hONWfkXtm3LHgpXQiuqAIXVtFdEqjEwlmBUdw0abVz+6Cm2tfnHpQ3YQQCCqRxZLIyTNz1HdKBpnEoV3wnno84uq7b/H6xkcsnz3L5x7/PGa8j1KK8901VLtHbSdUVUmWN0kwlOMxv/vWd/nsJ55lXJcYrTjV7qG04ur6bZb6C1w8cQa7c4/alpio6Oy0F+i0OmxtrUdHTNH9NBpNkiyhCo7cJJisga/ELP3YQpOnn1zDY3jv3XtU0e3m7s4R53bGrJ6uca4mzRuYNGdajel1miwuL7G/uydUodjPOWvZO9zn/VuO48vL/PbbJWc7PT7/xCfodsQUXBUSxeor0dGrTOGdQlcP2Agh9os4L35sRjzDBHIEGkpo/VrFZDeZl2FDBHwAI73ryJXcLEdMIuhhrcXPokgiySP4QLMc4b1nPJxQlfXHrZWfCg1bAeq4UBrAzyNN+x8BfxVBxP5d/mTy178LfC9+/Os/qV8J1RRbl7x5sIVvtGnmTRabq5zpNWgsJmzdH7N9WFJ5R2Fiz6GNwJ0wN8mGQB0USWQWS2cT4v9rqqBIVeRq+Ui5CIEQ6ocGfKLVCIHII5OzIjFC4Zb/ZFajtMbHk2ZGo6+mJRvXPuSV62/iVhZ47NnnaKUF7WaDE3lKqg2J9kzcFIUiz1oYk2AnU37/R9/m4pWLKK2ZlBMuLqwg0XyGW4dbnD5xmqXOAn7nHt5bnrj4NB/e/ACjEtIkJUkSqmizlGQFadEUgZsWow+TGAw55493+OUvX+b842fIOou8/sqHfP2PXmMwLkmbLSbTmtHRmO5KRd7QfPGLn+TDt95ncDhiZ1RC3sTUNZeOHWNj8z5b21t0iwafvHCRjf1DJsHz3OWnonu+nvsVh9qLUrEOMAxoHSCNkLeVMonaolMjzbyCJDf4JIARvYpJhW+nMiOU/bhofOkIqYJpQCWK7a199vMY86EVaZLHDW82HxJ6S9NM0VrR6rbIG/nHroWf5mQ5Dvw3sW/RwD8OIfy2Uupd4B8ppf7PwGtIlB7x9/9OKXUV2AP+xk/6BioryFuar3zycVy+RrrYFuDCi0jLeM1wYtF1LH20npsQoIVYOaNx46VEkJi72XRWGsYsHhPBO2HXKtldCBCMBBkFxFrJoKKlp5ZZSWzYRVSm53MBHRej94G9O7f5wbsvs5E5Lly+wPmF4xxrL9ErmrJgBzsEAtaWmLyJyRpoFK70fPf179NYW2C1tcikHHCusyjvTRkGh0PGwdNJcwpjMHlOlqUsdVusLR1neWGBu5ubFHmHoglJWuCVRuctCJUcsLGEzIucM6cXaTTFvlSnmud/5hnOnVvgnddvcf32DpOp53B/zOJkTNKyPPbUJU6dPMb9m1cpuobfemWfTqdFV8PP/OxLDCcl7179iP3DXc7nXdbyFrlJ0WWQyPE8bkQGjFXSxKcGEoG53dTJcFJr4W1Zj2oKeuZFbwFxnuan8vMOVgwPdarwmZJSGC/IW9OwM57i8hzlZ95yCCsd5s+FimWgvDhmbc3/74slhPAmEuf9r/79deCFP+Xvp8Bf+0lf91+9ygAfNA2P1w5Ki0qT+S7QaiYs5BlbNhq84dFBxd1fhlEmIMO/MDOsiAYWkcPk8XNRl4jHmKMj3jnx5k0SqX91KlR4iH64sZdRSugiKLSykZukqA52eef9N/hwusXyqdP8/PHznOqs0MgKghW0TROobQUhiL9w0pSa29a8/vZr7OTw4skzDKcTzrQW5XUaqcuv371Nr93h2MKywJx40NBrt1jqdTkcHFL6mnZ/Ee+DGN6lhQw942u0Xh7GoAxHE0sZxEIJErTJ6C4tsLK8zf3tlIPBhHv3jlg9OaC9Io1vc3GJs01D2jC8/sErvPSlx7BTzTs/eJWVVsKXX7hCUjRIdGAxbVOnA/zZJibJwCrCyMM4oKZSNjs82kk/aYyGVDhnIYbf+olEsOsikY1Tam6UcoSghToDQnydit+xzhRewkSh0SfXQ6rhkSCZCqKAfF5FoBRJIshhYnSUGP/469GY4CtwSvHPTYt24jmxP0K12wJS60CaapYWcnaGJVXwaKRHcFrQDYLEYbsII848uzwRFUKGTx4wSEJWnP9jo61HmuaQ5CS4iPVIKadiPqHSMqGfmYUnWjEaDPnhjTfYOtzlzIkz/Nrx51gqOsjPP4KVJkBw1FWJdkKCNCbBRTj5+tUPeH+6xeeefpbxdMTpoo91NVkiCkFrFa8d3OKxk2fpN9tY75g6x3Q64buvv07pHB5NkqegDCaRYaHPCiaDMY1Gig81aZbOT8CDQc1wIB4H3gmilbW6HD97jPfeu8/O/pRy4jlz5oDls1NU0UYDSdGj1+/x4qUuvUzTWF7j9MovU47W6XQK1tb6tHsZSngoAqXrBKVSQQxdIFQWvVXidyoYQ6gi/hTEA0AlCcE6klym/cKsjqdQPAmUC+L7pkLsQ5CQ2uBE9j21nD99juXdIYORmFUYY/7E8wax1Y1/FoT04+csH0+z/P/zdWRS/nFWUBmHG01Eu+A9lDXNdkq7SMDLA+5gHr5KHEjKieLx1smOGsszQ+xTQAaMIVDbmoAmy5rkRUt2Gx8iZqbiCWDQ8tlEwbYsHBfYuHqVr738+9wrBzx/4Rk+d+ZplrOO8NBsPPUqi61LbDUleE+aFMyyMKkdGzdv8b2Nj3j2iWeoveVkcwEbTQJ3Bofc2dvizY2r7JmKaRL43Ruv8n97+bf53vo1nAuMrSPJGiQmIdjoC+BloVd1QuUMy92CZy8vcWrRyKbh4XBUc/PekOHRmBCHmTpt0V5e49SZZfCB0bji3p0DXFXOH1qlEhqtHs++9Bx337lK1YCsAWthhceeOE2v3yA4iRcUfksiw1udCPJuNDQy1OkWySe66DMNdDMhyYS4ZzJxplQhWrIaNRdkKcQ+ifhIBCPSCTQS1T62hErYy9XBhO7JNU6uHYuDZfkc56S6CN7NRxNeypP5739maNj/UteMzKiU5mqjwzfGU35uXIFN8ZFAmWlNv5NxMKmxSgiS80NzVq/Op/5OskHmKV4P0HPrZZdJ0gwl50xs5J1oVkI02dMm2iQJOhaQxnKwu82P3v8Ru6njpU++yMnuMqUrJaouiQ1icBBqHA5bi89Wlhbouowf9+xv3ecbH/2Ixz/5DJnSHG+22ZrsMKgHZF1YO9/h+IkOW6/vkxymfPvu+1S1k+l51mB2Qs1Z0/KDlLhvpWB1mV/67PN85aSnSOCDD+6xP3RcuzficDjh3vaQ9Y0D+suLZHkbdE7W6HD60hnyl2/hK8vB7phyPCHvzZSSARc0K6dOsdj6APveu9x74vOshTuUwyXSho2uK5HOojSKJDYCGqWkvFVKaC7qXCC0xrirA7Aiu1aplswcPMaDs7HGnohntUuEiu+txHoQgjCRZ2nFNjA9qOit9DjNMbmHcRwgvUtshHSkQs2blBm89uOf00discwe5RA8jsAf9Ho8We5wSrRxwiFSgU4nJdszlE7w8ohPzdnDSsU8RSO8LYWXBYMiMaLND0bFhSQ9zox+KRETjqAMqTZxjhBDXJXBjka8d+NNPty7zfnTj/PlmHlvkgRVCSQ5m8cE5bGukqGb1mRZEbX48t1GB/v84duvs3zlDL1GwVIzZ695l7PPF/RXjpM1C9JGE6UTTl88xs/+3DNc/XCT1966xStv3WI4qUhSQxIFZPNsmKjRuXiqx9pLn+BvfvEKZniVgMYkGQcHB5w50WLv0HJwOGW4N+ZwZ4+i1SPPmpA2WTl5kuPH+9y7vUc9tZSTKbP0M5RGK0fWaPCprz7H9bc+5OY7N+ifXuFH1+7xuWeXCaVImV0IKOcgTUUKoYlSbVBGqEMKCMfEv9q+vw81hGIGy2t8HTCZwjnAOXxlMa1UYP+JlX+jfcwXVehUHP5rG0g6DU6wisZEW1YX5ywa55zMybya0/Jnposfdz0Si2V2ifhGM85y/kWrwf/6cESaGVAJKniKXNEoFIORlFdeqThsjNP+yA+a6x6V0Cyci4hXFPfUER5OZw7sM/NwAmkQc/CZ+Mtay+bt67xy511aKwv8hU9+lcVmd47V+6jlwIngSaBoK3agSUaeFMiSFnqNqy3f/dEP4PwKF5aO0yoy7qU3uXi+RZIpdGLQSQ4qlZmQTugvtHj+M2d49vkzfOK10/yD/9f3GB01yJtNijwjVE7KPDzPXVjky589wx3dRk+GBC8ZKKtn1sjynFZ3zPLyWBa5SsjSlHI8JGl0MWlK0VniE88/wf073yXYwORoLBuJklh0Z8UqanGpw8JXXuCZKudwb4+97feo7ApwCqUTTJKKH3VtRYuCjw6Q0QQ8ntaoBLXUhs4AdizK+ijoU+gIE4eJmEloYyRbMngUJsq1gZCACfg6oL2oN02esmwWKRo549EE5wJVVdFoNVHK4GfPRJyp+QgIfdz1SCwWsQOVnkPHXf/9fp83Du7w6cqJK7bW6BR67ZSDkY2MC2Eda4L8UJ1YrFplMCYFohFfrMuVCtEBRoKHvJ71JmnMolRxti8ipeHuNq9++AZ7ReClZ1/gTGtJPheZWViDJJHVUqq5YKXpxJGkOUYlzNOWQiDUFaWr2Vo2PHf+MqudHl+79yoXLjcpHTS9oo5ukMqJ17BQ/Z2Y6FmLdlOOr3VY3/C0uw2aWU5Tw8nFNi88d4ZTKwlv/+A6+/0+m3dGhIMNmv0WraUO3X6LvJUzPExJUmj3+ugkjcYOU1QoULrBuScu0f3667jRhP31XU49Iz7EigChJNih3K8kp1U02bh6k+zML7K1+y1W+nsEuywPo46J0RogxNPaCUlWRd5fQIaGC4b7//A7HHv+MdJTizKoLAMhkYi8UCnCxEmvUogxicogBC3zGx/k47nGJxNIAs2kwUK3x3AoLOWsKOJIgdiHeVw8WWwIOD7+eiQWy/yKQi1JtlV8bbnPk/sjGnkPlchgsNPOMHsllXN458VULg4QdSKwbjzzBfoNIfobB4gqvZkh9Kzv0V5uXogeX25S8cFHb/PW0TpPXXiCL6+eIU2SOdQMQXY6r2NOPXMrJpQVmofQaAXB8QHlPDffeZfj3vPZC09xbvU8X7/3Ojv1mN4wYXVck2eaopXh6gpvLcpU6JgiWlcl08mEb3zvBspouu0Gv/DZs9Rl4PLpJa48fZEkUbjpEZW6xXg8YjhKoLQM7u3TGYxo91skWUq32+TgcMTh3pDl46vRYRNsfYjRFY12kyeeucwb33qdnbt7hGmJp8RNBVmS11agkWHnsYtn+cNX3uXC5Re5vneDy8uLrKga7xzBGJyzBBVII8FUMlViBEdEHItTS1w90+bl/+mPeW5thXMvPYM51kPXgZBoVEPQRaNDdBp1oDV+6qB26MQQjAQlDbMpZTWmkXc5fuIk9w6HKGPQWQ6J8PdUlhK0IRkdoja+S+OJ58lchjKv/NjH85FZLIHosBEjoA2K7W6fH+4d8mUVzfU0NAtNM1FMaivNbVD4aIEkFkY8JHuNM5WA7GIqSIipUqTR0khsjeLRHwJb927x/Tvv0Fpd4dcuf4Vu0ZCqwQhBUob1ciIFL+WPMJdrCIrEZLgIYSqizWldc+f6NV7buskXspQrx8+zbQe8N9ykDorN3Qmr3ZRWrhkNp+hEk2QZ2ntsrXCVZVpN+cGbG9zcGLC23KbVanLpVJt+r0mrWWCrQ4n0RdNutSBpcPHiAqNdzXQ0jopSCYGypaW/2Gd/64h337vFxctnKHIvuSs6oJTn2S8+w9bt+xzt7THZ26e1phkP7tPuHwOdYbIWqBwCNHotPnPxDHW9z7Buc22SsLDYIqmHzH3doozAe4v4Nctd15EEqRPDF/7i89zQLd66vcVr//I7fKq3zNkXLpGcXprfq2A0obRyYo9CnLdpcEHIl85T6zaDySGtosv5pz/N2/1zKG1I0wyttJyksULoH9xHv/5PaF98mmJzf3YM/qnXI7JY4sMdOULG6Ii8KL6zvMgL+wc0lnrSV2SB5U7GYOKw2lB7hXEyjXdAEiftM3MKkPCfxIiziya6tShmdFSBG48G/PDqG2zoms89+TxneiskSnoSFXsf3IwJLfC0iieYdZUYbkRjDZBUMKMgOMf+zhYvX3+Txx9/nHznBwDcPNxi4ioCgcGkZn9cs1xaGpUlnVTo0spsSEFVWe5vDfjuG/dRhSEvMpZWlnj7+iEnFiesLbTodhoUraZw05o5O1mDoDVZQ8iMNpp6G2PASM3eX1ui2h7wzhsf8cTTV2g1c0B6paKZ8bO/+RJXv/eG+IENDyiaS+isjdIpSssMRRIJ4Pi5k1x7fwCHnv3NAyafPsVi1sS5KYDMp7SRDc7beK/FQUGms5r0WJvFdoefOV8wOnWSdza2ePVrP+SZfpfzz12kOLuKr4KYZigpvZVXKKdRuRApqQPt4yc5GO5xrH+Gk/0e6d5UhrwxKUGeDLHoDWE2no72pR9zPVJzltmiEbsi2b33ul3e0gFVBgiaLElJ04RmJso4vAwVEzVDNGL/o2Y5g1J6ETl6Jg4yXW0lE9F5bly/ym+9/zKttTX+6jOf41x/mSRNIDWoJJEYgxBQPuCqOi5EoaiX9QiFxqgsspA1iRZpQLCWnf1dvv7uK1x85imeP/cY4ubouDPYlhvnA9Npzbi0TCpLWVkOD0cMByOGgzGHewO2t4/4xit3ORpXtNqFoHuZ4v5RyUfrI+5sDdjdFzfIcloytpZmt4dXMdc+TSiaDUxiJJXZmHm/sLLcoz4ccfXN99jb2sROxyg06Ixmr8eVzz6GNVMwBVmzDzolqEQqAVdDPDWc95y59ASvXCv5g+9e5cZAoZImJmmAzqQ30lrybtw0njayQapIck2aGc3zPTSajk75/LnTfO5nnubeWpPf+vabvPmPvs3kzbvYvVK8j9HoVEMh8yVvA77yLK4dZzwZoLRitZGTqojm4YX8OVsUakZvUQ/9+vHXI3KySPmklRJ7ovhetDE4At/p93h2b0KeLmCDp79ckOcpdzdH7E1snBd6USQiZZgOonOw1iJ8PR0dK4NM/INif2eHl2+/R9Uo+AtPv8hSoyVqRhVBZQXKSUiSt9GcL5G4CpSlrmvSJCcxMw5ZmB/jylmqsuKP3n+Zsp/y9KkL4GTOYl3FgZvMB52udkzGltpKUGmSMLd2qqzjzRsH3Lg/JMlSsszIPqITSXieOpoDR6thSfNK/Hy1YrXRoLYDWcBZIo7x8axVxkgMHRoUPPeF53DeU08qBnvbaLWP0gFFjVYpC2vnMFkBShOcR5lo0q6F/h6CwOs6MXz5Eye49uFVlvQRWh8TxxhbQTR2Vwp0ms+TC0KYwf4C12dnuti7R1DKPtgOGZ85fhJ79gTvbe3x2x9e5cI7N3jqU5cpLq0StEEH8VnQHmwdKHp9xge3qW1JUxm6iWa39njvSZMkgjpyosXigCwRL7GPux6RxSLXLPJBZkN6vtjvtTrc2B3yWG0xRYp2HrOUc0ZrWB+wP6kxOlqHRzaw8z7SRWoKk8Tdz0XTipr3713ng8EOz566wPnFVTIjBE1lBBTAO3Cg3YMBlvciG/a1xbuaJM2jJas48auYJhbqGlzg/fffxucpf/m5z5EURlJ2gamtGFRTgpMSUSk4HJccDlOKQlOEhKmtCVqzvjvhjWt76FTT7GSE6LE8HZVYK/a2U+cZT2uKaQKqFDOGiPgppUXOkM70JDMQRGgxMwFcqhR5XkR0LCVE/putpnNPNW00wnJTcXOINyiEOE0PnDm9xKdOpmwdWs6uKLIkR2tRNIoS26G1x9YTGXR6IxZWEQzJjrUpmxkqBVdZlFNQSTz6M2tLXFrqcmP/iG9+cIsLN3c485kLFCfacrKMKlSeUfTbZMMGo+mIXqvHap6wZ+vYPcV5SnzN3sfsF++kMvmY6xFaLJGsKFgFM2M8fMArww8XOjx2OEEV4hlknKPVSji53MRvDRnVEZoMYELAOY/2gXaWoWMAkHKBnf09vr95lcVOn19+7NM00pTUGKyREsGEILOJSL5zKnrg1hZvPd7X+OBI0xyT5yIs817MrYMgZcoH1rfWeX28zq999udZ7PTmGnGAkZ1Q4/D4GMsn5uSlh7L2aBw+KPaHFT94b5vaBs6c7OKCw6no/Ry5TEpAPsaVp1E6UJJ+ZZ2nKiuMqiPUmjxwio+IFCYRnlzMeCTC756oOPSgTfGABh9JIlqbOZCilNCBfJB8zryRkVnL0eYBkzOnSIycGlL6eFAm3moFocb7KSiJ2VOILj9ZblDdOCT4OLjUChU1LYVJeXplmWphgY3BIV9/8wP6d9s8dekE7WaByTJMmtBv9zkYHdBptjndKnh/LKJeH0cHSikJtIqLfZZR+nHXI7FYHlSKYno34/MoYjyaClxtt9nfvc+Sn5EbZdjW7qWslg3ubo+pg8hMFZ4kSNKTiQYYtrT8aP0q237KS2cucyyKqnSSYONAEsDVToy8VaR4xPLKKU+gJgRHmhUopaVnIjqERHg6BMfw4JCv3XyZzz7xPCutLrgYLx4VNrvTI6raMvM3C4hhwqSsmU7FCrayjjc+3GU0cZxabdJrKqaVpnKe+xsb7G5vsHRqkRCjq4elxwxKtEkoK0dhK6aTkjRxJFmKweFtkKwVFSMWlI6ev/I60GFOAdEq5rDEoNIHeYt6rgxVKJyzJElKiNSiNDOcP3+a1eN9XLUPRT+OUsRvR8oxWXDB1QQ7FbcbW2ISg/MOs9KA2wMR2VlxlQw+QCn31NoaYxUn0hYrp1rcL8d8/dUPOdltcPn8GVoGWkWHjZ0Nzqye5ViRyWapFSrEhR4BgpniU6FiX/Pjr0emwZ814zpGRSgda+JouTM0ig9yja8qvBabVm9rEu/odw3d3BBcQAUnAawEjIdQB+7ubPLbt1+naDX4lUufYK27iEpScYv3AeVkDiJPQOxV4sOifcCXNW5aEoIjTxskibi0iAwgUlzilH46HPPtd37Akxc/wePHz+ONkvzLWMIpYGt6KCfm7O8ikjcpHZPKMyotb9884v7+mGPLDc4ca5KoQKIU+7tjrn20zv31PXY29lBKU9WBg3HNoAwcjSqGw4q6rBkOplSVnDCuFlaBoOhqHkg0d2FUMZNlNisMTtAh0U0DCqUTlDKxfIpRhDrB2VpOjiA9wIt/8YucvXiG4e4I55OoSvVyujJLAJB5WqhH+GqI8lO0EsBG9wuZiWVmbm+Fj9qX4GMfplAmIQHW8oLnel2q/UO+c+0qV2/fpJW32RseYZRmJc8pYmmqtTDGVQgkJnmw0RHv/8dcj8TJ8jAIIUe7FjZs8OLxFAIqSfiwU/C5QY3PEhnz24C2gcx7urnl4KjCBz0nEw6HI17ZuE5o5fzsuadYbDRw2qBMApF9LCWECIREsx+RNAXUsvBcXaOVBLH6uY+yjnSJSFX1HleLNkWdWOGFM49h0kwGePENeuuw3rM1OZLTKxIBZwE7VmlG1nHz1oCbmwN67YzHTnXwTjJRamtZ39iXFOfgqKdTXG0ZW0ttEspqQmk9g0FNOhxipwOWrAS9aqVoNBtCFVHpvH4XIxhxuVEYQjrb/ZEF7i06zWNpLH2k9yKJUEERgo1cPKn9dXTOMVqzuLTE4eGEflfcVHSU9CqQECML09GIJDXotBCpdNpAdRS6mRDqEKn5CmWU+CPXXobMAXQi/aUfOg6GU1a/+nle+tKnGZVjQFFWFbWzdLOEtkk4dMKeriM37MHiiNy+Pxd6lhmSpx/EKSRa6Pg+iKuLDoqNZpPxzi6NKhORkI0DR2dpFpoiCdRe+pMP7t/ng9Euz66d4nx3QXyrTBIp9D6WzuLvq0jlUdAGgtAxqMUTrPZWzDCSlDDLVlNq/rIFCpZk448+fJ87Rc2vP/V5TJbOkZ4QokDNiwHffj0lpJJ5WAdZtLZ2VN5zd3PE+vaQIjM8dq6P0oqq9FSVxVpPXbv40ImGxfogg/Da4WrF/emY4ciSLY7Y3R+CaZFnCXlhSKw42UcYBROh3Lia5D3FAW7wVmYyQYaGftZnKKG6GxPL5fnz5WNpFo3DjaW10GZwd5e63cb4gShPiVJwlYLOqCqPr0uSoiJJJJ1IJQqVG/xQ9D/KRIsiH0hyUVL6yoFVEBT3/SHql69QXF7h9sENGmmDU/1zZGnOpJrSzAoW84S94TSaisTNYTbpROHdnxNuGDOujp9N3me9itw8E4dJB4nhQDnySYkpUqgcygvTtBlSTrZbvHNni5cPNujlBT9/4jKtdksYrvEUkKiJgEeM76yVvMpEOVmUWlCkYK2kd6Uyp/BJIsE4UaJs/Iy4qQhVze7ONq8c3OGXXvoCWZaD1gQn5YlWCd7XOG/xwVN5iw/ChhVxE6Bhb3/KweGI1BgunerRSjVlZalrR1lZxtMqlqmaJElJslR22lTLglHgvWbqRa9+OCzZG2R0mpYsLUmMLBClNSYrsNahEuHQKR39xkJA+hKJBtQ6eUBtDxHVQkiYIbj5vElFm9AQ7HyWpJSi2WrwznsHPHMl4IixhCZBqwSlDSbNGe7u0+jVECzB1xKq1DT4w9hLaoWv3AMBVyoPfOUqdluGs3/9L9PstzA6lTIRKbl6rS67R3u0Vk6ylmmugqxp9YApEmaUJ22EQfIx16PRs8zmKrE+Feel2YBRzXcAawz77QIVLMGKQZwva5hY2Jtg0yHr6T6PdRZ4cfUsrXYTTCJsY63nyIdWRkKKvERNBHScqygxE6ucIE15KjtxQMKJ/cyNX8oRZx3BWibDId+49iafefpZVrqLsmPXkewZ5GFy3s5pOA/oOMxlz1VpOTgco5XmxEqLIlPSw5SWwbQiIfDSE2f40qcvsrggysU8Sei2MmYu+kmqhfJRebI0paphMHYcDCsm45pyWlNVJXU1oR4dzY3MZT45o7LPXleCNjkog7OWmaNWiJsYuPlJQ5zfaKPnG9/MLqnXa5EVimmt8HUJcX4egsd7qKxicDhmFnkxeyBCQ0v2ZKahMBJNESky3nusttxswOGnz+Azzf39XXaOdtg72uLG1nVcsCx1F7i/s41SmuONBrOk5AcAEnMpsVJ+LrP4cdcjcrJET+DYaBljZNXjZX4yM91Thp08gWEJ3grWOvZ4V7J7WWNUj7/U6TIea3aPLNPaY7Tk3fugxXlSG3EsdNKMuujSrxCCIEGaSJMaEiP6b+IAa0btmDX0Kih8XfG999/g5KWLPL52Ssyua5HzBTUTQnmcrUkjYGGi474KgJMHcDKtMFrR7xcUmWZSWioNk6pmNKr51Noif+ELT5B0Fvhrf+XzfPPbb7F99Sa/+cIJ3t5xfOu9e/hEQmmVtQxuH1KFwHBSM5gYRrmiWSSkmSZLMwY7OyzlTZK8OZ85OO/F/C4uGq1l9qGNRukkzkoeSjxD+Hw60pOCj4iw9xiTyDgDz+lTTd4fDng6L8k0qNnuA4SgSLNUeiknm4ILnqSZMLa1nFi1FQZGIgvaKc/t6ZRjv/4Vrm3e5IP1qxxfPM7m/hbaaPrtDtN6xEJ3gXduXgcVON5skGmNi8yOuZG6l6WtH/Jm+HHXI7FY1EO/J1pFDo+PfEUXG24pEfYaKUbXuEGNHU05WlWMGprlukknbRJ6UHQcecuzsV0xsl6UcvE7zGYTKs5zHHGndA7tZcHoJJEgVxk0yGdaH71ytTT9Xtiub3/wDqPFBj937jF0zL8MleQZ4uXBcb4W18gkm5+ULnrsEmaNMTSaGbnRlKVA0sE7qtoxnnreeGePx5ff4+KXn2Ox3+bXf+UlDrafIdm5xwtPrXH13iEbwyFWKdHZ39qn6tVMMsXhkaZXJLSmlizX5FlN59gSwVucq9GxFDNGpttSUkU5r9IyJ4m1onyOi1IFFecjKuqHgmh6xI0wrpzAQqdNb3zE0KX0bC2zNJMScKQm0G43IPaM3tcCTSeatJMKiOg0TD3eBkISWB9Znvq3/ip5t+DU6nFhD2gdF7BQW7Q2GO04Go2x1tFNNA2tmUg1OXtpc3kIPFhAP+56NMowwvyXRDDKg5wgZZlWDybFB1lKyGHsx9w5VuOmnlPDnLbK427lSdKUrJGSJuJXNfuaqFn2pMcrhXXSXAv8K2iVSdJ4wiEzFoL458akYe9qqGuoLffu3eBdd8jPP/UsWZHFzBgfwbFoqxTEFIIQZpupAAIzlHo214jZmASxFi1rR+0DZVUTfMZWOeUPvv0R17/9KtVwj4Bn4fgyzUsX0Mrxhc9coa6lpNJGU1U1tXPUdWBcOQ4mFYOpZTx1VNbHet2hgqQVBG+BWS6mgtlUPcQaHyPkyXgfhHFtZ3ctAiPEhZ8QwmyRycI528xITI6fUV98Bd7iSpmvCOIi25r3DpUq6srCNMqHnfQvG5sH9L/yFGNzxP5om63D2+yPNtk72mA43eVovMPecIOD8RaJMSTGMK0rGkrTz828LxYQM8wXyIMYjR9/PRIny3wX0mpuCm29f6A9QczwjDFs7Y+4Pb5H4uHkcIlmt03IUggi4JlOPEeDmv3DCZUL+CAWOnI6PdBdOydzhOBksu68Q2cGr5Q4x/gH7GQVAiTRmNXJyTTc3+d7Wzf44nMv0G10ZYLt44w7SfDO4WP8HUFUkt67uKtGv2UfF6B3JMFQO0/tIVFieK5RWAchGMqq5LpLePnqiJXHBiSNPsENMUWTYEecPdNhudlkt5xiGgk+VdjaU1eO8VhxWCR0Go5O0zOZVCSpJkkzrKtBGUzWgBggK9az8mgEdETKDCoOE62vpJGXUJA4nhJWr9aKgAElbpJaaXxQ5M1F/HCAt7vyM9cKpTzT4ZBWNyf4QD0do1pC+/cElAGvfJz7aI6GE7IXnmTt4hmMkijBRtogSwsqWwqBVRtm8mWjDP1mm4PBEa3lFZZTw/rUzp8BHj5VZoyRj7kenZMlNm/ByUNijBFdtVCOSIzB7O+wtXvAmwsnWLt8gSJNKacVh/tj7m8ccfWDXa5fP2B9+4hxHagcsguGmeY+ymKdyGTxIlH13qGicCzINBRjFCryw4Qq5gm1JVhHPZ3wrRtv8dilxzndX5HF5HyUqmqcq+VtaYVTDpNn6CR5UA8rgXtncK1OJDHZec+0ttgQsNZTVTXOa6yzJO02euUYm/tTDtcHYEeCeuoMTIGh4rOfvIRzHp8q0tM9MSLUUNaWyaRmUtYMhxPquIjqspb5kKuxtooPkAwdiQ85WqFVjPpArJRCCNi6pBwPcXUpbqDxwSuP9gmulnKV+PWMEVMJDmXwqOSErqYTBvtHYpEEpJmECRmtSVqZPPhRe2KDZTtTLD93noPBIcNyyu5wh8qV7A8POBhtszvcZm+4g8bEOQocX11h+2CXgOZ4o8DMT5CIQj40BhCrpR9/PRony2xqjgyfmKFG8UPaRCi5v0hjcZnvBM/BwRFPf3QfExSlynHRNiRJU7w2kASCMqBnzh5GKuuYoeKdJ3grNXeknOvooesUaASOJdq84qMXr3W8/uHbmNVVPnnynEy8lfRCwQVUKqKyGSih0Rg/MxGPu7UnlmwBW9ckRign3gdcovA6ofYOPLigsCHQXFxAVZYnLvY48cQxdFKgTIY2GWl3jeroHk9d6XN38yTfefcmybkc9gWxq4Oi9iGia4bptKbZzETJ6J0Y4RHm92CGOs16F++Zde7z0xmluf7Oh5y9fJpGf5GAw+uErLsksHMAY1JUCKKUJJCkTazfnlP7B7sD6okVzqp10ddNz3+OTAVUCZlia/uQxV99jv3xAa28AUCnaJGZAuum5GmHgKaRFQJAICOAtYUl3r75EUYpjhVpNLfQD5Vhs0dQzZ+3H3f91Isl2re+AtwLIfzKn2nyV1wcOsKOJvKQZtN8kJ0gmIgmhYQPFhe4++XHOf57b3DGBpIsJ0lTqiCZ6MFJF+e1nmdLxikhwYumHQUmifU1M6Vm/N4PHcoz/QrOc3drneum5tcvXomDuZh4TBB3EWdFj48jWNlhdWIiVOoePIwKVCKlgrMeHYRgaCtHlRjJ0DQKVwVMnqECWK+4e/OQo9tb9M9L6UkmLv86axEGu/zSVx4DBR8cHBCco6wUSZFSlpZxbRnXRnQzpSXPhZMmeZzR0M4YMUmXr4oiSoGV2IC4mNhM8Jy+fIqkyFFeXHEIDhW9D4SA4Qm4uMg0Sdbn6HCTTiPF1iPu3linHFvGk5K0WRDqGmtKmSUVCbQUTBXj0ZTGs5/g4uWnI1dvNlAWSlKRtOdMhIfPhkCg3+6yPzzEBcdimpIrhEMYyZQzprEOEhP/cUfL/5wy7P8AvPfQn/8zJPnrErCPJH7BQ8lfwH8e/91PuFScPen5KfMnPqpmqIX8O5HIwnR1kbu/+TneXUsYlgPiM4j3km8+U0N6V8MMR9eC8Rstjokq+rT6EMS/OO6w0vwKWqVQUFsGgyO+s3WDn3v8WRpJUxayFusj+ZwIPeoAfsZRMzhfo9CkemZVPnOjiVPvOGsJyAkqdrMyvCOIDsd7j60qGu2cwc6QelpikkKabG/jzMSjqgm/+OWnObfQmZvJVbamqgOTcc10ahkNpownFWVdC2csyqNlziEn7fyXfjDfkOZY+HfO1hijCN7hnPQwxmgkftiCciiijxgFNX32kzVGK59n/VaDajJl684e08phy1n+ZvSsVgGdG0yeEIJnv1Kc/NyTYigRoLYlEWqUXlQrxuUEsfJ1jMshtS0JIVDEwa11nlaW0kuj/W6crs3uhf7XH7t/7fppw4xOAb8M/F+A/0AJdPBV/qySv2YoIw8y6tVDTODZ7zo+bLP8SKXANXIGv/RpPvrhB5x+7R6r/WV0KgS/EByJMTGHyKHwOB1ABRKViOYjomGzlaYQHhPWxbwWeahtXfONm2/x7JWnWOv0pcYV4T5OyTxIJGMq0s6cGMppIwC1c0IJjygYkdLug5M5RUSwVYSm0QFbCVigtGZ6NOT5cz3+6t98gTxfkL5hPBQeXLMJaJQJqERjUsPPv/QM7711j83DEUpJXklZK4bjmiJVpIMxWW5INKR5ilEGSAW6VQCp3IM45YbInfJCEcJbeTitk/Ahk2LrEiLkbpIMqzKU6VHqjFRLSGo3NbzFImr9Qwb7A7KVRWzlsBGhA+kdfHD4QjPcGWOfPM1hOeDW7bscX1jj22+8xtryAqdXj3Pt3i2KImVYVrSKjFOrp9gf7KGVpZEVPHX6kzSLJofjAUvtHktGc6Q9IbI6krg5m2hE8nEr5qctw/7vwH8EdOKfl/gzTP5qLa8CDbQS04kY/Te/WelsYj4jLcY5iUeRRBr99KUnuLnUZvL1Dzm7sAZKDLKB+QKssQRlyKJQrPY+avITOb2CUDRChHa9j8iAc/zwxjv0jq3xxNIxIQHGXsZ70JkkLIf4YAUrziUmyg2EguGIzn/xqJSZBbOSQgVhEwQZlKbaCCk0TagmU7Ik5XBnyu77tzjxqWWZBTUaoBv48X08AZ1kok1xNb1Owb/3t36e//q/+xr7kzHOeUYTi/GeNNMkiaKRjUl1wBhFs6sJOsEERZJpQbSCFlaFjvoPL5Jc56S8qutK2LtKo0wlg0vvcCpn3bY405TIjGacxWTW0UDT7XU4vCUbVW0d1om8wVlx7NGZASw6T7g/nbDbTzA7u+wc7NPImpw6fgzvK7RRHF9cwySaM80mWWroN3qcWFyjriXZzAfPYneRje1tljo9VouUGxMJj5pTjQI8CK/6N1gsSqlfAbZCCD9SSn35J/37n/Z6OPlr9dKVoKhIFHOeFCGCRgJZSKMfHkyWZxHTyjspU5zHXznNdr9N/S9e5VxjUaguOJxx8nBrI/ZF+OhjHJtzL5CXjnyo1EvPIvMHx431W9zRNb9+9gomT/FRPuArseMJNszp+iDGfDpRGJNGLYgAkybaNs28EWSBqvlkfGZMHbwCq7HKE8qYSJWk3N4+5J1X1+n0C9onT0N7lcQUeO8Z3rpGY7GH7ixIs11XHDvW4W//ja/wD7/2NipN6bZzur0myyfXaCSa+8GxbzwLQXPKpvQTQamcs3FqL+hgdHgWxEspCJZ6MmY6LlEEUmvFkV95ynSB3x1qfvjRmM+f3eHxpQ7ni4KGVhQjy+5//ypP/caTXH/seRo335K0YhdwVkrSEGdUSmmmTcXJX/kSF04tsNDq8dT5S/HZ8YDmvds3aBQ5a4vLpCZjOB2wPdgmUSnHF06QRC7ZqZVlPrpzi6fOX+Jks0HYG6JDZJfHymIOjP0bniyfB35NKfVLQAF0gb/Hn2HyV64VqVasphqrAxMPFmJeh4t9ioluHJ55HHMQurZzjiQS4/zqAoO/9hIf/dYPuKIXqGeRBkaTIqIm62eNfhATi+Dn6mux1vExTSpwNBzw7b3b/NIzn6bICzxydHtheIIRFZ8PFmUSgrXSw5gMF6yUNyEOKBHIOo86GjCYNDDzBnbxSwblKasJSVownUxJswznakxusEZx4711+jtHNDvXWTx3kqMbt9m8usnpTxqSRltKt0TmI2fPHeNnXvD885dv015aRC10eL+0TK3isaUFTJGzm6YMkoQX2wu8MznkGV1gw5SqqumkRihC8R4EB+CoxmMOdwb4AI1mQXnLM35lh0v/mys80a25bRy/8+6Q31P7nFtJ+d8/fpzRyzfY+s6HDN9bp//lC/SvvIDffg8fAraOZhJatimlNOZEhw9u3GfwynWcr0lyQ13XtJodzp84xasfvke73SRbv8aTpx9n/WCHyfSAXqtBr92hY3oopVjq9vnB4C20VixlqeSKBjfbAqRPDf7j1slPt1hCCP8J8J8AxJPlPwwh/C2l1D/hzyj5a9ZcFYmmlSiMV4ydx4YYLRBhPh0bfR0fVgXEdxtdiwM6BOpeh/Abn+X9/+l7XLJdCl2QRJbxTHcBkGiNdxavk1iHz/oJUEZRTUq+fucdXrz8BKudvoAQiZ4jZSGZ0dwj+9gLWqSVIUEUeYKgaJy1MVVBU6gEY3yEr2dvQpHGYSYKTK6xpZSROtVgPdPRlM3DMT+6fpfxuOZY6njx2eM0dEpilfQPtSVNA0EnoFKUyXjxyTX+6MMttkrH9oeHbO5M6T/V5vyJ4+xbxxPdLgsNiTi3aZs902BB93h1dMhL5Ezfv0fWN+THc1AObx3TozEHm2PcfUN53Yu5d2K4ln2Xk3/7RbYOpuzvHlJNa/ZuWu50NKM/eg3X9ZSb27ibDfY//zQroxvinDNTnkbyrNaGTqtJO6tYXTvLweCQxOR0mwXddpduo8NffGmRdqMJypOZnHPHTjJjZMwGkyEEWo0mpbU47+nmGe3MMKrdAwYFMsf7CU5I/0Zzlv+YP6PkrxlVeuIsTmk0Ef6NwPcM2/fxJAhh1mtITzNr9lUAh0zSfatB/Ruf59o//z5XSo3OE5QS6DZReu4gOUOisE5ULbFf8gFeXb/Owsoyjy+dhNgQ4oMIwPQDJWWIk3kVxKR6lisJAaf0vGlWBBJt6BUt7tjB/GYBD3hJ8cTzwVNbJ2ROZ1GhhnLMjT3NxU89xcm1Bd797mvcP5xwIfGQZox3jzBFA93oCDFQa5RKSLPAr32yy9//zpu8c7WAUc4nT7e4PZrSL3KmznJrMuZko8kzeYN3qopjRZMvt/ooo9h4fZ/1f/km6bGCxpUWrUsN3HbOqdYFvKsZTrcZt0fkacb47j7vvXWPd27cxzlP0WmRdbv84WHGb37pOQb/9D3qJUu4d8Qn2gnhsU8zXX8db51Iup2XIadJSFLDc09eAtNnXE/mw8bKTdHa4+oJB6MxjbzD+sE6/aJHaccoxAM70YY8yeg3V0iTlMPRgIV2j36iGdeRyOlDHKjGSL2PeU7/5wawfgP4Rvz/6/wZJX/Fth0XFFU8UeIXm6MUEiNt5jRr4SbJpFmMwWXhGGUIOKyH0MxRf/lFrv7T7/PYFFQjj1TvQGZlIClDR0GxvIpMYGNY39lkw0/41TPPYVKD9TEqT1ZrdE5R4kUs71vg1DRq1F1EkpyPTB49/5yltE0oB1FRFVFAY+aRfT5IPnuaptRlBUGjyhG+HLGrlnj6mU+ydGyVL3fb9KsN0gkMrt1EBaiGY/IVG2dJDqMsOjgun2zz5NljbKPZfGPEh9/Zopc4XO74hdPH6OQNNvKcImuQJ5r3X74BP9xg4dkT+MGUkBqmeyVH3zji4IdTlpMGZ0632f6PP4fZndL7H95m4VOn6D13iqA9T9Yp98eyIfUa0NL3yL4wZvX800zeL9n8+nvkmwN6T57n5t33yCpp8G1dkWWW4CxBJwyGG3x0eJd+s8PG/g7nVk5iwxTvDXmio62up5M2JWktBLI0o5k15JlQCqUNx/oL7BwcsNjucSzP2YhZLkrHsUX02X7kJ/gB/kR35X0QUwUjpY1CyQ7rYzZHHHKpuJujxIJo1ugzS/byHt8oqH79RT78H7/LY24Vg8aYmRvJA2lAZWu8d7SynKPBkO9v3+Krl58iSzPsbIilJTddx5+qivQPrdRDVO9ophFlwyKPjgiSk51sQTdFW04QO6MkedDYxtLBE7BYwKF8oJ6W1NZxYnmZorNAu9njxJOXaCMOkXenFeM7exRLHbytMJmQSkOosHaKwXN0lHNuIWektzh5UnFhqc3ySp9LJ44zThLW8oJCa04EeOVffgd7fYfq/U1MO6O41IHSozYUS0XB6d94lkbaYDgcovo5rQsd9n/3PXZ//12KC4v8xd98gh+sH3L9wwnPPV3wKwuaxN7GnDykc+YE7QsvMdgb8639iuOrV/DD6+CCQOyuRqcFXgW6rQZPtpZJkyYnF46TmgTrarRJGU0nwkBQCWnRZjgZga5x1rNTjkgTw2JrDY3i2PIy97buc+WUGFiogxHKaFJjHjrNP54f9mgsloiCWC+DLx2bdeKQUgKeQqRdyy7/gC0cEaT5PECgWjH5jiFEnRb+Vz/DrX/6Mue7a/jgccagfZCMF6NIghLjOuv44zvv8+yJsyw0W5IzEhG5EPlkIVJciOWXi875KtF4NKlOCVb+TKgJNk7G4/vtqByjNc7beXkGAmh455gfrCqQFAZfVgQ8JmnQ7/bp5gUtU5DqDlqNUcFy9hc/x9aP3sIPSqhKyMXQL2jATTjaG7G7ecgmjvazi4xaTd4sS84OSgb3tqlCwgjN450+Ly52uPS5p9i/+w5ua0r2bJfWWpPJ5gR3a4j3sPj0GYpgOPH3vs3xv/sS+S88xu6lDvfub/Dhzhbf/R+vcvf9TdSR4nffW2D3sYIrqwec7wQq3mOx/WX2SLl/uMiyauOso6o9ufMkThIDTJaideDevWtc3xvRaRQxxNCz2l9Eq5SynrDY6TGtpzTTgl7RYVRNaeWt+ZA6BM9qb5E3rl4lKFjOM2FHqID1krygQviJE/pHY7E89PvM3YXYg8gOLDCf9QHrbHQVmWH/DkNcVzNZ8LxLj1Nyo6mX+oy+9Bh737jOcn+RJMpgo829UGyM4ZXbH9Hudri4uCqHnXOg0nkeJZE6oxTSuzjwvgYdNfw64KlRJhCCwMpe7LyYQQOHR4fyerVCSVKTTK/j9CgEMfYzSpOkRt6PcyhfkhQpF3s91lotVDkB3cC5CgUsPX2Z6cY9kRDYEu8sJjMYrfjg3pD1gWXj7hGqFcguGprNgq/98XUGZSAUTUwjZ7WbY05mNO7v4z5tmPzgiOy7JZ0zfTrtDq1Gn6q0HF7dZOe1TQ6GhyykI15dX+frd++wsTFmY7/mqcfP8qUvvUD/oMXvfHDAz3+2ZKd3kgvZNZrBo5LjdPenvLK3T+OxVerDAmc9tqpJckvWiIpK4Nhik+WVc0zrmmbepJnlaG1ITDr3Kw6IAlYp6DQgBLGpDUHSEbrtNmVdEpynYwwtI+YgaewVU6PnJMsfdz0Si2V2pUbNG/VEzdZ5INFKLFRVNDsgNuVK4uw0RPhVfmhEt5TZ5Hl26vjHz3Lvxia9jTFJ0UIRMGkqpZVR3N7ZZMOO+MVzz8wNwXUED0KQzBYV4ZMQfKSag5uJvSIdR5JexE5U+hxZkC72One29zAnElQlM105SR6cpmpGKYjqxbTI50lZNZp21iBLDNVwStClDCLjzypd6qOLDr50KCrstETpwB9+9z533z/CnOqwdjxhtQVHoyM++XSbH758ROksJm1x5sQCT37+NHf/y2/j392ndXmZ6s4eB1c32MnvY8ZedvQP7nD8MydY/spxttspH6kmyxef5pquaB43mOWc9mrBATVqYYGX78Ov9j3vu7OcVV26qoc6l7PShQ92K67ojLqaUteW1FpsXUnJnGS0c4UuumhVIKKgf/2hVqiHwJKIej40KE5NisIwmIzotTs83kzZr0EdCZi0YjxN/fFw2CNB0ZcGP1D5+EDNiIuEuSJPXBhjn6JmOm4H0e/JByeEyfiAa/UgoVZCnqIU+Cuf5GY9EC8t76mdeCUPB0Ne2b3JVy48SZFm0eVdIOnaRb+r4KOQizg0FS9lH6KGRTBu0Rh6eWdKI47kyA0trWU/OE6m3UjrmUGdUVsRCY0PeJ8CKCR5imsVDNKEG3s3ORptUk73qY62oJ4IadOkmLyF0g100RSgwVucD9gFQ/9KwYobcOKdXU74gs+fPc2JXpPKW0kAtp53d0v+i1d3WPn3vsDqC5fJrk45+eQl+s0V+u1F+t0FCpPhru/w2pmCPzie87u+Ju/2+cWTx0hMC21y7hxp1j+ydPuGU13HG/dKfvfVErfh+b/+3oDf+/ANjqo9KuCf/fG7rLsGwSRYF/BWUtRCsPOBMW5CwM8lzzMUcz6cnlHt4/MzYx3AzJ4JTqwssnN0gEKxmKcSdx6n9jOQ6eOuR+hkERq5jQpDFRtneUjjm4lDSBlNSC8zcxHU+sEPLwTRxMSOW75GREZ8UVB+5Qn2fuc9lvtLEALWW769fp3nz1yik2ZxQCkqSeesMPUjABFAeibcnGavjUbVNtKm5ZdGy2nialBSRjpnqQkcW1oma3e4Nt2KxhlCcZktqdnUHwukMuX3HkyWY/KMN65+iF1bYK0a05ls0z3pSVo9YfzqTE69GSmVgHM1a8ManbYZ3RpSjqZs/8F72Dv3OVxYIEzGwlgYK6r7U14/2OJr+kP+8t8B/ZXTJL0m9q3A4a0Dim6b5ROGpRN7vHlrwHOPneduLqegLx31+iblaEoeSt4+GPKRqvnEuSaNtGLl8R7L7cv820+e5d7+PkVL08xSnvv8MxwPb6CMiapWj7MVJk1RWgCc4AaQ9JhVCv4h2H+WFTnbZMNsk42Mj9m8ZaXb5+b6OheOnyLRKVs2sOCFNrXrYOF/wTnLn/k1Y5A+XGa54IXwCHNOgppxrMKMPxb/Ph7P0tgL4TJ4L1ELMyoJwJUzbL9xm87OCJMXvDvYob/Y40y7h1jNiWeY0eI0443CebkNqU7mSJr0GaJZQSEiJh8IZnaTotFGUKKFceL0/8TKcUatjLAZYopeNDaNYjR5wAXaNFrhjaSX2RpCVXHr5i6T/V0umcDy/h4XvKW5WpH0+iK0ijoUvMeWU7wyuKlleuM+djQhOM/Rzja1GpJ+SpHUA+p6irM15y4s8u//7A5PnS1pNNv0PuUpwxL9Ky+STmu2/vGbLL20hVNbfPVej9c3A9/dvI+/OeKbtywnVeDx8Zhur4GtNVc7C9yoevxHXwn8se6y2POMVYsP1gd8qeMo3t1iev0Qf06DCbhaosEz6yLfTno5FcbC5mBGf3pwkjysnZ+RMT3i3DNTxKqIiL350TUIsJilsunNvZp/8vP5SCyWWRk2K0ck0MjMF4yfNb/xB2NE3YMgVBAQEz6lowY+llBED+rgxWCBiKRYBepnn2H7H3yL8uiIHT/gK2uPST/kKmqrhEcWOWTOuflu5b0kERtt0MFhEkWqov9YJB4G5whBRmPy2jUqCqeKJOF4q81ekpGblGkUQnmiw4qLN9t7ghEAIwmRiewD1eCIcnxAEZpsGfjo7Q20h1Olp0sgabeFdhPnLM55SFL2nOZoUqHMGJU6Vi+d4sSnTvJugKcvn+HuB5pp3uXFXzvJM6c2aao7oGqCn3C9nPDK8C6P25T07evkn7pA62zgRKMNtsmke4JvZo7VZ5r8ne0Dkpfv0mwt0/jMaV589Q5XdyxvfHSSY0/1uFVbau344vkm7wyHfPrSCkuFpVENpXRGVKlCqnQo49EmAV+BG6GTbuTw+Tn6OfNhnm2cMxTMWjs/cVDQb3cZVxNcsHTTjMU8pdBCMWon4rr0yEPHca+IJm8RBXPRpHo209AKFWYs3jgn4WG4WHqI2O0zixaYfW2U0L5nAp+w3GP32ZPc/Zff5cVzFwhVTeUnkuKbJmgt5hkmggUqfj0XfHy0PZ1uRlEUlEd1ROWkhxGtengQG43cwGk9nevuW0lOJ20wqUrRvxARnOhSYhLNbH27mA+PgulwyHQyYlKA6zXZnnq++fodnh1MuDStWDyzjOk0MWmKJpC1uwSVsrZSsK+XSbMzLH90SGeS0rxq+bmXzvA7zYqVCxV3393grbd2+IdvjTiVvMawgkwNqeoKsjNU3U+wuTZl85+8xVq/ID+W0T+9w1+7eIbd/SlnPlxn7wfXqGuL2jsi+3CHcjxi781N3HaPN36Ys703YU+3eOozF1hZyjiRTOgtpUw3h4REZMTe+ViK1ZKXQyr3zw0JqjXfNB9eJLNeRlBUPf+pz0AekCY/UYbRZEqv2eF4nuO0QMb9xER29I+/HonFMruUCsyqKmk3Ykk2i0SLi2bWp8yh5gA6NjaznUSrWXiPDAZ1dHkPCEyIc2yvKk78+7/E7rUdju7s0Suh1+2QtlskIaIuiZmbSyREm6YgJ1SrmdFqGmyuyY8ga2YMd0YMxyXOQ3BO+GehJrjAvYM9rngnfVBQLOUdtkb7coPmg80Q36/EYGstqstEC91c2SmunFBWKb1un+efP8n6jX1e/2iT6WjC08HRP71EaBcEk2DSjLoODHb36E7FvG47OeLGuMTdusfw9tsclBW+ZUhbDfa+0+O/39f84ueu8Gs/s8K7jVOcNmPOt/ts++MceyHjXAgM7+6x8/4GO5u7vP7ut+i9v0X94SFbyw2KUzmTOxOqG/cYlZZJcJh9xae/fBa7ep47bpEvHe9y/Y07HI4nHAyGdEwNucZaK2xtJ7n1wTmCloi94IYk2TGpQtQDbCqEh+59HBTDQ8/CzJoWWOr12dzbodfq0NGKgZYSuK0DxUP0oz/tesQWi4kDIilMmCFBkWLy4H2I0i2ZO+7rGAokDlZ+3s/EpOLIZhVKfmwMvePY44+RNRqop8/jq5r9nSO2P1gnvbZFZ+xYbLUp2k3pAxD7KqWczEtcwJgEY5Co1U5OoiHrZyQ6MByVBCPzEevEc+zq4SE/i5ojXatZi/d4kH+pjcHWlSBiXsoR68Ek4qdM8BRGMUHhrOP25oSiqjl3cRlzqsPurT3u3tonbxiSukm22MeVJbv7AZc0yJuBRq/B4PEVbtzdgvsT7Nij0kVOPH+G4pNLLNlFDm8l3Gzk3J+kPJYN+UevBwZugxeW3+G51ePcmtQMdy2LpxZZLVd46oufYuN769z74AeEJCc51LTKBtnxFurumCQoVl94EvPZFYb1NocfJXzrrqZd9BibMUqlTGqHmda0mslcbJe6mYpT7GE1Nd5XKFPE3PrZhin1g3Mzj4AHPLsZpCw+AppG3mT3cACnAkt5xpETxvnYeabezwfCf9r1SCyW2SLw3olVqtGiN4irPlGBVAkS5ZAdwyBDwkRHl30jTopeoKcH/Y6XJjrRs51bGt9Z1qJIUzUmzwgnlshOL8OXHNO9ATffuYn5YJNFl7HQ74pVkvfUVtSGriqpCPKarULlCcZDnmkaJiP1QQy7HewMRywuNWgmCd1GgmsYLveW+ebGByJuDQFlZoNJ6VmsFTsgLGgfUCYjyQzBycempePm+pB7m0cs55rVxSb3D0Y0Nht0HPSKHBLLjdsjhtMBOm9RT0uuB8flT1+hvdfmtS1N0m1jWwVP9vrs5iMW7hyyf/0Ob2lYWkv5TLNkbHv83rc0/a/mfPF8h7rj0TTYHijuTgv0uwcsXTlJefcQpTTZlRWq/THFZ/rsfbDJvbfusfLVJYZplxPNParhNY7qPq/dH9NuT1jWFamGVGsSLbGEzopJuwueBGRU4IYoXaAQfzRhbs809ZENEiHmJBqRhCBleG1r3r5xg1/87GcJStHLMopENsIUFWXfP/56JBaLXIFMQdPE0kk/oIHM9StAoSDV0gimQNNIr5KZZJ4pabSE4gRglqPofKAMgalzSLaW7N5zCBri0BDQmnytR+vYs/Alz+HNDXZ+eI3W+oCFVlPc65OE6bQkwWC0fKdq4DHekcXItTQ4gg4UIeWWn/Kp08skQGviCKbmTNqiSFJGdopSYH0UnsrAheDUXP4aCDQ7XTqdFtv3d8A6qkmJLWuOKks59mzsONpJYPn8CpOdEflyl8yk3Ng5YtDPWer2mO5t4SbQO50yGRwSbh+gTWCSaF55HZyuCCQ89lSXhWNtOH6FF9dGfMee4O88luGOXmW9bHJnuMXeuMP6RkEj7JO5HXZvXMOWNbnRHBv0SJcNrx15rueWylV89Y2XCafPchha/IXFk1y7X/LlZ89w7d2XGfqaDE+WaPJCDBZdbamrCp2khBADXO0RpEtz1neYD4wrKmspq5KqrplMS8qqZFqXTKYlg/GQd2/cwVnFxRPHpWJB0dYKo6Cbahr6z0kZplC0Ek2eaFyAkXdY4gxCBVIdKJQmVSIWS4KnlRhyJbr9TGtMrDmDj9avRIqJknSCLEDbGHyQabmL31n8c8FqRRWPYoMiURBSTe/yCfSVk0w3D1j/7rvkV7dZ6/cYZym+SkiTnDyFwkg5QCUZjcp6lNe44Bl34OxE/IKJ4UZtMlYbLe6Ma6yzsad6AAz44HCiUMaHhMnwiNNFh047o93M0S5Edq5nYmPOpUlo9htsbw/oj6YsNLu8/+EmZTng4GCHUB6RjHq8/k9exg4rLh5fYGnqWWt1WF5apFt1mXYXeX35HPcnBr9zyG99OOCNO+9jjhy6BerUIsniCt2lPomr0JsJV37heW5ffpzjd7Y4aBRkT5/kq493+YVMcWBT/j/rJa8dOP76asrmaMCZ05pfa1qaBq5ah60dR76iUUjIrDYOkziCc8y3ShUox3v80fevsr51wGA8YjiZivGGi2G7xpBnKY0ip9ks6LSbtJpNGkXBp598ik9deYw8zVAKRu4hw3N+smnFI7FYZrqOEDxl8A80+BFO1UEa9NnlfZhTUByChth5qpWe17JKRe2+95GA9+D6E1ErUrnNNTJBaxItJ9bMy9coSI8t0P8rP0PYPWLnG2+ze3WdlU6P46t98iTD+kDmNUkjQZcls6p6WNfkbUNzR0ADZx2hdqhMc7bR4/ZkX/QyhOi2LyAAWAI5rnI4VRE87B6OaWQ5Z1d73FsfoTQ4a7He4r1lamFcOfaPSk5ZMQYvrSJLEsqsw1o3oT+Ge2aJ5754nhe2h5jtAe2ixdLKErvfv0ExWufZ995j83iL7+g208UVTPc8nbMd/s6LJygXjnC0WA2W9dEeVh/RLTLeKTWvnT5PraHdyElbmno0wAxHPDMZ8gev7PLb7x1w+cJJ/qtX4GdMYG/0LsYIEFLXjvGkot1OKXzAxcwUbx1al7igyDNNqzBcunCGTrtNM89pNZu0Gg0aWU6WZmito6RYzQEgpaWnIW6edfDcHA5IfHQKmpXwH3M9EovFIA9qoQ2T8MA9EkA/NJy0Sqb8yisKAzWeQikKjwTgRERJhpLRbTLEyT2iFHZxJSrm1tdUEXGec8hw1F7mOdbNBqACFqRaUyx1OP7XPsd0/YD7X3+D8Y07XDl/nMWikMVloxrTBUKi2HYTFknFqTGSMIPS6BC4nPf5VhC7MltJ/R1iUxusxyUzyx+g2cKS4KNh33hcoYPHIRkz2mi81hxNSo6f7JE1MrYPLQObM9grWThpuVNqmq0uS/f3SF95g1uJ5swnzhKSgvvfuU6mNIkvODHIuBI6fPYXniL9hTP0WopaH3F7O+X+Vc2TqymH0/s8d+wqt6YnaTW3eK4z5VtXp0x39jk8aPGt64pGmtHo9jlM2+TH1hh0TrN6ZZHFU57ldERxo2TrzvsYHXCKeR5NiPa5zkWPN7K5SeEXnrtCkp+Scj082CAfuNEwR8tmyljlZWZlQ6B0U66PRgysoxe/xtRD5T+e8vJILJZZLF1uArlRaA91gASZkjuEkChzSPm72ikSIE9UNNET87SJ97gZ1QXxMA5aS5wakuKrIqSYKPHbtUEe0ERFr+XICLBO6MIzwziDpnKWymi0U4SVNu3f/Bzl9S32v3+d4+0C7EPWSk2Dm1h2korHBrmcNEqOMKXA156zRZ92mjNyFeK35SNkauOuWgltw2gmw4rNG5tkjYLXB7vousa5OnooAyEwKWu+9+odvvD8GTq9Nh/tTal1m+6VE2xPj1jIapaX2/RKS/vmPv20weknzpBXgXvvHjByFhoO5R0HrqK3cZXhPrx2Y4fTdoM/utagmipeLUvyXpeLX97k99+Z8hXu8akVhek+w/3Dx9htrNC8sshnz7XI0gyvDGd2S/6Ld/Y4t9bjXKpI7JT3N3bF4E5LIy5DWSVSbWbqRRn6JqmRiqA+oE7XmNQWi5JF5hyl95TWUXoxVSdudpJQHAmrMVC2RlEHpOwGSgIT9+cADZvJimsvqsJMB3KlwEOl5E1lWpMi+hOjNEZrMg2Z0WQoITkqTRXkiHWxbKv9A7eQEDwBhffSs+j5Uw2p0hgdOWAzFC2IgZ64gCiBggNY/2Cu45QmnF/h/plVsmsbXBpU+L0JJhPd/Li0qDzQHCRoU8cTTcchmqKjUi60F3nzcBOtFWUpnsneB8k2jkYKKsmoRweEfAGdKcDh5jGCIqfGeYyChXaD4yttms0G03LI4qJH+QHj/QFZohmHmrH1BFuzv7fJ+//P36Ygod1IMT5BLaSsbx2yFzzjV7c5PfmAv/RzF9hd/hSdUxmfMob/97+4Ro8pd3ZP8fmnOzx7JZDmJS/lU+7YFlO7y70y4xv3hixMFNeyKZnTPOZrdjdGtDs5dQ1vvP4axxqB3GjGvsZZGI0rWk2J5yCIxNog5ogeD8Hyzu49blU5MfZKvDMjN+8BwdKLY06ARInjpEJ+XqJ81ZSIw2XtFH62If+Y65FYLLNLEzDBY3REKRLF1AdskMXTTCVnUseHWagwgRqonJfffcArJZ5gWjLhZ73JvH4N4kUmYECA4OWUCXEaTIj6GDllZouN4KOToQw8Rf4cQBnGOvDOlRNsjCc8ezBk8WCCPqw52N5nOS1QlcfXtfRZkT4Tac083Vjh7aMdbDwRvVcxm14GsFpB0Whh3QRvazG/UDKwsxERcs7N3W46zZRmMwel2N66gxrdx+Uncb5JHnKszljf3CBT0NA5zUZKSAJ1J8U1DO3VDv0rZ/lw2CNdXuTJ51LKJ1Z5+dWUC2tDfuapghf/V5/hcHKD7X3H0W3Dy7/b4MxnavT0OrvbP2DqFvGt5/jHGzkj1eVvfOUkv7C0yOb5mgZwUJf0rWeh08PZbZIkIcUwrTzOBWorGZo5zP2mlSAdeBynsgn3bAPnha6i5r2J/Hs9I9wioEkdoCbmiapZ7F+MTYy0kJ/gq/LoLBbjPWfvfUh9tEWipMTSSrhfXrgO84lsQMXeQ+yDnA/UyCIJIUT3dx2Hf1EuKisDo1XcYQCCJGUR+xWQh1SJUTXzRflgV0qUIpnlTiLGCNIPyfcNAa5ZR1FPWCoseXWDc2UbVVWocoT2FcXRR1C3mb2KZ03Fu/6AcTWitKX4CSMx1955Gq0WRTKmTCfY4S4N3ZJTsK7xtgY8PlhwjjQxXK736K2n5NMBq/tbNJpNrl/fZWEyoD0dkBwUXOhC3smopoFRo6DRX+AXnuqw2GnRXeyis4I/vKd5dX+IWk94rlmzsT3h1xf2cD9aZDr5EecWx6xmT3CiNeC9dMDBDw5Z9AXPXfos/ZVjeDTNoz2+cXMD9foGd441OZ3kGBRdAsPhlMcWE/beu8uiy2MuDbR9TrfOaY0aNNoNsiJHJRkmyQFH8BUNlfPEtMOWS5kxxIE5B1Dc4dR8OP3g/+W+z5ZFY28TNXP8+TjcmEdksQSlWT/3NM997e8/aOxh/jDNhk0zlCs8BPfBgx1B/SvH6IyuHfv4+HcPf5wZi11ex+zvZ/Xt/OOzxRaRu/hvH2aqhj/xiYqZTG1t/5C82RC+WelQYUgx+QNCSOevYZnA3x0cUA5HeC9WQ0maym6ZgKo17MopaidjTJ3OZ0/zdxnLNa0Vnau3ad5LMWnKr+4cSskRUtlNc9C12OB6G3BFmzQxrDmF/VEtMRfO4ULgLzr4igWrU86uZfw7ZYZ+BZomY2NUsR/ABLjQGHFp6CiHDu8DCZpisU3ZSfhlpfmK95y4m4HScWPx5NpgR2OO+RptLepWhRLCAyhIU0OaSOKYNiZqUuSJ8JGUuqI0A2dwYbZYHszjPu7P/8pdY/3iJ/FJwsc2LDwii8UrxW/9zf8jRZwz6AjVhhjbYGJ6lIllVxUCNggIEGJdGyJvy89OiNiTCFIiGL1zD/yLRZYc5qfF/MGPsXUBScCaJYUpFImwyEm0wYZAhRzjM5fX+c1R8VSsp9x9/ypnnngcbRKKvR1+4f3/HV/73/49pgsL8jWVID7vfOObfPDN7zAeHmCylKLZikbphqyVSzlpMnY/eIPMtGk0G8LSTXMUDl9NCMHSaeT8zCfP8cyTxyn6K/yDf/Yt9g438aoFyXFqUoYHUxINhVbcuF+TGMVLp7qYeooJ0i/lzYITJxaZFIv8znqT//AvXeS//cEhF19c4ImFPu9fm/DR+oR7u0f8p1+BwSt7DL+/zmK3Qwia/b9wls6lRe66imeaXWzeJNUJyiRUSpMNS9b/+A2+3znkQqPJ/rUfUuQJh4OayimOrbRYWe3Q7BY0Wk2anS7KZCg83o1ROsEkTW5M23w4bca5mmwcDxuto6KzaJipZ+U2RS6H9LAE0Ga+Ef6465FYLEop0CZGAYibvfMBlRh8lOZaL7oWpYIERCtJyvLMTJ2Z152yw+rYDD4UW5Ho+b9xM+p28GI+OcfkQ4zI9pGMKUZ/RikqZnkes3pYzQZCoE0kIMlNCgSshROXr+CLHBT4KD0OSos4K0hTafGEIqcCkkaXNM3nN9BkiQxnXQ0akt4y06MdAoqsWZAq8VKrVZAeRxtUkhISw7DUoCHvdPBJl/1Jl+37lv3dkqWFjONrmrTlGKk299KT/JXPrtDNE/r9Ju1ukzRNGdHgj795yN97w5O026wsdni222V0yXE4GGP3Kv7+Hzie6TXpX1ri1tMnqfImj53qkbdaPJtqOiohmIx7dcnpNKOBxpuArVKGWZu182dYv/4e1FMsmsHEYg5Kmr0GeRCftso7jIHUGFQssmpvOdmsueVg7NNIJYybZSSEBaLRiDLzj+nZCcWDRTSD9D/ueiQWCyFQe7AI6pQgU3nvBP5TSkztrI+quTB7k1KumUio9PFrCWFS9pmZ9gEEMZktAj+fDAfQDx3B8UST9FofF6pDRTInMDdJmFPEQ8xBnL8WZEdrNkhMwsy+9UHpJAzqJBFrV6MU9aREqYSsSEFFrlhiZOG6WuruckxrdQk72MNWE4IW50ZtZpR1Q7NRkGcGQsr2xpTRsMQbzbjSWHuIcSXt3HK4u011qFCrPZ574RyNaYd3yg6/erGN8Y4qwGg6ZXi4z2NZxfc/2GdlSVF9sMiN7hJP9zocrtXsbxp+/kSHVlbyR50Cn1WMEsMPX9lGhz0WmprLxzIWu4GnXcH3RzWbvub5EsqTOcZOKIPj2PnH2fjoVWrnqZxna29K1hjSbBe0u1oIpkBicqwV00KdZKTKcTYZ8X7dEzWpEcGePDMmiumiI89sM8RH0EdmafVMuvznoQwDIplQGmkX9QgeYevK8ahA6Xljr+IxOnOgB6GoqJkbZPBzDoOJjUk0RoqsgOhwaR54EcdDQ0434qIgxGmwn6NnkoYrDIPZUGUmY54pNEGQGR9z9mbJeAIdz/T2guwliQwos1ZDvJZtLQtBQXAOh4UgkRl6MqJz+jy7Vz8gQzEFKAypUWRZSrfToL/QpFQ93nz1R+xUHdpZwu3dA5pZwepqg3GlIWkwmAY6mWXwwTc5Co7r17t852tdmARWLnV54uwK/VaLy8tdfuHpK/hGyrFOh4Et0cHy/nCH3fsj/v5N6Br4antM/eYW9xuW6kdXKcdTmo+fRH/yAt/f9fz9TYXvdOmsLPDUp9ao748Y+yF7RwNOnjnP1bdfJVEarQyHo4r7W2M6nSFZbmh2GwQc3pXCrjAa0eiXnGjAzapiQhIzZFT0a/Pzg19uTqw0UFGhGpW0yInETBPzY66fNp/lJjBASL82hPBppdQi8D8A54CbwG+GEPZjdsvfA34JGAN/O4Tw6sd9/YAiRJmuV3NElVla7mzBEB9g/Sco+bMfBMxCTkxUSs5qUBXLJzkR5BQKgEn0PI1rruGfWTERJaoRQZP1pqL5xAxQQPqZ2BfNuUXxdT0wVZDvncSPNzTkqfDcUhXpNFpj0gRf1gQti8TaACqgjfgfYx1VVaONpXvmPMPN+9ijQ1Ro0ei3aLcbnDuxQqN3mnd/8AEbR0e4tEtotEnamnFlqMkpuv/f9v40WLL0vO/Efu9yzsk9777VXtW19L6g0Y2dAEFQFCURHFKrFfKMhhMx0njGmvAHS7IjJsL+NLbDipE9lkcTsuVRjDTUNgxSgEQQIAWQIIhu9L5UV3Xty6176255cz3bu/jDe/JWSUI3m4MGuhVRT8eNzpuZlfnec85z3mf7//+KVhQhGgX5uM+RToyxYErFnVtDTK5o1xZZO7HK2uwMTiuIFMv1Gn1nuGXh83NLPHsq5l/f2ePZYcahd24g3rxFlEqOrs6S9xYDN8E1xdE/dorfS3PEigGtKHTMNzLHn3msC29vsjUc8PiREziRYMpJANg5z93dCVFd0elE1BsxaIUtM1StgVJJxS1dEinJWpRyrWgdNJz/7YbJFOsyLRBFIgzbChHaAE7c6/q/l/1RdpYvee/v11j5G8Bve+//ayHE36h+/+vAHwdOVz/PEwSOnn//j/ZV53x66YekeUrUfL9ThIvQVSjC6vdpeHRAWlDFrFUla+pp7iAu9QevBQbXKcFB2Irvr64JApBxOkoR0pSq6VXlH8Jzj7R8WjmrdrHpdiVEaIwJAQ0Z6JF09W9Ka6k3EqbEfcaW4M29NZWBIEMJHxptJkOJgpnVZcrS0og0j549x+ryHFFZcvG7L3Pl9h3S9gp1HKocUY56lLsm5AM1yZ7WOBNC1zOzJ1g5scwcgq2dHQb7Od2kxZv7A47MdohR3EpT9r1lqd6kKDKsKXhYac49u8Ts+h63vzmmV2hE5vHXxzRETJ2EVqPFbGee02cE6V6OyXJaTDhau8S1vmI0GrItHTrWLCwfZuPyOwcJeZqW3N0asbPUoFGLaC9IakkdawxOx/fNFFpW1Jjbsh3y3uoUu4NQ+V65M7CHBj7lKvUPOc6BUMt7248Shn0V+GL1+H8gcCD/9er5f1Ax539fCDEjhFj13m+834c53MEFKQ6qGve2UFWNolSbLFqEaeRp/hCqHhWVUhXmhIs4VMiQVNSoVDtJdZepLu4wXkF1vEKIp6rdRAoQFSZCyNB7Cdox4W4k/D16pkiFdUofsBn3ejCOpIKtJkpUz90rXc8tzAUhoEpmToY5wIrHKYDNwqSDO/gbKMZE3lAOc175/bt0tODsSps0bvJmXmOxE9NoxIwLh3E1nEmpxYpOp8m48BS54dCaBlHS7Syw3G3xwtkup+cFhw43mUQlIwRHa3XWanXiKGYdwVzcpqYbLF7cQdYlqbRMopTo2ALxkUXqo5L9F65j5i32i3O8sv4G+7d3SS8OOfTEIv/ppzSPHx5yq38chGRSllhnOHT0KDfPv0WsgqamVILeIOfO9pil2QaN3GAjF5Cu1byw9BJncxrS0KCgTxyajtOrwE9JsHxVXQQvKwrg6mpylSzJ/cQXP8w+qLN44LdESA7+biVEtHyfA2wCy9XjA+WvyqaqYP+Gs9yv/NVdXgmket4dAL4EHLA/Vg9Rohp+rG4ASgpcFcKF+NMfYOxFVQ1z99HhuGrnCImemELfAzmGtUgBkaxUO7wPzjD9XUzzI/CyKlMDwnkiHZj51X0HdCpL7qrysgV0tTMejNncB3udW5xHxRGTUQGmBK1QUdCVd87jq8EOAaA8UtiKB9ojajVUU2KU5gpNnIvIyzH7NicqYZRZ1NiwfKiFxIeR9kGK8FBvzXN5MGF/Y4P5Xo1nZhVREnE0rrMwO0u73kAnddbLgmMqYjbP2bh1l3/+xjX2v3OB7lqXU8+fpnZslr02/KAzoW1zGrUxzWFG+q/f5eG/8lNcObVMx/c49GiT8yplbdLg+iAMxo6ykkmesnpoFSc0wjmUF0RKUZSWra0J/ZWU7lwNV0YIlWBtqALmkwk6jpDC0WHCSIRKoiREzeFQV2rSPiT6SsqDXdu5KYvP/UjcH24f1Fk+571fF0IsAd8UQly4/0XvvRfTLPsD2v3KX4cfftjHwlZVrCr88dVdv7pDSAjjHBVwy4gweu+pZPBkNVHsqcgtwmiKPsgfOKhWTXetAxk7b5EqOGakQu4UdFf8wXvxoScy7Uy6aQ1fhvDKVZJ7iNBzcRAuZimZUgFqH+5lpQ3TBsK7A4ddnJ1ldmmOcW87ID2rKl4sVMirDoYlLRJdObMAJ6o8TiF1hEUwmRTke33yvGBuvkPv2oDJMKV/J1C5Wh8uFlTMjZspxThjOC7J52aZW+zg+pLs2hYlFqxjXBrGRcHRxTbLs02K17ZpbmXMUmO+0WB86y4ucaTv9Ph8/QSHf/GTXHnyCL3JmLyh+Ha6zc7YoETJlT/IOPezS5w3c1zavFoJGVn6kwmH55ZpzXQpd3eROIQNeJa9/THbexMWFurUm0E/U0sFEiajnNZMuLE0xQQp5sJxmvLOVZGDdy6Qy3tXOch0X7n3ng/FWbz369X/t4QQv0aQmrg7Da+EEKvAVvX2qfLX1O5XBfuhJoAaIpRSCWXYKY2pmIZN6h6zi3TiQMtwOjqPD9vrlJhPVkl1VO1U02bkQUuGaU5SBXreIQ9wlSHUUzKQ5XmmBYepAwSuKVuBzKosJqAvfaV9WRUZ/BR7gyeq/l4nAlxgGnJKH6pyh08cZ+PS5eq7VOifFAW2CBgXrTXeVwyM3v2b1RshMDaEs6OdAXNzLVSjxd7dgqzQiKSDQGKFqna0wMaYDXKaC3UWrUIKy6U7O4xHhtGNEc54RK2JbDSZXZjhs6dXWNjos/r5J7nzLy7gBgXitYKGFxjlaC/NsfzTj7JlPN9fbnPzlmewmVH2c2ae6vK5n5rjey/uM95v8InZGpfLPEh2eNja3+fo4hpLhw5zY2eHSEpiHTr33jky48hzgykKqNXCn+w93fkOiBCaNkURZBOr8yPvm+hQMkDCEb5SZ6hCZyEqrVLevyPJB9OUbALSez+sHv8s8H/mnsLXf82/q/z1nwshfpWQ2Pf/sHwFoCSwUQZnqfokVeKN9yhrqyZh9cczHe2vpk2dq4gLgvNMNRuVCKGTcIFPLDSkqpCvSvIFVGTj4qACN62iUTWylFSU1h5UW+4vIBiqMj0eW1EnWVMNclYCTLgw5uGB1DlSW+VI+IO/Ze7YEVSthk1TtJLIiqU/VO0CRkdKGVgaXXBEHSmU1FgDIhKUpSHLCho1TVIWzM1ohsMxrZUOSRJhnSdyHll6SBqkucFkKbY/4XBjhnNPn+PibsHVsh+qYM0mUbPBF545xJfOznLpv/0222++y+LxRbL6mPHuiD32UXXFaG6GnfGAq6khqsfIO47JsI6KFPsvZdxauMlPLUecOqbYGQr66QAIjeY7vX3wlqMnj3H51dfR3oZelKqKKAJs6SiLe1Lk9/e+PALtMqR34YYCARMkpsUZQLiDfNhVSpnhZks1nfyjJ/jLwK9VyY8G/pH3/jeFED8A/okQ4leAG8Cfrd7/Lwll48uE0vFf/sO+wAO5C5UeLav6xDS/CPxHgSnSTxtLDm9dkDYDjLFVAicOSMSnjiREGFGRohKrcT5cbFVjsNoYqpyEqodSJYKVozgC9NgA03kJW13s0ybk9DALEcJEISr9eOErld/gLPjKobivDyYFIGnOzNCZmadX3iHcNCpGfaZ3QX/fblhVcmwYJsULnNd41UDWc4jrGK0Y9nNMaRlmKWNZ4pyjEUXMdCKajYTJHUfpYkzdMewPUL0Bd7c1taOriEYDH0XEruCtK7c4HW0xPNZjwAY6ucPcp5/g2KlHma918CPDNW7zQl7gMJxzkus3NnmqK1gRkrIPb6Utzn12hb5rskYJXlQ9Lcn2YIR1huW1JWQcI0yOdoHtU8gQTqtIVR2CaWOxOnE+9Fy0sChvMZVI1LRpPJ3cnt4I77lE+JyAhL2vavYe9kE0Ja8CT/6Q53eBL/+Q5z3wv/nDPvd+k0BTyWo7FGglKo6uKuypwhhR7SBCgNAVY2TVoxBV6KVElVv46WdPubimiXJFp1qVEQIyEqjCIQHgghioF7JKzsN7XJUPGecOkkhZOY0UoZrip2MnzlaQ5Hula1UNA06nnqfJ/XT+oJ7ErBw/Tm/nLtaUQWbC398om4aMAWbtkXgXQAMIwaPPf5rO3BLf+Gf/nH5vQmEF44ENR7hviJsJmZKMjGWCZs57Wstd6llJuttnu2l55/zb9HpjxMgHGO9Kg7OriuMLQ4pBwkOfajL/J+a40fgSTi7RTh2Xvvk2jcY+20tj1scrbFwY8WJP8QsPLfHJzQGNJEarGp/YGLLzxh2uDtvQjMN0hnNY6xhmKYUxtJptuktL9NdvIqrxFWfDgGVeBBJ26yzKOLysGIBUEJnSAurSk7tAU+Uq3uwQHcjpPZFQ7fRwcBUEZiHxIewsP3ZTAlqqAjxVNEfTAcqgiuUPLi6hOaBRDfEsB6yVcF9Ppmoy2umdxEPQ7KgYD8W0nzIdrZ8mzOHubQ7CqnuNLDxMFW4lYecylXyfhWqEJmxRWgXJPwgKZgBFNSlg/XQ8ZdrVD7tnTUccP3eaS6+8TOlyqPoB0oXTLJVCVUFEyFtEQP4JyZlPPMen/tiXufHuLSInsQhGE4v3FrzCZpDfKRFdRdSMmK/XcEqRDVKU2afZkIhC0zm2QnxUUmxGzB+e4dCzs/zxhT5z4h2G44zxcMLLd4b8weDbnOjMkt1oo98acOKvfY5stMcTh5bYGEx44qGET75+iezyHttJwehUHTsa8MTJeT69FnNns+RaklCWJd4LJqVjnKXUO3VWDh9i98464NBaMSkLJmlZAfJCJCFliUJhjCOK46q56DjTbvL2qGRc9VGmmj0hlXUBciGnFMDTsjIHSf772cfCWQBiWY2UAOCrst80pCLEK1IgqjLqPUkIqBom1ajM9ACooCQGFaS4SqarMSBJxaJyADkO2P4pFehB05F7LO3ivi7/Qdmx0oqUUhzI5yECZ4AQVaO0GmtRBICSp4I3i6q86UPj03lYPbLG0tohbl+6ENTDhMBjDob/gvqYCvr0EmSU8NTnvsBzX/kiSkmG4wGNlSWyq1eY7SqkbtDbL8kzi3OCw3OzzHYSaEQMjcNGHi9mGeVQeMPg9h1EIyEfa8zlHnvbl7neUbSbnidXLMeXTnD8qOeReINO9wzm9Elulle4/duXmAz3OPpYn6V3NpGbm3zLFhz57BpcyGjcsqwNGnTXTrG9s4Gaa9IathiNRnjvKA3sjUcsdOdZPbLKqy96IqVR0lJax2hisKXDFKaa9QuOI5XG+UB8aJ2jJhznOl3eHvbJ7b0JZEkYqNRKVJdLOMdKyTAiM6UJfh9/+Vg4i/NQVH2Ogy3CgZfuYDhSCYG8D0ttQrQT7tLVPzM+pG0WHyhTRcA0Tkfop0UDV+0oUoRd5l6y7g7kLRTigHxaVDnMtDOPt2gVBeodEU7ClDJ22ly91/OpGF3sfdOwQhzkXoKK+Lwi+0PAU5//DBvXr1MWxUEhwgPehtADFN5LVL3FF7/6J3joqUeDYJLztBt1sBDNz7O3vc/KUgxRydZWTn2hgSajPy64tT7EIfGxZnlpEbCsv3MXqRXiqCeZa2P3YPBuyUOfafOp59fYVC3qXUdLjkjUIW6lh9gtJpz86SO0Ikl2o05rrsUvHSvYv7nH/MMrtFSDfqlor3uElly5e5d0O6PbjlBKYg6ofARb/QFnVj0rq0uoOMbnGV4KlI4ZTQoGw5yF+UYlUhT+zZSY0atKeMobZmsRp32DN/Z6gVaryNBS0ak38PfVOwVUGqRU/a/ptfHD7WPhLBCqGcbZKrwBql0jjFF7LJUyVhXCW+/x0630PlJooEI3Vlg5cY9Mb9o/mfqjFyHBdJUsRThg8j5nEtW/qypWEryXFZeVq3D4tipRVmGVC3kWVZ9kWmSYypCHcM6BuzfbFmldieqEHWzt5BG+9NU/xe997evkg17gHsNUui0hC4uaLX7xP/5LLB9dYzq86b3n5voGGzfv0lkuWTnWoN5ukbQt3RlP4TWT3CGVoBZ70twiRUyRTShSqC0ukSwf4uzZeZKTTdZvOfp7ArcYc/duybHFlJfOF7z61i7z6hInVm9x7sgJdgtBltQQMczoFn5/j+ZOm9o7CoY5h547yi29zdvPn+JTJ2Zon9/hir/O+eEOne4KpQn5XW8yxnlDo5HQnp1lsLVJFGmivGCSOnqDjDQtiYuSpJaEq6O6QUkkUmmsGZOoZZbrDRbFHX7tlR+wtb5Be7bDp5/7KRZasyFcFoGQZJSNMc5gvGcuTzGlec9r9GPhLGFnCNhoV/UepmPq+MBOWU4lBqp8BVF19/10bCX0HoSc3umnW3XAs0xH6qeDjX7a5xBBvVhUr+M9qioWWF8lgOre+MxBbHswWRAqLtMQzB8UJqYSFdNByrCzHeQ9YjpGEzrL08aq8KEsfubpx1g7foQXv/ttbr/1DsPdHs5ZisJghOSZLz4Cs0HBOYzVCF584SKvvHKN2mIHwR6TzJGOC+pzdeoNjRtKsrGmvRDz0FHL+rWc3tAzsClHVtfw5ZD+rfO8cSEL5ORKIqRm+3jMHZFR767y2JEuv/j8GV7d0lzIND93dMB86zCTNOH331jn5r96CX+rT9QtMLZPcqbFypGUpV9+jm/egd+9OuLPfbJLNLPKN//hP6TRXARCz+ru/hDrDUpErK0dord1F+EdOo7IsoKt3YyVpQxdV9QSjVICITVChKawUgJXDoIwlBA8tnyMd5ovcXXzKuVkjmvr18mXDcvNWa5t3+HSpfP0d+/iTR5UC969dY+N/4fYx8JZPAH1GMKYcBFa5wPnbUg6QnIu7lWXwnUbdoBQgpxy3YbsZBr+HFQ/Kok9Vx3I+ylhD6aU/b3BSu85aHoe4GBU1aCsGqKqIuCblpnDB4bPnR7yad5iq7wl9H6mozv3pp2tt5TWsDfcweNYaC9xc3yXXbnJ0qcP091bZvPdG8gsY+noCqNmymsXf8Dnn/ppItXAWcv61jbleId2rQfU0VGDYc/jZY3NwT7NqMNkp4cQJXEtRjQNi506qRkzuLNB586YE+cO027UUfsFo9l5LhQ1Pvv5NtnaLM8szTEv3+EWR2nkKVvDdV6/fJNc73PNzdPOSo4eXeblmyNGKwvky3P8b3/pcU4eSri2NSSbWGrHWxx6eJmNdcuxtSWKokQnMR5Pf5JTGEM9jjl87BCvv/ZqIIU34ToYDAu293LqNU2rWadW93hv8dU5RWicHSFFdf5UxH/w2T/J3qDP777wFi/85r9k4cgyR08/yvlXrrB/4ypKemqtGCcs2XCf9xtE+dg4i6l2inCHCIrD02FDVSXCU1obKe6VAadKtYLQexFVY1FUYki+cgBf7Vy6UrDVShzsXFOHkFUzc9qjCS2QqWOFcqMUEiuDAxhrmRQTInXvMDpvSWSMrgoHogqblJBVLX8KVw7O7bzjVm+Li1cukI37jHu3ac/MMH/sMW5fvc5oYwuzvIarN3FzilgkTPyIYv0ms08+TSw14Ll7d4+bdzepr8wjCon2Q7zbYGFthV6uiZIZap0unZMxO+evYUaSWssjZjrMNTqgHM2HmvSu3WRgBJGV1Nu7zEU13vhGE32yzu2yYGc4YC+7zNFmh8UTmu+UKyTdOZ5v1vHDu/RnW7x8+gwuqTEz22Bjfx+V57TXTnB2PuNsXZPuOs7OJ1w5foaLd8Mgu/cwLkoG6ZhG0mRtbQWV1LDZBOUFWirGac7W/oROK2J2NqPRqqPjoDCgtDoIba0tECLwFCRxg7/0lT9Nfz/lpdcvMti6yzv7Q/ZvZZhJjosUUd0ilKfMy/e9Tj8mzlI1/6rE7Z5zh/JxYERXWG/Cxe6qqtl9/RSqPswUHRnQcfd15EXg1BJMG5vVnV39m2P9obRsD+aKrLMMiyFbvXW01KzMHcY4S5oPGaYjLly+SBTXw7qsQSjBQyceZa7Zpa5rKKEOpMantX1flZB3Rvvc2b7Dmy9/n96tm5RZjk48/WSP4ciT9QeY0lFMRqA11tQwriSODbXY0awnlYqA43tvXGa0M6Yoc2bbCb7YZZxLakqyf23A8mIHkw6gcIhah3EhGe8AOyk6hnai6WhPo4yQQqKkQw4zDktHY+I497mTzK2ts9V8DNk5wxJLDMc9zizVeXHzCnPpVfJOm69fGaFlgbl9k2NRlxNzZzl6/AxOa365YemZAj/u88bQMzj2KLr3B4EE3kHpPP1JxlLb0W63mJ2dY3tjTKQkpXQUCLb3JnQbCYvzNZqdAhXpKo+jCo0NzhUoHR1MWLTqXX7lq3+Gfvr32djdwtsxUB6E19YE/ZtiUr5vE/9j4SwAUxRbGJWuuq82XFRKVWPwngNHmTqBlNN5ruAgokq6qXo2UwGkUEEKOoNTDxNwgD/x+IMBzjQfIqSisI5rd94lG+8y2NnGlgXX55YpJ7u4fIQXCXdvbmLyAlREvdGgNAU7t2+QNFqcOvckp5ZP0IwS+vkYTIZznot3brCzs86ta1cZbe/Qu30TXwZivjiWmHzI1oXzRPUuJjV418O6PeLGPJOhQYicLMvYunOFN6M6k1JzdWeL5lKbRl6jzCfIKGG2Abghiw8dxe1M0HWNAo4/scZkdwCFIWnE1JsasTsh3e6hhKXRaVOzksh4YqmIZ2vIWsHuoIHvbTIc3+R3rxQUusNnHzmCaETsNjx58S5/+RHD+egkRfxpmtEccnmGoffsFx6rFQ0ZEYka417M23cLnpiZZXPQR0iQzrHdH3BuTYGQnDh5nPX1WySArvR1xmPDzjBjdz+j2clQkSDSgQ4poF4ttpygo3sKYd7DYneF/+yX/wL/zT/5e2zt7dCaV4y3PcU4CL0ezFF93KthgoCddlN8irN4z0HjyNkwB6QP8O3VYGSlCIYMZeXwfBWiqfDG6V3cu6DzMcjHREpR1zFZmR+EWjvDbbSMiZXm6tWXcEgGE0//9g3i2FJkGcZkbFy7TlKPSZJQQMjHGa60IAtsnqKTiHywz7i3TZFNGBzfZqY9z9Vr77BYlnzK5Fy59A63hhnewWhrj9HGAKUFtRlNLkBHgnyUkfbSoNNSJkgF6X6KjGJsZsAJ1q9eZHtjh7T+OJO0ZL6WUFpLJFsMJ6uUAprxUDrTHwAAN8FJREFUhE5NkbeXKCzUnSWaTJifCTIOYInrmqJic8xFhE40vXFOb7cPnQY1PeHC799kYe0YcbPOvp+w9swaJ2bX+IMXe7S7HX72zFFac3dYWVGcbqe8ld/mlZv7/N++WedLn17jy4stvIzhzoQbNcvSLHyh1KyYw9wd9QP5nYRbu3tBGU0qTp06yu/9bjiPWmuUMuS5oT/K2R+WzAxSGs0ERRUJqBB22zKtQuywo1cdKo4tneA/+YU/y9/+1f8f43SMqoEfh/y4loTJ5fezj4WzQOhLhDH8io0lxGX3hiGrKtm0ECVFaOyV1pBojSFwI2cmR1c7ipaS3JZc2rhGf7+HSfuMBj1UnFCvNzGmpCwKtBRkgz1E0qJIS4r+JnGsGPZKyrRE6hJrHCpWYD3ZKKXZ0UQ1gSst2cgABUoLrIkCNkY59m9fZe/WDcpc4F2ObyrKPGN0d53+bkZZeqT0RHWFzS0mDU23fFzgMk/ciXDWMdlJUYnCO0nSAp04TJEw6Vui+gLaRXTbXbLRmNG4oNNtUlOdoJ9YdkjLkNO50jPcGDIzFyNkHbwBVWeyOWCYOR5+5iTLcwvEzmDwvNNzXBvCKBK05uDM0S6zS2u8lvawMfjeOsc7KY1Oi7v9PfrJUfyNAUkr4fT8Mn5lkV5/j1mTE4lZlHOsv36TF47XObxcZ20G6n4eoTTCluChN0oP+i4LCwu0Wm3MaIB3kERhmDUd5ezvp4xnE9JxhpK10GDUYSjTmiFVY+wA8hFSU88jR57gL/3cL/B3fvV/IplxmBRcGSqVB9in97CPhbOEKaepMrCpYuaK1alqGApgUuZs7u+QxDGz9Q6Xbr/LoL9HUm/SanVoas3W7t3wGTphtjtPb7DPrctXMOMRNt0kiiRaewYubNtFnjPp5wgZ0VhcQcgYqRvk2YQiLzHGYcY53mlqLUhqgslgQlyrI6RExzE69hQTi8ksQmlKCnQsscaTjgtcVhDVI1JvcMbS39xhuDPBY2nO1Ug6Gls6dKRRkUcIS+5Ca1TrGsYUuAJcmZGLAqESTDZC1dtkZo58NGZ3exetNKrbwOkE6yTOG3RNs9sbMNtuIrUjXqix9e4mjQS6CzMkbY2crdOZabCdTti49C7emkCTKiVJ6tjeTskS6N8STEyDkyfr7PRbxCci4qjNu7csR8shP3XkCOJGk/RYm2ymReFT/spnTiBwvLq9y/FbY7g15NB8wrd7e9h6g//8qbmwK8ggxTHMDVmR0ag1SZKIw4cPcfl8H3+fIkJpHINxyXBUMupnRJFCK4U1hijSmGKAMQVaxwdFnoOQDHji1NMsH3qJu1sXEFKS75fUWhqt78+B/137WDgLBOI654JO5LR3gRdkpmCSTRhmY26tX+Pu1Ut0Vw9RqzUZ9nbYu3YDT05nfgHhStLRfhDvtBFO1cF50sE+UQRxzeKMw0kBaLI0C+CtOEHFbcqspN6UCOVB15CRpNkQlHlgqjKlwSNozzRJGhoZCXScoGPJfpbjFSgNSntMYdCJxDszbepg8jA0ONoa4CwkXY0zhjIz6EgGeiNAxYKkW/VtjMVT4rKQkKpIgTM4XzI7+xj7uWB4rUe+NUaWBaqr8cdmaM53gsNIjc0yCh2R1GK8EtSOzrN/p8dwfUijXaBrCucinj9zmLa3RJFCuoqIO4r53rUxu05jVpqcemSer545zNYoZ6+f8eqbA1CemVrOG+/ewB5apHl8FltLyJzlX75xmSv9gq2sxl9/dBU3usCjMx1e7CsaUYvvbvXZ3DDML4ATktxYJnlBPWkggEcfPcel8xeYYpq0DOjJ0aRknFmy3JKnJfVY43W4wY4nfV576wWef+xZEl2jyvSZ8lUP0hHzh0/S2+uzt3sXkxvSfrjZve81+uN0gA9qWVkwyTOSKMZaS1aO2d7bZXNrncH+FpP9feIkZrJ3F1OWiKJgYzhC15oUaYF3E0ZsoKQhSwvimqAYefLxVmCzJ3T6nBHkJdhSo7RHKEGRGfLU020Lenc2yZSm1tREtQitgwisEJpmJ9AyTUa2kr/QpIOcqDadIvBBKNUZPBqdgNSGzqKkHEO3qzmaVERxsUAWAbxly4BJMaUl75dEzRhnSpQWJN0QegkZUYwduu6JWx6TGUTUphTL1ISjvtQma0Z4BYMb+0x2U3Q9CQjQJMKrmL0rA6L5iKQR0ew2KKWjITV6PCEvMmSz4Oq2ZXm2iUgNXoaypCihO6/ZutontT2YHfDWZIt6LSaiwTNLgr0i4fpGjU9/7jgvTGJee2GLk5v7dM/MsH5XcrtoUnc5I/EK5/5qh2uTBp9qd+kbxXdvjul1jtMuLyMIw6Zbgz6L3Rmc9xw9ukatUScdWrTwxEqSCcmkMAzTkklmaRWGsjDESQBuN7TiOy9doLCCn3ryk8Q6OZjgEEKwOxlhPcwdOkO+H+HLjHSUU07U+20sHw9nScdDXnzje6ATbDoiH2ww3Noi7Q8psxKlNVpLkB4ZKSZDC8ZQDEbVfE+ByXOcIqhkKYVMPH5sECLkFvfvr/nE431J0lDEsaRMJ/Q3UyY7BbV2go4SbJlRFg6Jrg6kZG65gY4845FDKUU6zHHWkU8ynHMkSYxJS5w1KB0q2/VuRG0uhFemDDlYvSWJRpD1M3QSI4Sj1olRHYGOFUXqEbHA5R4VSYSW1Do1ZM0gI4c0HuO7ZH3H4aMNrJlQJnUKD0UnZzzOyIzD5AXRZord6qGSOuVGDosKYyy2VIylY75RZ2FGsDy/yZ004pWtBYq7guXTXY6c7XK222UkJCc+U2Ol2eKZhVl2rGc5SriRp3zr+i61HcnSmYKLPufocsypI4d4zkaka/N8szfgxlsb/PLTKU+v3uBmucC7kxp6Zp693oiJT5hdXqC2e4PMFngEW4MhjxCOX6PZ4OixI1x95yK2LJESIuFx1pIXliw3lMZjHAGX7wRxEjEfe772B+fptFp88szjAT3pA35lsz+itNDoNDj7mUcYpo47WxlJohGXr7zndfqxcBZvCjbf/gOMsfjSENVUYLG3jvHumLgeETerMeyyQIgcqUOZ13mPjhxSqeAopSOzLiR7HrJRgS4FUU3ijKDRkUEEZwLeWUwSCCmFAF8GSHM+KcKksFJkeYozliLTCNEiSjwqK8kmPbLRBJNp8jSj1o4h9jQaMdaUWOuIazHOCvq9EqklZT/FOU+tIUnQWOswuUEYT20xYeV4jVpdhfLoriXPocyKUFSYFPhhiYokKlaI9gLpIMcNFZNxgRcgtWTh9AL2+h6xSrCbKYPNgEb0WYoUjuzyPiJJ0DNdVk8tcW5uSK4ln3jkKxzVdfI3CjbUkNpCwqOHF5hrarA5q3FE31n27YhbRcmsrvPy5g6+Z3j9uuOzu5s8mVuWnjzO+IUNtj34Z9e43upSm2ny2LGMJNY8lHSYFwO+28t4rKF5ySt+cbHkpc0CoQMF7sbefpi0qJCQp08/xJV3LgaZdgIy1hgYjQuKooIbWxumPGQg+Ti20OT713f4rZff5uzho3Sbc4DE2YKt4TD0V7yncJ6xUZRREyOS971OPxbO4qxjtDtAJwohHGUusZknHwdK1KgeYVKLSU14T2yotSKkFhQ9i9QehA2bh7Rk+xabebw11GYSlFY4G2a0HKBrHixh664rpPZ4J5GxRSqNjBzZfoFOIuK6hLrEZJ4szap6vKfWktg8ohineFeg6oKkIYCEVlPjfEEcA04wvFtirKWWWZz17O8UTHKByQzOOPRsRLurEUohpWBmTtPuCu5uejZuFBQDi81yrClJ2jWS9jLza2cxw5TSFiT1iOJ2n2JnjGtHLKy2Ge+lpFsThBPgSqwTQV7PA2mGyVImc56bogCp+M2XriNiTborQTdYXJihUa9zamaGs3HMnit5JG4ghGB7NKQpYp6fEbxlRzyyN2L22oR4FLPz7cvYmYTVrz7OhSdWWd0es2Ik11tz5LbLYTHLzdE+K3rCxckyn5uZ8KUTTe5e6XApG+K9p5emWFcgVYL3cPKhE6GHNRrinAy7hLXkuSPNS8oioUgtpm4DrCGJOXtqhfKNkksbOf/0O9/lL375y7RqbTJb0KsigemIeTu2ZBGoKbHbe9jHwlm8B1sGJo+4EYgFitxQZAWusEx2RtixR0QKPERRKBHmY4PNDQiNjsEaF6hQhWW8NwkDd50YoWCyXyKEII8EtZaGmkEllepTJlCxoD4XYVJDNjIUowLwxM0EfMDJZJlBSpj0cspagooVdgBRQ1WlSUOtBjqGOAky40kCXkr27kC2leNcBSt2EjMxVWdUkU4gtwWNmkTHUIuh2ZTUEhj1x7jcopoahMTqk5jU0tUeo2PyrMTHMbQccq2JlIJaVNJZjhnugppbZl7BeGeHkS/RC0sk8/OMJiP213eYebjO2uoCsfGcXIFBXpBvrPPduzd5SXoi77ESuvUa3XqDkRQIF1Fc3OXc2iL59R12mo69x1qs0WRxGOEv9dCrdTY3BYMiob/R5+ee6PKPr+eIoeUXHtI8Ee1ydqmGRLHbG0MTsLA/TpkUY7pRgpSCZrPG8ZMnuPLmWwhRYYcE5FlJVljy6qbjvcfYIEy1NlNjZbbBraHl99/Zptv8ff6Dz32RcZEzTPNAlVXaA2zTQkfQaav3BYB9PJzFOPJeis1tuDhlaCrGNYlwOrCYJA4VS2QcJnwnwwIsxC1Nra3wVlJmoTmVdGC0EZJuZy0mE9jUoRuKMpXk/QJnLLUZTZmVJK2IbGDIB7Yi2gZd9+hmifWWYhCD1RhjwXgm2xOSLoGQO4K4rUFCaQIZRCwVZQlF6nFNkDqomPlAv0K5n1P6e6CwrF/Qv5sxf7RONjGQCmxTUKt7Wl3JTkVn6hzYCcQL86g0ZXL1LqzVkfUIOdfA1OPAAawkqhUjUgHbE0j2OfXwGs8ee45J3/B7r93mxt4e7cUuS8tH2M5G3Ly0ThJpjq7MMzNXpxVFZIVjpdOg1dDMNBUd5fHGcOfNDe6+sEk7blBuGrQUmM+fIH70OIN37xJllqVPneUylpVDBfWru8yLFsdrS/zi0SFXbuYwyOmurlKvKwb9HqvtOtvFCKQkLSyDSUq3UckZSnjsyUe59ObbKCkDk750GARZ4RgXllFaUs+KUPFzniiSfPHxOf7Z1ZLStvnWhW0WZ1/jyOoyeWkrLgZRKb+FgCEq848/+Ms7FzDQUmALg6xbdF0gdY16UsN7A85RTBzem4AviQJRQzYsSHtQjgMWxJocXVOgBXEnIapFCG3Jxx5beJKWZ7SbY1JHMSmQkSBpRQGIoj21GYmOBFkaUW+HERqXS7Rqkg76QUG4yMkGPgCuhCEfQdSSJC1JLh1RXMMZx2QAKlJIlYAoUUkoNJjSIJM6KpHYssSPS9JBzqAnsZmhNJbuQovFtRqmzO8hJsclrVOnWDhxCP37l5gUnu0bOUWUsrhaY/7QAkfmVnm7nzPc2Ge0OYK4SWkt37u+wYvX7rDUiDh5YpnBbBel64zGQ86omCMzbfyTRyi8oMg93z+/g1cx5vYEpSU/+/ASn3Q50XaJGHcYDHuAxUYTxKkOyYs3ePbhh9j/0hNcznJ+483b3Ly0zenHl+i2FtmeTCjMbcaR5rlHj+J7uyxoQ57ntJp1lA9/n2zFlMax2RtwaD4A4qzzrB1ZpT03y2hnl0gKEqWYGEOeWyZplejnJaY0uIoX4dlDNb65CRMXY5KIX/v+BU6s3cYUNiBkXTUB4u81xWtaved1+rFwFgTohsaXjnyY0egIVKzBlIx2c1RS9Re0hxLKiUHEEh1rVEtRjquybn8ERYmdSYjqQbph0i+ImxFxPcHlUIwgbiZEDUs2zLClxPlQhbI1G0i4VYBWFilEsaY5G6OQ9O9mFP0CVxZ4Y7ASIHBw4SOUipDKIWc8MnbEDY+MLGVqGe6OmE3CrLQvPE5ZhAJRhrk0VVPoRKJEhJ8IvNWUuSJp1Wgu1ZjsWLwRqCTB7g9xu2MmDoa2ZO3UAlFdsr2+x+b1bcaFQtTn6Z45QTkYk+/3cJ0ca1M2ipLexgaPzs/REIrd7Zz969fpTcbEFy7RfmqV9fWU3p0chELogPG/E+1zeVaw850bAYbdEbiJYU7Xmb0Da3/iLLId8b2X3uD3X75Or9dg9tQZrm1ErKY7PPTsDK8NW/S2M6jvk168jTw/5PCff5x0uUWeljQkTCqk6c5wFC4MEWYCIx1x+txpXvveLsqJip/dM0wLukVCmjvGw4JmoySpl0R1xUzkOdyUXBh6okYNl6zw7vWb1Fo14kYCThzAw3EBNPg+G8vHw1mEEnhTUI6KigyihnAOYybY3ONNTOkD/2+ZlmAljeU6zZlQTnZtaM7XGW1bRptBBluoQPDgjaAcl+T7ObVmEzOyCC2RsaexFEZAXFkQ1QIga9IztBYjkpbAlZ4itSjhiGueRrdOMShwuUE1NO3DbWxumeyU4BWudJjUk6YFkVbUGoIoCojKRlejjEEoSGYTyC1KaYxzYa1SoLUgyywyjvAoyswhZMzMsRmKbIAdCxrzszTHJelcHaskjd1qFCYrSBKNiyVKG9LRLQaXR9DqUj9yDDnfoa7HTNQAOxzw5m6P2d0BTz9+jAV1jFYn5k7dcb4ouHWtj8/DBK5XErTklVfGtJ9vsvy5BSbnCxaPzlHeGaFv5wx8wbu+5FtvXaQp5xh2H+fsoZjP7m9R/81bNJ45y9IIXtg5z7EzK2zcrHN3Y0Ln/G12/tY+O3/5M3zhmXOML77LuknJSsPtvX7Aqnh1AAQ89/g5XnvhJbyxxDpC5ZYsK8lyx2hc0k4UZWmxpkTbiCjSPDYHV7Iwwl+oGn52ATPaC2NUUXIAzvPiPtj3e9jHwlmkFoDFy8BE4r1GakOcSOKGwHmJSQV5r6Q5L9ANj04CDWctURjh8MqGqd1WiNtNEWhOvTQUkxKXezIKKBWNpEaZj0P1w3smvZyorolqMYiSfGzQsaRIw3xZljriKKLWbBIlY8oBgYs4kchYoMceFcswOas9RVqS2YJWp4mKNHmeo+sKXZPVSQpDoUIpGJRhENN7itSRjxxxQ+JdyWTkg8PFkririFfmOfroWfLRkJmzY5q+xq5sEvVyFo538EYwGI6xAtr1Fq1Wwu6dffovfx/VbPCFX/4ETx0/jpSSXSRvvLrL3VsbJBKe/Kkn+eS5QzSyEU9/uuS3Xu9hneVQC3722G0eXt2iU8uo1+vIX/oiDs/uDc/t33mX69vb2ENznOx0efc763zy1Ss8u1DnzNIaI5Gw/Tsv01h9kj/zlaOI4iL94zP0knl6371OIuu8s51z6E6fre27yPkOznvGhcV5i/Ah6VZSsri0yMrRQ9y5fA3pHVo6ihwG45xaominlvG4pNE0RDWHVJ5z84p/teUobUjmRXsmoG5HPeLGFPdUcRz4fw86+OBpLQnSnsCMBdYU5ClhZ3ACnWhkDLoTYliVOISwFCOHdIEzy9iUuGHI9g2uVHjjsRjiToTPHTKRxHWNlRWphIJs32FLgWpInINiUqLiKSOlwpUG6pKo7ijyjPHOGFdYhFJBiSyzqCSiPhNhigJvPM4K8qHHGlDSEdcl4/2M8Y6F/QJnPXk/RXRbxI0GqR+jhMQYSz4uKSYFHotKwGuHUY6oDvMnG7SSYyTFhDEjdmnR1WNqD69Qvnib3Zd2OPvkYb78udPoqMlOf8jV7R5v6YTRfoFWkjffuMztiyVnHj4M7RmG7Yy1x1foDjO+8Vu/T/5qiyuxJRI11IZHa83cnMJNrvDmlRFdURCJbS7svsvAzfL06U+jigxfV4xu7XDlB1f5uf0Jx3cE3eePcO2N6zDJqScaarvsp6eI5DFuRU0aUUraFSzWBX/+ZBf3co90A1wriEeN0pw0z2jWNELIABVWkocff5S7129gC4sWgtwZsqwkzQ39UcbMJKZbJCTGoKKIhRp0pWDbVDwLUiIXFvBY8mGfqB5m/LwIaNz3s4+HsziPMR6PoshLpAOIwgGpxUgpKFODM1AWHltIojq43DExBhkZEB4dQX0uwZSB/CFOQgNPaignENUC4VyejXC2xBWh3Oy9CRPNXlKOHVETkoYD4cjHJUlXkdQS9m+NyAYZcSsCJXClQycClVQOiKAcBVb/uCbJJ4bRYIzSCpNbXDa9hUnKscGMR2Hb1yqUr5XEFRIjIiZSo2ONGEfoqAE+wpgZZhEMM836bg6HjgE1xOMr+Nt7vPWDW1y9s8/iWsHR1aN85vHTXEuOMjt/lsX6hM+faiIGAy7tZ9zYXCdpdfj+1jXm221aMy3GXmPHmtnDLVaXNEt5g++sj+noR3n0XJuBlzwqbrJyZAOHoNFukP70o+Tnr3BoZoG7Mqfe3+Fm01P73tvUl+fYuT1i8Zcf5sJqzvde2OKXTw04uzxD6/Ae22dnmH9khuOP72Ae/TR/9tUV/ue3f0AuHHnp6Kc5rVqz4lMIIL0Tp47zvXoDZ4YVx5wkLwyDSU6sBMNxQZoW1JolURzTqCesNQz7XuJERToiJGZmEdIcP8nCaJCa6sK9t31Q5a8Z4O8BjxFO938MXOTDUv5yUI7DhepCe4M8N6AlSktMAS6fTo4GalRXKOJ6xHg4OWgmuSiuYk6HbnqEDE5U61q8kJTWYB2YoiBuS6z22LHBZgZnFXEjwjqDFoHwz5WBIEOgUCKhGJZBM907tIzCmhoWKQELIlI448HGZPmYTqtOp9WgphMGd3ZAFoDAixqeBqpRp96tg5IkjQYFMLvSQtVrTHKHjyJEUiN1gFAcWWoy2hqTLB3hUJJiUwfXN/FnluCowlnDeJTRkoIXS8eV87do9sasiYLHHznB08dOsNRSrKQZvQtXyW7vMr+yyOxI89rbNzDjCaIs2Gsq4llJNLGMdkpeXmhw/eWIzmqbf3Z5F1fTfPYx+ORjOXXpiTqS7NoddH8Dh6MmNTaTLJ86xNawwX8/mEW/MmA52uPEoXm+fskgZMLewhLnHlXcmkTEF25jyIhiyG0Qu90ZDlmbmQs8BRXKtNmqcfj4cS69/jpB4SCw8KeZYRCV7A8LZgYZzXaDpOmIgGNtwfmJRQqF1oFMxGsBS0vYO+u4NCVq1MKI0o/qLNXF/5ve+z8thIiBBvB/4ENT/gIhS4phIGv21mGNI9YJZpIj4wSPC1y3PuQF3kCROVxW4d47CeXYgLIUwxJvQXcUKikI6H2FcwZVU9RiTTn2mEmgIZKJxACqpomkJ0qg2ZGUE4lzhjiJiYRC1SKIBDKqKJIQCK/AeBaWFwFJWi/RcYJzbdozLU4cXaadtNnY3SBJMuS1dVpPfQbb6iKkgjgKuAslqQsJtQivBcoEni1V4fU1ECce2SuZ3BjAWp16PUfYHHNpF3t4Fl9vsbNv2Hy7ZGbtBr2oxmdOLVKXS4yuD/mvfv23mOtGHD1ep9001BsNzs50+f7L27ioRbK6RFKLsULRbiseeWaGm5cNCx3N82e7LM+3uLY94eLtESe6V9nd2mGhKanXE+TGBCkjTFbQ2pfMHptn+ZlDbB5LUG/sEC82+S9/ocFvvdNBRQWvnh9zaLbDt0yNL2UdRldv8fZsHekLnA/Ed1uDEVIpBALrTEXTKzj92DkuvP5aRd0qyAqLyAxCwKCmGae1QI5elhitOdrW2JsFTnq8V0gRiNtLB8nSEm5jAzvO6dTj971GPwiLfhf4AvAfAXjvC6AQQnwV+GL1tv+BH0H5y3sP0hG3XAiXGpJsz2Fyi25pbFkSJRonPUpE2MKgIoGMHdoGho/6nGO854gaMXgwaUEuA0pMC4nSHls4bOaIGnNYNSJuBvpXGUnERFSEMBqlI5y11FoJtnTU6xGRsBx9ZCVg4ktBacA5gYrrODy5aOGFpmxISh2j4hp7UcyL6zW8jrCLZ2m2U8QL3yE5skat0wl8xYIQSwsBIujPICEidKqDKpnA7GeYNEI168iaxG7llNd2MaMhKUNOHevw0BdPcvPyLus3chbXxswvL5Gk77J99xJp0WB+ZZXtSc5yGnOqPuH7Fw1fc54yt/zMagd78Rp5muNVxNjkXH+9wV4UYx5u8U8nEU8eq3Fzr+CJteOMiyVmOwMutg/RGBf4FA6P2qhunXGzYPu0J92+xhOPneI/OXaC//cr29zaEax0DDOLHT53dInfeOUWj6UxD80cYn3nFje0wJSBI8EJy/r2LkWZEenkgClUKsHRI2t0unPs7+6gCeDAPCsQUgQ8TGaYTDJmyiYucqzWoeFK+qVEV6preB+YY+I6pjuH3blLzHtzhn0gZwFOANvA3xdCPAm8DPw1PkTlL51oJtuGqKaJGgIVhw6+jBRRQ2PSHJdafAS1ToKzBms9SU0jtKAcekZbRSCStg5jDLbwqMQz3vHEDYWQgWGlKB2MDc6EC9FWYZ+ux2jVxGYlpW6yNy5BNogSTZo3Sb0mR2JUhFUKr+JQ4o1jnBdYrUAp0AoZR6AlUoFUAhnHaKmo9/eRStCeaWAaSeCIk6GKFpjeA8+YVBIh1AEnmhYKuz/C1hKEl5CmRFFEdmqG0W5M/+oW+9+9yO7AcWTRc/rJp7jVm+W5tSV0Y47OZIMZU+OF1yJ29nc5NJtQa83SEDfxGz1OL3Xpru8jS4habVafPszo9S288agTa2yfPYxYkpBnnCVj641trvZHHHp4niLPOTqA7udP8+tvb6D3d/jyJxZ58tmHmJmfBSE4mu8R3b7N7507wV957DjvDnf41bcucfF6RjdNeLR+l9+bb/DU6UWuX6qznhqElfQmaUAvVvqGooI+N1p1HnriEV7+9ndQYqqoJjDGMB7n9MYFM5OYvCjRtZhuPWYtkexPwLhKn8VTae946vMdnHbcvPDOj+wsGngG+C+89y8IIf42IeQ6sB9V+as5W/c6EdjS4gpJPvI46ymzHDUOFEcmN8hYUlTaKq4wGBVyHGccac+TzEbYoSHbLsPh0wpfetKJJ2m1iOsx9aZEyjq5rmr49RivFTKKcbpJtNrCIpBa4XSEURFjrQM5ntaBC6AqOasowhPI+ZwPjiFkSNQDRY0gSqKKm0xRq8UVv65CSYnFhhMX+J1CcikqLrT74LDGWhotTy4s+aiSdXOO/uaY3tVtonaMqtfZzeqoVFCfXMflirfe7bGHRzdjfmZlmXPHHe9sjfidSymH5kY8cmqFI7ev0rpwi4nNGTcF0mXc/hcvIWoSMoe/fRP5cg39uSVGjZxdYkpm6Q3hdKPNeK8gzzL++eU9JtRZOv4QT335YVoy4+7uLutpwbWizbOffIilCNb7PU525pk7Nubxa3dYe+USO0dirjfh4ckVnM8QQuHxjHNLVhpifU/rRsiAmHzymSd48/svUJbjoM9jLM7B/qBgv58zmWtQ5IakMETK8NCc4kJqcXmBiiVRxdcGAmMcot2FzgxF+d50SB/EWW4Dt733L1S//7PKWT405S9nw51BKYWTgVnQeocowKQCW4TwKmpqXO4CZlt6yomqYtqIKJHEcZNiAlFnBqFjVJxAEqHiBBE3UK0GSmuiOCHPPUon6FghIoVQCmXgoWNL3Nzrk7sQHgVOMR8EWqUGQglTMuVPBqoYWGsVdGAqLLirGC19xUrj7FSXxaGVPEgmJSIItZYWY0qSJMJZkCowNWIlWaopxgN2ipJZEdNsNNGdOrNPHUYkJbM1i0ktDbPBkRk4dLJBs9vlO+c7dDPP+Svnmax2mNy5wWRsSc4c56nHD7O3MmB4O+NIq4Ooacp+if7kLIPzd5CzEp86Gs0mj//JT7G7aLCmYIYlfuOtIQ7Fk52UaGXIp59YY6yWeOO1Af/tdy/xx0/W2BjXGK00mBWG44cl51ZXeW1/F/vuNb4iY9qfOsmbv32F0daIcycW+dT8TV55bYDszCO8Y5Tm7A0HdOqNqnkYxoW8h9m5LodPnuTaO+9QVLuOE4JJadkbZOz3c8bjgkYzwZiSI02IZFBFcDYI2irnaGmYNxYXSw43JO8H//og+iybQohbQoiz3vuLBE2W89XPf8iHoPw1zRVwQa+jmFgwAmSMKyQShYrqSBVDTeOJ0O0GXmq80sS6gYgSXK2GEhqtY0QUIbSqfsJ2+8mHlznpU5ajiN/cmHAj80FgSEqkVvjCsjWZQKKoScVUwttZixKqokoKs0MOj3ThwEopETrkHzJSSCUCjh2Bd1NOTFFRMlER/XniWGNsgBZ464giVU0re5xxYIMDWueIG3W01HTG+7hak2zo2L/dw3Udx1uGpu3TnO8wEyvGec4bl3sM8ohbtzb5+c+cY67XoFg7zqunYr56doleb8CV33kVO8wYtgy7bojbc9RmEny2x7iVMU5gZHPKss8P/tG3aSx54nZMjiaqR2TFMm+PevzZL9zlZPkWb+61eerUWVorK/yLGwN2L2/yv/rSGZ44cpQ3BrtcurWJTmIaF1LccJfBXB05E3Mn8/TqbXTjOeTuFUScI5TGC9gdTTi+BM6F4zkVKJJS8fgnnub6xXeJtMMWFuFCHrLfz9naTVlcyOh2akjhOTFb56kZRWIMR2VKSxpaCmYSD/2CclKwqLMPpYP/XwD/sKqEXSWoeUk+JOUvkAg3g0PjY0G8WIMoJm50EFGCVxriBK81Oq6hkogvPn2YH1zcJjPgtURH4eCKSlfSK3lAoi2lx1u4ttXnSGfCjb5GJg0SAaIMo/9CiIABl/JACNZbsJWmhxBVuOUczhMaWNUF76cxtVBYB9YZIq0Q1gd6Uq2R6h7Fq8djrA2Owj0Mp6j6AAcw6DwMlwrpkZGgWRiUHLN7y6IaBjOxRN0GghFOabQb0oglCwsRM3NneOnqEu/cvMXOeo+7b95kfrXGH1txrLl9RDPm8vwhJvQ4khfY/QEeQaE8C08doVUucKNf8NKlMQbFLDF/9bmIovkwX88znlhSPLebsrsjqDVSulpwbGWG3dEMP7i0w5tXmzz60Ek2ZUFy8zYnF+dYfWiN3l6Pb3/3BziX8/QvPcXyp1b55oU+O6sjZHfMF55Z47fe3YHZGayHzf3hARzYuQDOEyIQIR45cYjZhSU2N9cRAqwJx3ZcFOz0UnZ2JyzOJUiZ0I4Vf/EE5Psj8rQgasSYwuJKj4kdcbOGbukfferYe/8a8OwPeenDUf5KmnDsOXRcI9IKGUUIrZFaIkW1MVYYhjIvaTY1S50GxxebXNzNiOsxIgrUqAKBM6G/IqfU93iEkvSt5xuDLgJPaUwgDBeVapTSgZHbg/cSZ03FgB9mzIT32Gr6V0iJM44oqtSlfEg8HRbpA9Gfc+HfJVPdloo8ECplYylQQmGq7/G+EsswljIvAmG5VAjhKcYZ8cSS9Qqy5SM0ZiT97Q0atZThzZx+Y4XZzz/CWq3gcH6VK3tj3sglj61K5n7mNM+26+zuOR79wuMU2YCsZ7n2+k3OHG7x3bLFenwM6XZAS77wpRU6ylDkESszMXu2wYsX94mV4I112N25yclZyc3X4YnnWlzcm3Cn1+b4es7aI6c59dkn6ayUXBq/w6n6kKdmjnN8psP2xiYvvfEut772Js1LJd3DXbJ3BiitONLf5ysLW7S14pNPHeGFd7fo+XAu+5PsQD0gHD9/8F+tFvPIJ55k6+t3An2vFEgbQub+MGW7n3FkUlKLFUVUUK8l2DjGupLBzgAhFI1GTGOmThTrAA95H/tYdPBFHJOsLqOUQsj7S6kCKmm7acKrkhBa/WBzzI6R6HoMMmzLcE94CBm2bC9C+BPY+RUGh/AC50BHElSg5bPWEGQFq+RdVeFWxVGGIEg/GItKJFGkcN5hywoyoBRRrNG6onGV4kDmTYgp+bivAEoWay3WG7AeFemwRmOQIugoBlL+wAEgcoEdFlgpaU02EJGkPy6YPxKRvpGy8/o1ZC3nxpzizz28wHPHM+LmiJbeI466OHOSredP8e7GmK+/2eOnVxMeO73CVr7Llo+5VJ9FPH6Yzxyq8fxnFxC2xBNQnQ9/WvD18wN+/cqQJ45vcKGxghKeK/sD/sfvOB4+v8s5Vyfv13h74xovlim1owv8/GdP8lCzwRtv3uDv/+B7fKLb5clCsfjMk1y4+SrbO/vsfWfM0ukWP/UfLdCes0gUSSJ47EiX7+4VCKnY3NujtDmxCD0QUakoeB9I9R576hFe/d6LFDvbSGcovUEhyAvD7t6E/X7GwmwDqnNcFiVFVtJoJSTNhKJfYHODNx6T/+il4x+7CSGI6wEIr5TCWkukNQ4XLnamJdZQZhVecneYh7ArCn9CaSzWuRD+ROpAUi1MkyhsUSJEkLN21gY1YHH/iHbANkgVdOdVxTghqzjZWYfHVuq5ldyFDYwiIdEPz+WTFB1phAwa964iKy+L4p5oq/BEUmCtO9CfQQqSJAkCRjLE6EqCcAZXlphhTnM2IUsVxZ7DbOT0GxqcQHQSrFDofc8/+M0bHOla5uJrLLQNreYs9eV5Xlk3rP/+ZTb3J+wttnnyYc3PPDXmL0Sr/OZr17l5JePGOvyPtwUzgkCD6iG3jstbjmw95e/erPPUp1t87vQRZo/Nkq171ndv8tuF4dp8nYUnTzHxNQ7vCV64+BL9qyV3kznml07wn/7MYW7/g99j8607LDy7wOD1bZaeaHD6l2aozcgAWiPoaz79yCov/s51ilixNyrZHw2Za7dQKsY7CxXZYlFkJDXNM597nm/8xtdAgvUhn/TOsd2bcOVWn1YiOX16qSJtlOzt5+A8S1qRliWRMWAFcj8N+Jb3sI+Fs0DgIw7YakekI7wMEtzeh5KgcIANF6ux7iCRdsYFcSHnqNUCqRrWoiJd3fUFTqtQXrQucIpNWfO1oswyEII4irDO4UuHFg68QApV7WgeoUDXE0xRUhYGLUBLUNJD6SjLMGDprQMZmmcuL3FCIIRCVu8HDoSHhNYBQVlanHNkaY4tC6JajBeewlqUkESthO3+LvOHE0ZZh9FWnzxTTPYyvPYsnFtkcXmGw0stxmoNF8U8u5gg7V2ask3SOMyJExFXtnJeeHeLdrrLZ87sYMuUk4tD/sOvtPhueY6ZYYIvNcszCacXY4RWKKXop7Dz7XUaizFPP9xm1nry/oDf295k5fmHkCdWqd+ecPvlO5ybsZyaP0xn4Sxfzy2yzOHuBd54/QWe+TNLPNb+Cv0LI26WPR76aptap+KqrhqFHs/SQpMjnYiLmcc4S+EKrC0p7YRa3CVIjIAXAutKnvrE47x7/iKXLl5gSteqpafIMt65tIEZ58y0Io4cX6LWFHRnmgz2Uvrbo2qINRRpkvTfAxZ9gDDlBqjAdSxkWJpUCqEkZRnutNb7g7q6qRI6LUM4hXWBAcR7XJ6jlK6cLgxWemsC34QM+YSTEt1s4IzFADKKA2OMNQeM/qG2X0lK+6oCZkusACU1ZZFjS4dSMZaQrAspcGUOUqJEcFoXEijAkxUW11JoqShLQ2ENWitEFBEnGoHAV7uO1horc1qHZjBKk+5usbhc4lKPmWshu4pOt4UrDXf3BhxamWWx08DELWa6R8Pf6WFWaT7ZtDx8pMPF3+hx62uWxmeXEMUlxuPb1NM9Nvo7bA48v7PeRArPzJEW8+2ESeppeMGqn6Gnlpk/fIhhXMN6we28T3y+x+k3Njh8IecTX3yWk2fP8v985Q71jSv89KEt/tSfGjHXLZjYDEuXeFnx8C+30Y1KaEqqcF4J095eWJ48McfFt3YxQrDTz1ibWwYHUmiCzuc9lWipBD//iz/H3/s765RFD2lLXNBhZzQsuHJzh2YisKnl8EPLtOe7FGmlem0tSTuiGJVhl/+4w4ohULgKF5j0g+AQFXGdxjuDNBYRBQzI9M5sjSFKYpQUWFOG+rlUFbF4OBFaKoQ1WGvAB8qkSKlqUDI4hdQa4R3WGISOwk7mLVKE+r30hrIIPRJjDRAakhKJKQ1KRwfyelII8smESIOKdMiJtMJaT+nuCSw56ygOhjIFShCgrsZg8xylACfIbI4QkrnDbca5RSx12RlPmD8Oo0kdn4DNC0onsBZ6u33G+wMiBzM1TVxX3MkMRzptnjm0yq4zZLcHmMNrvDQ7w6/MTugkiyAKsqLBv9ho8xtfc6ytNPnkZxc43KgxqMqyiZNc3R2ylo3JDCz3S+a/eZWluUXk7QQ5jrn27fN8Z/MqLxSKY588wR//8iKd0Qvc2nd8a3SUL8ttjtRzooovwFaqZ1oGlQMhJd54Th+ZoX1hl10Dd/tDlFA44SqA1jRH5UC+uzvT4rNf/AJf/7VfR6rQq5PWY7RgkOe8fHGTmzd2+dLzQ849dYzZpS6mMEx6Y8aTMuSmhcWaj3kYJoQA5zClO1DH8sYglMIUBcIR7s4lqERD1bOQ1uALH4gkfAhliGvoOKpEOqeTyqFnYvIctEYkUegSS4ESVc7hAeuxrgQXpCmccaAFRVmEXo+SeK1wQlI6kM4gVISQusJ+B7Z/IWUQBg2T5VAadBQx7bho6ZEy6MdQ2kCTag1eSGIVdGhsHtjd40ZMFEcwGZBen2BNhp9L2LkrUC4Ns3LkKKcwpUPN1um26tSbMSfnZplt1Bj2+9wtS17d6mHHYxaSmMX5Gc4NYwZX17ALsyw+vsbcfIflYoI4scPh0zMsnJ3lM602fRMYUzI8nTLnuVqDvD/hrVsj+kWMup2y58fsL5XoL53i8hOr1N9UyGXJet+gJl380PGzbpvFpgYVEfqzMsiTe8JNJwpE6Eoq2k3N6cUW/TtDNvcGYRcId1CsczjvUFIfqEE7L3juU08z2N3ne9/+Dnk+QQPNSNJPDcNRYJ38re/dxMqYU2cWsM6TFpad3THGemZHGfZ9yME/Fs7ivadMsxByVXmH8GCtrSSZK/k654K4qQqj+x5VyW0HitU4iavScYnPS1SjBgJsWSKsQOoYnUQUeUZcq6PxOByUDhVJSldiCo+ONaYwVYNKIXUE5VT8RqJ8GNwzzoUQTxiiRh2pPN4ZLFT8Y1EI45REah3Uiwk7prXByaULBQApJN6WlCYPfZko9GukBh0rvC1om12ysqB3tYbJfejY1zVJAvv7ffJ8QmRL7KhB1EnYHU04vTTLYSn57uaIV95yLBUT/srZI+Tfuk1T13FDg1MZ48ct3V95ni8e6/BriynxIuwXQ353p89TKmY8njAZDrj8ylVur49I7mZsij4bx2K6zTqrNx2rtDn37NO8207J9m5wuF2wle9QsxkzcY1GA6AqszsPJuR2nlAmP6ityzB1/MSRGV5aH7DRG5AVGUkcB9lEUSm9YZFShwhEBvjxz/6pL2FX5/nWt/+AZG8PlWU0jCV1jljW2E9LvvG9yzzRG7E832R3L2V7f0Ip4PCowIn3JqwQB1qIH6G1Vtf847/yV7HOE8URUktcUQYiZ6aa8gJfGoTzWFsStephrETqEDrZMFMlrMEMxzgPcauJdTY4lnM4F0guEJWknq0u9mr40VXalSFMKomTCCElZZaH3o2XqCRCKIGzHmemTUV/0A9y1qCiAFGWSgUtSR+UcbuDIX/r7/zfuba6honisF442AHxAX7kfKB7kjJUiUBAmkFWYJ3HOhBKkU0sqhUF6fKyICJozcSxDlPZkSaKFO1awm5RMtkuqWnFQl4icotGBQnCaniz8cgyVntujcZE0tK2MMmyAJdQglgJyp2cyAkKA65Twy53iLMCtZVSa9RoHprhwrjP5M6Y+TmJmaQsNzSqGipVSgamSQRKBxSkmOruVH0yUSl5FaXl0t0R4Dm5PB+Yfg5auBWQTtyb8ZIVS/7eJOPm/gjhLCpLkaMRxt2TTrfWEceaZi2itI68NDip6RrLn9vtczlNf2jm8rFwFiHEkAAm+6htAdj5qBfBg3X82/aTXMcx7/3iD3vhYxGGARe99z9sQuAnakKIlx6s48E63sveXxfsgT2wB3ZgD5zlgT2wD2gfF2f57z/qBVT2YB3/pj1Yx332sUjwH9gD+/fBPi47ywN7YB97+8idRQjxc0KIi0KIyxWl0o/zu/6/QogtIcRb9z03J4T4phDiUvX/2ep5IYT4f1TrekMI8cyHuI4jQoh/LYQ4L4R4Wwjx1z6KtQghakKIF4UQr1fr+D9Vz58QQrxQfd8/rkB/CCGS6vfL1evHP4x1VJ+thBCvCiG+9lGt4Q+1+6WPf9I/BLjVFeAkEAOvA4/8GL/vCwTyjbfue+7/CvyN6vHfAP4v1eOfB/4VgXftU8ALH+I6VoFnqsdt4F3gkZ/0WqrPa1WPI+CF6vP/CfDnq+f/O+CvVo//M+C/qx7/eeAff4jH5H8H/CPga9XvP/E1/KFr/El90XscoE8D37jv978J/M0f83ce/7ec5SKwWj1eJfR8AP4u8Bd+2Pt+DGv6deArH+VaCMSJrxB4E3YA/W+fI+AbwKerx7p6n/gQvvsw8NvATwNfq5z4J7qGD/LzUYdh78Ux9pO0Pyr/2YdqVRjxNOGu/hNfSxX+vEZg5/kmYaff995PYYP3f9fBOqrX+8D8h7CM/wb43xMkP6k+8ye9hj/UPmpn+ViZD7ern1h5UAjRAv458F967wcfxVq899Z7/xTh7v4ccO7H/Z33mxDiTwJb3vuXf5Lf+7/EPmpn+SNzjP0Y7G7Fe8aPyn/2RzEhRERwlH/ovf+fP8q1AHjv94F/TQh5ZoQQ01Go+7/rYB3V611g90f86s8CvyCEuA78KiEU+9s/4TV8IPuoneUHwOmq8hETErbf+Amv4TcIvGfw7/Kf/a+rStSn+AD8Zx/URJj9//8A73jv/9ZHtRYhxKIICgkIIeqEvOkdgtP86fdYx3R9fxr4nWoH/F9s3vu/6b0/7L0/Tjj/v+O9/4s/yTX8URb7kf4QKj3vEmLl/+OP+bv+JwLnckmIg3+FEO/+NnAJ+BYwV71XAP+val1vAs9+iOv4HCHEegN4rfr5+Z/0WoAngFerdbwF/FfV8yeBFwncb/8USKrna9Xvl6vXT37I5+eL3KuGfSRreL+fBx38B/bAPqB91GHYA3tg/97YA2d5YA/sA9oDZ3lgD+wD2gNneWAP7APaA2d5YA/sA9oDZ3lgD+wD2gNneWAP7APaA2d5YA/sA9r/HyUIPmt4Cce/AAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "from opendr.perception.object_detection_2d import draw_bounding_boxes\n", + "\n", + "img_annotated = draw_bounding_boxes(img.opencv(), boxes, class_names=nanodet.classes, show=False)\n", + "\n", + "plt.imshow(cv2.cvtColor(img_annotated, cv2.COLOR_BGR2RGB))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "436aaefe-fe18-49d7-b881-d0f64ce47742", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/projects/python/perception/object_detection_2d/nanodet/train_demo.py b/projects/python/perception/object_detection_2d/nanodet/train_demo.py new file mode 100644 index 0000000000..3ef0394392 --- /dev/null +++ b/projects/python/perception/object_detection_2d/nanodet/train_demo.py @@ -0,0 +1,51 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +from opendr.engine.datasets import ExternalDataset +from opendr.perception.object_detection_2d import NanodetLearner + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--dataset", help="Dataset to train on", type=str, default="coco", choices=["voc", "coco"]) + parser.add_argument("--data-root", help="Dataset root folder", type=str) + parser.add_argument("--model", help="Model that config file will be used", type=str) + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--batch-size", help="Batch size to use for training", type=int, default=6) + parser.add_argument("--lr", help="Learning rate to use for training", type=float, default=5e-4) + parser.add_argument("--checkpoint-freq", help="Frequency in-between checkpoint saving and evaluations", + type=int, default=50) + parser.add_argument("--n-epochs", help="Number of total epochs", type=int, default=300) + parser.add_argument("--resume-from", help="Epoch to load checkpoint file and resume training from", + type=int, default=0) + + args = parser.parse_args() + + if args.dataset == 'voc': + dataset = ExternalDataset(args.data_root, 'voc') + val_dataset = ExternalDataset(args.data_root, 'voc') + elif args.dataset == 'coco': + dataset = ExternalDataset(args.data_root, 'coco') + val_dataset = ExternalDataset(args.data_root, 'coco') + + nanodet = NanodetLearner(model_to_use=args.model, iters=args.n_epochs, lr=args.lr, batch_size=args.batch_size, + checkpoint_after_iter=args.checkpoint_freq, checkpoint_load_iter=args.resume_from, + device=args.device) + + nanodet.download("./predefined_examples", mode="pretrained") + nanodet.load("./predefined_examples/nanodet-{}/nanodet-{}.ckpt".format(args.model, args.model), verbose=True) + nanodet.fit(dataset, val_dataset) + nanodet.save() diff --git a/projects/perception/object_detection_2d/nms/cluster_nms/README.md b/projects/python/perception/object_detection_2d/nms/cluster_nms/README.md similarity index 100% rename from projects/perception/object_detection_2d/nms/cluster_nms/README.md rename to projects/python/perception/object_detection_2d/nms/cluster_nms/README.md diff --git a/projects/perception/object_detection_2d/nms/cluster_nms/inference_demo.py b/projects/python/perception/object_detection_2d/nms/cluster_nms/inference_demo.py similarity index 92% rename from projects/perception/object_detection_2d/nms/cluster_nms/inference_demo.py rename to projects/python/perception/object_detection_2d/nms/cluster_nms/inference_demo.py index e653f5820c..37f1f1d724 100644 --- a/projects/perception/object_detection_2d/nms/cluster_nms/inference_demo.py +++ b/projects/python/perception/object_detection_2d/nms/cluster_nms/inference_demo.py @@ -23,7 +23,7 @@ ssd = SingleShotDetectorLearner(device='cuda') ssd.download(".", mode="pretrained") ssd.load("./ssd_default_person", verbose=True) -img = Image.open(OPENDR_HOME + '/projects/perception/object_detection_2d/nms/img_temp/frame_0000.jpg') +img = Image.open(OPENDR_HOME + '/projects/python/perception/object_detection_2d/nms/img_temp/frame_0000.jpg') if not isinstance(img, Image): img = Image(img) cluster_nms = ClusterNMS(device='cuda', nms_type='default', cross_class=True) diff --git a/projects/perception/object_detection_2d/nms/fast_nms/README.md b/projects/python/perception/object_detection_2d/nms/fast_nms/README.md similarity index 100% rename from projects/perception/object_detection_2d/nms/fast_nms/README.md rename to projects/python/perception/object_detection_2d/nms/fast_nms/README.md diff --git a/projects/perception/object_detection_2d/nms/fast_nms/inference_demo.py b/projects/python/perception/object_detection_2d/nms/fast_nms/inference_demo.py similarity index 91% rename from projects/perception/object_detection_2d/nms/fast_nms/inference_demo.py rename to projects/python/perception/object_detection_2d/nms/fast_nms/inference_demo.py index 5e0a5b48fa..1582fe8f0b 100644 --- a/projects/perception/object_detection_2d/nms/fast_nms/inference_demo.py +++ b/projects/python/perception/object_detection_2d/nms/fast_nms/inference_demo.py @@ -23,7 +23,7 @@ ssd = SingleShotDetectorLearner(device='cuda') ssd.download(".", mode="pretrained") ssd.load("./ssd_default_person", verbose=True) -img = Image.open(OPENDR_HOME + '/projects/perception/object_detection_2d/nms/img_temp/frame_0000.jpg') +img = Image.open(OPENDR_HOME + '/projects/python/perception/object_detection_2d/nms/img_temp/frame_0000.jpg') if not isinstance(img, Image): img = Image(img) cluster_nms = FastNMS(device='cpu', cross_class=True) diff --git a/projects/perception/object_detection_2d/nms/img_temp/frame_0000.jpg b/projects/python/perception/object_detection_2d/nms/img_temp/frame_0000.jpg similarity index 100% rename from projects/perception/object_detection_2d/nms/img_temp/frame_0000.jpg rename to projects/python/perception/object_detection_2d/nms/img_temp/frame_0000.jpg diff --git a/projects/perception/object_detection_2d/nms/seq2seq-nms/README.md b/projects/python/perception/object_detection_2d/nms/seq2seq-nms/README.md similarity index 100% rename from projects/perception/object_detection_2d/nms/seq2seq-nms/README.md rename to projects/python/perception/object_detection_2d/nms/seq2seq-nms/README.md diff --git a/projects/perception/object_detection_2d/nms/seq2seq-nms/eval_demo.py b/projects/python/perception/object_detection_2d/nms/seq2seq-nms/eval_demo.py similarity index 93% rename from projects/perception/object_detection_2d/nms/seq2seq-nms/eval_demo.py rename to projects/python/perception/object_detection_2d/nms/seq2seq-nms/eval_demo.py index 01437e578b..7110edef4e 100644 --- a/projects/perception/object_detection_2d/nms/seq2seq-nms/eval_demo.py +++ b/projects/python/perception/object_detection_2d/nms/seq2seq-nms/eval_demo.py @@ -33,13 +33,13 @@ "TEST_MODULE"]) parser.add_argument("--data_root", help="Dataset root folder", type=str, default=os.path.join(OPENDR_HOME, - 'projects/perception/object_detection_2d/nms/seq2seq-nms/datasets')) + 'projects/python/perception/object_detection_2d/nms/seq2seq-nms/datasets')) parser.add_argument("--use_ssd", help="Train using SSD as detector", type=bool, default=False) parser.add_argument("--post_thres", help="Confidence threshold, used for RoI selection after seq2seq-nms rescoring", type=float, default=0.0) args = parser.parse_args() -tmp_path = os.path.join(OPENDR_HOME, 'projects/perception/object_detection_2d/nms/seq2seq-nms/tmp') +tmp_path = os.path.join(OPENDR_HOME, 'projects/python/perception/object_detection_2d/nms/seq2seq-nms/tmp') seq2SeqNMSLearner = Seq2SeqNMSLearner(device=args.device, app_feats=args.app_feats, fmod_map_type=args.fmod_type, iou_filtering=args.iou_filtering, temp_path=tmp_path) diff --git a/projects/perception/object_detection_2d/nms/seq2seq-nms/inference_demo.py b/projects/python/perception/object_detection_2d/nms/seq2seq-nms/inference_demo.py similarity index 91% rename from projects/perception/object_detection_2d/nms/seq2seq-nms/inference_demo.py rename to projects/python/perception/object_detection_2d/nms/seq2seq-nms/inference_demo.py index c260546d13..437942bca8 100755 --- a/projects/perception/object_detection_2d/nms/seq2seq-nms/inference_demo.py +++ b/projects/python/perception/object_detection_2d/nms/seq2seq-nms/inference_demo.py @@ -31,7 +31,7 @@ choices=['seq2seq_pets_jpd']) args = parser.parse_args() -tmp_path = os.path.join(OPENDR_HOME, 'projects/perception/object_detection_2d/nms/seq2seq-nms/tmp') +tmp_path = os.path.join(OPENDR_HOME, 'projects/python/perception/object_detection_2d/nms/seq2seq-nms/tmp') seq2SeqNMSLearner = Seq2SeqNMSLearner(device=args.device, app_feats=args.app_feats, fmod_map_type=args.fmod_type, iou_filtering=args.iou_filtering, temp_path=tmp_path) @@ -41,7 +41,7 @@ ssd = SingleShotDetectorLearner(device=args.device) ssd.download(".", mode="pretrained") ssd.load("./ssd_default_person", verbose=True) -img = Image.open(OPENDR_HOME + '/projects/perception/object_detection_2d/nms/img_temp/frame_0000.jpg') +img = Image.open(OPENDR_HOME + '/projects/python/perception/object_detection_2d/nms/img_temp/frame_0000.jpg') if not isinstance(img, Image): img = Image(img) boxes = ssd.infer(img, threshold=0.3, custom_nms=seq2SeqNMSLearner) diff --git a/projects/perception/object_detection_2d/nms/seq2seq-nms/train_demo.py b/projects/python/perception/object_detection_2d/nms/seq2seq-nms/train_demo.py similarity index 94% rename from projects/perception/object_detection_2d/nms/seq2seq-nms/train_demo.py rename to projects/python/perception/object_detection_2d/nms/seq2seq-nms/train_demo.py index 4facf2696b..843517214a 100644 --- a/projects/perception/object_detection_2d/nms/seq2seq-nms/train_demo.py +++ b/projects/python/perception/object_detection_2d/nms/seq2seq-nms/train_demo.py @@ -28,7 +28,7 @@ parser.add_argument("--lr", help="Learning rate to use for training", type=float, default=1e-4) parser.add_argument("--n_epochs", help="Number of total epochs", type=int, default=10) parser.add_argument("--tmp_path", help="Temporary path where weights will be saved", type=str, - default=os.path.join(OPENDR_HOME, 'projects/perception/object_detection_2d/nms/seq2seq-nms/tmp')) + default=os.path.join(OPENDR_HOME, 'projects/python/perception/object_detection_2d/nms/seq2seq-nms/tmp')) parser.add_argument("--checkpoint_freq", help="Frequency in-between checkpoint saving", type=int, default=1) parser.add_argument("--resume-from", help="Epoch to load checkpoint file and resume training from", type=int, default=0) parser.add_argument("--dataset", help="Dataset to train on", type=str, default="PETS", choices=["PETS", "COCO", @@ -37,7 +37,7 @@ parser.add_argument("--max_dt_boxes", help="Maximum number of input RoIs fed to Seq2Seq-NMS", type=int, default=500) parser.add_argument("--data-root", help="Dataset root folder", type=str, default=os.path.join(OPENDR_HOME, - 'projects/perception/object_detection_2d/nms/seq2seq-nms/datasets')) + 'projects/python/perception/object_detection_2d/nms/seq2seq-nms/datasets')) args = parser.parse_args() seq2SeqNMSLearner = Seq2SeqNMSLearner(epochs=args.n_epochs, lr=args.lr, device=args.device, app_feats=args.app_feats, fmod_map_type=args.fmod_type, iou_filtering=args.iou_filtering, diff --git a/projects/perception/object_detection_2d/nms/soft_nms/README.md b/projects/python/perception/object_detection_2d/nms/soft_nms/README.md similarity index 100% rename from projects/perception/object_detection_2d/nms/soft_nms/README.md rename to projects/python/perception/object_detection_2d/nms/soft_nms/README.md diff --git a/projects/perception/object_detection_2d/nms/soft_nms/inference_demo.py b/projects/python/perception/object_detection_2d/nms/soft_nms/inference_demo.py similarity index 92% rename from projects/perception/object_detection_2d/nms/soft_nms/inference_demo.py rename to projects/python/perception/object_detection_2d/nms/soft_nms/inference_demo.py index c05ff4c7c2..c34d9fe46d 100644 --- a/projects/perception/object_detection_2d/nms/soft_nms/inference_demo.py +++ b/projects/python/perception/object_detection_2d/nms/soft_nms/inference_demo.py @@ -23,7 +23,7 @@ ssd = SingleShotDetectorLearner(device='cuda') ssd.download(".", mode="pretrained") ssd.load("./ssd_default_person", verbose=True) -img = Image.open(OPENDR_HOME + '/projects/perception/object_detection_2d/nms/img_temp/frame_0000.jpg') +img = Image.open(OPENDR_HOME + '/projects/python/perception/object_detection_2d/nms/img_temp/frame_0000.jpg') if not isinstance(img, Image): img = Image(img) cluster_nms = SoftNMS(device='cpu', nms_type='gaussian') diff --git a/projects/perception/object_detection_2d/retinaface/README.md b/projects/python/perception/object_detection_2d/retinaface/README.md similarity index 100% rename from projects/perception/object_detection_2d/retinaface/README.md rename to projects/python/perception/object_detection_2d/retinaface/README.md diff --git a/projects/perception/object_detection_2d/retinaface/eval_demo.py b/projects/python/perception/object_detection_2d/retinaface/eval_demo.py similarity index 100% rename from projects/perception/object_detection_2d/retinaface/eval_demo.py rename to projects/python/perception/object_detection_2d/retinaface/eval_demo.py diff --git a/projects/perception/object_detection_2d/retinaface/inference_demo.py b/projects/python/perception/object_detection_2d/retinaface/inference_demo.py similarity index 100% rename from projects/perception/object_detection_2d/retinaface/inference_demo.py rename to projects/python/perception/object_detection_2d/retinaface/inference_demo.py diff --git a/projects/perception/object_detection_2d/retinaface/inference_tutorial.ipynb b/projects/python/perception/object_detection_2d/retinaface/inference_tutorial.ipynb similarity index 100% rename from projects/perception/object_detection_2d/retinaface/inference_tutorial.ipynb rename to projects/python/perception/object_detection_2d/retinaface/inference_tutorial.ipynb diff --git a/projects/perception/object_detection_2d/retinaface/train_demo.py b/projects/python/perception/object_detection_2d/retinaface/train_demo.py similarity index 100% rename from projects/perception/object_detection_2d/retinaface/train_demo.py rename to projects/python/perception/object_detection_2d/retinaface/train_demo.py diff --git a/projects/perception/object_detection_2d/ssd/README.md b/projects/python/perception/object_detection_2d/ssd/README.md similarity index 100% rename from projects/perception/object_detection_2d/ssd/README.md rename to projects/python/perception/object_detection_2d/ssd/README.md diff --git a/projects/perception/object_detection_2d/ssd/eval_demo.py b/projects/python/perception/object_detection_2d/ssd/eval_demo.py similarity index 100% rename from projects/perception/object_detection_2d/ssd/eval_demo.py rename to projects/python/perception/object_detection_2d/ssd/eval_demo.py diff --git a/projects/perception/object_detection_2d/ssd/inference_demo.py b/projects/python/perception/object_detection_2d/ssd/inference_demo.py similarity index 100% rename from projects/perception/object_detection_2d/ssd/inference_demo.py rename to projects/python/perception/object_detection_2d/ssd/inference_demo.py diff --git a/projects/perception/object_detection_2d/ssd/inference_tutorial.ipynb b/projects/python/perception/object_detection_2d/ssd/inference_tutorial.ipynb similarity index 100% rename from projects/perception/object_detection_2d/ssd/inference_tutorial.ipynb rename to projects/python/perception/object_detection_2d/ssd/inference_tutorial.ipynb diff --git a/projects/perception/object_detection_2d/ssd/train_demo.py b/projects/python/perception/object_detection_2d/ssd/train_demo.py similarity index 100% rename from projects/perception/object_detection_2d/ssd/train_demo.py rename to projects/python/perception/object_detection_2d/ssd/train_demo.py diff --git a/projects/perception/object_detection_2d/yolov3/README.md b/projects/python/perception/object_detection_2d/yolov3/README.md similarity index 100% rename from projects/perception/object_detection_2d/yolov3/README.md rename to projects/python/perception/object_detection_2d/yolov3/README.md diff --git a/projects/perception/object_detection_2d/yolov3/eval_demo.py b/projects/python/perception/object_detection_2d/yolov3/eval_demo.py similarity index 100% rename from projects/perception/object_detection_2d/yolov3/eval_demo.py rename to projects/python/perception/object_detection_2d/yolov3/eval_demo.py diff --git a/projects/perception/object_detection_2d/yolov3/inference_demo.py b/projects/python/perception/object_detection_2d/yolov3/inference_demo.py similarity index 100% rename from projects/perception/object_detection_2d/yolov3/inference_demo.py rename to projects/python/perception/object_detection_2d/yolov3/inference_demo.py diff --git a/projects/perception/object_detection_2d/yolov3/inference_tutorial.ipynb b/projects/python/perception/object_detection_2d/yolov3/inference_tutorial.ipynb similarity index 100% rename from projects/perception/object_detection_2d/yolov3/inference_tutorial.ipynb rename to projects/python/perception/object_detection_2d/yolov3/inference_tutorial.ipynb diff --git a/projects/perception/object_detection_2d/yolov3/train_demo.py b/projects/python/perception/object_detection_2d/yolov3/train_demo.py similarity index 100% rename from projects/perception/object_detection_2d/yolov3/train_demo.py rename to projects/python/perception/object_detection_2d/yolov3/train_demo.py diff --git a/projects/python/perception/object_detection_2d/yolov5/README.md b/projects/python/perception/object_detection_2d/yolov5/README.md new file mode 100644 index 0000000000..fd3aa7c4c0 --- /dev/null +++ b/projects/python/perception/object_detection_2d/yolov5/README.md @@ -0,0 +1,7 @@ +# YOLOv5DetectorLearner Demos + +This folder contains minimal code usage examples that showcase the basic inference function of the YOLOv5DetectorLearner +provided by OpenDR. Specifically the following examples are provided: +1. inference_demo.py: Perform inference on a single image. Setting `--device cpu` performs inference on CPU. +2. webcam_demo.py: A simple tool that performs live object detection using a webcam. +3. inference_tutorial.ipynb: Perform inference using pretrained or custom models. \ No newline at end of file diff --git a/projects/python/perception/object_detection_2d/yolov5/inference_demo.py b/projects/python/perception/object_detection_2d/yolov5/inference_demo.py new file mode 100644 index 0000000000..a3bff2b298 --- /dev/null +++ b/projects/python/perception/object_detection_2d/yolov5/inference_demo.py @@ -0,0 +1,42 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +import cv2 +import torch + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import YOLOv5DetectorLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + + args = parser.parse_args() + + yolo = YOLOv5DetectorLearner(model_name='yolov5s', device=args.device) + + for f in 'zidane.jpg', 'bus.jpg': + torch.hub.download_url_to_file('https://ultralytics.com/images/' + f, f) # download 2 images + im1 = Image.open('zidane.jpg') # OpenDR image + im2 = cv2.imread('bus.jpg') # OpenCV image (BGR to RGB) + + results = yolo.infer(im1) + draw_bounding_boxes(im1.opencv(), results, yolo.classes, show=True, line_thickness=3) + + results = yolo.infer(im2) + draw_bounding_boxes(im2, results, yolo.classes, show=True, line_thickness=3) diff --git a/projects/python/perception/object_detection_2d/yolov5/inference_tutorial.ipynb b/projects/python/perception/object_detection_2d/yolov5/inference_tutorial.ipynb new file mode 100644 index 0000000000..0968320e74 --- /dev/null +++ b/projects/python/perception/object_detection_2d/yolov5/inference_tutorial.ipynb @@ -0,0 +1,305 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# YOLOv5 Tutorial\n", + "\n", + "This notebook provides a tutorial for running inference on a static image in order to detect objects.\n", + "The implementation of the [YOLOv5DetectorLearner](https://github.com/opendr-eu/opendr/blob/master/docs/reference/object-detection-2d-yolov5.md) supports inference only at the moment and relies on the Pytorch Hub model availability.\n", + "More information on modifications and license can be found\n", + "[here](https://github.com/opendr-eu/opendr/blob/master/src/opendr/perception/object_detection_2d/yolov5/README.md).\n", + "\n", + "Via the PyTorch Hub, all models provided by [Ultralytics](https://github.com/ultralytics/yolov5) are available for use within OpenDR.\n", + "The model architecture is passed onto the `model_name` parameter of the learner. Custom weights can be loaded by setting the `path` parameter to the path of the saved weights.\n" + ], + "metadata": { + "collapsed": false + } + }, + { + "cell_type": "markdown", + "source": [ + "## Loading a pretrained model\n", + "\n", + "For a list of available architectures you can print the `available_models` attribute of the learner:" + ], + "metadata": { + "collapsed": false + } + }, + { + "cell_type": "code", + "execution_count": 1, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/administrator/opendr/venv/lib/python3.8/site-packages/gluoncv/__init__.py:40: UserWarning: Both `mxnet==1.8.0` and `torch==1.9.0+cu111` are installed. You might encounter increased GPU memory footprint if both framework are used at the same time.\n", + " warnings.warn(f'Both `mxnet=={mx.__version__}` and `torch=={torch.__version__}` are installed. '\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['yolov5s', 'yolov5n', 'yolov5m', 'yolov5l', 'yolov5x', 'yolov5n6', 'yolov5s6', 'yolov5m6', 'yolov5l6', 'custom']\n" + ] + } + ], + "source": [ + "from opendr.perception.object_detection_2d import YOLOv5DetectorLearner\n", + "\n", + "print(YOLOv5DetectorLearner.available_models)" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "markdown", + "source": [ + "To load a model pretrained on COCO, simply initialize the learner by passing the desired architecture name as the `model_name` parameter. The learner will then download the weights and initialize the detector." + ], + "metadata": { + "collapsed": false + } + }, + { + "cell_type": "code", + "execution_count": 2, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Downloading: \"https://github.com/ultralytics/yolov5/archive/master.zip\" to /home/administrator/.cache/torch/hub/master.zip\n", + "YOLOv5 🚀 2022-11-28 Python-3.8.10 torch-1.9.0+cu111 CUDA:0 (NVIDIA GeForce RTX 2070 SUPER, 7982MiB)\n", + "\n", + "Fusing layers... \n", + "YOLOv5s summary: 213 layers, 7225885 parameters, 0 gradients\n", + "Adding AutoShape... \n" + ] + } + ], + "source": [ + "learner = YOLOv5DetectorLearner(model_name='yolov5s')" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "markdown", + "source": [ + "The detector is now ready for inference. OpenDR and OpenCV images are supported:" + ], + "metadata": { + "collapsed": false + } + }, + { + "cell_type": "code", + "execution_count": 6, + "outputs": [ + { + "data": { + "text/plain": " 0%| | 0.00/165k [00:00" + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": "
", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADfCAYAAAAN+JPJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9W+xtWXbeh/3GmHPt/T/3U6dO3au6i93sC8km2SRbokgrMn2NbSRQEiFE7JfYCKCX+C0P1lte/ZAXAwEMKIgRC0Zi5wrHgB1bEmzKukLqJiVeutn37qrqutepc/3/915zjpGHb6x9TlPsbkIW7ZZQC119zvlf915rzjHH+Mb3fcMyk4+uj66Pro+uj65/ui7/H/oFfHR9dH10fXR9dP3jvz4K7h9dH10fXR9d/xReHwX3j66Pro+uj65/Cq+PgvtH10fXR9dH1z+F10fB/aPro+uj66Prn8Lro+D+0fXR9dH10fVP4fXHEtzN7F8xs983s6+b2V/44/gdH10fXR9dH10fXT/4sn/cPHcza8BXgX8JeB34u8C/npm/94/1F310fXR9dH10fXT9wOuPI3P/k8DXM/ObmXkE/mPgz/4x/J6Pro+uj66Pro+uH3D1P4af+RLw2hP/fh345R/2Dbve8my3YGaYWX00Mez0NVYfe/Jf+hogEzLJTE6FSNr2WTAHrH5e/SR7/LO3n5zkE7+/Pmr1e0y/1wysPq6fVl+T4PW5xPRa6rVa/S1TPz9PbyH/oc/rT30k6v1kQpz+/fg9bt99ei11/wz96e71Mf++f9ebOX0t9uR91c83Ev1PvzMiyAgy44l7/bjqq+/8A3e07ml93i0xy7pPj+/X9voff5vpfddroO5H8sSf6afnG6ePn97a970m2+79H3h92+t4/Jwer7Dtvf+BH3ZaA9sV+fh78vGDffy9T3zsyddj+BOfemKt1s/Zvn9bl3laC0nUv+sLSb3B+lGP15Me9RO7yJ7cBdsaePw6T2sTLYk/WNNvS/bxfa61Z9sT/r47+MR9eeK9PHGDT+snt1+Wp/ex7esnn5llvac/8Hq+b/2cPv79i+pxxHh85fe93sdrqG7M6WVF/dDT5+2JNfN9L8ae+HOLFYZ57Utr3xdHnrxO74XEMd0T235p1POGiCQziNR+fDTHe5n5DH/I9ccR3P9Il5n9eeDPA5ztOv/Mz3yS5o3ujUzozWhbULXEIzBLzMFJmjsWE58BMbB55HgYzGmM6eTsRHYCJzkD72Q0vO/JhEkjfNtgeijD87Q5wgO3pHnQLDALbGe4JzuH5kkj6A0aA8vArdGaAmGktnHMhAz9CjfMkkEQFqRDZsdD35/mzDCOazKycXFYOcZkncH5GlzkypgQs7POQURibcEt6b2x613/tYWld3a7Pcv+En13xrIsLG1ht9tBX2i9Yxi9NYxAC86BZI5Bw4iYzBgcx5F1HFnPHzHOz8kYzHUwRkJMuiVJgOvPJIhwMgcw6CSLBzt7ROuTS8vCpR6ctWBpQ/c5AJwMmNOZow64ARGNNY1wZ43GsDNG7hnRGdlYvbOaMwFvRnOj4XqmeaQZWEyMwHKQMfE8smvgDpFTaysGxo45jUgYMXF35pzg6JDECFL3Z8JIZ01jYBxGYDQyIAgMaDgzEssqk82IDJp3Mpy2dMIa0DFrBMnMJIBJcrFOIoM1B+sYHNbBcQbH48p6PBIxwbLWn36H+2S3NJo5btDd6a3hDpbJ0oxuRkfBXYGj4XTcXQdh1HNMhZvMwCLxdLJ+bmuNaUaa0/sOp4MlI6YOIIzIlRHJmjASRgSWSc7BjInNIGOSMbAxaSRtDSwG3pIewZKGp9Ej6ZYsFQC7G80My9RrMyMzcPM6rPR8MxJzfSyUuZAYI5SKYUnijEzCnTBjJZkJw4zVnTWTdGeYPkYz0g3zhrdOGmQHbx1rjb4s9GWhtcay29F6Z7+/Ql8W3I3mDfftoE1aOsykRdITWiWCNhJbB+M4iIuVw/ERh+M56/HA4eKCv/H+G9/5QTH2jyO4vwG88sS/X66Pfd+VmX8R+IsAN65cyqU1uvUKgE2ZnSfNAveghTMzaB26gWVATror6M+EfXOmKSdaqXS3EpyZRpgTaXg67l2LVkk9ydQDy8qVLE5poG/5Tk5agmcFC6+FZUl3x7wRccBOWVXiaUoibMPAGs2MTAUH8y03cSJgxmCSzAzCh157aEE6DfPE6j2YBd60yJfmCmIZtdGT5pUfxkpMCDPmcP3MOWm9KbBl0kx/zxwQyTECgIhgzmBdj4wZ4I05JpGOd8eikbGCQaTuWZqDNxyjuTPHWve5YzjrTLonzfWMDOizlnlChtXhYMxMJkakkdMxWyA75h1rC813jHDcG96cpS+0BlSFsbcdzSDrfaZNmAdsNpplBVpoDoyjfnemEgfbKqMGabi57lczImCgwzjNGTOIqhDTApSaKChu4OdWbbiWZls62U4LkPCErcpKyAx2OCOmMnfTOjbLyvWNw/GAjhYdOK0ZrTVaczpGd6e5ay2YqqWGAmHDT5lkos8nk2aGA3PLGEky4nQwZRoZgAUDsG4cx6DZJK0xE2YmyWRkMCMYdfjOmJBZP0d7OGKSMbEcdJWsyl5nQBojsw7GSiJwulkVnKn9aHXoug7IrKpwbhkwW5Jf6x9VfLO+NiNI078nyWqPP78SjNQhkd2VvlTy5rVmcSNdyYVOUWXrrTUw6L1h3bHutN5prWNRdWgCU4eMtYZPxZqMSTOv9RdKfJtj1mjesNZ+aCD+4wjufxf4lJn9BArq/yvg3/hh32AGvTU6euNZD8R1QLK0xN0glDedSksL3EILoe1IP+KVmSSNkbad8cx00lsFmI6ehEEDMpk5SdPDajmVy9bmzgxluJl0A3LqwVmSuWLNGRG0eaTZqhM/FGx706Gi3RAYIXhkNiyg+SRMGynSmAGRzohkRjJCGx60MM2GstMqG5sl+9ZZvLHFCbfUa8yJR+BjYuaEDY5m2Axaa+QcUFCNm8q8QNnTHMGshRfHI7kOZUBmuHdsaVVmhyLVnJgp2/OmjZepgJ+tc2Sw2ELmBCZzTiY6+ABmOh5GEMy50ljIqc0VOGkd+o5kIWxPtAXajuaX2WVn17XYzY3WsiAJxzPBJpWgERzpuYN1IeZKt3mCPcYE62C5srSOHSfC25SrNw8aMHEGxkgjcOYMZfWpoB9pZEFebjoUePwEK5AqZfDWCDPMuyo3AytI0dJpHeYKLY1gsNs5Y6xgTb+bJEfiGfpdDq3pUO/ddfAbWnckvfVKnLSHmmclBNpZmcruDWXJBswZZCRYEOlEemXfQbqTIzEbDEsmTtDqIFpZtzVtkxmTjCQS1rlCBHPVwWUZeCYRSZjRSVpCz8fw7DQw18+fmTQv2OQxliIkI58Ac8wKkoMwV6JWSQPuFcgDzFWtkQrqpmewZrKOZDYnLPUe6gDRj3fCE2+u7M0Ew7g73hxrjntlD67X31qntQV8YDhe+9py4gOsNZonOQYtlTR4c3JNGu0UD2h/ANv5A9c/9uCemcPM/m3gv0Sh8z/IzN/9Yd9jZuyWjqVjwEJiDVo3ZcroZi6mk66bkbkqS7GuRZYr1gtfjkazrhNvdOaAbl0LNyb0VlkSlTQlEcAsFM4MZyFjgisok8qYLSduoY1HCsqYyv48B5kKBJnKlFU8BN7QKnRlRc2yKgUU5NMx81PAgBWaE9Pq5+h1LM0YAeZ7GoNG0CxxISoY2uiWxhxHzJyWC9YaY65EDNwXFhaYpnLVvTJbIEMZGyjrmmvBFbpdjtF2nZmhA3NEwQkDc8E0Vlkb1k73dqEzozDesdKbMXNAVwaXARaJsWJ0xkxIZUnK+nUIZdtjvsP7DpZLmJ+xZ6dKoTJWrYPKcL1Q08KlYy7YWLFlweZK54IYR5hG64XNt9T9aEaEguLixsSYqVJ+NWN4YaCFjTqFz7qCn3tVfdbqYNaBH6HDtZmqDepQGiTdGtCYGGvo0D9rxnEOwrIOY/03LU4Zss2CECuOtAoi3fVMe3d2PH5dVutm8cLXU9AEifYTU2sum7DuTDK6DmsbRMEaRoANEmOCqhJTYpKZHKYOz8j6L7Qj5gxihoJlFoRVGPdEmHIP7ccO9AwFyw1yaarWoWvtqw5nVjw59SnqeShbS0ZqLU03qMpihN7fQDDbrPsOqjZGZe6BK4OvxGHbFF6VUPfO4gbeCG+kNbwveGu4d1ozelc2LzjUBaOl7pcFtOZ4GkynmyuZDVU0EyEFNCfcThXJD7r+WDD3zPzPgf/8j/r1BoI0RtKaaUEV9u1eQTSCdGXtifDFvt3c3pjzSO/GyMq2Z52t6cIGExZ3lXnECUPT1legNTdmBlt7S0skMN/+bCRBc2Mxh1jxllhObCjrDSBNGVKYKgdvwmB7C4KVTG3grWljbmQoK/BdUxZMKLtvBjYgVN0Q4OakCafrtuGkE0tXQAz1KDImOY6MDjl0YJk3xjwyx6D3RTCImVZsGm5aqJMkNqglJ5kTN1N2XBBRFk5OKHgkyVxX1CQVhKHXa+RcTxvLrLMyIIMlwEN32z1xnDlQwzQXrBmt78hcoO9p7TLpO3zZE7s9re9xOyNPpaoXbqdSuHkSOTCDmIOYji0dmx3GEZ9qrGqTFrzgSaYzCCaOedd6AWXXU4EltsY3qmjmECzhlpibslAv6MMeN9aWXoG9DlbvDXOv+9YxFg4ZwsTDFei8kW1hjmT4wPtCjAvwQfqeOILZqkrWXTg7sBQ01ir/dReMZ4SqPSHjtYYEuYwIpivFiNiqIOHlp4Yugpgipu4tXpn/ZJoCUczJqPs2Ip5oyidTD1nBPjfIJ6tDvX2MSli05yb6tNVaU5rurDlpXkSCzIooWptJFuauuJF1SI9Q0iUYRtn9AKbrd8zQ8baiXtxWWYfreTcT7AhejdJ2IitszxXTGlgWrU039RRboRKq9qr6tYb1BkOHIHMINmQqEbTAWhBbkohh+d8/LPOPcKmcVfpbmKUnaj0mzXTKum2d93r4QjUFT7Smx+RgNHpTgDBT9mUU24QgrUGDsGBaPl4LtckjQgn79zXBkohVWXIKxjBLZUIRWE4GSXplez7rQdfrtCTDaH5Z1UJO0pIMoUO4sPyow8WzKhDFc3oXptj7opdrat559roHQ81bFdKn126ZjDhiYdjUIZIbnhiTGTq0nK7v0ZpkslUw1aBamnCW1kmUGWZMvAMzyRg6Cs0ggkz1C7Tz7dTdz5gKBGaENzx143UIBnM2rHWCrqqiNcJ3WNuTbQdtYbfsYXeJ2fb0ZQ+2E6yB0XonmuHN6c1Pzd7MScQAdsI5jkfCnNaNHK4GdyK4C2OOwbIsxBqMYl54BYWog9BQSR5zMiMJqwAtEFav36rn4oIWvVuBM4IBVLor0Js7iXpBDcO9YbFVlZMYeqjWBowjO5+YTWYWPFPQTCNprVJyV9N8qTWuBqvRcXbNaTqxsEIox1SPwB3m1N44VtIShUdXO4Z01yEeME3BUYF8y9L1sSiMfUSQcyo9y2DbelH9qa3ZWSu4oFclYVGVDwi6wlolf+CVdEWgBOfEdEk98ybobBJVBathGmbMOgDozmGGGuHemOkcMzmSOqAS8CzmjF6vFzpg3h/j7F19Q28FzXhBqlaJGE1xw0JrNrbqomDONgXHuOC2jTHk6HsucmARhcX/D5C5/6Ncs074GcGu6bRdPCt3dqBh4rgQGzPg1KzS58cU9pU0rC2QSeuOTWeNJLyyCIvC3x9TnyzyFPytmkfNEg9hye4p/DYC3ImYajqh0tJsqpGImAQzVpr32si6MieZpo56OGSq9LIUTk1CGH3sCAvCUS/AG0ZnxqRXj0CLKpUvTSPncmqIWVEOI8TS6ZVdp4du1YlqOZUojajStppUKicwq8oms9g1ahqbCVP27uQMvDdiNUZBPkbCMMQcGYw1SOunasgbDEsWUxMqMnFv+j31e1sDbw36JaZfJtsZvuxZ2p627LHlEtM63hYFf+946+CtmCKGV7amPTBVSTDJVYFVFZ6J3WGT6UmzFdapjZcT74HP0BrMVlygeeoLadP7EzRVJRjCkcXSMG80IavqHbljvrBszdOmfkFrDa/qS5i+AlCYMTxYunox4c5wZ9ieGcbaz7UzZmOkDqkOLOYszXFPFnf2rTEJds3pGXSvRKUApaxgLggPBkP/LugST3JO1bXWi5antTKtMusI1owTLCM4Q0nLLMYbczsdtuBVa6C+zvOEmJPuTNNh11tVC6aKvhLnagA3Vd91WCloioGWaQwX22oErJFM159R729GMq2q+JysCcc0hjdmDKIp6TSMbkowI7cgq8rAmvDw1pVYUMHdt+ot1XPbWBwzht7j9v8ZMFcaIeaMdSEYqWQtptFoNAu6Tw7ED42pPxbBPTJPGJeZMbegG410BVQ7ZepbC0LNLjOrKqWCkdmJdeA4nsIvzfWd6V7BD2Vj7ow5SNPPFu6rTTpzpdmGo+uE9xCuHFa/P6M69MroLEwLDjUI8UVZBxOWiTMLcy9OvzWxQuoYi1AJPrqTccAmeDbSNuoVlbGjRVUBRAlwqsPu0G17r3W0ZBJDjcqZgo4o/BJrHMcBzOim4GKFJdKg967s3juPOfPQl66yOpzZGxbKjmNO3I2IoyiNfaHqCG24HsK1MdIn1kQDtHRa1iHQjcGCL5dp/TLRLuHLHms72rIn255EG6j1BW+7yn67mAlGHZo7zKG5McbKup5jiypFMSYmOfZEUyDbYA9VVl2VJGqqKbALmtmqHzEtBGetrCpUCkMmq5eSU70KVIarz6G+knuxJ5aFf+mb3+BXXv9uHb5PsOwrC62Vefo3G1Ye1Rci+Y+uXuM/u3IFT2gVFBcTW6YZ7JqCk6erMU/iMQS91cFtU4mI0ZihpnDzZM4Uy6sifrju4awDZcYG2eQJrhKoIlrnVmWnqDiqGk4gaBAIIgvbWCg6PAWF1O9E2DvVyMRdO7Pwend93/RNb9JO1epI52hK7ibBbM4sWEjMGCWZM0KHqzmTWRm+3osqO/T8nArqYva5O25O80VP27eKzGq/BSMmS1W/FGSU1TSOUFXTtPMYmfRADXYTvAnGtCRysrXpf9D1YxHcIYm5Fn2uabG6Fo0X9tU8GONYAZbqNvvjJk0KT2+tCRcdoS49TQ0IdAgE4AVxuHcVnCbMmsoacmtS5SBaIYw5sRBE5GNiTUsWnJjVgClqUoZoiOCEg3k1DE/0ODvhvCLDdOGgVUmfT1hjgvcK5tqYYixk4ZBeNUEouFe2ji84k8Ub0xL3WZCIgXciqmyNqANK2D82i8opGEYJfB0A1fnvfcfSd+hQVcNbfUpjzMkYKxkLc67MYZV9KONJm8KWUdPbm+6x92BpDcLVTMxeZXJC7rC+x3px85cz2m4PXTCNmxgmG62wtU7ve9wbrZua2AWnYLpPZkAMBnX/LbF0kgOj+ibZ9mSuxMZYGkGmsG+F0EZaMTsC8KKqoftn1dsRHa6aYgS9uTjOTRi7d6cvO7x0CU9F8NKtpyCSfP117LlnoS/ku+9gzz1PPnyI3boFH35IfngH/4lPEK+/Dr3jL7xAvPM2tyiudaqq3JmfmDFusBRtl5QegLmKZUUTVXiqsuo4h1XvpZF0gjEVPMUqWcWcKYbPjIlvEAc6fMR2t4LUFRTdFKzC6pDLLYvfMjpVsGmpxM5MxANrDDXn8CYII5rgICs00r2JvRKPk7GM0pOEUaUwVGU1MxmmlHFmii6dqgzEminNQbUCLLe9IuwoXW/IfGPIGH3pWn+tq3HaCyGwgkNTAd6qr6hsvvZj9Qs8Ep+qDp1ZTJqVjFWJrxVB7XGB84dePxbBPTMVGExNvg5kS2AwMzgk9IBdTrEN6mFkJm6i7blvqq9SmhXHNLMVxVAlbraN226nzNdJmFOc9Jxi3qTQuOnBkoFH0KmM1nTCzhME4tXwCTUzXQ1cNlYCgadh7Aqzi1OgoBrHmYZn0+k+12KJVHBwLYrWFsJC+HlabdrKiKsJZW649VPmbTlwRMccmeSYymZMGyNLvKOsyYtGFtUENsw6bp3eFnbLjl6ZMKb3CcpYek4OB5XDPip4zQ5jpS1JxCTGWot6px6DDWgTq2DCTKYtVc4mzXak7aCJHeO7M3zZkd6xvqO7WDLWvOCEhb7sce/0XVOTt5qpjjHaSu8L63pxguRaGutRGGmLxJZqrs0j0wpiSMGFVIIwq4yvxye8PkCotQ4Cx4tBYSdxTeasA1JNcF86tC5Yxhy/fZv+2T9NvPZdePkV2q/+aRgr87f/Af7KK8zf+R3aL32B+PrXtG8ePaJ/9qdgtyfvfED/3M/CX/sN3fusyqvojK15USNLIOPioFkT1i/6ZtJqL4R7wRxdAXmu1ZhVI3JTyW73FlcDNc1PEGtSGauOdGJpeFtUWTk6NGcwLi7o7uyXRVWCGcfzh9gcyk5jQhNEk81LaIQajJaVILmCczxmV5GbNkDBemSIPhhez3hyrCbyulVGW3+NaqKeYOEtrosnv1Wim4jJq2+iZipYh6UrXlhTRr8pwjOCMYYO18zT2lCPYEsEBdPM0muYthwtJhSd1Pnh0f3HJLjDYcziXlczMBPzoBuMCelBi2A1cYSdQc61sspNcl1ZoYsDbFMnakygkG/twepk142jDg1DJZnFgFwFr4QaGWLq6KGPUusZonFlq3I2Vaa1ytW2B0o0VQRz0vpSbzqqmUjR4GDMrFJT2FqOYlsUl1wbxss6QSXr0vvJpkCfbXhGZSMTz9JHVLYSWVlRMzBnzPXUzBtzxawL4/OFpe9xX2h9T98J126LsP7WGoayktZgRtHzxmR6x6Yr0PeViFX3aZHGoAGek2YDfOUYQ81GS0aV3zsTrjt9ERTTdqXkNLwtOnT6ouBego6+7FmWvWCkbrRudN8LkpoDb02qzq3aaxPrk2W/YBYQnciVaI2wRtAJN2HPjgRLCRuXKiqIRUiRa2Z472rs6StqA27CIk5JyH4neurpQGqqjjgc4O5duHmT/OA94rXX4OKc+du/TXz7W9iNG9gLL8K9u8S3vwUvvggXF8zf/CL9X/iXT1VWm+rn+Na1dDtBSmpEblnohv8mYeKOW7FLWtM6Hqsahc2cPkUCmKeAWhAfVEa8MVYqGNGJXaedda7evMrZbs/P//zP8tmf+iyvf+9Net/x9a9+lZ3D5376szR3PvjgDu+89Q5vfOd17rz1DnF+vw7pLYHZbDW0VrL+q+WoCqMgxy2xU88gmEM4+yx5hip6Be+Yomiqc1XJYFUFmXZCBzb4pXvHi+baTIpgq6Z5tQELIpSQTDtUUHJrTeriiBNqkFJ/Pe4XlLJ9pj62aw2fRyWQ+Ekj8oOuH5vgvq7FN53BbOK3Y8Hi4u+SKyOPKsVDiryeCdNxW6QqdK8SSj+LEjtECgMPV6k+cpLeAas471AooVcmKvhg0koIlCYWgLuauFYMHDFbYFbwsFTj1VqV41PBB5+0FgRDmVA97MhSslkrju3QxpvGY/+Wotrlll04WJNlQSpgqOGs/oSqB8E/ypoq2gRidiDln1gI2oQzFPTdq+FbQozWG16Ye9/voKn0bAatdS3Aht4nYgm1rn+Lq7yQOSThj0HGUarjTCxWMjppgxU1h0cKm05zNQmtY76owimY6AQbQSlTd/RlR18Wlv0ZfddZdhv/WRTOsa6qHGy7r1NCmtlgaUQ4MZ2YjWQh/YywxuSCWQ1UZe+Cw7JYWFnguhXGK/GbEaHMbCvXW9vWWJ4YSUtfJNtvncjJ9fNzxs2bjDmZv/Ff45/6NOz2xDe+rn0yBm1dyd/8IvHGG7Sf/zyHv/k3lBA9fMjhv/0Nfu3BfT5xPCgpoYJ0IQkOdC/uPapc9YO3V5YndbVojckMKxog3+d1tBGz4vS9nA6+TVSUBs073XfsYsel8+AyZ7zw2je5/N6bPLq4oC87PZsYxN/8Dc7PHzFn0Jcd0eHDK879Y7IejjBX8cKP1Tgt2q5vr6F6abmVZVtvot5LFBRDCpadUTBbbmlbQS3k6T1RcSiBlcn/wZy7G9zmTm8mq5RKkNyN1hfFFFOPZKlKilob7hAxKnCL7y+fDVW+h7EKis0Jc6pyiaH978n0FML037eI6R/t2nivooWRKsN0nxPLlbDB4EjOtRag0+iQjfF4aT6mN9njRk6aVJVR/cNmypi2gGNWgW6KL7uZ9Gjti49NSMDQDcY62HlTlkud9r1V82ZiLiWeeys2AniTjYKhEl8NF2Wyeu4q4ynI5SSS8OqcA2ni2mLOMaPobWrAFKJZ/QplZaIxGkwdPpES4cw0cibRHG9diyUS7/V9bcMM5UHTu7Psdiy73Unm782q3CxYtOkwtDkZEfQ00R4zxCyKIMZRZX4OYhxovoOpjD9ykN7F3Eg4Unx+ieoZKfaHDkEpf9NdEM2yo+/PWHpnOdvRFqfX61bjUYdtTGdcGG0OIjvLspBDHj20CS1oiwLY8TgYKbWxmROlmzC3ogiejpeT1D1inJqtTaWgNniqxLaTQlICIitYA5K9O1fefZf/03vv8/6zz7P+2gvim89g/fzTYq+MSaxJXnuO+PRt1nsr8czHiXEkbj4rL6Czq+oP5cArsHSDZsHixs69RG+2odsoeZA+wsoeYFbpPyLFMJnqYY1Q5Rr1TKaQBY5zSp0axjRnd/mMT3zqk/ziL/4Cr7zyElcun9EyuXv3A86ee5p57TIvPv8Cdz78kHfefJu2dF5/4zs8uP+Qw6MLrt+4SdvveOH553lxf4k3vv09/tP/5/+b44P7XLt0xvndu8ThvNb4Rv22aoBXVJla/zFFZxSLB8ZIjnPK82ZGefm0aqYq+GduOplqjpH8BZs8y+QuBb1sCaGX6WF5xlgoQWreaO5Fr/RtORTkUky1gmIIBXkLef7MOTmGMvpWliLC2vNxM5d/AoJ7AiOHGhZjMmwSvrJrxgiJSXZN5SImJdsaEwO6NZVgpuxi4hyLMpRm+NLxEOzhrQmiCT8ZOYkcJ6qjZYmfcmPiKLTIY0S4iIdMjGKqZGpeMpAQy6f3pQQsxeOtgO4JLaQIja4A5sCaUfgzyvpL2LDhbGQUZNQhJb7ayszZvfj9pRPIBG80xKEnYZ2Dk0NAFHs4VJbOSJa2IOxQ3PDWhR/2vtCWzrLfs+x39F2rpmUF/GoObm6TmYH1gc0pWf9ciAiOU6KmHCtLa+Q46GB2I44Hmne8GFFiSDTJuwO8dygGlNV9XiOK86/71VoT26Q7tkig5Etjt9/RrJpqOYmZrAzSBZ3YrMO1LbRMdjL24WIOpjf6fs8Ygz4XDqu8dNTg14bUY2vMUdm7uG4QUbYKVtQ+NTBnFpk3Jm0aY0rpyVjpiw7cdV1543Dk/rUbXExllsc5GSNYAw7HVYdMlrR/GfLtWRvzIHrutAvRa1P+TA1Yqv/SmrFzWASnS81b/QDK3Cyind5Lhpqo+m8wMkQRnKkqE+Hck2Tteo3YnrOnrvP5L3ye/8W/8evsl4X92cLVy5eVod69yxd/97d4+/d/ByO4dOUK3i/x6OFDbt9+Cr9+k2efeU4Q1a7zTjUWb/+ZL/A//5nP8o2vfIvXv/0t3v39r/Dea9/Bj4+kwmXrx1bIS8ilmHgBxwjWORkJB4KDicc/zFUpRSmyK2TmCVsHqrq+X+QLbKvskzmngrPZ6bwvpURVF7ZJhjFrp4rPC9abcyghyy2+JH5Uk3kNlUhtTs5MlVswT43ZZv9EBPcql+YsRdxU07I5qw35fwy5wolCKArZWhzg7huFD2FiNPBFzVNzyc5QE6UvnUhjZLFIEmTSE8XsoGhHeaJfghq3zYwWFBc7CoubJeYo2EQa+yq1q8HYDNAiqpUDqKrAqnJAEdhYTqe7IS+bTEExGZUd5Dwp3LKwdzBkI5DC7qoUZfuUNTwgcrD1H560H2690Xuv967MvS+7omA6bdnRdwvLstcB4CUcS8Ez6xxgRuuN/WyqFjLIIbFTuZQoi0ljrkFruxO0pHXgjHJWNG+skbTFi1rZi/Wgyqd3k8LSXC6H3Wm7hi+O72TIdep7pBGlMdjc/NzLfKwvOlhcWbc8XvT3MDXuJkm4KIIbu6Pa8GRRJbeVskHcrQLAjCFhTauEZJTi87iyWCPsyEB46mZItVEC11Arz2o9NtPvi6n16qfG7hPPuTtZlEV3HegyhlNFuCbECHYuGmCYflcvvEVBrSqNUGWiLLFonDNLXFV0RdMvjmKxPPviS/wz/9yf4dd//c9x6UyJ1fmDu3SbtP2Ov/Xbv8Xr33uD3f4y5+crL7zyIhnB/Q8f8ODBAdoRyze5deMGt28+x7PPPsfZpWt87+23uHR1x6/9j/80xD/Lb33pt/jrf/kv853f/W3i4qFU4uSpuTtD8MycIal+CkmXz83WEVFGP7IOM4lACmLaqKfaD+blcVO0RYNTYgNi7Xlh40oMp0zeUtm4+7YPtyZqlhC3svipfTHGSsxJBzyioMA4Va5MYwz5PMVjU50/9PrxCO5V/onSCM7kGEl0I9vEbZUoIyGscebqtufSTlDDMcRlzsKzQ7Wxyuq+LcYN8xQGlqvVDZfArGdCsyrF69A1lykTskDQoCn9HFkliHWy4eJWwTiqnO9Nf1fDZyflXfD99MVIxkiCRaDflBtclGtiOpJLt0ZmFGZaDSuTUMg29gbC/MggrMOUXfHMLFhAHOs0JPhyqRlbK4qZKWvvfRE0UxxsUzcQa2oYNa+M2UVvzN5hkR9ProOY0gcsXdSusCNjHnF2xJonXDLHgFmS79pQpayC7timqB1Ba9vzkDVwbDS55gUmq1Hl3qovAXWmoCC1YC0wH/JzaZ3Wg7GuQBO91oYOK45ixRiygMjiFicnW4mSRJFmxZdWhuypLC5SmHBsWPaUitlQryDsgM9kwU5e+escrLNX0lE9FRMVt5UMPl19nhyiWBpi+WX5uGzBSfDlUCepAna2sqoIimKYYvRsB2yGGrHovkbhwLOw7G0+QPNNMToxnKs3bvDST7zKr/7ar/LP/9o/z/PP3cZbctbP+PLvvMXf+c0vsdtforUdv/CLv8SNp2/x5tvvcvfOO+yAn/zUJ3j+5Rf53ltv8fUv/y7f/da3uPS1M37qpz6DL53l7Aqf/tSnubzfc/9wwUufeI7P/fLPc/7wDm99/et4QTCC7VIwaZj2QbF43LsgjsqcbeMeW5Kp6t+2pKjYNiJFbH257c8N09ee3w7Xk6995hY8nqgkxFSTyVirKnpT0CduTT0pC1aXCKPPUVYkQh5iylis+q4nLdgPun4sgjtsjINq9GUKJ4uj5MW2EDZJk0CHgGOHPkplhx7GriANaxT2ZjTfkSE/68ySxydyRNyGT9iWycpGwLr4yp2iE2KQrdSwCGNJOeTZ1nxNTnzijUZJwpzKZjOVhbvpQc0clVEBM+m+42IUdiigRZ7krgOsVUqozE/BS2ZOhbFnYexIUJERJB1sqpogy9xMHA43J1uHRVTCtrRSSu7UOC17Unqr4FnBfLdgy6Ksu3i9FkGMIdm2oiFkJ+dKhOMjiv8d8quxJIYTdiwKp2MpfvzWMIUoP45ObDBODCwWnMfUPQoiEYOgNlVKMpSWzBiyh7VGWicYYsIkOqxMMv+B1cHtVXrrADtaJ03c+5EDj7ml5gWHVcsuHzMYZJUjjFvrQR7mMZNsOzX8x7Fgk0YeJrtezcoxmL2yUCvl67ZEtwbbnMQcEANnss6DAjaQYxPFyPNoE7splDg2lQlmSzyWE5tnGkS6jMSg9APl3Okb5KAA2HueMtuln3H1xnU+/yd/ic9/4Rf4wi9/gZdf+jjFe+L+nTu89trrvHvnPp/69Av8wqd/iudffI7X3/wuwcrZkjy6dxdi5f133iLXlf2l67zz6D1swpVr13nw8Mg7r7/FxcOHvPzcUzzzzLM8fXXPC8/f5umXX+SDt98i79470aS9AnYde6qy6WWDoH2XlUM46kEc2RrhJbdKOamehpmU5QBVVemjT8Ja5ZmTk7Zh4VGVTVN2P6NYgBQMVomdUJlBrKviSiQ2xaTpYTI3y0mPwEIVXeT8J4MtQ4qGFFC+Llb4d5W9xeqgGy2caIaNRh8LZ7vi63pnncLVKbw9U9m8V4m+qcFIZPLENs1HWfKaU4EA9GBSQUbZeDCrmdlN29LSi29emP92swsTs0wWEx4doACCGoVujbmuoqilBkNoH0YJRYzWupq89sQPdspDoz5SsAAFTVnIBtl6UUDDsC6r3TSI8BqK4mT3CuIL1vaYNbFjyswqXU1TK8wwy42utUZ2eb/MKk2jegvdFEDmTJbyy6A5uSSZ8vkQTFHN0iwfeQomkq9r8YmzrB3U7LKik3pONFiigjGgZnJlrzZKWenCxdHmiLppEZsw5bGy0VqDqQNsXY+iV1bzeAM+srJXJcFquDnCn8mytKjNq1WlZqaGM/jJj2fSiakBJjkGmY5YfgEjJGApcy/K6zwKy3WTojRjMsYQBh+U7/6obF+/P9OZBVc8TvIKFmJjnDy2v/WS1Y/tfp642bX2ENWvvpu+LOwvX+Fjn3yVT/3UT/Lpz3ySp288hduR8/N7/NZv/g7f+fZ3mZH86q/8KT73s5/j7MplPvjgPS4e3cPnKiMt77zx2rdY1wueee5ZXn7xNsveOD9/xBtvvsHS9jx14xb37t/jr37lt/FxweXrt3n2pY/xM5//LG9842vcf3iOp3j6lpuUqgSKZSOChZK+JpZathqKUiKrOSenrcZWWeu9S3BkG6KpxnrBaFZQmiiNnYiNfi1B4Cwk31IuqXOWkV2EYJhQpRsbc2Zs66JgnnLO9Bm0pCDGSoZ/yPXjEdwRvu1akVUaASmGQGtUE6NxbJAlVbfWOUy9CU9NN1qa8MHE6W0pJVeUi19qqMJ8UhxgJRKSmVFpWuUel6amXAWfpTICCn6hSvZNbq5BDkp90mUuBmU10Jz08nLP0KCODYenlIFqE5LWIEsE0xox87QZs8rBIgxpMaDyb0YIl2PWVsxTY7WXF8f0Bt4VXL2C2okrXlNkeqO3hdYXlpqO1Xwp+9JFjBlTs66Xra/3XvchSgxm9NQGmSk8fukLxzkVSEKwWU7BQWpKKdsWv3ejFYohEDGwCd1DjpZLZ6xHGYehxhZNB+lxPbAY5MZEio0ZMeVNDuWRI02EuZIBeYtQ2KswefUjpHhW+a7nH08oCwuA1X0w6N5K4qJK7URDpOC5okhm6r0YxloZ96ysfIPRIqTB0Lqdp2x9pjL3mFOOqXUIZPWJYhZXugKAWT3vrXI1rWffMPMZJZMv3NkKTkwdMo4V/bGGmbTOcnbGS6++zC/9qV/i5Y+/xNO3n+K4nrM7GL//lS/z1tvfwxfj0z/5WX7i1U+y35/x4P5d1vUccvLe229zWFcuX7rE9Zs3+dpXv8K773/A5atXeea55znb7Xj/vff5wi/8PC+8+DHeePNdXn/9deLsEvfOVz748u9x/eZlfvoLn+f321d4+OF94vwhdryAnLgnHfnxnCwRjFrvUCyESiQkSIxyXKwvqKRj+xiconvtwwjNetjUzOIGiE9velLlTMpJlY7FybQvYp4O5jmDOSYcBy0KgsMgVu3hKPiu/pv8ExHc1d7wQCUnVEa1OUJqNWaZg4G8LkYbQCfpgmSaMig1V+RyaAXPRHm1z9jc4YANR2frVj/Gzc1FF1RZLejFkZAiTX4SydSGoRovmXVDN6FRJ6fw2cBKOEKVzNuiUhhuzSGMmTIJ671rCEQKZx5z6P1kTXdK2ai25nIL7GpqZQWSjMFjDx2VnkBx7nsp/HplyYtgi02V2BuaH+hQcI2ClA4bGWCpT0ErRff0MuoSi8DzsWEaNlmrbBWzxgQlzSm4DDkGyp4Z4cQxwJzjuqrxba5Dcgoz72PgfWUdRxgNupPTOD+olwBWnHvRQzcq3ybxVibu9e+tMVlOjjxp4yqBU+RQRu5RTWktS6+q8sR3ZxOw2Qnzz1MmX7YXVh72syrMEkBFJscYjBSry11Q2rqu5R6qg0/GeQk1GiM8NMMOIIMxtx6MeO4EWJga8iYRjxKYKWVwKgE5RsopcsvOrbxcjEpKtNJb61y6co0XPv4xfuIzn+Tp526TDh98+AEvv/QiAbzw0itYa9y9f4+bN29w48ZN9rs9xJH7H648uHefi4sDb7/zPd557x3W1XjumZe4ffs29+7f46nrT3G4OOe99z7gt770e3zxi1/kxZde4p/7Z/8Z3nj7HR58+C7GZNB4+vlX+Plf+hXe+d57/Nbf/lt87/d/n3l+/zRfocAZVWJhJ3J+a/4YVivse+u1qVISJn+aO1yBaSMqbM3zU/mXzlg3pp0zS6tysiWxKTgeK9hoMMtYTwIqNcr1evM03GTJqMrOpWmY1Zv5J6KhiihYngqDai7Bdqc3HHlzgZQlsFfTaoI5q7kaTiKV092JtolOejVWRNXLFI5V+AsWU+UzrfypK/i6LF4XK9cM7/JEsclAFp4q1UR94mRNKpWiUANnsmjDxABrzGjgSdTpHqV81EOX77PuRLBNOKWUf+qu6+NkEGueXOf8iYyy+4LFEW8aBde6VLRuymaPzUhPlaauisQRrCLzeyOLWbKxgbZm0+bKtw0ZF3NHf5dBkz4/YzLmCmWWlpEn+imFg3dHfN7mevZrsWli4+aLZTLN8XByL1XyCDUFfSa2TsKOLCYGyLLbq4rySV+VJAiEE1QzQAccgkQiBAvmLCiuGfMwq2fTGa7BKa6uJZYaqCCLhil6bAVuYxazpRXEFzXURbi+2yImR2XR29SsCIPQOMO5dvnwVIbWQlznYDBNGV7EqJUxyDVoBVnNqp4qsVSwSDWmg0EU732DVnSKIJM5l8mZ1lurno/IDqtBNhTgm7O7sufmC0/z8c/+JJ/9qZ/m2WeeZemdjAN377zN4XhgrBe4Ldy6+TQtB++9/R2++c3f5/e+8nXeeucuu91lHtw/MmfnqaducuvWDTIH169d4Z03X2MdK9eu3eTFF1/g8pXLpC/ce/CQ84cP+eLf+01efu4mL7zyMm2cc+XaTX72Fz7DU8/c4L8ieeP3v8LxwT1awWcyeKMStqpwqhIONj97GeFFar2Qxb6xLXFSxaVksCqB7Wtj4tFk2Beb4E24vTByo7ekuZK2TGXvhbowRw0wiVBiNrfJVKLxSigpe3HP0BjAHx7bfzyCO1QuVQICj1LVxcZIyRr2q8HYKlGorr6LUaLwii/KukbMiq7Fc87HD1TQ2yZ2UBm6ZXDUx8UgEJZe6uSaBBMYEv8YrVSTOt2jym2YNO81aBeyaRt217Qp2duutYl6zXdt1QMQvzbmKN43EHGCZRqNMeUFc2oEJ6dgvyn3gixlubGcnOkM60BUFeHO3O6bia1CZLFS1Pdoto15oBZjisEyJ7Su0nbD0GrgcS+Iy+o1ZCRhUzMkB0UpTCjxUnP9LlVlKkfXOTgeBzNg3YQxi9VsS7C+4D0YhxXSWMwYuLjxnLPkJfBFgg8zVSKBgjNFX3wCt/TCUG1z9muNjM5q0ibsls5MqXut3l9UOJf0vDG2rkpl4bN+R2v9NCptZtLLRbTSGh3kKeDAmDAOkEv1SETZ9QxiHjVAmSDG1Bzf0BOs+rS49XXQEuXxL2l7guxkG3i6LCMoPYa5WE3kSVlraaLhVCVihA65szOu377NT/7UZ/npn/5pPvnqq1xadpw/esRFHc7f+NZrPDx/xMdf/jgP7t3h61/5bT68+wG//fu/y7vvf8jPfe4LHM5XPnj/DpeWHc/cvEGOI4bx1a9/gzGPXLp8lXffu8ub3/sOn/vcZ/nFX/gCd+68x7h+mU98/GUe3XmHu++9y+7SAy4ePOBzt3+RL3zhF8Ebf+c3rvLbf+1vcLE+0F4pooYV2QAEealKk0W3EsXqi1VU2mwOTpYTgAaTlAV4Uq6Thbmb8u4xDOtKcAyN78sI1g16rQE4UYryWXz+zBIUZpZ/FlUNJsmgE5qznD+CKsMfIbib2X8A/E+AdzLzc/WxW8B/ArwKfBv49cy8Y0rf/j3gXwMeAf9mZn7pR/6OBN+ahBONDGtig7RINeFCtLKWhjX9F07xsgXTSJEYTAYzDZ+i/UX5xMxqVDXblGicWk1Sf0VtME7QWne9QGubJU3x6KnCwv30960MN0Om/alGWjkbQYk+YpNzb8MYeDwxPgCiMgZkp5BTmzFiiNpI4Z5YtYSVzbvxRN+Cxxtyy9TqC6LwQytSv5o7ibUNc6XKWTW1o6U42tmIGdU0nYIkTq9VzT9HPi65qR2n7HM3KEINwmBT940cEmfMrUE4GMeV4/HIuk6OM4iUV1CWiI3WWdcVbwcGsFuciEZfnb0Zc02MRlva6cFoLxibZfHmda/7IbyZEqXo6xpmk9Y7u+w0dhyKXSX7i7p3mCqt016z8h0peMo0yEHNX8F9ZmUWUYfznJPlRJkrm4YNIkSBnRj0CFlgzIHPFY+pJqpsSQVHpuCfWepG+apLQwE6TmxTI2dRChbNFK7cSl9UmP3mcNkQZcf2e25/7BV+5k/8Ir/yq3+az3zyMzRb+fKXf5O33n6Dw0GUr3sXR24/8xSX9sYbr3+D99//gC/95m/zzAsv8+f+Z/8yH7z/Lu/F+7z73ve4d+dDjucP+N4b3+G5527z3HMvcP/hI/b7HS+98BL7fePZZ57mb/+N/5pXP/4SDz94hxv7CX0lL+7y3sMHjPE+u8X5E39i4XM/8yofvPcZvvv1b/Lm179JxjipO2OjKW5wS+15rJIhCp7Z9kghB/KEoeDh0s2wETQUuLcGaUNeR6Ow91bTsTYPnqhn4sZJBJWF2ees5OGJ9XQkICdtridbZUeMux92/VEy9/8L8H8E/tITH/sLwF/NzH/XzP5C/fvfAf5V4FP13y8D/379+SMuw4duFxXkl6J/+Ymvq8Cuf1dDzO1kEDQTDiM0WSeri011pFMndXqrzblKNUhhmBU00uPEad3ynBaS/jfXKDlhdfbEJJRNviJmiDVh2WtKELTZP8QJzzepHK2B7aoBtk2lsUJEFMj0HpxGwRDJyUdDvzU1i9FStgfNTg0yYoj66Bp2kDUuImbUtKgnqIMF9YjPrbA7IrCRJS6Y8pTfSn9TFTLWlW0AdIaUqHOsokWGSR8gzEMZfuHuRjLWIxlDMMRYmetgjCPj4pyxDi4Oa0nDayC0d5W65SUTcyXnIGe9juZ4R8KRaFgOGivOrmwdlBxsdgJbkN2EXJvqr8iLZduq/ywbngtRFMU5VK0sKU59hFB16RSCbk09jpxao1DPYhHctzGwTAG1NadlOaFva7vulYz0RH1Ts1UfszhCDFVtufFhtDgEElnhwJVZpipOJqdpRLNmro4R0EJMs/p6K9+WrENn47Xvr9zg1U9+kldeeYlczzlrwfnxwO7yFe48uuD9d+9gY/DKKx/juadu8O1v/i7nDw98/Wvf5Sd/8hN8/ud+mvfeeZ2HH9zhL/8X/yXv3fmQF557jjlXMldee+0bvPXmt3nxpVe4ffMK53ff4rvvvcvzt/4EN69c4t033mB9eI92PLLLlfViMo8r9x8kv3v+AOzI537+l/jZz/0U73zvfV77xmt4xYDT4GxTdb0NtTHb9qbYV1aQTJruqiUiR+TmCySYxbMximGj5ncJx2bImbV8EdKqxiuL8s3SYIwiUxRT6jGWr4p2QxO2drABq8Ewk4HbY2rPH3r9yOCemX/NzF79Ax/+s8Cv1d//Q+C/QcH9zwJ/KfUq/7aZ3TSzFzLzzR/2O4ykFb5lxUtVc7L8v+fWHJoajGGccGc195bCxOTHbSFD/7TY8nAIHR6y8pSt6HZu4xpYbLYUa0fwDQR0CTyW8lFpSxfzJFSGSeugB9hcdrXpC83ETxcrQsMrSPHWs6EAm2JjeG4YXIiiOJV5RtkVTBQYO5SpmMQqI2eNCRRFLk1Dlr2w2OZWgz50HMxSZ40QBBMGNPUnpj3pjS/oiqjGpRvhg9EWjEFvMKbUuoIzxikbn2OUta8O1u3w1MeSsR6IOeXOGPpzXVfWwwVjHJS1nx85HFZlPoj707zjtmMsC+uY9PXAaDV8OBqsOpDDjZZ76GIgRRuYZioWwiBIRmdqY53FLAmJlPCNrgrHnGzsqHRBfh2JYRhqWjYXZ37UqbslHaeEBNNkHk/chPzjWw/Hqhp5bDjW3TlE4p7EKMpjMSnGGGxzSLcKSDQ+zUJN95qGtDXkKlNEzVRCs0jD4TgHOzTbzAEr+9CRjpUuozmniVZuyXJ2xpWnrvPM889wPF5w5cYZ33jj92nLjrMrV/nJT36S52+/y9uvf5c773yL3/7iX+fpp2/x3Te+zW55iqUF/6//x3/Exd1zfvN3vsyN28/wP/rlX+XRwwd8+5u/z5tvvMnFxSMuX9rz6N4DHr7zGi++9BJLJl/663+NT736MW49dYPrV65yn7v0y2dczMHFuuIt6O0Kh4vJhx/cgeUSf/JXfpavfOnv8o3f/VpBsNpPMVPP2UROaFGiNwp23Djy7mV5sgVRwZZPBm1qBsKMoZ85G4svW7ZQ1XCFoCnjwXT1dsK09qKo33PWIVCNVHV0ikmYlQwUjDPYqNo/+PpHxdyfeyJgvwU8V39/CXjtia97vT72I4I74o7PcmU0l8y/Shdpkpq6xcOwXa9hy5pfKGc1NStiCgI4GsUZL0l+zhMcQWFslDdKa/0ELdQYV2UrSBno1UD05lW6aYB1lMQcEy1Te+kxTteL5z2RRJ+2Z4ajtoH499PrIFM0ZA1B2RT8lAXjpHcNwdgqgFSrePMKl0GYynFrYlSsyJfHlyyMT/dsmHBDXFTGmVZirJq8XtiiV3WzzkGfTh5XfN9Zh2xUGwNbugRZoUNamaZK+sjJHCvreiTnIIZghONxralIg/Vw4OLinHE8cDg+YhyFta/HQYSas2nOfg8ZjRiDdT2wLE7G0N2NIWTCk547NrvUiMkI04Sh2syPcc2QEVek1NFDg0tiridarFew9OLz9+aso7jmbWM+afCIp+ypBWtImyDvFsFjgDLEstHVRLukb5XmrL1QbK1Z1ZSQ+0mkDsNZ3jKUoKUVXm/mImxXJHHTYHYxLjjBZzErcWlKPiKrbQLqTzRBYE2YoyDP3vBmXLl5gxdefZn91TNefOUlzi8O7Lvz1M3rPLz3Id/9+m/y5d/9Km989w0uHp7TuvEP/t4HWLvCL37hKv/tX/n/cf/e4OGjc5594SU+8eqrfO9b3+D1736Hu3feYx2Ty2dnXN5dps/k0Qf3eTfe4oVnX+AnXnmBV555hls3brOOwY0rl3j04Arv373DjOQ4H+Bt8vStm7z08Z/kO6+9wWvf+Qo/8cnnee1b3+L8warMPHUgk3K8DJtEA59l612Mmaz7aL7BZRtcW5VeimnnsVnvCs/alKxJFjmjPK6qSqT0G1vcyGLfbYVWpGJQbqyuUI/GUvOehQIpiVN/7wdf/50bqpmZdpJx/dEvM/vzwJ8H2DenJm9Wc1CT21uKadAKSx7ASNEa5WsiAyZrurHbxj2uK9CVYVWpHYjrLidGNZU2P24SvFfjiJRjXyQNWQ53ozA5Y5Mr9zK3shr3E9blwdK8eLQdmoZZdwrybAttv4cI9otu/YhUk3MEPrPooMqE1XmvRmYRc5hDQp0QF3yuo+xmRSlkUzWmMaLw3Tm1gKp8j62CSGedm6iFQvlFAZ1N+J/oW6nJUzaJYQwzmtfAkXFQdmo6EnKubMrfOVZN2IpBDmXpY105HA4cjitjg2LWI+t6LKx9VdY+pgJ2JL3vyKbKJnOK0VHZ0lyPjOZaI30RtBJBxLHYVF12ABvffiZjXTU56sQxjpMboLQEqQpslZPjVv4aMgEbaSfBkpdSuHVl6SrTlW31YihhXtz2jTZpBddorQkt2UwltM5iShx2mhtQ/Gu3poO1DqDMUVXi1oPR6w/y1J+xjQ4YW/N0E2JZ8f3Rk6+Dxc1Y+k4HlKtatNZhOePyrZtcfeo6T99+mrNdJ8dDvvONr/Kdb3yLt998jQd37rOeT9bzwRtvv8HZ7oyPfeLjfOXLXycPyZXdnk986hNcuXaZL/3Nv8V7b74jmmfCEg3OV3o/8tzTN3n1xWd5/vkXuXX9Oh976Rmee/YZ9mdXxcH14Pzhfb753e9CNCIbD0Zw786HPDqcc+3GNQ3pyGS3P+PwaKpyTq13pxe8UmWqzlLRqUGVXh2I0i/YFu9O/ZDYoLN8bN6nAT66ZxtBglCy1CpZs4KSN3vlQs90kISxUbKz6LlQY/5KkBWmUeQ/qqf6jxrc397gFjN7AXinPv4G8MoTX/dyfewfujLzLwJ/EeD6bsltCruDIAhzeiLKmW+mPBPvku9ntpMyzGpYb8ws5bg2VyNUjpagaKkNKXy91Qmq6kBOkIF5VhMVWgoL7WWKZFb4l0m91wrmydbJvhOGDngT+yV8pwqgSltrYnO4y7OFCJaU+dZcZR6Ux7UoaQDyIW9tUUMrjcXkZRMxWcdQ2dYK+y8xFqn71UyNiwhlBEkrlWayAfjmRbUsfq1w+0JtY8MFYaximayoERtMdr1MwayJUjnHSUwz1u3vCu5jXZnHIzGDKG/1GcF6OCjYrqvml671zGhEHuWmyVSjMZVm+qmJpXuUM2k78bCzgqFNykRrkHSNTYsjFJ94XVdGVQGJPD1maQm8NcaYaiaWJYS7w1TwL5FAsQAqe7fNRx/RGuu+y76k3k+AtUXtuBJqlQaefqoWTT0Kky0vIfaYmtdBWlTlF3TLE95+zKoQqqmdMStAPO6kqkFb980dzcptbMMjFtd/ZjosF69D0xxfdiy7S1y9cZOPv/oxLp3teOt7r3O8uMfduw9ofolLl54i17dYLx6wPnrAjWXPtRtPcf7wPZZ2icjg6vU9Ny4v/N3/5je4994d9qmBM8tyhifslsatG1f4zMee4xc++2luP/cSbnDtylWun11hubSwv3xVPZfdnsttx5Wr14jXvk4+eMCV/Z7b166x3thz4+mXuXrzQzIGzUrUZbp3m8DJ2g7PocdpEjwJ6hIsRT5m1ikRVPPUioEWxegT5KIbHTFFguhWJ4aa0jPiZGSXuTVyixySYvc5JouJDTTO1EAUVO1vEGEIx/uhQfofNbj/f4H/NfDv1p//6RMf/7fN7D9GjdS7Pwpv364l8/u40B55EvA0tI9aDZilGlGzsvaYQdRElF5PpJXBj9g0SaOV4RPlOBj0aiCyqQwRlJO5DbF2mneab8F9at6nNYLGtFWBu+1Yfccw+bBMk7tgqyETQXC206xMbGG/7OlL1wYcZQ7VpE7z3vE5iSFf9KU18nhkPR6xWdAHYm7LqU50OW9NGTOiUC1oUdRoHTb6X5oRdiwscSc/166wooHJCFqMVNZdTahpUkTm2sh1FQTRiqXkzlzV0c+pIdmMcjgcR9kAHy8Y60Eq0aNYPxtko0EaYsyI61vDk4u66U0QR3PBBO7Q22bgppg/Z9KX8uSYwktjTpIVUkyVZsY6V1lPFCsho0RhoQMvbMrjBmXyIyulq0RCdMkGpgwti7bmJ5FY2UDklkF3WipwZFFN3WDpVbWZBp+Q8qzZDuESSBb0xsnPX3AgTJsKJpFENppBr77LmgUcRj623CCrfNQouG6aWes0etP3Y117qJS7zetzveO7HU8//xyv/sQneObWLS4ePRTcsJxx69mb2ISHH74HHty4cgaPHrC7dpnL1864OD7izFZ2lxZuXrnO7/ydv8/DD+5zuZ9xab9waTFu3rhKI9n3ztM3b/DclSvcvHLG1csaxLK7dB1vZ1Jw4qRpvvCtq9ewxbg43ueQwfnFI3rCs8++yMuvfIzXvvF1pEV6DC7IzVLPO63o1rYoGcpZXy9bB2yDSHRlpfiPG9Y6SDHkPluUY9cpUofrrN5b7cGsj6HEM+dUY74cZRXs18dq4VRiJx8s6UzC8kfSIf8oVMj/G2qe3jaz14H/PQrq/3cz+98A3wF+vb78P0c0yK8jKuS/9aN+PhQcEEFXzco211QCyE16X6b8qDE6U63DxqJMXjvtNE4tUxNMWpXsp4dQgLpZSGhEngJmL4GHIazMqIlGLkFPtxqXl8ZhJpoQ5HJf9C6hkOvvWQrHrICfy05zP5u+rnUZ+bcZjFFDGVa95jyIOeBNG7JFsSljYmOyOfNlrnV6p3DtVEc+fRNWJJn+2PjJJMAqUFElpQ9I+YTYNMLUKBpjJXNKmDLE2Fiaya5gDLLDXGFpjfAaexhqpmbMEosNRhyxNRjHo2CUIYhmHldlz8cjcx3MdQijRCybZvLKt7Ls7N7p3lhajTNDz9BJHegpCT7IXiJkwgGW1Djs4unPExwX9T158kjfKKnBNgHHNrgvsooHJRPuKRkFm//IBpsUPdKL8mai9RbiIngPHpf6hQueTOmqASfIRhVCIp6/UzBKgPsONsFRdNyCZmrAaVSbnYLCdglXdzZztt7VR3J3Fu8s3ti7qwHcFx0AbYdbY3/pCq+8+jFe/eSr9N45P7/gyvUbXD47Y+l73vjOGxzX5Pat29w9THJ/wbLr7C+f0bwGTszB1772Vd57+z1aM7pP9r5w4/Jlrl/as++N4/mBXCexBsdHFyzeuX7zGcBYD4Pdjetw4zlVvuuBuPcBV9144dbzfO/eI9adJjs5wXPP3GLZLVy7cZPzuwcypKKOqkpxBc6ObCsU5Ju8kVJV7unZJxXQi/1SsUq9eg0bz9BAHA/V9NswejJPB4F+RBQerwRUsKjU31m0SFw/Y6YqWdk4a91uM32Dx8/2D7v+KGyZf/0HfOpf+EO+NoH/7Y/6mX/wMjRQoBvELG6vKfu2VD9/83doEaTPIvg7McQZzyaFqOBxleDNreA0Id9mMgyLFJZqJweuOFUK5mLLNFy9ABd8UZhM4WVGs45ZY7RG9h2tyXvlFNxR9u7LwjSn7c9w7yy7Ha02VXOnT+TUNycsk1gHbXcJOz9gxSrxXtXMlGhFqYWcEWdMZnnfJKjJaNWRB+Sh3tS5t43Va1hDI/0WIOUl3lLUOouEZZGAZswSOE2yd6YdaRj7RU6b67qW4nTWBKCpbH0MZqyM6h/MVb7ixCTHkXE8qPk6VrlWFrvAyRrkbMVMcMwfZ5cNZeCtGtCZA3KK5hmDMZqyZlMG2piK08g1M2LoNU/BWmnyHhoZrNSmioLI5mSu5R46pw6mOU8WENtA9bA6cKnyOzXar/vWKNMagzwJ02JONdkRFDZHPZn6vpN7ICi7LhVpjG1aGCUOg6gh6JbygPGq09RHyYL4HMUaKVE1yNlOdOKdLywGO2/slh3exAxamjz8l90lrl6/yrJzLg4Hzi5f5catp7l5+QofvvceT924Sf/UZ/nab75Hv7Tn2jO3CIJjN67euMH9D+/y6NEDDseVp248xd4ne08u9R23rl7l+pUz3OH9iwvuPXwEsfLi80/TzOh9z5333+F4nDz97G3sqdvk2RWxzPZnLB80bp4fuH7pXd4fwb1793lmDtyCmzduiEYLZd9QESc39aliS/o4NT5bFWuYPGG2JH17ktuQ7CdD65yak9AK1yedCD9RSd2LGR+iZGrOs4gQtkEs1Q+YqBKciLmzZeszBPkFzthU/D/k+vFQqGYxYyqDkS9JEfq3jEe1KKCJaBLyBOGNcE4GS3kqVbTx5jTIXuW5sNrtdwq7Lbc4tixHM0EjYc3HPPWcsgveuKdh6s+ENVo/w3qpEPtOGT3yVfcuKXlfOmf9Erv9mVR+rqyXVfZWHsHhcGDZ7cl1sjOnT3nHjNmI86RFZ0QnxspICNNQ68jQa4sSnhBEDQHPHJLiB8gm12rotnxxmIO+W7BWTBcmlv1UxsYWoFKCpO7O4oZPg6aG26ygbjnxGGr6rmtljmJrMCfrKgx9XVcJRabc+jxSruCKRhj5eBRcS/rSanKgVeau3da82D1RIpKhZ+mWjCG5fksTZa3qs7k1fOdUBVEDYjI0UHqEePekJvesYxY8tQJHzMqvJDUjoLVWMoE4YdqGxGuatqOg78WR9nImNHcJmVysn+7+uFKNaqiZhG0A3avx3Z2IVbBkQY6zQaxqlm7eJBZ5si/YlKbqRRlL6yzW2NHYubOrKtJcMIu3TnfRf7srKz27dAYZvPHaazz33PM8//zzLLsdFw8e8NZbb/Pxl1/hi1/5LS7efpNrOHMxHoYM3npv3Lk44qE+2n6341KDfRO8cVzPsWhcvXIF4xrWdsWeOvDo4T2u58rZpTP6btDtSDx8B1ueJ/sZvt+R+4UbN2/wE6++yk2MvttzcTxw5eoZT91+ClpjWXYwhwRhxQ6LTGaxqMxd+uJMwbUWgldtoypuojJOWfw2j1eCuCJ4jCzzQsN8qkKAwtm3gFdD7Yk6kFWdbT5VlkpINwRjJuobapYXOVT1jR9h6P5jEdwd2Dkqj/X2KovjFJCzmlSSD6d47NMIl2e38HWdfjEnYc6uAxT2O+OEiypAb06ORacjxT+ukxXPCn4lUvDNa0MbeBJkZUCilBm+LJrsQ6f3mlhU/+2WHcsi/NC6stCldcEH1aQJk8ApevGtjxqrFSvYumJrTZayps65y641hmxajzNKWj7FuvENZ9ddjVCjd7oUmb1tPYgJdsRbCHJcklxHOSZa+XKYhor7E1zRRVQuUjCMfIGGTHbngc3n2svNTpmzNkPMx57l7mDrpmTdLIklHvPT5svHBmZQG0uUTJtVgaXYPTPAmphWa07cF2VFVTkEyfFwFFNnTA0MKXZPzkmMwRyTi/XIcawwB7AS41DVCcWAEDw2Rq3aMqFr1ZCmPu9G4fJaOxuOOzLl9W9OjMr8tgojdbgtjaLd6XiimC+kGDWjes1s+UmpHKmG7caWmznxdJZudZB0Gl3MNJcYy7ogtpnG3pusipv+vHn7Ka7duMGHH9zjhRde5nhxTowjb73+XW7dvMIb3/59vvf132M5DsDJMdn3zuzG4XjgbLnE/mzPpf2OXd/Rc/D0tcssSyfWA5d6Yx2D8/sPuHH9BvtdZ704Z16csz64wziqJ3P44G0sV1Xz124KDmSl7zuvvPQyN/c7djefZ51HHj56xM1bt3jqqRvceeMdtvCYxSOfCM4jqDVigi4jsSHvnpFxsopGT7QK+SfgrtOny3ba5DDqIc3JVjF4Wf5uBmIlfdVz3dSJlR6klYDMGxZDiIFTjXIUv/yfgOBumHAvU5kp3nayuBXOpI5zZkrY4rICpotq6JknbHLMyVJZa0T5NahOV+YWk0zx1aMySwiVZqEgWSOZiZFErwZMTLzJh8Ws1zBgYXiOhhaId7/Q+xneFmXE3el9x9J39GWh9cZut6vmoEq4iMlxDDx3ClSeeC7FBtmr37vsycOR5FDnXdmFpjGmcZwrR5Mv/FK2whabn4Z+Tzonyh8hpa57Z10HI5JlEfy0zlkN5E2Fm+SwGqatDK8tEkMlk5aii8qNcJWGIDSMIgubFGUMxpzIYbFGnlW22Ur+bVUtuW9e6CXbFt6mMjVCuHJPgmDMlQS8deAofHq3sF6ERgUiOb/XRloj1Fhdj8SY5FjJubKOoWHTQ1XU4fCIOVY1M6fgnTWCnRkwlQHOoYMvKuu2rJ7H0OFus9hYCS5jqsde9AkmKwN/onnmdSC0BEfVyJyThYIJslgwNZKnVXBfx6zqC0GbqujZ9NaqIUqt6iIGNOt0Fhbr9Cb4azEdMt2WU/A/HtUM//gnPsG9+/cZx3OIweHRA+6+8xrf/L3f5e67bxF3H9JxLu13OMludQ4HCckeXFyw98Z6POfh+UPmgzvcvHzGlbMdl/c3WC7foPvCslcPIYH33n6X/dklzvZ7mEEed5gtmhP86C6cX8DFgbTJ2e4y/cZN4tJVfH2ARXDvww+5fKZDzXKRH7rYEniUGZ0P1oLyWmpPeWvF3NO6rF7nqUe32cQJISg/mI3gkJtZW82hcDX6xUxKERxKuLbx5m2DjPIJ2MfUOzRXr29OzWTQ9C/EsPkh149HcDdx2wdiABhJx4T9una8imw7YVn0yoJi0ntjLcjGGnX6zi03VwkT5WRIZeuzyqmiG+0X5PNuHZAQCpNnSRTmb6xkOTpuE5JwDXRubuzantb2eNcQaeud3W7PsuxofSnmQdPfu1g4ERObRrdi/5CqBEg1a6YW3cSYZgyX2EKCQ1nlns+p4b/SispAa/MyUbdaeG7UAIJNSBWSvsvjEiIPMrlKwSGbura58PVowXDKIdHExEnRzEZoeHNjknPFQgMvdBCJjbK93yyq1ywmim8y+Ggl0MqaIFRc4y7HSEITn9y7JmnNBsNIFxi12eGmd3JVh/J4PBa9uXQB+bhhvI6VeRx4QU6H4xHGECRweMQ8rsx5ZEQxaErx6W5qELoquFFeJOJMp6gAJi9k22AwoKVMICIUHhTAFSxaCerSdFC2alC3bQMbghIisOkwk2UahOlQCmgz6HVfPYxeGO0MwTFmRjdjZ87OjJ07PZ1exmsd2JFc6gs7dyktEXw4L47M9UhfnPOHD6Ff4tH9B7zzztvceest7rx9j+PDKTdMqfAEM0Vy7ewSt28/T3jnzvvv8OH773H1auNKgytXzrjUnItHD2vfr+zawu7sEk9dv8G6BnfvfMh66YxLly9pHOQw5v1H2K5jYyUujnifWO80N5blCjNW+tI4v/+QD9+5w1mTpYMiaGMNw1zeUZFSiNoUnLa5J0cEvZgvmtJGmQhuWfaGM4h6zZYr0oomuelGjLROTGjdqGnpWHol66pCN5voDeo5ofrbH2ZVjQmv/37U/x++fiyCu9gCRkeBFR5zSXEBKFJxFQ0tH2f0M8CjnRwQR3WVW2X9+vGjPEBmUaVnNWijTkjxuTdKmtqTk101HzdoINFgiygWjG3eIL5xe8QmsW00XWsK9LtFcMPSOdvvS+mq4CvlGjX0uTOPQabjbWFddVJv05ZmTNKSlWSN5Hwm5zM45GSt6fSzIKfmcVoMVd4Awl/Nu5gBJHMa1q0gkBoMIivH0/fPOU7sG9MsM5yu16lhtNhY1ezNwJ+YFOPIVGszVsqZYM5cFeDdpPSMOU4eHhX6Tnxxm1amcOqRZE4pXo+VP7XyrGlLzTgN4rhibSGtKLCmQDhJYh7FkzY4rkdsKqCPytznODDWI8fjkTmPRBmFxdT7yoKMzKMmHenwbCbe/8IOZXTKlc2npoHNEkWl1l1rbO4GlMEISw5EoIyqLGUfq8EidUhXv2gx3Vc10oMWiUeyQ4fvOsUtk5d+DX9BQbwjS4HWnKU1zSh20SM19rGxK38UB/K4Mi8OrBeP2DXj/v17XDy84N69Cx49OBBrwJjse7BbksXh0uU9t565xaUr17j3IHjtrXdpDHbdsGGcHy+4c/d9nrlxg2euXSFJrly+rP1RHct1rlycX7AsjXZc8HaG7c+0nh49IB49Is8fYDsjzfH1Ac7EbMc4Nu6+/5B5vrIYWKdCscMUfz1mkNmx6rkQIh00dxYkDssRcjKtnhBVB2nGQaXbiB212fxGPU9F8YJ7bTJHSlw51TP0XgK0zJO9wCaUAp4QM3GqfnUCnF7MD7x+LIL79nrdBENFU1A3tkxLEl+NupuaGMRmu6ppRt2tEisv5Z3evAZISDGpB6sxdtvEGp2SnOCg7eeYNSxrcHBx3ye9uinK2Dp10Gz4alPGK9aDP7YsMFh2KnvZWucCjiFUlnm5S7bWOa2LrVRnIn2uprfMTIYZxzSOJMecmuI+NhPaLBMjWdcSj8UszZwZ8jHH9DWeYuvIurQ4/afS3UsyX/TEJrikhyZW+RyQRyLWsmlWsJs5BV/FlNOnbc1rK7m7Fm1GDZfeMpVqPJK6j5gEIrHmqSJjDiKPdLLG0WljB9X/YKOkTYbByPr4BlPlUIDP5DCOxDgQ64AwDscjc1wwjufM9Vj3TQpGCVJq6Ee91hHBUhS1sQniYtBm2RR4Ey8fOIbYLLtlV6rRVsmGmqiOsZCsKTqcVJDC7XsOLAdenkFrq2EtRX909Dt2YayTosFqLW+DU5SFwq685oWpe+k51PQznP2yF5MnC583Y9c6V5bG+YN7jJzcf/iIhc6Ykwfn9ziu91n6kX0LLnXnbL/n2rUrPHfzBu998AHf/tb3mPsz3njjNR7cfch6WGkdenceXZzz8OFVFnNuXrvG7VtPcXm/Z4zBvQcPuHHzJr7smNaJbFw8eEjbu2DEXvTe1ci8gPM7EA8x7xzOj3z7a1+TNUTvtTYVU3ZNMO9mPOgZrFNxoLuRYzBMDLz0redRAT0Fe23sKAhyFk1447fP1ECfmGXUFySDhuxSzMXe26ye1XyvyrMqQdhajluwLxiI6kXNjT/7h18/FsFduExiNZaupDSc+gxZzTOUhZ7QrkxhW2Zl3OMSDtUAhqXpxNvMkdxkBxybEtbEYd98PMKMkdCbFXbWiiqt0/5EM/OiIxlq7KYqiiiFW296OL1pHF1vC93lTtiXHb1ps4ln7pCNMMFLaVnsjaB18KlBxcVgLrUpNU0qygcFiYNiSy+8pM7beLgaiE3KaMw1D9ZN5kUZUmOqtwEU9GQ5sMr42iy8dgqSwoKWg8Yo0zKn50oz0VS7GWM0klUbasgDe7M6rQdPr6x6OgWVWUE1EJXBepcjZA6JPXIMlu7kKraSuxSlOq+0OmaiA+vEnFF1NaZYPDHVNB0xmONQ7Blh7RGbYvZYVMuN8RC0plm4lEYi8kQuVUkfCjZuk94legsa8yg7gqU3Zh2EEt+pyeehezPnwFo/vc8Wol721GjGljtxnHMy5qRFsqR84j1hSSvsQMMgLJ2eydLEvW+mQfI9k6VaGYuBp7HYwlkTv72Zl64Ami1c8oWbbXLdL3jt9fd55+GBG1euczwOLi6O+Dxydedc3S2ctcaVS2dcvXSJ+w/u8o1vfYMP7g/+3t/7EvcuHnF+MMZ0zpbk2qUdV7tz9+GBS77wwf0HvPnOuzx7+xlu377FW+9/wLUb17l+6zarGcdxzrImy7InjwZrzYs9njPXR/QLQYXH4bz79jt892tfZ5zLooGmHawkpogVJc5rdDVOw9mlqKpL7wzZaHL0guRCFY9Vr8y8grnL5jqrqRpZXk04Y2jMZCtrEKzU1taKpirTui0z38h7WnV6P5VdVFWbjKI8/7DrxyO4g6T5qFm20ZQSTn4LM0JZZzXXRkbxvIOJ5pM2NJ6su4Ty4onWaep5Erk097JUreaHScCShZFuYpVmyUzHszGnsaBsCZJogbFUQxCOI8AnfdkhtafG/HkFn0SOks1bqRWL9WAU5uqsWbMxXT0GLTb5dc8ZTNuaLPXYS7GoYd7K8ijsOdPpTYyLGWth8KbDkJp4ZZvrXQjD9s1fpAb6VuXTXNRENZpTXPZiEWyyfS+aqLvun4qO0hRgjLGxtoESH8kbJSsLKiZ4VZtJOXcW9GBe1ccYdVQPrDfG4aByeNnJ8gHTbFY2+b/uxxiD3F5HDNEJy3I45qqpT1ONy3G8YMZRvZkqia0SizlVScWMGjKiTMtNr68hM6oTt71sMDT8IytzVObfAvH1hctgqJlvE5amAL7lal4mXlGQjjLwLrbUFCRzal6jg3GpajijkpkU9OLVAF8i2YXw4946ixkLopf2DZIxicmWdeXGg7t83C7z1ntvcLw7efZXPs21W89xuP+QPD7g8rhgGRLdXByPPLzzDl//5nd5cFz5ra++xrvng3vH4NFhsk6xgi7vjzx1tXN+HNza7Vhzx3ENvvP2B3zs5ee5d/8+N69dYX/5Mv1s4dG8x/v33sKt87HnPsYLt57D4hG7Ik3MRw+w9Zxc4Ztf+Qbro0mzTvMou14TxdpTpnLI81+iZpnIjZnk0sgh4zpbIKj74ZvbjNhHm9EYiUZAZvX0WmPTUJs1shwftzVhrdxfczMSqyspCnF8H4w859TnQpCk6xM/NKb+eAR3rWrZuqZxjFE3kZNfMmw+LmrVzI0sEEmYpMI5wJuokd7FHNnolFvpc5Lxsg2gpYKwyi+x/IT3jrmZDGlRTIf0RjfZes4SKlUXF8wZARbGcR303gtvBl+qhCp3xm2JRB0WRrB0L+vzGmyRxQ7ZOLaVzUr99njohabp5Onkz61nUUES55RdRgbtVDoqq9+MpcQfH5X7m7jcqUk/TpNwZ5Z62A1yYk1Nbb1/qW1Fy0kyjno9IdiE7WCMWdBTQTV1wPbWmKNiXfHEtvd4ElpONbfkjqj+QRTsOc2kCB6CYRLYbFFjSiKeQ0KmMQ+MqYZqxMoYR5FwZilt62A6bSA7IXI025SuW9AXbtoKJ7Z6blnDy2WBUVDh0KFUp5aqmM1rpLBXCbSq+vNFzCYbCii9kznop2nezoL6GLbRTjNYal03yv469bVelWW3kCq1zhbHynZA4yO7Ga0bvTeJ7+bk/S9+iW/8lW9z+dM/w6de/CQvPHuFq899nOduLLz99+5x/zsPyMMFD48XHC8ecvfRBW+/94B3797l0UWQc8FysDg4zroaDy6C4zhwsRusV5NDLRMbk+XOPe7cu0v/xjf5xntv8+B4QfbGt996i47zcx//OP/iF36BT776An7pkvbyBXC4z8P7F3z1H/yeZkAs2+9VBdMWDcQOa+prTc3QWhwOa9FizVmayagwJ60wetsanlhRXGthVGadFW8i4vvMxrbjO9Mez3q2lGUBGyWZInkocEfWOt8orimzvKjPPT4R/vDrxyO4g/Cqk/mXVaxUxqH91ZQxWVbAagVVqKSx7E/gosZYTdm8ywoYKusy+WuzaTUrsG8DO1rhvaSp2Vgd9W0y/GId8BrUXA2YETKuAuaQNL014Z0zNRuRzBJESR5vpU50K8xZx3L1HWpCTp3YOv0RY8QGUb4iXh4mVlTOrTEbBS3N8hWxrFmdNYwkTy51Mk8jqcOhXARrobq3ytij7pMYB07QyorBxjwNpqBlqSW3kXblIR+mUpRaj2EwOQlnLPQkZZPbSKuGo29ZO7o/lcWvxwPWF8Z6JFgxX5jHoxqorgN3Fj0t2bJqbZx1DmZq6tOYK7EGcxyVEc8ollU9DwoPt9JO5HZ3RjVThYeLysmJdqtxiir7lT5MsZ1SVj4toC86zDSuUNWlcHFBbstmHDZkpxybtWyW4Ao5ox7XWdl8NU+pg7Qg2kTQ3wyNpTSsiAMC+zYDq8feK6re5EEPizWmy7COi0e889bbrLbnE5eucu97X+XZF57nxnLk7OZlXv/eJd6LCw7jwJ3zA997+wEfPjpy92LlMIMZTmdP+lT/YE+JDI0Hx4QHRw4x2Hly4/JV3n/0kPcenHP3W29ymCvnM7C+h+bceuomb18Yf+er38CX5JMvv8TSO7Ym4+I+3/zae7z7vTdppNw5Wdi5YD4xZDTTlkXipYhib7V2slLulvRwVi8oBomfpHovl1mqF1T30CJq4U7ayeHRS8OzQjqWGnGp1pusDDamVCLBIdWLiUwN59pGUEbBseR2lPzA68cjuJsVdaAsZn3raosBL3+QqcwsUbOIZNJrpqYaHq3oRRFeAqTATU0rZX/KcCKEN3tRL/UAgE2skCU4cW3YdT4eHh1e2XBbNKgBKwOnQayOLyVvJRgxOM4jLZw5nXWFTHGJpagVFz8QkyRqgcgiOEkkfZfSUE5zOUu0M7fsXHJ9GyWQqANrTq2Q5uVeaYUTn/j1W6MGqEak+hIoq0gNgFgsSzWpiiNb0j2l9rMdTmDF8LAwWrkyjnBG+WY6iU0YKUZO33B/Vw+gm7O4yMTDKaRdm6a3hqMJRBaSYVtTs1RbrDFSAiCbsjOedjgNOCa3Wbu65yPELMo58CG7hwxlxVl2ymYUO2oW+6ZCfTVEM2NzS6CZoL4sAYxnsEbKh6iyfrOuXkUr6Ao1iTHRSFUl1Rqbk+FdylxT48zTWcnSWCjDm8A6ZbinRtQmpwfqQDumMP2Z7VQ5mGsiV2m+oMvSYdrkyGBvMl7LuYCbhqvvYb+Dy2b83Od/nm+uwXe/911eHD/NLo+s771L3LvgbFm4/fQz9MMN7r39DtnPuX84cP/CuHuehMNFJHOW7YKtJ2uENZJHRyUXT581rp3tOcRg2OB4oXvado2Xb17jMx9/hRdfep5PfPpV3Cffe/AuT51f4+blPUvumceVL/3NL3Jx72FZKnSaGTuDbhOzWZCkqs9sigWkmqBzZhkICr4R+1fsOG+Ntg3X2Bqgdc8thir5LPhtBN6pJEdQmnkrKACmxYnFV5IXBeyCXNysKtSs/SrFmqps57Ea/w+/fiyCe5KsRfPqrUPK5yFBcvkZymWt8CiXbaZtEIRxKpcyE2+SnCuAq8SlMMvNk33zAjErClPIxH+TkW/OkxayAPaMyj6dQFl5FyGOxBnrpNukLwCyPfDpxelO1nUV3GHyMvHy9Zio0jDUH3l8vuQJTlLgLByqPu6WLN0VROeG21fjObfGqr58lh2pFz00kHcOOUuaXhlJRjWBqqQMZE9srZpuNTglJp2EOJK2Cs5KntAOGJFFN8P1eqqhu40bXPpO1UyJwNhEHeUns5jeo4UYG7IrkIRE2Yu8ccI6MxzlUCt4k7gqt+RAxwuoN9By88OfMpiagc8KnTGRoVzUmqpyOatqQn2fzYbCAvUcKsBaydXNjmJb4fU66tCZQetNPQoUIBzoGSxVnZg3WTxbNVJdDBenMarpFmoWkVs5b07SGZksDNwaY5aXEo0V09QtyypuRYH0qtDmDGZ2FoPjWNU0joFNHQYUBfQwLnjxxRfpuzN+57Xv0OKCXQbx8AGP7t7BGSzuXL96mVvHZ3j33hG7cyGKLgsPV3iwTprDjWtnNBqP7p8zawOrotlz5fKen/vMT3D7xef4v/5nf5W7jy64OAw4Dt6LD3nmymW+8KmP87xPzp6+yt1LYrytK8yD88Y33+Pvf/F36CFoZeP1a1xmKa4NPGpwvDcWDqroUjCbudFSDKPF1RPcrJE7jbEeVXGn4OBTDIqofeIMK6X4qJiDRkFuthJmS7HktOlty8ULsgsexzSrqh8raNQFnv6w68ciuGPG7CoZLWXaLx67MSol37jXEfLUUKbkZbAEeFNTthgTZSaIWco2Nx1v201GGKVy8BMzg+3nog5JbrBHGs2b7GitGgTuwr/LKEgsl8kYBzVvLWukmTOOR9pO8Nosx75mBt2rDNsoflJLxhRTQ+S+WatnxXJVAEI+LsrqEbYqWv1JSfeYOgUFuquCqQ8V21dl63Z41KHm3ti5lV940qaxb6oAnKBb5dYZ9VrECqDoYb0581hWukDWnNFWzSu3Jm+VVCUh6FFBvlnIGXEOTaBxxxjY1rStXsIMZYLCxaU7mGxDOkrotgXbCqQtJPNqc8qXI2aNcwy8NASZG9RXOQHauBs1NzfgPVMc/cI/vTUCii0lx74wZ2SW9bDrIKkezvIEFGPVG9Lhryy8NcdjqsFawcJdxncyo3p8eJMpZ04HZjX6TM/DXYfpQD0JCWbUzG5spAIjJox1pS9wdDSopJ9BHCFMjKMefOf1b/Ebf/+r5I2n+DN/7gbejH0m7z485+Hd+/jZQuwvcfPGVZ57/gXOSS5fe8Azq/H2h4945849IgY3LhkM5+DOcdV84v2uc+PKnlduXeczL7/AZ3/5T/F//v/8NT44f8S9C+PCnQ+y8f533uH9u3+NP/drf4LPfvolbt1+iiu94wG9X+U733qPR3cfcaUtdFcvobmxNM0hMF9kI9EkMDrOSnZMPi7Ne9GsjW6TbBLzneYcNMdyEbWy7j9Zg9ZRzykpW+ap56nn4cVnL/0KUR9XcG8KWBXiC5qt30luSpoE39h+Pzys/ngEd6TgSgcyyM36EsO0g+vNKThvG0cimlIG1kQZN4Qvl0hHNrxeQbzK1xpQoM2sBmNsN9V7zdQcqIJyWji2Uk6OwuRy6AG1xTGbHLR9ibkyVuHVcwymO2MTZZA0z8ocrOhOMq6KsTkQDsa4YM4L5vHAPJ6T44LMI5HHCsoppkzKGIqU/axH4bhZjcHSBThdC6IWg9ff3aICPCePl+4V8GOy67JhWKyzM9g1CFZl/F4S+YI8GoLMhG/CzpPRjJFBm8GuehVhGlS+DSY42fc2Y6xDqsh6kPICarRWPPWRTFZBGkX5k5o7mCauvwU1VKN4/KbqSyZxATlraHc9j9A9yFIFj9Qml2e2FMBRd31r0lpBKapMTJj/ZtpWalnL8oIvL6JNoj7mED2uia5rXeypVpCKpWblNsRg8aFh5KomvKBLx5ZF/t89sFjkc94aLb1IBFahppryCBIbiaBFU6XokXh54Icbx5LHt77nMA7KVsNZj4ldusbrr3+HD++u/PTPf1qj9r71Feadd7j/8B4PLg74euDG5Us8de0yZ0/dop0Zh0cHPvzwEfv5Dv04uBgXxPHI4WLQ6VzaBZfPFp65fplnbl3n5o1btNG4sdvxmU++yoNH38SXSeyMF5+7xmdeeZFruzO+feeCm3ce8bMfe5mrS+P46BEPHtzng/cu2LXGmXUWl++7CKka0AEamxhlMneGM31HhLyDZPbdwI1s+roFVdqt9ZoNoao8cpyglI2rPqvvt3k/KREocWYOaKpis4L+NqA7ymRsq6rkWxWn7F4itBobejoEfvD14xHcDbzbaaLJyMIHS5pOeSuPOevUTJiGowdnVW4qQJWYyBVgZRakkxszfGk6cUv04hH8W2+8yYuHA9WBAmAbpaXXdxIZnw4KRST9O73xl372l3jr5k1qrAKjHXGH9ai6rZ2dMeLIYfZaIJCugBComWdDk4vyeMRrYhEhebxn6O82aV4NsNTi7CbefZgqEDekzNzeQ37/IsgcZEqnWB+Q4hE1zlpOydPTCqtUud9sC2ZdLAzfaKe97k0RdAmWncOAzkJ40VqtsQ0zt9ZZMzemIZRbYaDs3NtScI5M2dIX0g4bh0YYdYQqphRMxxxkNnzKXx6kxFxWKQktJl6bcaFM16rR7dSA8LTT8PDoGnwdKFCH1QDq+tpseq9VbBBMzUatbK65WEPDVIFJUKR5vVaWAIiwVCytkpc3rf+RSdstslkwwRmEEeNIs86uTWY4RtDtqD3TmjQBcLIc2Ia2SBdnp8Z5hDPnpDdjRODRqxJVFm8O5/NIrh0WSM748GCwW/jY80/TH77PxbtvcXx0nzn1LA8XB/rdu1zd7bh2Y88LN2/yIffoc4UXnuL62cIH9+/y7v3JXBv7Jbm8NJ69fZOP377JU5d2PHP7aY5rkA8e8D/95/8Utq7c+/Aeu73xzI3r/NxP/gQ/9dOf4OMf+0kent/l0vXGPpPzh3d5eP/AN7/8TfZtoVtj15r6TMDiSZM0lXU2hgcxjvQG+zBonRFGjyQXJ9ZJT2MUK8zKSG0xCQ2X3sr/vzz+qxreMNacgjWVnc/HNhkzVUFUJW22IQgdr2FDlvIW6s2qH1ioRCaCIH9E2s6PSXA3g75sPnoNqgFqaPErcVc573ODYfRGvTKNsHYqeTuUMlBOjrtSiUadsL2yKUro83LCJ28/R374Idy/h734EnnvHowVe+oWef8+dvs2+dabMAN75hnyjdfhyhXsxg3Gm99jf9AoOZG8k1zL66Q1YjgeR852eyw64Q69sc613mXgU6yTMY6M45GxDuZ6wVwPjOMFsR7IuZLzqCA2VwVYq9L7MapSPuNUNqDmrm5ZMQHMa4QgZfolb5FWSt+laarPYpLUdzMNgbbHjAvMaGmCZWrIr6Pmn1uDdAmNspU4LNFEpMbSvJqUj7HrWdiSVZ8k58DwJ3EkLfzpamRFwVlM4c2b7USCVxPaSNp0sRYoWwRgc/+TwnVWJbRVboB1psHBJx0jHI7INKyZZqjO1GaPgmwyVPFodJ1ecKTumaqaAGsSG9X7wcRYkj+RFUulM3sjymt6FDIuGwcxtLx3IlbdKzcyB7u251jYOHRloG7yqLfyKnIv+qiXJiFlcmZTh1bKrXNUMIkJi6txe4zk/uHIM8+9wMXZVS7tzoi3P2S9OLCug2W3YzdEL3708ILjeJvLDw5c7meMZtJwnD8gL+7S40DPZOeTS1cu8fRTt3jp2We4de2MbpOzq1fUMH54wa/+wud5/8MHfOXv/y6fe/V5+v4aLz7/FC88c4MbNy6xvzS5evUSce8ux/OV44PgcP+C/bJn19RHcN888oNtqLycPiH7orU3NNFqNB3+64hyTU3aBqNWcueuCW3LssBM1jzK0OuJhHDjsszYbDxO3bPC1StJOUHDVv5CqrS2iWOiX5ZpQhHqn1Sr/rDrxyK4wwanFLa98TjLmGtGMq2aXC67XJ/JYsKeRip87bqC1L53cYlRWd6dmgSvG9UdLOXyl+a0z/8C7dZtYefvvIP/4i/Bo0fk/XvknTuw22G3bhG/+zv4Sy+TDx7AK6/gn/s54rvfoT33PMQkhhwrYz0Wzzhl17s0VjotB3HQiT9FS8FzGy4t7gfjCMcDuR5hPRLrAeZRH59HOsHIQbeALrxUg5dTbJQs7i2UT4wghuathlxoGHmjjKKaix4GLG4sluwcdjbZN9kSy7Z2smumubYUvQtVSFslI5Mk6NaLjrkj0UjDzYQNqyElffMICjyCNUOlMqpIzBYyx2lBz4LpNrWtsfmqi+evweKqOAQ16GM9kp6qVjyDmUIuTUsLr55Ja5usW72XFZl8rWYMQvCK1ZIsjHtSfkTF4rJCRQdVRZq8dzab4gLVBT+aVwdDjf1wL5tXr2aRzNo8y6jWE58p8VUTldaXalBjCuZLwUmh9xB6QwomeqGy8oBTUFdV1/QzYhThQPhxB2KISnh/nVxqg+tP3+LXf/1/yVwfEN44XEyORykovanncP7oguMH9zl8+22uXLsCfaFb8tKtG1wcLnj7w/swjds3rnHt2lVuX7/O1W5c2XX6bsf5xYH7BymSbyydP/sv/Yvcuzt5791v8IWfvcFPfeIFrj/1FOfn95jjHLu853h+jzi/wEYvssH+BD/ul1Zwo5X2AOiO5YBpoj22RqxHwlQlzViZmPZabzCVtHRr9N4ZY+C+0Nokssn3qSCTU8+OIjHA6YDNDWYxrZXNAqKXtTIo+WzV+2qWJz69lOBbsuOnvssPun5Mgnv5oLg2Tnpj1jSMKD+PZibO9FBW33xjI7v8L9xYuhqBS5dToxzUgt5qek+rwLD5ypSXii078s03sVu3sGefJd94nbxzB7txk/mlv4fduEn7qZ/GX3gReie++Q38Zz5Hvvsu8Vu/CZ/7WfLDR1CTe7p1Yj0yQk3VNVdaOLka+96woc67pttLOVlTO4mxEvNIjgO5rrCuxFHZu2fgOeimZqJjZHdyrEyH1dTA3YZ2NN/EW6LCKW8ujrwLJqGYO60aPE6ZRnmwc2PnKFtpSPhimyWvMULKW7F8mpgVctSqma6NZgt4spThWtDEkTenmTFilH/PRg2VSEO0bWXc5gpAIx8zpjYc2w16PIYgvKYS9TQ8NLlLfGN55Y8cmHXMtYZsFi02qFmyqca0Gz2C1eAA0i6YejNjFouLjfVqFZjVMG9l7dDNTlXBNujBCn6pdE1COByzDgj3pncIea4bggu0XoeC/AIzGqPGCY6Qsdu0heyNMQfTRHOdOXH3gppqDmxloZrGpOcyrNE6ZJmh7fCTWnyN5CKcsxvP8PTzt3jqcsO4zMjg4vzInMGy6yz7hbDOwzG58+DA2+/fId67y8RoOeitMXzPUzducNOUTZ/tnGVeEGuwt8vcvHKNS5evcXlccH5+weHRI27efIp/7V/5M/wX/+ld/v5v/x55HLz0ysfYXbnEtVtPw8Mjx3sfaojFclkVkZueswkNsNT+t5MnzwJxxFoyiqa7WyCb01xCvos5CG/k+piR595o2TVlqwGLbB7cNR0ptt7OP5RYK1nVvtckphO1stVwl6ZY1ihtjkkdvphhU+Iq3Iow96NC+x9thuorwF8Cnqv48xcz898zs1vAfwK8Cnwb+PXMvGOqS/49NEv1EfBvZuaXfujvqP9Pgyyf5c2pcaN62Ub4J5mx0biAbLSmoLRzTqdidz3gpXe6idq3UYvEvHHGTM5GcLauxKc/w/w7f4v4zrdpf+JPkm+9RX7598gHD7D9GXE8El/5ssQHn/xJ1r/+32LXrpEf3oF/8Pf5FwP+1G4vS13X5rYyXfIKiCLI+EkQARt6ZlXGmVgzKel5RrKWsEZ0NTXlIiRFHluTLuL7FLszKcOrrP9ZNXWqzPOawr4dqvVxfR56c3plnK36Frr/eQpWUFWD1aooQc/jCOaiUpoOAGUvRS/dePb1/MVOiQKPHguvNt+W7Ss3D3sN4X7Cyzqt9lNRxkKWt6LL5hMWAluxXJl7DUjY1L+b++Y2oESUS2Gu05ABVG5Kw9Mv10/OKsWTEySmBnY+NoUz4688/RSvLzoEhVI5NM3clcdOZ7XGrjfWbCxtR+LMPNL7jgxBK9Mnq6311gT3SAviOkSbBqVbqqrT1DIFIKyGcOgmYK7KIMxpzU9Eg0w1Hqc3Hs3J7sZNPvv5X+L1L/8O54cLPvv5n+d4WDmuk9Z3eINnnrvO2VO3sbc+4MJ2nK/SIsSq0Yusa1UEByyT88MjDkvn6Vu3mIdHXOlP8/Ttp3nw9utE75zdep64uMeLV41/9V/45/ja7/02H9x9j+XsXT52/RPsfOHiw7s8vHdOxuThmmQulfVq6Ls1U4AvGMwcVk98qNkRljDrdVWF6UsF3DHoXcQA94b3hrdGn9pjMUxwYWz6lnyclWtMubLsMuHTMBqthfQm36nW6E2wo/z0y8+pNfVTHFqIkhqWxX3X/vth1x8lcx/A/y4zv2Rm14AvmtlfBv5N4K9m5r9rZn8B+AvAvwP8q8Cn6r9fBv79+vMHXybWgFWkMKT0kjeGS9lXMM0WvK0c62YYiQyazDh5j+9cPPDuTwbXdtqc65AA6PJYufvd7/AfPLzHRazwsZfhze+pvLp+lXntmmhz3/gm0eUlE9/8Nnb7edpuB9eertO4yau513zK1ODmxcWTXdyKRlgZtJlKdhfNzUxQ1CivEiJZx+A4V2JIyHCcgyEGH2NMjpEcZ7DO4LgGI/U9MamAXzq2oCTxouBtAXrpC+7GrtUwA5LejbOls2vKrJfuLK3MrWzTAkjsAXU4pZF0NY9wWusFU8iNxsrz3orJg23VhXz2ozLxyeQ4h4L6EJ6+lbJUgFpX+YrnWAk07LrRazBF+d8ktDXxNfBRlLKyc9hUpr2MtKxts04D68L419qrMyZHgoucHM0Y7qJbVutMMvGi0aK9PacOWKmXpzjS6vrzK48u+NTFgTevXab3ReSA1uX9X4wWVRW9YLuFkc7iDfc98zCxbkxxDLAFHfwjGW4MRL1cXVBSZKCZm8GwZDXkx19jBzfRk1dCwhMQQhaEkR0Gk6XDh5Gs1vDLe272Pfe+/U3ZJKd45mdnO67euM4zV57myBnRFlh2ZBj37nzIB2+/zaOHj3jw6EKc8lrPz966xgu3X+bS0rl06TLrxUMe3L/HMSBa59033qTngXbvnFdv3+DNWOmXb2Bn13h08YB5932JAN354INzIjvNd6rg6v63thRu3fVsGswnIN/FDctOzmDkpJuzxlEaiznpLSrg9lJ2Q7Yk+oLFSotWzo9VJRbV+gTBeIpQYDKPM5v40mne6b3TjRPWbi7odmnaP20T7c3az21q5q79d+S5Z+abwJv19/tm9mXgJeDPAr9WX/YfAv9NBfc/C/ylGpb9t83sppm9UD/nBwR3p7WdlItZplZRdJ+Y6vSXx4OCBqcmQ2s6xfDHw2gXS/aNCux2YtGc6qRqqGVM0oI7H97hKzeuEJfO8CzRlBXzILMCkDD7dYz6XU5b9rUxq+xuBr2JMmmPMe3Fk8WCfZO97ll3dq2awQhfS44k4tw68kKZ0TkMdeZHpDzbZ7CuMjc6zOBiDcZMLtrgOFZZ3FqKCRGzvHesRFra0L0ZvQZ0L72x61b/lehj11nc2fdWr9+LL1xZiWk4CVkj5cxYs+O+sHh515c7Z/OdRERtUR8ky+YgjaCJMWNJxGCug5XJmkNGX3NFk+b1PvBgjDPmYSUO56riMjXXtfeTKZaPgPOBXQz60bA5y/BMh+RCE/e5AUuePFREWw8O7f9P3Z8G3ZZmd33gbz3DPue8051v3pynqlKppNJYEgIhJCTAIAxuMDaygTaj7Oh2EG3T/aH50BDR7uj2l3ZEN93uoIMIMIMxxjaYMTBCCDSUhpJKVVJNOVRm3sy84zufae/9PM/qD2vtc1ONVCgC7Cgd6UbWfe87nXP2Xs9a//UfYJTGSGNDoSfSIwwijGJyfmPoGAQ0iUvGIrQa6UsBcTdQqbj7Ox8uxjjKIZLEXidCtEOlWci2vb7ZyANids1tWiwHRVJgHG2SICfGoTEE+92sWMEYYcTUuBXZuZUWfaISLiqEZFOchsCovgyedl0qhuuL4c466zh6+QXa9TvsbS/4zN/5u3zjt3/MfFcksJhHDvf2mMXIarvl6MoVWkioKKvLJWfbFZvlGavlJdvS6Guhr41ZTnzTN3yMl+/cotWBEAt3v/wF+rNzjh/dYNyccffuXe6/9hpPXbvGwdWrPHPnGW6/9Aqzq0esj9+iaSUEZWDB2bYgwaL8Oky1nGP2SVlAkk9VbtobTECm1fJdixRSaOTB6ZD9aK6pMhAQupTIocOOPCVoImlnDp1B3arb3q4onU8KZkOdYiZGK+YShJR8iR6iQdLBpoXoTWqMgS4lz1AwcVR1RXUIno3wr1Lcf0kNFnkJ+GbgJ4GnPlCw72OwDVjhv/uBL3vXP/ZLiruI/CDwgwAH885OVjXmhXmO2ynWXI1qFpeYV4w2xxR944+JZFKwoN8sjRTNHCjEDI41Bu+gxrH44sOYLU2tSBgkYrjulIFoCl874Us12t10MtfakFD993CJgVZCs4FM3XtGpSG+vOuCUIv5shsTzm+sUFGnWDX1PE/j1DE2GCr2Pe3F832DnfaTG2EmGI7uCUHWrRtPGzvPiE6dDKJmKTw5QEpw3jm7ODown5qxKUgkResUDEYxZoyh+wbn5OjeJcHGYlsqRkQyxLTjwJs7IozV/FHGWhi1Qe6otVhogtiitGmzAOggZtuqQs0KccbNm1f58Ne8zNd9/avcuX2d/XlHUGUcCmcPjzl7dMo7b97nvS++xclb99F1dd622dzmaJOMBtxQLeyCPaIfiKMEjzUzZoxpo8x1MgSjNO6gmKDmVppMmr6j0mpDQvbXxih5yTesTYQRIcYORWgpUUSI7vgYo+0ahIbkgFSlpUgjU/rCGIRe7E9zCmcPFIm7FICBSoniIjzzYppYG5Z7UIhOb60TxJXM2qNqISlstZH25szvPMVnfuZHedyUbeyQYMUvamIvd6bIVWPYHyzmnJ0c8/5b7/D++/d5fHHJpm7pq7IZlc3YuLa3z/rxIz797lscHu7xzr0HPDg/5erBnPle5MaNPW5fv8on333MvfvHvPLi83z4xlMcXT0iHCwoyzlD1xGkcnYJ9x6dubdP88bLsGvB4C+DofysjAkhEipojpSiBuW0wswX1a0KaLWFZjCMPOeEDkrTTFPTLuRo+hiz2jYGm2XTWqcf8pwck3fyrkJOk+bEGHPRXVlzCMyyC7CSY/FRkNYYx5FSA1Row7+mmD0ROQD+O+B/p6oXvzQgVlWmufJX+VDVPw/8eYCnrhxqjskzP22DH1PntgDmWJjEiwp1h2caRm0rw5S8h2qV2sz0K6XoSLa9gEbEsIVXczhgMuZq6oVclSrm3YEX+2ncNtcUc9hLPraOYIIbtaRzDeJxczjeBzqJXtTeKJPV2yKz+lGvDQ+48B8m7MQKE449mTypGMWtqOHs4rCT+sIPJqMvj9VTfBEzMTeMsqWtIDGQUrauPjqu6FitOgvJFn+G/1umqf1uEQtGCckEH7ZjSG5ZIM6ogZjMAjnn5J27TSxGxTPmTCyBvjVnCggxuNVxS4Zdi0X0pb3Mnede5Xu+59v51q9/jqtzRdbnxNoDoz3PBnLrFvAU7bu+nr5X7t97zC/8whd47efe5uGX79GWhrHOgqUzhZiok5hkh8w7js8TaM8i5GzBLF7ca4vTVY1izoM6gqgt2GL0EX067Pw1jDkjMVlnnjubVGOihkQPxGSYt6iYCKwFqlRaVnqt1GhGYX1TCkZDrBINfhEYVWhBGVQZtDFSdsrIHBOjNkIr5JDcfdMZU0EsgjCYP7w0YbMd+akf/UnOTo+5c/Mqv+nf+wHKm5+nlrpT8+acOT675P5lz5ZITB3L1Yrj8zMu+p6zUhiqsq2NTRUIHQ/Whb/9M5/l+GLF/qIjh0BMjf3tFkmRROXl517gbNNzdvqY2Szz9IvPU4YNaQgwNnLu6OvAyXLNamO00IRrQRxnT2LQoAbZccclRIJka7SqvV81GCY3IIhEY1dJNbZMiMSUiDnRhYD2vs+oHZqNLTOOlhcQI+SQbBcRAikbGyZ3GUUsZ3lyoZXpd7LJepYTOUZiNAFfjC7M1IbkTKqVOowU13L8So9fVXEXkYwV9r+qqv+9f/jBBLeIyNPAQ//4e8DzH/jy5/xjX/GRPW7N5PNO5A+NUq27FnFzJsyZUAQXoQhINfpbMJ50xYVQOt2kU+yVGsNgwnklUZphhtUJy00tE7OvzqqYiqqqFW4xvnytAzFkxMy5qaEyBXmIL9JiM2YFMTB6KPfYGlDd/s+LifLEr0SsC0dt4aLNvm+kUYq5RE4HDGriG1DzWHfalIUVOoNDDbeeIiWYqHDTMjV4Eo9MoJVZGNvzFibveEUpXuBqbXQh0aQRpaO4J0gLieCjZ8TER4REzJEUssURtvLETiLayVRUICakDMQolNos4iwn+tE85GOMLG7f5rf87t/NJ37jJziMF4TVe+jyHPoVjAV/KWjFFnwSBWlr5jHy0nOHvPTit9P/9u/k0emaL/zsm7z505/h4p1H5M1o15O4Bw/Q3NRsJ/SXacGcbYRGPSjEpiWtFfWFfwS64BoDD2SYQEGRSIiZ5kpcRelSMtgJzAo5BFvgSiBFc5isKh5ACTWZFfaIMkigF/MjL0HoUf9aGMX3GFoZ1FwMJ2RydxWoQs2IGBSlqsbUCYFYDHsmBIbtyIN7x5Rh5CO/67dz685tHr73NuX8zKbmrgOBVgqriwtqiEg34+zignW/5WK9YjMMDEXpWyDNZty5cZMuJR5frugHON8OzFPgxnyPw6M9jnXBj3/pIZd1xnvrNRfbnsNHD7nz4G2uHz/LdZ6mk0hLmbUm7p9uLIc3mCeOupmaiv09xERLTjAQIceZLTmjWRFkGk17go2KqAzMJEA0gWGMia6b03Vz896JibZ+EthS24Yp+9SCeZL55HeJlAMhZVJ2fjzpA1GgZoYXJfiOK3rn7hbMKezsD1QDdRwoeaDOxq9YU381bBkB/gLweVX9v3/gn/5H4D8A/m/+37/9gY//xyLy17FF6vlXxNuZLrYp0s2WCOMwmjAmqS9T3WqgeFdJY6CZN4iK22yKGyo1W0i15HYDiej2wBW7iYs2xsmLCxPRlGbBB6U1wyb9gLGu3jrO6pxym9qrydcFit/UoTljIwqlCtEjzsZWzTQKrNQqQCUFZ0kYxkIQF/Rg9sfq3YctdQwOqKUZRVENqLF0JTtkJqZG8cOPIBTHcGRSv2HsmOTirkncEYP7jDtzxLxZxD/fvXi0+C7D3DqrNkLOBEmkOLNiFAzbDGIh4SH4HiIkp0ym3SQSgxkhiSoEjL5YKqUfkZBIWUl5xp1XXuX7/+Af4NWPfhMSzuDiGDYVVgVdGmV0EmtRKpU10gWkS6DRIaTKLDSe6RrP/PqX+fZve54H9y/59E9+gbc/9RpptUW0McqIyXow58AQyAlETJUbg70mTYKLmhK1jQataaWUJxbM6myVkIyZoWLBIzE4LTcl76Z9TohAnjjwkaGZM6ZMh71bDVcKNVS2AttgO5kalB4cejGIZlT1Im/XvcVWujLXxkNTbFeDkZr90iSMLx8VpE5KauFytaVfXrCXniMJrDdLhnGLsKANI+t1z/nZOQONdVHee3zOo4tzzoct26b0KsQ04/a1G9w62mfRRW7fus6tp25wcn5JjJmrVw/4bb/lN3Hn1lOU8wvuXDtk7xe/wI/803/MvfWad48f8PTxfbo0Y6ZmPz3WyOOzLU2TTawa3BrDvYxitB2CM5SCJN9rJF/a2wE/q5ByROOAlojkim5HQqdEX/judXOGMjKm6oe13VfNG8zWXL3dRdsFdc6I6RZ0XSbnJ3BRCB7WEU1Fn4LQZaNwdinRxSd4vC3wI7WZXfU4/isWd+A7gT8EfFZEPu0f+9NYUf8bIvLHgLeBf9f/7e9jNMjXMSrkH/mX/QARMd6pNFfhRchGQQuAjoYREpVaRoqqdTrN4AGiLeYM+zY2iqphjDGYNeaoRie0Omde59Wtg1FL6ilMzByzQRjqSMw2vhW1om/+EYpbUnpHbK6VTa0hT2Jd1uS/UkojROPpD+oJ9aKghZxs8DdpuFGfDAd1qb+YnUGMamHgEsjBOyxfaooY7p3ECkRRwTAov+DwVCP/mhQMb06oGYIFde/06sufhgn0sZ8NJAIhJForxpjR5nCDHxAOO8TU+aI727IwCDmJ2deixJRAkhdG49trs+czWa2ac1WilEYKkRc+9BK/94/+IE+9+BFw9au2Ee236Hag9T3jermjevar3gpxUrb9yN7hVcdahb70jGUDRHLquHUz8Fv+rW/nwW/4Vv7xf/v3OXvnHsV3AsGpnilFWrD3TcSKvPU8BrOEZjS2VmzBGoNQanFygDUPJley9zQlw2FTyoRkOHBwUzWNM4rE3ZRQSyOmQNHo+xgz0ysh0AsM0Wh9PUqhOU5uU25DGLRis6J19EUnIzVvBBQsdCUQDYOzQ6cZp7ovIw0lhUxrynJovHH3IR9+5Rk2yyXjMNKPW7b9hlVMPD69YL0dOF0uOV71PF6uOduuWWthUyCEGVeP9uhiY7NZMW6V2aLnWpc5unXA4dWr5A5+3Te9yOHRFTLPksMCCcp7D97h8z//GR6crnn8+ISr8yuEWWJAWQ2N894sjicOieIWJClSohnaRXCTNSv+RoPNhsAKzCMMKrQYkRqNvplmbMfB8PbZjPliH4aeUCshJFt0ByHERL/dMtYBAbqc6GY2yebU0c32mM2z3TcSHTbG8mp9iu6SfRyUedfZRK2mI4giHuHXKGP5Vy/uqvqj7Ia5f+Hxfb/M5yvwv/2Xfd8PPgTzK6llJJBsM+ynLjqNstVEAlEZSm8+Hc0d79zfQVysQUioQxfqng8GxdjtheAOjh6GoUqthdIiY7UiU6st8wYdiMmofUWtW3ZlBAFlq6MVM/Pto4mxVaIIiJqSTUAluj9Jc4GV0/uKqS9DStDMP8SmAmOB7JwTZSrAo00TrRGjlQ3RRhHMi9yDmSMYBONWkR6yB2BMHTXb3ogSJNNaICX32bbfjOhB5GbVYMs5YmdybltwOPMmkXIy7B0hZuueRNiFPiRxUzNsKRknKqEr9JoIWQUdRzNHaoLGwJ1nb/O/+gO/j1svvgSSEQraoN9uuTx/SP/4lLa+JLlFcFDsjdJGGyqSEg9PHpNjpkuRvm5Yby/JeQ+RjhKEtDjn1rUr/J4/+jv54X/yKT79458ijI2CQXHWReDdtmFTT4TjJpRTGtkXzlWFlKfEKduRTL4gFncYSDk7ASAiKbnRWTBdaJihQSktEFJAo9k4jGpMrSLmeVOSUKKiya6jUl3e7pBQaerc9kLDrkt13r9iEGPBIt+KM5ioplwNTU0dFoyl0YZCTpmxJo7rjO3+c5yWX2S7HblYrlnM5rTSOLk459HxKceXK443I+dDz3Ls2dTKZoTDeaCMGy7rmmG2RwqJGZDGkQAc9yOVNe++/TovvfwCm6os9q6yd5R45aMv8uM//Sneun/Ch07OeOH2lkFsmr0YChfbgbFGFtnZdCEYE8mXkiFEcggUFxHFaD6nxEAUo/GGZhCVaCXWBKW4cX5CQmS22GPv4JA0dAzjQB+H6YS0xWkI5JJBICcxcVeakfOcrpuRsuUbixgOn5N93xgNrulSdljJMpgFa46i76JMS9GoQ6GM/xow9/8lHhoCkjqCBlrtjavs41UNA5XK5AIZMCVgg11SUHIXt4q6e19DSeZ/otWCi5msDZShQd8Ci2qY9zA2BhcAlWYFt6olCSV1xzcEQvVFodkwOpqAlEqVisZoy49m6kS7383vXYLdqLUVcjAW+Njc61ytQCSCm/xb8pQCrVpBUA+jNgaleb+oNrKYw2DFCAG1WIydEDz6zTtRrLvvxIRKMQW/kAzWUTGc3OL2Gk1Hcs5GD23NePFetHMwtoi3skiMJDPDYAKuc0pEzA8+hmhFUYJBZcEokPa1aacNyJooJaE5cnSw4Pt/53dx57krIKMVdhrjcM7p6Slvvfc+5/fvEdZb9iQQpbGYZfZippSRdb+h298jzWbUfmkQkDbGYaAf3dU+J3KDVgfyXs9v/h3fyK1nn+Hv/3f/GNZrpA0mOEJsce7Uxsm2OcQnjpHEAW3Gh1efaMQnRxNI2eQoydSoxIwGw+DtHwRJ0Q8N42NLMBojGH9eW0PDpEz260lsR6DFNAchCKMW+7h7M9l94fju5HPSrHvF+fBFjcmjpVGi2SJbs2VL19HZHGco3c3n0ZsvsH3nS6xXPSd5STtYcHxxwaOLFY8ul5yWwvk4MvokMbTG6XbDRS/M84y9+Yb9vciVtGAvzbh+4wbrMnJ6fsHnf/EXqW2LhMDVK1dIKXDz1oxehbdPTnn7wXu8/PRT5KNDcso8uNiw3FZyp4zBPXskU51SmkKALLSQSbGziTvZ66uIU3cdn0eZhQ4pyTyOukIojZw65vMF80VnzK9tcKjH5rOUs71fxfaEMTQr2HnGrJuRu46YpvsukHPeLU1zzsSUbQErgZT8+hKfrGPweqjUOlJSgf7XSHEXDFeq4wh8wEgJY2UkMoXG0IIX1+qmYlaYxJdDQaLdaFVRMXXYxHW3iLXGqLAtlVKxzsqL+VAaKuaFMnpCUq0mDhINlgCkVjRV3M9ELBbN/FcALCBYcRwdEyUUBVCLK1PdGQrtqm5t3pEnO5yqQTkBO8AItshUN30CW/6lYErKGI0OOrZq+HZzMZRxPRDUKXhmBRBdCBZ0ggTcGtX9vyfurSE94jCE+P+rvUcitJioYTJts04+pmgUrvDE6rih9hzN2AeVSsoedhLsNShFjQ4aErO9Gd/yHV/LU88cUYdjSh1J+QYimeXZMSenl7z7+IzTk1Niv2WO+cgcpMwiRqOizRKr5YowWBCDUtGxkatBSIME5t0eOQt50dMNA7O9Da9+9Ijf80d/D3/7r/19tucP/T0wLFoR78B9Gbbb2ZhDZUiBXMWiDG1ZwdgixETMnUFVkpCQISSaRFSj8Zl92jPbYLOHbruEe3FnR6XlAE2e+C3lSB0D6iLJKror5upsKgB22QPuGgnOdFFLCooYRIn53IdmjKoSxAR7eY/W7fP6O/f49Be+yEe/4Vt49xd/lll6nxaVJonHFxveO19zshnYKPRaDE7FDo9SbMIJVUhDoItC00LtC9efvsXTN65ynhrP713jqHVcjAPvvP8WlBXLy0opcD4U7j0643x5xpVFYJQj3n7/mKIW7t0kULzpmCdTlWqIFAmmQwnTAVjdksBU2ME1pdPrlXM2AkCthDqScmQ2n7F/cMR6vaVKJNZKSAa9jMOWlDvXo1hma4qJFCLz+ZycTamau46cO58mzCyum81IXUeMnYscnSYZYMoYtvB5g0NDMFrwV3p8VRR3ozQa5msxZNF5uDYWiyb33W4EqS7qmJiXwtisWUwqTMECkkGolFrRIASNznSZIugikyG/YorO4lmeozpfuVVoassyiWYylISUjOgxWQY4RO6ceHbyfFV1oYFPFapoMUmymUeJ4592OIgItQZjk0wHVIy2wG2+Lcf9cPynG//VFqjiTI9aXVovYD5YBrREjItudgNPmuzparb/6QlIIo5GWDczGXhF3yMggoqzXnZjAZ4V7gTCGAwfd+qpoCYoSpGQgz/nYHCONmbREoRqVF740C1uPr3HydkDxosHrNYjjRm5u8Jqteb+ew+5f7ym31TGyw37qSMiLGWgC43ZTNBtJUqmYQtptBJSJJVKkgh5xnYOOQttU+gOFnR9IXeXXDu8ye/5g7+Tv/lf/w8MywuCNB9SzGogiC3BVJRWPdxbClXVg86D8+ft8GrTYSBW9CUlm1Z9N+Hgl+2E/FCovuielttNLQlMq/pyNdCiwTASjdtNK76gSwQquDlYdMO0XUEIBoXih0AQYylZt2gss8ljoapSuxkHN24R969w79FD/t7f/Qe8+h/+CRbPvMLw6G3EnSULwuNNz2lfaDKJCF05Hhq0xiIFDmaR/ZwJLXD3eM3r/YrX75/w1NUr5Cic3D/lY1/7Is+88gyalM9/6R0+9dl3uLxcsb+AzWbLZrtkaFfoR3j38QqVzq89hyNTpsZIS6aSltihMdkpJtj1lzO6M8dLxnwSda8YC9AJrRJbIcRIN5+RZ3NmagvacRzd6sCVpt2MsQ4M48AsRbd5gJQSIQpd7sizGTl3RkF2amVKplQ2hX3YKcqhYhYr4qJLa7KiBKT+a+K5/8/9qEXROgW/VlfR2RjbXCjibOEdp1qLmfaokbxtBJ3a6dHNq4BW7SIr3snSws5Le5LBNw9rGGtlbMYC0eozrQjq5lXRrVcbClF2SzJbKEYTMoRpmjDohWZcYHGxjPVVdsOmoKDug1JBwkDQZN1EkJ1wKWCudgZfTMWiInHyjRZbqAahinPPG+YZ3+wGD9HG+BCt+MdsIiTxCWeX+sIU45eprlCyyQrwhW9t1QqPKp1xA83zOtpiKabo7B/Dk42m6QyGFG2JGLOzhSBO70NQjq5lbj0953z5iOPTxuV2ZLsZ2PRrSp2R8gGrfuB4uWV1tkbGysoTraRu2OuEvUWgmyX3wTfnv81yjYTKYtbRSWJBosqaodiupiMiW0hpRu4esdhr/Mbv/jb+6Q//KDr09r12rCPZ6RSSWKNgrCBlVIhirK/i/HFNwXcONoVptH2EXct2XYDtl/Dg8hCC7Xuas1tSoo62tK3q8YEh0cQCu1Xd8lcs4EPFlNSlVUIUUnXIUuOTCcynS/tPpKp5mEenEsdk3e7s2lVuPP8MfWnEY/jsL3yWv/RX/irf94mv49Hn9zmMA7nL7C3mDLXQO9zYBHcStVuvC5F5MkviIMrVg47bV/bQ8QaLnLl2dIiocuPmFWK3z7iuHBwmzk7hrXtL9mfC1cPIoouWUUzkzYdnnG0MyjRk0L2BgsFmRbDCGTM52c4ItRBwo6JEp+cmcjS83BpCC3Op2oi1EmMm5RndbAGSSEmoY6QfMjl3DGMkjQOz2tm96deI2RC4dUEKdPOOlDOzPDcxU06kLpu9dsCN8oxoEIJNaXijayHu06L/K5fvr47irk+6dnHTndbq7uITFTfksiKiGJZYdMKJLatQxeiUyYuqI4yIRpoUG/l9LLU6aJ1lUxiq0grWafvP1ubjbXUvaIFWlJhdtIN7ooOzZ9w33vNbnVBjSyu3no3u1gceF0hzvmtzr/TJlkCpKohUk9eLMWkmpk5KruAtNo6n5Bh/FVqyJTJarQjUAK2Zk+TExIhGiSMkChYgob5biK4PaM0OWeMJQ5DB+L9x4eZoAC7pV3FaSABxmCoIIdprBmYPQTS8GAl28ESDj4IAoVFkzdPPzFmtjjlbwWY9sF6PrLcj62FLqZmmp2yGLRfnZyzPV0jrSQwWNtFG4qZx0Ce6WJinyCLBLEWKZIZhYNSRvdkcrT3zEtA6I872UJmBJssdLZXt2UMOjhY8++EXeONzX2bWpvDs4N02ZOfr19IAw0qlWWh31GpwoESqayYgGPPItRsSCqSMVJ+ApIFWYjDpOj6lRp86iZ4A5LObcdVtP9KCLU5jTK6ejVRNhOiq1FKBRPPmBYrRd9X2FSEoQtjBNDhVcO/oKgdXrhNIPHzvLSKV1gr/5Ed+mLo+4WkN7G82xNTYO0gGMQ3NrvugaLMdQyLaYlsigcblds3FeslcAtfnM65f2+fpoyvcuHKVF194hsMrHSFXTtbnlO2W1JTr1+bcvLbg6Mo+YTHncd/42TfvU4NdT6NMzCyfnkUIKSMxE2aZEJ2CGG1xGUIg5ux0VTsAACTbHqXUZlTQap27FXezEy5RKLFDQiHVQp4lWinUNtDqSAvWLAWdYhQj3awj5Eg375jFZN171yEhGoQTBVw1rg6D0uz1Rj0sxmNEbRX9Kz++Soq7Mm5767TFLATQwsSFxqGBCu6Y5gfAB3DFGKLTu4Sxqvk1IzvFoTg9MIRorIHJG9l3gop1T5bG88SlcKdWFBwrVuexGuYl7vjHRJubRlE1iMMWabgQyj41PgFBDb/1Tsyoj8kOiclSIU58aYdZgpjvU4xULWgyaKc1oYUIRVlIREsFiTSdFnqBEux1yn6o1CamwAvGdfc2budiaDCvSaNFkrF6XG2ZYvLDkR3cELwzDdE6oiL+HKN5pEzLI4nJD2Yz54oS0NbY9Cvy/sioyvJiy3oYWV6s2G4L/aiMY2QYV0bPU+Hy8pxxa9TMvm4YR1P2ilZO15W92Yy9nLgyE/ZyIrp6trVA6mYMbWCR5yCJMMuMIqhU5jGRYqaIsRqefvYa9967x/Z86+iTOpQm08tESslgODXTp6DBc1k9sMGtMfClpINgTMZ41myY9mCCwFotpGjML/MLV4tu9MzV4N23ce8DGrPBdO74KGKivKqWENWSjfStNmq1STfZWeyHjd2LFjkJBCUuFhw8/QJpb5+HDx/w5pffpNZCzh21Nn7oR36Cb7i54GNXlVt1xix37M0z87HRo6gUj6W0qLpZSCaMwu6jvjYu28Djcc3763O+fHrCnav7XOoJH3r+NteuXqFq42y1Zr7IXL864+jKnP2Dffo25427j3l83qNhD2nmDVTFbJsJtjsCgzpJgTpBI5LMZiIlmti7YfYCNvFrxGiU1Vh5NJumczdjMV9QYqIX88KROJBbptRMLQXVDks7c+GJWt5wdC/4PMuknI3/njJxWqbGtBPIBSx5ToUdwlCL2XVP190kfPuVHl8VxX1aTJn4o5pjpjZbsqmHD08GJoKPqY5pC842cdgCvKsypZeJBCBMlMfJfsDUOz4pOGTii6iqE1TTJtajc2J1Z5QlzvgwqbwV884pgjt3N8HHM3NTDK084YTjIiLU/ZuDFQ3B7D+l0UXZLSpFsMBobdb9TtxqbOQWDb60tdC7yf3QBBruP+PeLpGCBg9MpnnBMU1A8w5eHDdXNXrm7kDDsL+i6gXLzKE0BFI0tkDTShdmNolIIIRsXj1Txy7GNw6j+WBb8EZl2y44utJxdnnJejuwXG+4vFhRCgzFlttjHdlsB/re3h8qDM2P4arQGrUOIO6UmRIyCnHRsd/B3mJOCpkuQgwLRN2YSZr7tHdUmZPygpgjEpXbdyp3Xjjkc585ZT/OzHpB8KvNg6l3r1s0LjKYEhUlBYwZo47ZE125LEizZKkQbLHZqsFdpRViDJQ6EKUzums1ewoTs0zdoO64+NEXgzEmh1kcuhP7e63FF8DKOI6MNduU6qIqUaPYNv+Ydpl8cJWDm2Yb9bmf+RTv3r1LStHEPHv79P3Iz711Rnt6wUfHPUQaR/PI0DLbCpfNdkBNK0HjjsfdmgkA91KiYiZmN4+OeOradW7fOKRbLEATbYCD+SHXDhYc7meODmbsHczpjg54uIQv3T1nbDNjn/k9R/Apx1lcVjGdCZOdrOGCPXUqrqDu0mnQZsxmE4BTWFWd1dLN6Lq5OaDGjmHsSTlRaiVX837RlhAK4k1iKQW3SyLGSNd1pJzouo7czQgx7qwNkhd4WiG0Yuy+arUuJYOHS7W6FWL3FevqV01xH02hg9kMgK+WnDMeGNVsSwvNtaaTF4v7jYs6ZcjMxowDYtvz4r7cVd0TPQTrXnSihz3p3gG7ILSR1DpR86bw7TVCclfEFAI4fdHES3WnXkwumlHFvWisaw7OV++SU+WwTk5lsgWOJI02ujMdFmJQjE8iiBhDRszCVRomta9Tl2xqxUncVJ0Z0wI+wSQsIsQ6xIm13WojpG5H7wxe9EMwBodIIKi5brZgODof2NMpbimcsx1uwbp0dTqaKdkjKskWy8F3G1F5ePaQcM2CrlfLwvnlmrOLJa1C3xc2w8im31q3IpGhFLQ16tioZfBpyVbxNnJAXwqhQVRlppVFgERiL885zGLufiFTKajCbG8PjTNa7JhduU7MHSFAmSW+9uu/hje//Ji2cdM5x/Jt+g9+kNt74+xCGgYVSLOCHmKiyJOA8OnaIERflqpfE2YeJU4V1VYnkMFEfjq9Z66YdWaSNgt5iMGosSnFJ7+XivvTK2MdSV3HvEXKOFBGBUywt7+3YLVes+m3pHxA00gS4XK9YhhMg1CHkZYKw2ZDGQvLsfLp94/JpTEPgzVWIRiEoR2rfqRooUszOk8E0xaY58yiy8wCPHfrGh//2q/hIx/5CFevHjIuz+iXp+zNInke+O5PfJTQFQ4POxbzObXrePOdY8qY6VJyC+dmliBq5IE4NSPeUIiYeCsEL+q7vZC4CCmYmCsaBh9i8kYPguPfKWdiN/NDY3QWWyK2irZK12Vq7al1sOChZrsrS8eyVKuUEt1Eg5zMDt2agOBbr9CIGpEWpisa1O7pWI14UeuvASqkqtjp5HyBEBwiUKEGc00cVI2iWCyQ2BKarHhOtCHL00vkJHQOiyBmjj9o9cTz4L7LH+AnKLQC0iYRuI3cQWwxlsQpfdFNrSLOApCdajHi/vPBlJxam+HQqLNYKsnZMSFGJua6uSfa4hQRkpgH/CSMElE6cZ41kCQial7xrRl+WNpoF0QVUDVXwZTcMnkkBhOzNy8JFYNBJv3kpAqIfmFJTMaZ9wKN2nIQnpiBoUoSy6gUv+jGVkmY93sTdcm3uyEmk3vji9WQErEVxn5kXVa0vZ6rV/ZYrTacnC/ZjpaG06+39GNjtd2y7gu1VnKXGMaR1mAcCq0OiJiVLs5kyb5UDCJ02W7i2CCMjdQpe5o42l+wHRuVbK6bq4Frt25QZnPmh0fQzUGh6+a8eniLb/22FT/xIz9Ldsk4El1NbK+gTOI5bEE9lWSmLl+EFCLZfe+TdxQ2GTrk7pqClGx9r2rs+KaVLA47NLd9RU0EozbChyhGc40z+znJmFZg9FoJmANnHUygNxQzH8sVQqZbHPDCyy9w78H73L//gPP1loenp2zHLUKibnvKMNCPhZwXzLpkYsAGD85HPl0vubUHm75nM1TmKdLlBLPGMCqLLrOIplGpanuiRY6k2ghjYU+UPSk8c+Mqw+GMy+PALBZa7PnQy9fQ2Ud5eHLG4fw6bz/oWa5HUtjD2GjGHZ8QT9uLiMExweCp6TDVpu5rNE3/EyPMJssarMCH5KpQoNRCcGuA2XxBKaOJ9WIGLbQyUmqhlWIlpQVKNZGkCLtCHGN0frtpO6KTD4LbsYrDLtoM8pRW3T/efnkDGqaG9NcCLONl3Sgl2BJDrPOpIlQ8X1NNdTc67ji59c1yJuYpsip7ADamWhXbVM9rYMCCjtU3nVMCvfgbK1PxdczZckafiAm8SbVQ6QgxNVNaRjE8UacS2CAoIdSdMjH69wuu0oziQd2Oy6dkHOYuQ/TDLSYzjooOUSFi4dduw6eowxzW+akX46DNi4wzZFpFQkKa+dQblTQaBODWsATn/7rfDL7AjsHhLZ8edheX8dsMm3TxRkgJjWIinRg9uxUbgWOEkJgixrQBwWLFxvU5h1f2iRELKCnDjq6q0lAtTIlDKRojaSzigRfN2R9K3wranB7KZG0cGPuB0rJNflLZ6pZ1FQ4WCxbeLRWJbEZlPwYOb15FDueUrnNxyx6qG771Ex/nzS++xfn9Yxzcp6lhyeIw3tTF2wJ0YqGIL/7VD3jcVycTovnHiHiQibKD6+yYMHuJhJnqRddlzLwA0Uz7ICrMu0yOHTnNiakjpwTiZmTYjmauFngylpEujBQNDK2R5x03n75DlcZq2zNWZSjmCf/O3bsEkh2kmK3HarU0NaVT8yod768qYzygDJWh3zIftszmEWIDjdSxevc8QYWBYexREVb9hnsP7nP7+hWW1444PDqg7zqCVtabNRpGDueJcX+f9Qoe3V+jukfqsgsHky9Lo78Hk8LdDkOzTjYvKntdq6m/Je6SkWpTQrTcAxymCS4civok+ze6H3stWCPXRmoMyOCHSQu0KnRNGcpoh7HrYGJ0OnUO7vho94MTYqAa97615qwfCwsRsYY2YBz3EAKBXwuwDGZsFAjGKU/Ri5YxYqq4GVirFC0U87MlT/BINNOrLnWkNAMMhvENEbRCFeuuouPdgyHP6I4NJr4UNAxVgFAdG8elwN4MpGRdUDdZiWIMk4QvUGhEaaaeFR+xmyO0wacJsXxE1NJbAoaJB/X8ouRYoGLFw5PYWzNxk0pwuo3aInVynaWi6txnjEFjzo7iU4xRyCaOvNbGlCJlGamVLhgUtRMzOW4scToAnWbqz0OsBXKM3xa7KWXybIY2Ic/mELKrBhXak8Xhsj+jxA17s8T5xQkXlxeG+5ZKkMRsntkOAwEP60YYa2U+yzQttqRMiVqL9agxMcvCwcwELEkL0pTWhKHA0DdaXdNvluQucHS4z3yxINRGGmE8P6Pu7XHt6g3S/hFFhc12jcQZLzz9DL/lu7+Dv/O3/iF1MKgqxs4OXLFOu7Tp9bZpxlxOkwnxxEwXYrCsVMUbCBGfGq27b80ZVA69CGoJXOYwZtdoM9olLRA0k7rEvJuR84wuL0jdwiAiQ58tglGNLjqMnaV61cKmmPX0tds30FB55527LNcbEKGbm1y+jAPjUOywjwmRgVoKy+Uls/kCMP/4sSrbtMetW0/x5huvMdY1220hJmt+Whn8ioRxVHKLyFCJ0khaOTs54+TBQx52gXrzOjEK2/GSVd3wYHnCykOp3373hHE0lslYR6Jj6rWZ2VZK5vZofa35QeGhLw3Ts6Q6GYg7Cw6DNmFq6o2ardG+T/DdXIzRHByDOS6JYkH0qN8bjVaFOiqtFKLahi1K2gkEJxFVzokQzdSsaLV7239nc3MNE7bgkIyJoNKkmWiTwO2Xf3zVFPe+OvVxFiDY0zJhho/840gro2WD+rIvCsyThUCEFD3707rXoVrhrs2oXqW60skFQ02L8eInpH1ayOAubbh9rsgv+RP9Y4J1bImABiFjh4HaPoYgxkpJWOMSovjycDpHnB+uZh0L0E1MG5So6owb97JX8w2JDoMY6zDa2Jai+XVPS2cc5vGCq0Q/DHwiiJbWPkHGgcmr3AUX0U2N/EAMUQg57i7yhDtIhmh+8NF4vuLUPVVBYgeSTJEa7dCVYJ1L0MAsGDquTblx5QDqmlJGhrGgrkMIUqnF9hgHi0hrZpM71kAhMoyw1QEd1fyFWkOdsVNLj5LMvCkGxlJYF8gYZ1m6yr3j+5RygxtBKGOlVtieD+ScufP8q+wtrjHSyNLYXpwRJPKRDz/HzZtHnD5Y2q1XxaPrjNnVxKLutBllVUM2vNcX3Aq7qQKgYd7qRiv1cRtrdKZYH+vofXmfMmVskBLDUCwpi0AX58znc1I3Z9btkePc8wSM9z6U0cb92hhax3ocWQ8Ftlv2Dxcsh0vOT88YhsJitqALke1mw2oYKIMFv09CMLBLabMdTB/QJYLYfuz4/IRv+dZPEPeO+NzP/ySzOhBqJQl0Wel7QdVsRVKB1gopKl3XuBx6ji/P6B5D3zbMukS3aIyhsNyOjDQoe1wum99kgx1iERCbdJVGk0qlmrVHNTW7MBA6oZbksZMGe4QojONgFEhVst8bySExVZv0J6fNar4egJpeo6rRqV2NPS1NhcyIiQ+jmr+TKddBAzapBXNNnaDEyRSsNQstUuzgMGM9awTA+PL44fOVHl8VxV0wC9NpxGuYwCBGMf/y1izMoj6hFmVfaIovKAySSU45nEx2LPhXELI0xlKMJilCAS9e1nWZ0FIJ05snE1wzFVtL6umSeEdu2HjnDJ5G2+FzBPN7SX47m+il+tRgFM2pkDZ0R28K4GHextM1fH8CEU0qFfz5h2ARgLUWzCnSWCumVHXRUrKN/zAWh1qmPiaSoitlvXjY8tOIBRKDh0zgy9TgohqDRaJEmnj3nTpC6pyhEQkhW5fivuUpzay7jdb1ZTfLyjHQdGQhkfliztnxqb3+vqTeW8xIqTIOW6TrbDqQwHq7RVEuLi/thqgD42hdqGpjNY6UZovrMmzYamHW2SJ8lETLe+znYMZQ2jPveuT0gllKVhDGJXkWWT1+SDo4JF89QFpjubqgJqhlw0defY6ffPB5oiZnZTjjyrvu3T7IeVFmQ2D0UPUld3RYUH3xvYuEZJqEGhKFWmz3JKq+KK1EEUpodg3FRIyB+WxBni1YLA7o8twk6gIxGjd6PpvtzPJElYtxZEyB+ZV9lps1YxtY5D10FliuLunHLX3fU8ZCcxaI4clmQVw9z3McetBKcG59P4586jOf5t/9gT/I+WrLe6/9HBlBQ6NV62RrLURJjM2oxSE2TkvjZKwcb5bkVWKMytHRjKv7M+aLzPN3nmUzVj7zCw8ZixKSYVjN7wdBHRarjGUw1ht2XCZ9QldWVVI27/lCJKjtTZJa49SG3nyZJFgMoX2ZMZkw1o9iYrGA2D0jbuGh1SFlv9n8j9G17WfH6IaGauaEKcIE4LVmmQ0hQC2jUyjxhtGu/xCMchuiT3pf4fFVUdwVpQTzK8f55+NOrdoodTBrgGouewZpBCsgEnZxVtaK2pMurYIzbyrNeeiJlJRhrE9uQl9UGBfcmCcTFTBGSyWy3GshR5NmB+x0ngdjEkRPxAkoVUeQCdsLvo2HjEEYLnh1zNEYMEatCyQ3fbJM1gC+XJtk+uoumMatNzqc+LJV/ER32NaftweARMPkQwoMNuM5qzSgIe2sAawU2WEbovF1AtCakCQRNCBqcnk7fKMxSlI2K4IQjb3kvvMW7+cWBfj7FTIhR2f1jOwvItvtlqaGPaYumzq8GO2v6yIHiwX9tmfTD2w2Pct+ZBwLOSltHE3k1kzIo1rpB6GVShsL1EaOjS4JV7o5JSt9TgRpjHMh65JWRhZ7looUCXSrNWV9SSpbwii0cUsXG8fbNU2Fl194mp/+yS+aN724vbO/V9WDZKb3YQpDCXFyyvTyrRP2/qRjQ11800yoYtOA7YVSdEzfl/axVrNertDNZuzv7TOf7ZO7uTE6Uth5xjcJDFQuN5ec9xvONmuWpTBme09m+3tIH9kuN2y3W8o4UEvdPQeDgqJz7M2WO0jw1DOjGMekZmGswr333uGnP/mj/J7f+3v5a3/5kpN770DdYJk/1m0WraZolkSIMwaNPNhsCWfKKsCrs8SVNGOsBS0JzXB+Efnyu0sT92mzZkeMvtsaBoMI1injwiMGarUc2KbGFiqtkbWRcofWYoZfYiEYmY4iFRlHRzrdgtw1HfhCdspwDtO9Vj2pQasdgrUam6vW3XWB20cQAqoJqYE6DJgH/GgU79rsdy3mktnUAldMv5BoRZCcPNfi18BCtakyaMHYGMY+keZkYa1EKrUN1r3484kypYMLNOeoinUpk9e5egclGjAnAaVWsw6ure4k31YnPcJuUoqqTXspTF2TkLzvnQKjsyrdhMN7197UvUA86DqIkrQxc/8Ky341LnN0iKOqWrRdU4vfc2w7OFyD+lJWfHybfm+Mm15VkBQJTcgx0tfiI71T58QNlbDnoW26EO21DinssFkzG1PcCss7ewXpPBnGOgjRQCCh04GaO4Ik5xh7Ycewf6bDeLpJxHQGMovkrJSKiThCYjGbA43Sb2ljYzHrqENDa2GzWdEPI3VwymspZn1aDKOOKVGHhmhkGCpDjy/EzOJg2VeoPdttTxcDpQphUDZj5FY8pAwN1cBqO3J48y6zqwvy5ZxOGodJGEqAWUe4dmQe66Oj4s2aB8U6vsZkJYB1CxLcx8VZGWE68ByqaR+AT7V5clNw5W5yQzyzwqi1EWnECrEKMc+YzfdYdAvm3YxuPid2HukWEi001nXk8ekpDy/P2dbKujXozKysjoX1esVmteZytaLvt9RhNBjGbQtCC4RqU4JECzI3Bb+YQZrinkl24Nfa+Jmf+nG+/uu/ju//Pf82//jv/S1O77/FOKxJ7QPOUdps4lWIUen2hL3r+xxe2+fa1SNSN+NyOxCBWx/6KIeHNzn5x2+xCOaVH3XaQQXXpATGWinYhJByAB0NASjFMHSCqdmx3zumZJGVtdJla44mNl6iIc3FTsEiIlutTEHrJjCyIqutUb2oB2Bsxf6OUspIKZ41GwNNzUuGCarzbqz4BFraiBablKo6ySQGZDa3fGmtBuP9WijuiFKl0Zw8FpuLLWpDtKJ1RJotQwnWycTofOIpDq25GlQmhrzj6WpWvwgO8XjXTmPwYGYz+Gq03UhlODTV4rd2A9ZEVRRIWsxpRoKLmXDHxoincjIxa7poYzZqLpeGPBhfuVCti3U3wIRzcdV86pqLp2w37MtS7xLVuz+oZiTlB4upDg2a0WAXsRF4rNdoAVTNoGFS6Rpc5OIctYs7R3OKlDCNNM3FFJ134QmJM0LsEBLZzbDsCVqohvU3Drc4BXKikWoMbNY9/doSkKQ2pDW6LlFWJt8f+5Fh3dPKYI6hbUCbde6bfmDovSlIiVYrUe2Z5NihyV6H1mAzWGDLNgYOQmSWhRah6kjeu0K/riwWmby/R0gdq3pBmo2k0FG2G2axcnNvhs7nlONmh7lEqqnmrFuTuAvKwJdfinhWatjtQ4J38kTZde5MtNSmVlxEd+lbgeiQJGag16qJ5sQCRxbdHvPZjNl8xnwxN88UFEmR+8tj3n70kPNNbxNG19Floe8HLi5P2azXrC6X9H3P4PkGVCtUpXl0oO94xHneuRp8IMEOJvzeq8W5+iGw2Wz5h3/v7/BH/sR/xK//7u/hx378R1gev4f2ayjmOqluvoY25l3glTvX+PhLL3I4C+zvJ/pS2JbG0f4eL7387Xz4zjfxd//Rj3PxzpecUWKvUcNJGCpUjUhTSh0tdEXEd0iZGgqlbZnN51SFDn89XVGtrdg12lWDjspI183sfQzFmqJWaHWkqrNtEFqtlDoYGaQ1WrP9hNZKGQYLGBoHq0jeZMlWyEmdYun7wWI/dyz2NVqNrTRW89K3IGclVFf4ln/1JKb/2R/W8Ri1L2IjtpFFbbRpjrUHsbEnRTvtirukaTOqoLRKC7LbOhs+5ktLHyGrNvs6NTvVOq1UTWOw206LmHJUm3WE077LunFhFo3VkmyvaCIeEfejd9xcjGKlqhR8kYrduCJuztTsABNf7qDN7HIFW2ZO4yDVubpGY7SLpBl1C+v0QoxuZ9AYqy+jW6NUs6Kt1TIxC8EyY7U5nGGUSfs+hstbrJ5RzKIkdmpIta49ZnPQs/Qop56558oEDwUR6lgJOVqqUnRuvxgNbNBirAmZMeiGFJQsQmqeodpGMo1NGanDaEZxY892u2YYTFHanONt3kSFLgSCZGIn5Kg0CdSqjE0YK2xqoQ+VfYStjlxGoZWGDI0XD2/x0ivPobGjUnh4/JDnX3iV+eKIsrogaSFIZT7rmKXAZsB2PFjhbibWADUv8V3KvdNoozivWT5gQRCEWk2JbeN928nOVa1hKJPxXGvEBlLtPpnnzCzPmOcZ827OfLEgZhPELIee++cnvH38kOPNBiQRWmNcrhmGkX6zYb1ds+17tpsNtRS3JTAKXimFpkYtnQz9Ji556vKu4McYKeNoUBLNJtdoIqq333qbH//hH+K7fsfv4mzoeePzn2Z1eg8ZR1o/kSOMGJD2DBpZL9dcPbyBJqG1wlPXrvHCsy9yKMJ8P/KJ7/iN/IO3XmcesEVjqRbsLgnFbDxoI1ptIo4ehlGHQo1q/lPjSKf23olATJEClnwUPb8gFXLrjC7s9N1aK6UMjOPWrJVdINdatYJfTJVaqxX31hqlWmKSpSbZMjq3SWMDpVk+aq2mZKllwDILeop/XUMpDLYPxCyGQxDK8K+eoToH/hlYYArwN1X1z4jIy8BfB24AnwL+kKoOIjID/ivgW4Fj4Per6ltf8Yc4BBJoxFZNrSfVizk7/5XW2o7x5wmQfoEEhmEwimuceWH3SD03b2pNKWpWvqUWhlp3MI26/4YtZyoEj08LLmxw5oNE81duYv7syQ+S6MvGJl4oUQJmF9zEwgAsBxODJDC+uu0BgnNWbeSQKG4TbJASrkQ1/NqUbjbKiSMc5kap1eTJTRtRsjs3WpcXg9JaIcTM2BrZXz/bvJvP9pSuPlk3iOulbcnnLJ9gopkUgksSGmni6ftrJmpjLyn4gTWJn6z4aTNOryhs+7VR0Yhs+jXbYUPtK5t+w7g1nvXRInGQ4Hg50oZGvy0MBTaDL6nEhGloBbWCL1Kom0YweS9NCylESguUqoxYzkE/NrYiUDbkFLnaH3Jydk6aL3j66Ttcv3aLfhxI8yNm86tQe6RkcrBF3+m28MSGGYcAjSKHW/nGGOlyBx6mrWAOhA7HaHB4wzm5rRll0SbuRGv2/amNOoy+HzL/mpgyOWVyzsxmc+YpMerIo/WaCxoPtpe8f3LKMBbGMlKG4oZ8MGw3rDe9qbaHEYpPtA5bqkMeuCPrzqNJnGcdzUKg67rd52ozky2795QyNn7sx3+Mlz78UT780ku02vPo3Y7t6gzRxmK2z95sn5g69rrAracPiWmJdtDt7XFt74hXrt1Ei/DuF77IS0fP8Y3f9A38nf+2o6otd2MwgZAEm9wkpZ24p1W77vFSqih1a7u2Vs3eARFStWCUTW2k1IhjpXWBEiysJiX7vNoKfb+lX6+dyWeLzxgELaP7ytj3bq0xltF+N2zyLn6AotOuxRPYVHbYfCkDUJ1+OjCOg30eAnVgrB2zbo5qNUj0Kzx+NZ17D3yvqi7FNiI/KiL/APhPgf9CVf+6iPx/gD8G/Jf+31NV/ZCI/ADwnwO//yv9AIMvGkEboo0kybvU5mwwd3E0ngkNT1JDfJVtKG+UZMvTMHXzxoYZS7U/rdIXS2UaW2MovvxwWAZtu8STgPl+hIleib0hk11BxU3FJp8KbRPPwSmXjo17R64uSgDv4J23inPJjf/srpL6ZDzHlbUmVzccXsQl1Ng+QSSRzW+MKSVKQoPpwopKdWe+Vo2uJeJOmaoYhGI++iGYctLk3JNHju7ETQE8XWpi/DilzG/65t2T+GFj/FyjvNHMRjUFGIYNTXsWs47Ti2MuLy+5PD9jebpm3FSydBxcPaCmI1JUrjKw3l7SVXPpHNSYJMGFYOZXYhh48WtHRsw3OyeqQsxpBzuN1eLj6ATJwmx/xtUbR1y7dZvVMHL3vfcJMfPsiy8z9pW6WiNaaMnYFy+98Axvn71DCjNECm79D0RaMkiGYKyhLs9ssRwClQCSfDo0qEoEpDZfhCutGAUyBivqqs0Xxw4dKpAjFbN3iNFcFi+2K47LlvfWlyzLwOPHx6Sc2a63lKFnrCPjMFDKyNgPbDejL/rUoQzLGbZrwlSkRgt2aip2u1n+a2Icrah3XUcpxRularYUCohydnnBP/uhH+K3/u5/kxtXrtKFD3O5fExtA0/dfI6PfewjfPxjx3ShUjdrVg8vuL1YcX2x4eqwz6yHR2dLHi/XpCtf5PkPfz3zxYK2uvRc4ubYd0/IM7RMsKr43s3yZRHzwTdtx8gwCR5TZhyL+R4hhLA1c6+aCcH+m7MV/1JGytDT+q3ZMje1ZC+/161bd+aWazW0FmoZDZqpBdTqUW1W0FMqiASP/CyMQ49qhdYYhp469LRaQGAcA2nI7C8Gcpwq56/8+NVkqCqw9L9m/6PA9wL/vn/8LwF/Fivu/5b/b4C/Cfw5ERH/Pr/sw7pPD8NuZh6WoyUqtWg3omJ4ssEj1i2O6gZitZGCj49quFSb/o2w60bG2ixGr04X7rRAxAU7nanTmrkgTlmkk5eH2Xcabz1hnawCY/OAC++oVQO12WJWdaI26s5vXVVdKv0kBCMgzmDBCrkXTfGwBxHIMmHzYilTwSAaCFafUYriIQmmnhtoljUb7PVIyahYosZk0WarVLVThShTgLbRCXO0HUOOQhST8ZdayV1nvzM2fTRfklaMzWPwiD03xDH3IBbiXEZW/Sl7Bz3KwKPTM04fL3n8+DHjcmDBIft7HR//+Lfwzb/hexg+/VM8+twn+dz7yqfuPuJ0iztz+cXdoIxGpVUMbovRvIWyCLnrUG2MQ6UFKKO9xotZ5GhPONxP7O/PSUHYn3e8/PXfyPH5itP37xHvH3P71m0WSclEZDajOzogLObEWUeURJbEMIxGbTSiMxqseHQTROUTTHCTLnVYg13Hrt4Bs6OoGFNKHI9tSKmEaQrEFM01NMbWs1lteVwH3jw/5eTijPVqTSnKZrXx+2FkHHv6fiBgebkpRMZSKONo7CLv0O1OVeeSu3muwwam0XBmVoN+2KKqzOdzAIrjxFobWs2l88tvvcHPfuqneO6VF9hb7HPj1m0+/OFX+bZv/gQv3FZuLP4wom8j4WO09tOgG6s9r/0mNl+8w8nJiqG7yqDCldw5MyvYIjo0qLbZqaX44WOpbVoN9lEdQRIqiVHM8C+ERIqFEAeDCXPaNU0iwmycmSHYEKmzuU0utVLHgb7f7qjFYaojWDFvzYp0U3sNytibPqcYA6bW5uSBYnWi+u6geY5zNaZSGW0B28YtOo4ojdglpFZKE+gE4r8GP3cRiRj08iHg/wW8AZyp6uRc8y7wrP/vZ4G7dt1qEZFzDLp5/P/3PX8Q+EGAvS4RI04JjM4jx8dcBXWowBkFXagM1QOBnWkwajMFXytmSCXWQdZm1sFVzaSq+fZZXNEfptEsx11XGok7T/jg8u+AIs246VmFmRdj6+btUAkyjdOAWBcZdnJovzOc9jhh1XGK0lP3chELDtGpy2difLgxmlsNVHsRfftu3XFVJYdk2ZmYujfEhEizMBQ1QdCA8W1NPmny62mnN3neGBd/El21nS1vdNqYTJS/prSqpGq7hJTMHiFMQSZuo7quA2+88SV+8XOf597j9/h3/u3v5RvvPMW2F3LuuFwWzs8LcxrdbOTWQeWZzQX/5M/8WVbbh6QO9g/2+LavfZnHn/4y55s1A8UEHQ51RTHrhYZ1vykISZRFhOhg2VaULSZ9F8E0iqGjr3C5HHl8/yFXn/8Qz33oYygzTu7d59ZTHXEmSD9Yp90lnn7+BdJn36MzO0dSFxmrhaU3iTsDrxADIRtTKqVIFTEtgZohVIXd+wf4TY4ZWfnfW2m0IgQ/iFtw7jRmArbdjDwYN9w7v+C1L7/DxeU5AHV80hysN0vqOFohC5HZbLYLs7GYSXUIa3Ixwm0sQEM0uf7UfCDuXmjCwW1vh8bB4QHzlCm1Mm4HEz0JtACf+9IvkvYS167fZLG/YJYXplVJSgy/AfQKqvcRDgnp30D1HQaNrJYjjy8Hbn7dc9x69cNc9oXTyyWH2sjJX1NfULRa3V7D9hnV7UyscBSqZIIYjBNdH7CzoR6MPhmCkFJkHAa6LtF1GW2VFO2/fb+l3653nxsJOxqrqKfkakOLBVjXcaBWY85omXZDZloYkufkYg3T6N19cWZPrSNaB2QcgYZKJTYokhBJznr7lR+/quKuqhX4JhG5CvwPwEd/NV/3L/mefx748wA3DuaagjOtfYs+SYLBoWfDQQx/NLAdS+1SZ0x6OlBQXzb6jSJuYVCnhY+FLIuaCkyCvUk5Cd3kg93MFyJ4QXahMdHl36LNRuIgvvS136lhv6g4NNScKihhkkc3gw9sVemXYECCcXZp0eGkyRvDOgLjFwbHrRVCQqhEiS6MsDWofbYFi+Dq1QBoDDTibtmbwnw3/QQxi+OJd23WC81S28WcKqOP5imY+2VtDZJ1TlR7fm10kkxV0ixSEpCVvm74ws//PP/4h3+Et+7epWrjt33vJ/iB3/2bOD25y3IdeXg44+jKHonb7ElhroVvfPZp/ulf+1vcfXxJyUJNlaMrB9x+qfA1L1zhZLsya4PkRbJhnbNWYwuJsBeF/agcZPNgN6WoCdsIc/bngZwHLvuRd06P2ZaBL7x3j/izn+e5F1/mN//W386zL71Aa1tiaYznl+QbB5DmXK43dl0WDz53Fpc2s6Ww3bK9jyZV9x3HJFZyKM7UiAaDCDYdiWREI6pGO9TmLoCtuBWHdftRA/3lhlMGvvTwfe7eu8f52aVdVSJoaaCRYdgwltFwYjGywrYfwTM+Q7Llr9J2KXxR3f5AfOKIwfI7fXozdkkjhxkxJfphYLlasb9YWD5o1xk8IVg61bbwpS++yUe/bkG3WPPO3bcJUblYZb7ja94gyhuE8B2IzBD2gD1W431+4f1j5s9/hBe/6duRo+v86P/0Q9x9+JDbe3tc2Zuxn8xJVV20OmLQUlPzcWrA6HbfKfQ7munI+KSwhyf7JYAuZ1LqKCWgrUPryCx11HFk2G7ZrpfkGBkxnDKoEhwKqGKetVqVOo5IKeg40qqJkiyr2Q7UWEZnkZkuozju3spoEE8pBsnUEZuJA2UGQzUbayn/GmP2VPVMRH4Y+PXAVRFJ3r0/B7znn/Ye8Dzwrogk4Aq2WP0VHyKTF7N1UznahUu1hWRAvYsw3nTxJY/Ns75YA/MV10pToTq2WqvRHvHFlN1HU+GeMkWFeU4ec2V/umb+iU2F0DxxR31zLmLMllZdgOAlONrvFF2xiBjeLg7ZiGPViqXRi5hzXJYwscyZSn+t5pEeJAEWmSYhM7pCUJt9T5yKpzqFizRX1JlnCJg0f5oyUhDqbmkcHDJywUwzS4XOXSuz+PQSXXgh0EIgxpktcdU9dRQkBmqAoQsMUtiqsjo/58d/4pN88ic+Sb8xK9j5fuR3fu+3sycb1lFYa+GZW9dgu6Jt99HLNQelsX10wZcfnHPlmZsMYeDB2Yq79y95Z+y5/WzicD4ibQ4CfVNGlBQSORqDSTQyb43DHOhQQoKRka6ZkjJ3kSCVbRmJObKqylsnK8YqDG3JZ9875guvv8a3fM2Hefb203zk9m1mdWQ/77Otwj/9Z58k6MLeT6fEKZ42hdFELSZP/H1wPxLxouMeyUFddRySYx6JiUbbGhQX1dVWTcwUhL4pUhphM3CiA689ep+Hj445v1hSi8nwh8ELROvNT9yVVbUUgyxbpUkjp8QsV7puZr9bmNScZqExLd6NjlbcGtdYUs270NRAJJsquBRytiWviDmWBozee3ZyznvvP2Lvyg0eHD8mZdjf26N96D4pPw88RusDCB9G9T2Oh4H80oJnvuYKw/xd/vmP/gN+8fM/x+HhhpNLMwIkwEGaU6lW3X13UVFqMyuM6l19xe97vxd1bJ73oE6tNphxGBIxJrocKaNFB+psYQrfcUMZtjvPGbA3KjQjJKjbM6CG7Zuqt9hEWY3ua/ANriaXHSQHmB+8GhVVm9k+0Ir50qB0UlFGKmIWwV/h8athy9wCRi/sC+C3YkvSHwZ+H8aY+Q+Av+1f8j/633/C//2ffCW83X/GB/zNhSCNpMaBHV2p2IKZP43Ow7V30HwbWoPRk2rEqpUtr6rL7ieF6nTRYoUzhkQnFjydU2QWo5v+49YBTkOsLjISBQJJAjFNrADZQSTGCzb8WpqFeT+ZJNzlzdjGtojEhBcmXAte8A3XD4QP8KUNUihEYg4I1e1tbeEcggVqNx3twk3Gjbe0KZhcE2Mwh8gkQrEXwsRgO0GNLVSCNnJygZjz7bsgfghEtJmfzohQcmLMgW2EtRZy7KhRWK2WfPpTP8eP/9gn6fvBMP4ovPrCU/zW3/BNjMtz1qsLVsslZbvimav7XN5bcrLcoAif+cW3iPM9nnr6GltZMh4kLh6c8XhZ2DworGulHzZm56vCPHV0MbE364ihEUksVOgEEwBJowQISZiFSGlrxmpmWPOgdClw9co1qsx4/Z0HvPd4yd0Hl3z+rRO+9qmn+M6PfoiPv/oi8+vCZ7/8Go/P1lw5nAPRDs8gSMzU5qI31DtDu8WC71gmB9LmzCmzpQiOd5gdszkNNhqRsTVKGWnDQFIgBiQpmzByPm748vlj3j9+zPpiTb+1sGYdK+PozA2/JsWvyeaLz6pW3JsrKUOwdCrzOzcthA2upi6mNev8g1qCUDej1sowDMblrnYtL+Yzt7E2yMNIwArVBHTvvXOXa7du0YISHiivPHvO5eoxq/4VVquHdDlzsFhzerphfmOGbP4Gn797yUv5Zb7l41eZh0/zGz9R+FP/xwXn2xGhMc6V+WyO1NESyVDz+8fwcFu04zsx921RfKdhBTY6GcFCSyBlEw0lmTNQkVbtfeg31O3aMiOCBa1E1G0dKlObZqN8g+Z892oaFam2O2m17SC5iT1T/RDX1gy+qb6kLiM52WQfRUFGa3SnH/UrPH41nfvTwF9y3D0Af0NV/66IfA746yLynwE/B/wF//y/APxlEXkdOAF+4F/2AwTroN3pBZFIConYIMa2G7uLVqSaeEAbtmBSY7WIwzm2kFJStZ5Do4+8atiwLdFtsRliIAbz8OhmkU4ii2jeytEvjFLM1S54t6/NwgaQgoa0U5D5Fsz598rME8zjFLpgwzSlKTkmgzrsrEAk+qEARYUumIc8wRWNk5xfAkWFkDoEZWyjTQxqrnNVFdVAJFNDY9Bi4qYczf1RsCxJXzJPjI6gpvBEcLvT5otRCw0eNFBjhKTU0DHUhM5maExoShCS5YTmSKzQxp633nyLT/7YJxm2Ju4hwMHBnN/7O7+Hp67us172rNcbjo9PWZ6esT5+zOO7j+nPe56ad5ycnHPz6QNK27KphbuPzjleuyf/Riga2Q6NIJ3llcZmqr4ghGh47DRtgdP5HFoorUEsNJqbtcF6gPffPme5DpTRfOk1wqPzNak+4Jlre9y8ecjh9nl+7JOfYv/KTVM1u67AAgkiIVgAs4SJHODWA97JGxvJfNabw37q7B6whqY6nj5UIwGMxem6BEanG1KFd85PeHR8zvpiy2bTMy18VK0oGAbhtMBWqdU9x53qiLg/zCj0uRC7mfsZxV0aUwhGEmCCEZMth1Oy/E9Ri+3bDsudk+j0CEFsAscKa0PYrte8+/ZbXLt1k2EU1usz3nz/23nt/g/w7rtvo6WQivDc8y/y5i98iT/3//5/Mp9f5T/53/8Rfttv++385//FD/Cn/9PXScB2HDgl08uW/WHgynwPDYG+tZ1Pi9kCTJOsQVumAp5M9dT3WT6RSzADQgLFYlwYazVTwFoYN0vG9cyaMsEZce7mqEp250ed3tdqgJv6tERTpq35OIy+TPVpo9nU/UG+vDZn8TUnUbTB1cAF0X/FDFVV/Qzwzb/Mx98Evv2X+fgW+Hf+Zd/3gw8RYRYNCw0i1KqkZqPcqDCKLStig1SdZlUjmWw89uZhFCrGE2/WlYr9PjuDrujubjFPyS2BrtqY3s0SCbsJkxhPPQWjtdXJTVGCCU5ohr9PPxNB2whSmbxDilPHhmrwSOfjrki0qL9oXb+IKRWDQo7ZsOMQTL2ILVxazGxjpIZEL4ENjdPViu0wOl20mYmaukAojRaKLAZlERJlMK633bBG3QpxKn4m/55EVpXgweA9QQsxdkgnaE7EWUfKM2Z5htYCY+XgYJ/VxTlDq1y9fp3V2SU/81M/Q7/pbX+QAk89/Qy/47u/g+//vu8kWX/KzaMbrDaFe2+/z4N3T3n0/gW5No6iCXL2DmccXVnw2hcec7kyumGeZ6OSYSTx2ldUlF4r5ID2jb0sDOPW3ssAs85uNAEkFPPwSpkklUW34HzZOB8zQzrivbNHSL/mqGss9mDRCS2MvHv6iIO35txbVk4vByQvDHp2+2R7bw3KUy1W6GMyGCu4S48CEg2L1xGt6ti2d/IIYy2UEGhjY1RT1RYdKU6JJCgxC5tx5GK5YtgObDcbhrG3SUsm+qJNXeJ2E9rMOM+olDYRwmQf0Bj7kZpG8nwOCKmbA3ZAhNZc9xEgKqnLFtmo1nBlacxz5HK5potCms2pYl1siOZdhEMzMQYevn+f88dnXHnlGk0im+2Gcahcv/EU5ydn/M3/+q/wzO2bvPPOe3QHh8z39lguL7h1/So3btxCec3sRkqhlsp2MBpnGQqHh4eA2f+Ozdg75h9fvT7Z3yeFrDrbToPdOzkG0AAxYfrxyljnRB1pdaCNG1q/svtWlInlYV4z7ESBBF/hT5BdM7lkUdu+jG48ZjE5jb7Y3m2KAsXNEhULeAnehNAatUINQmq/BpKYRCAnxxlLI/sYGFWMWjZWYlGaW8HinUduZsGZNFDUlj2lVCQaoyA6OyWoJytFK+454UEPkTRawPI8L56IibSZbzUQ2+TjYF1OMjQFmhX1JJaQbvdSZDLxQs00Cpmc4HSndBPspkghWTCHVIiBoqPBHiGiOVA0wPwq9eohw/6clhKr5Ya33r3Lw+MTuhgZ+pH1ZmuLPSyrNefM5KsTYoIU0VJ8YrEbDCZGi9sfedceXHhTq43sUSzdKHWJ+f4eN29ncuooOtIPG+azOZ9/7QuMm5Fnn3+WzWrFl177Ag/v3UNyIEvm6Zde4Y//4B9jsbrH13/NK8SLh6SkdKmyR+OFW7dp52tCDQyXS4ZxxZWjjr5sSenAMiMJhAibbeHKUWI7WirXXhcsyEJgLCOjKH2DhcCohcN5R4clAtk1FrlyZUHOmdPTNdICL96+zXf95t/GO8cP+Xv/0w/z3t0lVWDQ6LF9ymYsnIyVe2/fJ8QFrQWjJ7pICpJDWAa7iXvqCN7hBQ8OVxu5DYcWhtLQaceiggWj45Td5gyvthvZQzQ21fnlku2mZ7W6ZNuv3VPc6bU+Ifhl6A2AgoYnMCUf2Fth73Xfb5nNZsxmHXE+B21G+3P4blrwi++NJAR7nUphvliw3PQsVyuOsvkESTAalss+QG3ZPPYDX37jdW7euQ0hsh0LZ8s1t65e5cHFe3z5tTd58O67dPM58/kMEfj8Fz7HanXJnadu2lRhSRuUVgglITFxsjX++f5sYeE4jluYKMwP9zo5SDqE2rz4N6MnB7FQbIoiJFRtImoymm6k9DCuDUxzOAcnGcxSQlwMKRo83wEmAduofp83s/Eu6M6SPHmTps5c02AML2sJPfZFLawoAi0a5PaVHl89xd19vsnR3PzAiqw5flE8/FhqJbaKa8/tSTd/4sENiRrWoeREciJJRVCxG3yKtpMAMdsollNGIk6fSztVLNUXpWpeIkHxU7W5aMOXUw3jpk8Px/6D0zpjcqc61PF/wzJjsAUYwQMwWmI2Rpp0nMw63jo/5vzkPsxnpJS5ce06t27d4uH9Bzx88IiczDp1UghOzneBhPoBE0xFtGNw2CLVlq2IjaW1Tt2nuPjL4CvBup9xqNQysr+/4KmnnuLo6hHaDNs9PV9x46mn2fYjl+cXvP7al2gC87xg/6k7/Ok/+3/i7O3X+dBz1+goDNue1fqC40ePuHx0ggyNo/1DIsqpQB56ZjT6jdnONgKlwnY0v/T5LDLWQhPrYOYWX0Wr6p4oSkuWBDVqJYTMwd6cvNdREhwtOprC/p0Drt56mu3ZBbne49WrC777Yx/hjSsHlADb7cDDd+/yTS+9wu/6/u/j0bLwkz/9JVqbbjmc2mjmZBZmoZQmyE7hbEvpTtzLB2NKlabGA0csbFycT267fzQIWsUWo00ZtKFiE9rlcsn56SXLiwu22w3Fkr0ttCaZx4/l95qVQZBA1WKdothUBq5s1kmNWqgtMI496B5JDAZMnrYkTlAwI2ubTmuz6XMYCyklDg72OXn8mNXlJUdXjiAZxbk5q80GC2XRzXj43vs8eO899FsCMc84vVxT+5Ev/MLn2ay2aK2EEF3bIXz57l0+/9oXePbZO6ZbGc2FUprxx0cxvcnJ2Rn5WiTFRNJpI2b1YXd3usagOYUy+T4q+QEWsEMgiKnDU+vJzQSWOqztMJFkBdwZfDlGUiku8LPnbFbZHmBjizkC0cJ7tKFJTMXerNk0e5RJ3OgusqU4+cH3furiu6Y7yOlXenx1FHfcGnWactwMrDTDmqQ5h7TYKTfUQimGi4pWvHE2fCo5R1xs4RhDsG412BKQKKRsN2ToInFsSLBUlJiUFPywaLLLMI0inoFoQdnUusNFUVv0Wntib6j5p6gdHiE4BGS5oobDRhu1sFSXEOOTBHSJ9JLpDw55/fiMz7/zLgh0XUdIkePHx9y4doP53j79w2O2w5a9xYx+HBiGgQnbT6EjiLiVg7jAS7Hg37CjuKlWwuQhgiclqQcLqFLr6MHiAWpltVqz7UcOz/ZIIVoW5myPsVWWqyWP3n+f05MLoiTa/JDf94f+MN/3Xd/Jf/L//X/w2//UH4CyoYwjy/ML7t99l7OLNcOmom7Ru1xt6ZdrnpolJEZit+ByNbLeWmGfzTtScm54tU4L94kvNEYtFi4eBA2Gazdsp3D1cM5if8YwNNbrLU/d2OeF529x5eNfS84dh7NrfOSFl/nMF3+Ro6ducv/du/zwPzzmzsE+H3vpZR7VOT/xk286rtx2RnUxxB1PXbDJJsbsDAx1fNacSas0BqbFaiMmpZSpk/aCH4WhjQY/FV+0qU1am6Hn+MxESpv1kjIOaBNSN6ObzZjP57YQrZUy9KgW77IN9UZBnsR2+d1nMJ0pJAfGoSfPOiMGuPJYgguqajB3UjXhTQiJVhrD2NPNZsxmM1bLS7pZZv/oKh/IEjL7giB0ITHWxt03vkxoL5NiolvsE1PmtTfeBoRhKGxXW4NGY8fFasPPfPrnuP3UTVpr9JsVV69f4+aNa5xdLjm/HMhBWC83XM7W3Lp+jRQa49hIKblIqyHRPGIUJTR7finYcRdjMo2HuKgQIxdEGqGCNCW1RiwjLVi+Q8Im84DFIxrpwPyZKhUkmQmcO7YWFVpoFvcsQiTt9omowXO1uiBzLBDN2TNgrDqbSIwV1CYR36/w+Cop7mafmxD3LwYwH5lSGsEdFBu2baYJ0kyUlFRJ1W78JAJVGMHgEL841SOuNJm0NORICp0V9ehq0ZwwTldjJgmqUahii8QwIgmoptYkCozVDpLSzJGORpLoGnR3t4zWveEyfVFx432jnREtmSg7DhtJhJS4zDMez2fUvUM0ZMbtxm6k1FFGaOMJOUdixGLpho5aYRyHnY3xKMVZGWaOJv4648Voh7lqM+HXNGr7kq+6YVtt0/hn0qb5okO1cnp6bku1bs7hbA9RuDw/59GjhwzDQM5zPvTxb+J/87/+Q7z7xpucPLzHrF+xOq4slytWlxtagX7dc3ZySb/cslxtuLxYMvaFx03R5Zo1JxwcHDIflvR9YdMXVpe4yWZDkiDS/KAx8VByn/zoh1tfC9t+JGvgSt7jvB9ZDT2MI7K+5PbzL/Hci6+Qc2b9+IQhPMu1q3dYvvk5PvrMLY72D7j31l1e/o7v4s5zd3j73fvWQbmvT3OKnQSz9g0xEHMHYWYME0ylOjFiIDpbBaOTuvIYEWowrrRUH9NdRBfFisWjkzPWq4HL9ZphKGg1Y7u9w0OOrl0xR81Nz2Z5abF2k8fS5Guzg4kFQkPqk2mttUo/jmyGgYXDSuoToRGlTL080SWDqIW5N6EfBmKX2T86YLtesT5bslgckHL2VaxSxPQlsQ0c7s+4OD/h4f0ZX/cNL/D0s8/z8Vde5o0vvsXnv/QmbX3OcrOBHNnvZpQ+8qlP/Tzf/2/8OkKMrFZLDq4c8omvf5HnX3yav/KPfoGHX36HRuJ8ueTGlQPmyWPwMJsQYw/Z62n8VaMaB188J8EiJSWYiV0SD+mBqIMfjrZLiGp6khTM6dWUDJF5toI7EqjNnFortguswQ6LAPQNsliaU2mQHIVoPrkhgdGFboOLD0UdrYhYLGP7AFLwyzy+Koo72CnWfBnRaiPiJlhOZaJVpDRSM8FAUBhqnfg1fvmY6VZze9UUxBr4aBCJ+LgaQ7YcxBxJ7o1sQRzWZdsqxNdklh5NlGSdiwdkSAqIzpA8ghZKzUazTC4IEstiVeOWmbdJCEZlCiM0dwjM9icFIYc5j4fK587OWXSHkODw6iEnD7aGqYbG0Kw7R0we3bSy3m5JMTvzwrpanQq2TmONj6YhUDFrUxs82u5AmJgOu6R1kd3/RhpCYpZnLC9XIJFuZgUipWRGR9sVy4szU2zuH/J7f9/v46Xnnuaf/P2/w9npCa998Q367RWCRh4/PuXyYsO4HBnWI6eXa04uLlmttgQCs8WCGuY8fniMauTVZ4447kfun1wS4ozS92aVEBO1Fca2pSHMUkdq1VhQTgfth55+G9kuV4zdPldmCw5uJfYWM7JGKJX+comGxP7RVT56dJW3fuENuOh59ZnnuPbUHe4/uMfNL7/ON37dy7zzztu7JRcqhJhtSRuty63BQssrFrdmk6QdphMkpsFNxiYjSTWfn8mmWgiU0huFNlmS0rpf03UJLqsFZ9SGhMDelUOeuvMUR4eH9NsVZ5eXDJve2Bf+vVqrWLydK4v9PZ+iH81eyLyH+r5nu1nR5c4gAJpZKmijIBYbaVeKmWYJZks7jszm+3TzPfrVkovTE67fvGnXZjAPetwEyzrdyJffeIPff+XjXLn5tdy5fZM//h/+YR5ttvyDv/oXaesLtusVKZju4v137/JTPyP8m9+rbLZbTk8v+NGf/iw/8OFn+cE/8u/xf/6//JekxcCwXbEdevZnHTF01qlXc4ht0SA7g299J+fXffTiHh1mkVBMICUKwXdYwQ64zu05RDAvmpRJMdAFs9HW6ocju5GOyfO/aTOYLpsjaFQgJmKzab9gLJkxKNtmzrCTDUqM1qSlXSbAr/z4qijuMuGWzcbPhEm6LVTYMMzazAIgKmbbq0oWc3Jzsrf5QzNF7BlSEnKgRYH4gVDnlCBb4bXYOGGRTdCTor3hLTwpcNKCLxYrwTHE6HfEJL+fxc4c3+ycJmCHS8AoWFZYo/t+O0YniSwW3ZdDYqORz957yJv9yEeOnqa/vGRvMec8J1O8qR1mYxmQre8VJNPwBCBw/q4zJlzUFSeRl5rIxDZthvcqrrBkou49Kejq749lSSoxm3nS0I+uuWnM90wlO2y3rC8v2a7XpK7jzosv8X3f+10kgTdefx20cnZ2Tm2XbDdb+jZw+uiU4XLg7HzDeqxsx2qHxnxOmmf28xU0VWRULjbnHO0v2O5HLrYjl+vK0d4cbWoaB4cNUIhT+EhQhlopRQltQ5cgh8S1o0P2u31uLW5y69ptbl29zY07zyKpo4kwPr6grtaoZp5/+WXeuv+IYbNiP36Rr/lN38M/+uefMnm9CiGZfQNhEnoFmgktSCHZNSIwNLNFMFashzLXijY7ZJszHxIBijU0AVvdxJzQoGz7Df1mzWa9MvVjCMz397j91FPcvnmTYbNie3nB+vLSPF+wgm2F3HcsDsHYa2UsHhWLbdRmy9hWqiUExWxMsWAsE0Igpe4Di1q7F6sUahsZh8RsBvP9A/r1itVqyd7+HleuXAXMDVRrtEJbKyklalNSq9y+doUmsDfr+DN/6k8Sliv+zn/zFxlKz+rygoBymZTP/Pwpv+u3BEoTLi833D894If/+ef4w//+N3Dn9hXevzxlOyrbYbDDtimdZEJM5gQbjaESnBqtanGFxk5sBrMEcWjVFqFGAjc4NThCEB2JNY6CCzCjhcNPgsZUnZFTbfnb/D7MYgekQXowS3YNlFrApwOlOcvKXFttbycQDDLqumyOkl/h8VVR3AET5QTDrcB8jfGCFXFptBbzWBeoAfpiQgA0GKVLbGRBKk0sFEL1yXLLN6bOmDGqWvDg4NTZaR7U+Lm12epLgxKbLTdqUzMME++OU6NUw3tBqcF+24n/HH15FlR2HdIU6IEEchJCCrZsjTMejpFHKqwvlqxX52zXWyQnutmc1XJl3yMY/t33PbZ8t861anWurV1s5l7pcJabo+3CwLVhrVpzCwXd/dnZz4J37LLDaYMI/WbtAI5Jt82NcaCOWOjDWInzBd/4Ld/Kh154FmmV4+MTrhzOefqp67z6NS+y3g783Gd/lscn5/RbZahKznOeu3NEksh2s6If1hSEWTejtcJenvPw8RnbUTjeuhYiWeqRZb/aurJoJbZGSkZk61uDGhgrjKdr+uE+d5Y9T19r7Hdzji4S9fiIvuuI16+RFnsMyyUPH73H2XbLo1/4LBebxrX5Pg/OL/hwmvPMSy/x9mtv2rWZEwbMJIcDAzF1hJCIImRx+pvvU2iYR4xax1ZrsfAZX14H92ofi2Xudsmsg999eI/jRw/ZeLBGU0izzI2bN7nz1FOM2w1nJ485PX7Mdtu7r4pPCdWKsxVx3FdHwKl3k85isuIQhH4zMpsZxTDiorimhjUnh5nURXIyOVcWxnFgtjcnz/YYNhsuzy/YPzwk5ETCfNNbM4vtmObsHx7xqZ/9FM+88gVe+shHqaVxZZb5P/zJ/4gf+2c/zMl7X6IfR+RyiSTh8Ghhl2bIDNuezeWK17/0Pv/Z//XPcXF+Toym3egHs17ITjMWgZCSOYaiRCIhix20wfgZQmPnMBuMGZc0MJvgW0wwaP419p6GYELFZMQ4M18L5kEVcQaUCEWE6p5RItakTo69IcJYqtU+tbWrGR4qHY0oSgiNHIJTugNdNJ3BV3p8VRR3Ef8TLUWltWKb6xigup+GNEJQqNWUmGpc+KzGLGhgLpHBEmp2lsDTaLxjB0DM5jueQ2TeGcwSxS7yKIZtBZeDC2K0qAmaUGyTL56o5NQ1VbMNncbfycLXaFRuCoaaKyNCiuqQUaAolNBRrt/g1YObrH/m57j73vvs7y+4XF6w2F+wWW/QokgyiVEt5ktvPdl0GE0PW+g6+8wgpidtuAWCf8BHZPeJPPmcqTuzKE/335joeEEQ51wran7hw8hqvUEV8mKf3/hd38nhLENtlHFgf5aIqXGwP+PWjRvQf4xO5xxfbNj2I1pHQlNq39jkzKPTkcvVimv7++SonK8egAaKszS6mRg9TcRG1clPwtkJg+9pqIFRYdDGZlPZDJV+gKH0VF2hZUWuBd1uuFZeol27ztnxAz7z+V/gy++fc3x8wdWjI269+ioxBLanS155+SXuvf46EvzmkoDEbEUzRmxB4wtrBw0nCb6fuW4xjVk2NzXuIyaTN2EZNnon4cHpCe+//x6r80tzCxQgzzi4cpUXXniBhPLo5DEnpyestz3DWFCHPhSnubTmIjzsdfItiwTxIiOOPYNiplXbvreprnpykheuPOuAKdNVd5NpKQN12NLNO/YP9hm3PZv1louLC27cusn0q0g0WEhaYZ4Tp+fn5OMTjo7PuXrzCpvNhg+9/Cy/7jd9F//wv3mDpoXtsIVzuHnD7aljYNgOrC+X9IcHhLxH7haMrSd3nS36Y2SebAcyZTZkzLU1+T5uCliJUW2abaaADxESkSxWXM3TyrpvMPM8dWZLikKg7IJwpgZrurWC15cumVX5UNipYQPN3v9gC/cpvrK00XJzm2cWuOjRxHnCLAVC+uBi/F98fFUU910HAOSY0OTYdmsQG8G9uoMvKxuWYxrM8NzU0RLI2GnYqhCz0EJg9E4fHP82Wg04DSxEgy+ymINjdM+YKGF3W2oyz4cgkVbtkJDgY3YMvvCyIA0IjtljfiDYeF3VLQPE4JqcAiEadlYlc28z0u9Frl094NWv+Sg//ZM/xd7+gs1mw2Kxx8HhPhenF9QCKUeqQzH2vW1U0x127oUZtyD2/9sN5s70mexdxdVLqgbNTMfiVJwMW7Uuc0qckmCQ1DgUmzzq4FFwwpVr1/n6j32trRFFPDlLWG97Ht67Tx1hWC55/plb3LhVuThfsl6uOL+45PH5KQ+Ojzk5u+AgC3t7M4pGymlgFFyBGiAozc3mpLqTogBuhFaqLeQVaOEJbeysKG27pq0bs4vIXlpw68aG5eqCfHJMKlt++Ed+jM+/t+L9RxdsNyOPNsdcPbrGzZt3mF27yvz+sS/KAl2cMPVMSNGtnDMhdDS1kTqJ+ROF6fX0EIzQcIaYqTxtl204bQxCaTDUgbvv3+Xi9Iw6uuI1Z65cv86rH/oQBwd7vPvWlzl5/IjVcmXMm6o26SZTdttp/uS9niYzJ3i5ijrs/GYC4jkGkaYwjAMRo0OG2nwX6d18MPhJsIXsOGyZjXP29uasu0Tfb1meX3D16lXSbI6btJqDyDDStlu2Q+Qf/MMf4k9+6MM8fPiAF59+hu0w8C3f+PX8o7+RQKww931PGQfjl0chpMRmO7K6XLN/sA9dJJXIbDEn+HSxv7dn2RDYoVmbQIsGv2LMlOYCrxiicdmTgDSyVkIbbF8Q0s46Ai8jNKsFE5ZeaqVFY6c/eS/5gIhLMSsPi2OcrMQjtn8bXY1aaWg051Xr1I1C6VtFZtnprr9WYBnFTrKIojE6B9Tl8+7PnTT7iCnUsRCrkqVRp221mnOjZWPaRysNbSPR3QOn3ZZEzPyr2DI3YTdgCs4HFxOUIIHRlXa1VCv+EndiBQUQ34rJxEKxMXiSCJlfjtPfYiRFCw2I/stsZc47qzVtfMzsYMXiYI8XX32Z+/fu0sXM8uyCm7dusd6sKcPodqB1F2I8RRRON+9Ey5tu4t2i9APe9er7hOlhB9P0kF/ydXbjTiEO4ouoAMRd0Eg/FPp+gCDcuH2bp+/cwfeN7B0csNwMHJ9dsOgC6/XIuFnRl8JYK6fH56zXW5b9hofHj3l4ukRbxzxG3nr/EaerLatWCIuOXIQ8jiiGtafpcFLzHarYUn6KLI3BdjUpmhdHUbiskNYjV8KGa/sbztZL9td7dMtLfvZTn+XTn36NWwfXyZI4X294/+yMu48e8fFXt3RdYfvoPpsKsTPgLcZs1FI1hpS/TPZaAaEFfw0N9ktun5xVjFbbfKmnkCSSRRilQWg8fPyIi+UFBSHMF8z39rh6/Rovvfg8N69f5d133uH+/QcsVxsTVTUFvPFR9XIw8eg/OJ5hmHuwBsNYsmnXBhTvFJNENkUpFONmh0gaR3Jnz8FNJl2kZJNdrSOLvQWLwz2GcaDfblkvV1zf24csjOoYvgbKsIV0wE//wut85vNf5PrTz/DCzZGUEt/wtV+LSAYtiERKKWw2G1O0dx1j3yjDyHK55Mr2gL2DPUqXaQQzLvMgkyiefhbsut1pCnA1uwij7+5Ep11ITyYQqyAUm+y9qIva/ddEYVDI2W4Hsemnqbs9RrOcyMEYMajZfFv8n9s6tLh73SbOvWojeWOWZDooDAbS4E1aU0tz+wqPr47iLoKG7CfgEzWpRmgx+ILMgmljEhM0BWWyLwWDGbIYpmc4vIV8FIJNyGbBRkh2ky0IzAjMxaCgUAspWPdvMIS9NCrhyXYfoTlGb/bEbu8ZhKY2CRBM3u33lksoxCcBw0BTsoVMDEIfIyfdgjGqeY54/uLLL73MxcUpdei5vLhgs16xN+tYe1pOK9WZELsXcTdsGxvilxbvXbcOO4hmV/zV/kw1yZuT3SNGd5L8Fx5KqyObVU+/3TAOIxIj12/epJvNDUIBbjz9DOfbnkfHJ3Rtw+XW/KqHsXK5XHJ6fsly3XO5XXO+2tCPtvC9d1kYykhfFcmBrhNmCfYGZdBCa8pYlBZNqVx2OwNzU4wIM7FC36VooS/Fuvz1UDnvB85rz6oNrMvI8Zvv8M8/9WV+9kvv8bXPbnn6zjUO5pGUoV8tGYZTVl/8eR4/eJf7F5c8Nz+gSjD2lVjHFsUWcGbmZkXdovfAN5NIs26+OYxoboZ2bcyCMg8KGlgV5fHxCSqRgyvXuHH7KZ557hmuX7vCIkfefftt3r/7LsvlimE0z5iA21BgtFy7/5WG6TTsdnNWlZjkvqG+k5osMqAOhU1YkbsZpY6Yl4kyuh2tUSSNGca0smlQm7pdhzDb3yderihT937jBqnrjACAJSMNDb7hYx/l+77vu1nWwN4YOb1c8vLTt/nEN3+cj33Lt/Hzn/znO2bIOFi4xnw+Z7s1AdNms+XyfMV8vkdMHbGYta/KRKSwQ2rXdfv3CiF4zoEledly24prq4FYCzEktBVQY5jVprYwb4UQxKjawRo9MwM0w70mSnDLAfXO2/YdjRSDWzfbo6rtO5IIMqqxz0qxhrJWt+HWHQQWHGVI8mugc1cVCNnwQJtf7cYIgZCqiZdiQDo3+moGRbRU0ToJEczm13YSjo81PyCKz4LFaWsCsSizIJ5uY/SnoMX4oxIdubEgjKoVEXOOrB58QZssXM3cCWwZXGozsZSaeKoU84yXpsa08eeraqPXZRPuLTeoRhaHh2z7JRfn5xwdHPHM08/w5htvIBI5OTnh6PAQYGcH2lpxdawV8gY7b2qRJ0k+O6rjdBRMmPqus9+hs17VvdPXJ1BOc0qdEJzPbYV96I0CWGultkKedRxdvQIxsgWGqrzwoY+wLmqWCXXGZrRRtSpcrjecrjastwNnq571UO01Q2xh3oynnRQXYwvzLkA1n5/Wso3tQGmF3VN0L/9JmautUjA3zCQmMlkHoQ/KpjS+/M59tmvlvYePuLdRlndPuHV6bkyKKCADdx8+4PG77/PZL77Jo3Pl+aeT+awYHmQFdWLL1GRwhb+eCoSYiEEs49ZvblOPel6pmlVGFqgxcn62ZLUdODi6ygsvv8SHPvxhDvYWDJsl77z9Ju+9e5fLi3PGMu4YTbXVnbLYMDRrgGJw3FZkd3+oGI3RIzl2NE1b/tt1H9QcEWsdHUC2wOf5bA5i1hvafNnqastabJm7v7fP5mCPy2Fgvd6w3WzZO7JMUm2RQSsNmIXCf/yHfz/vPF5yvBo5vdzw6p3G0dE+v/8P/gE+87M/Q9ueE4lO81W63FnUXxTGobBcrTnoB2Z7B6QMbSxcbrc0OWSezT8qkIwBg4dlxESN9h4IHoriPPPWAjIOMFiGqfi1hyTzk/I0KFQZx4JI3TmpSkhUjW7ON+lJrIDHGD3b1X2fHOLSBlECTZxEEqdwImNbEdScZpsHu6gyll8D3jJ2JU7mOELMzbDlGEjB1ZKluKR6oOtmVF/opMGcF2pouyWhSqXqdFJ73J6OxnQpQg4ZKebvTDFIRYtSpSEtEjU8kekHK5hj1V2MnLpnRtNKaeYBEkQorVnnotap11Io2kxJK77kVEWrsWR6hYfLgfvnS0JacO2p24zrwuXFmQmXRAghI3Gk36yMgpk6ZPA31bs/WzC5lBz3hp621Doh7o5Jq2+1+ABco83//YlL3vRvIQbU17a7VsNHV0nZuuSq9MPguKZweHCAIvQENq3w7IuvkBZXuPdwzV4DUiJHZTtWVpvCejOwXG8ZitKXgUGhFHtrArabGIBQoqUthUJsAaIt37UoLVTHcu1Aquqc4SaMLXjmrU01KcJ8Fjk6XHDl8BonJwNl2dPNDnn2xed4YyXcOznhwWlPlgxUruwHPvPekvLp1/mHP/8a7fAWy3Hk+nxOiNkwV5cU2GGZd0szrW2iuSNEum5Ore4UOBpzIgbPD1JjQzQGLsctR7dv8uIrL/PKqy9DrZyfPeb4wQPevXuX89NTxqF3dhZo9ffelYvJc3dt+WcXgOxKuTmdTpRXkYmuawZVMUazF/Zr3fB+i5oby2hWF6VQ6uhLSYM1JntdbZXZrOPgcJ/1pe0CLi8vObx6ZNzxmCjV4Kgvvv4aLb7ON3/zJ/jb/+xn+fyjDV/30jNspfK7v/+38l/95W/lCz/1o9Q2Is0gRMEWpjln2lhZrZes1pfM9/aIOdFUObm84OL6IfvzA7PplohIpHPdSRO7x2NIxl6J1WjW1kpTMCpxaGLusxM+4jqY4rDUVIzN9M86dnuvA60Uo16LXc8x+P3VgsFAIewOF7HuzPrQ+sSOuIIntbnDrCitjWj4yuX7K7Pg/xd7CE0DSEakI0gm5xmp69AYkdwRuxmSZ0g3o3UJ5h1xZosT8RdSxaIumjg2rEKpdoO3ah18Gwdq31OHgTqO1LGZFfAIUtRA2WoezUkbsdSdm4btGhNVE4NEBthBAaUpRW0GwO0SrFe2YGrxuTV6NzmqcDoo99aVQmLTb9gsz1lenLFcXnB2dsbQDxxdvUptELs5635gMd8npxlGuWS33DFsv7kDHjtmA8E7bZmKuTD5ij+x2ffvsOva/fYXMe/2KcRVZHcI2ALOFpVD3zMOlkYvEsjdnG1fLKu2CYujI5554RXePd5wPgibNligxziw6Testj3Lfstlv2JdK5dD43JQBlV6Vba1MdTGetsYRuu4LH2qQKgMbfpZMLnqTTh8qYGhmdqvqrn/xWhWBC88e53tReP1Nx5y77TndFN47vpNfvtv/A185NUPcbh/lQ99zUfRxRGn7ZBPP+z5iz/6i5zGOafLCy43G5okWkho6JA0Q9KcnBcmZU+JmDLETEgzJHjoitvlxjgjp5lZQPtN3UXbO1Uas70Z3/ANH+P5Z++wPD3m0b33OHn4gIcP7nNxfsHQ95RSjZdeyu5QVr/WpkO+idACaLDmpCq/RLgWgov21HFgwWAcxfYrIVEajKV55mqhYLuuKebSbIGfTIbajDK52N9nsb8PAqvlkloM3pCUPQJSODk94b//23+TMKz40O0r/NCPf5K7j84ptXHzyiF/4k/8cfL12+SFhV+azTLsLeZ0MdPlzNgPrC8uKduNubrGQF+VBxeX1JiZ5Y4uRnKy7r2LmVnsmKWOLmVSzMQ8o+tm7KfMnmRyNBrqdPBNr5dNO76M9eaplEYZYRwbY1HKaJOAqlLqwFC21DoyjBZUXrVYoW4VKYVYmy1Oxb2IYiaHSIoYY0fNqzWIe72XQiv9V6yqXxWduyKMah7u1S+2FBLy/6Puz4Nty/K7TuzzW2vtvc85d3hzvsx8mZVjjZk1qCYNVWqVwIhJgmYQ0IYGd3QEDkc7MLY7DLj7j7YDR2CHIwy2cTc4wEADEiAGCSEkRJUKNFSpqlRVqjErMyvnN+R79707nWnvNfz8x2/tc+/LSSKQHaUT8eLdd9+5556zh9/6re/vO+RUaVYFiRkkVQ/xntKD0uBcgt4GSeOFqE6xICdz3CvJqp0NTc1m1ZWClILGZN19P9hAt60sggiExroRNfOeyoQnokSSeURTB5VaIZdixmXrUirmXr3kMbgo54j4hjUNN6JBFyE0LPsFh4eHrI5X5Fjo84o+Zo6O58znR0za1vwkJobJmVmVJU25usKTx7AP2WSwOmfWCEVteDQyaeCkc98EmYyPsc3cRK3ZRWfOeVJtFXxloJiiUZPtmlywfNjVOjFZZ4akhMmMd77//fy7n/gKl88HtlYWkhF75XC+4s58yWJYsy7m6JiKo6gjlYwPnrZpKu20IWmm5MHgazF//FwX31wKONnw/RU7LjFTOySlEWW7C9x/8QKr48TNq4ccz5WuVYqfcPFSy7seeojH3/0EX//KV3j40Yf5v/7tH+dgHZkPBUJHFxqiRg4WPQ+FFudc5Ukbbq14qDChOE/TdnixSDtxgqs7nuAAXwfv0lm2aejM/noYKDGzd/0G155/nm46wQfP/OiY23fucHR4RL8arGtHxm3ZRppQJ3AjwFbPucFdFON233XSlQ0kIGraDC3FOnNRg7xUkWxCvBxTnd/oRglra0qFmOp9PJlMme3ssFwu6fsVq8WS7TO7toMK3vQJufCZX/0cv/K5z/Lkkx9huVry8q0D7rl0hv35nN/3u3+Qf/IvPs7nP/UzuElE6cmamG1vsVwsabsJMSfm8xXrxYq2mxiE5z13juYs1j0Xzu7SiCOaNAyp9WY8duKoaUoOvFCkEJI3IxGxxrEGLtT7Qqolr9qcDbXwFwuDNm+fkXZcdwgSGoq4OtStv0tt9+2xIB3nQHIVsLnq7+TyyY5ctZLpQX47WP6qCkk82fmKmws5++qA5nCNYgaZAs4YKKXYqlkwKqOsh5OwACkUZ92Dw0QIKDXAQ3Ep0XpHlwtNyZboNCREAuRk0/8kRkdyVLqkTbONrWSUNJwnDZYyVEqh5GisCRESI9ZdbPgq3sxgRVEfOCiBG4sB8a1FcJXCweERi4MFiC1MfT9wtH+b9XKB5AmTbspqOadtGnwTjLdM9X9OudKmDENU2PiFOO/RYt7c1oBs6ByvG8raN0+6MB1tSxkXC8sEzZVdkOJQjZjqPsUHusnEhp2DslivWaxXPPKOd/GzGjhY9EzDBLRhsVqy7JXsAhoaSqzUVszXw2YugaYJxjCqJlVOHH2MDLGM+xUrctXwSZzJ6G1HV3dxxQyeZo1wYXfG8nDJ1dtLFoNjvspMh8BW06LDiis7He941zv57red5+rebbYnLXfWg0EPpZBJhHbCnYPFZg7QNg3g6/zIfPrFma94K0Z9te5c8L4xYLepN2zJtmCKow0zRDOH8yXPv/giy9gzCYH7H3gARNjbu8WdO/v0q34zVGfkXBuGaFTB5iR82Tzc64yl1MnJSGUad3kilJLq9WuLkiUNZXDFFLSK7Xb7iMulcrNr2tOmsz2Z0biqj5jOpnSzCav5nMXRITu7O0a1LDUKMitHBwv+2U//DI89/m6+5wPv4xd/9Qs8+egV9hcrttvA//xP/Sl+7fOfocirmzlSaBq62ZR+PdDkKcNqydHRMds7uxZok4UhRW7s7XHl/Fk639iiBYgGyyCmHjcdMxogilDE6J+tmk4i53FqTE1OG7UlNoB1UihFrAuvjKlN8haW/pQxBatona/I6C6pdZZt8z/TkbhKqLZfaPbAhgHlrCQ1m+a3enxHFPeCkh11+xgozhNUcC7UW3dMNbeLU3NBGkWCw0ch9DUco0DjCg1KFEGDWfSKRsQLZJN3N2rWwZI9PlmieNNHnCsEbRApuBbDtnBotR9IDHUI4tFs9qlFPCkn65ALNe4u1yJq3X5TB5apbnOXGnhpObBKjhYx2XFUlqtj5vMj2q4ll0QaBmI/oFnp19E6Bc1EHxhiIue0yZa1C8+MijbKtzokLbUAmNAnbm5owLqHkSpjwmlG+oyTYJAMNSiiLhwlZ1JOdmGXUulJleXkLD8zpUhKhX69Yrk8ZnZ2hysPP86Na0+zM/W4MtDHxFDpehStyVVKwTz5zbOjs8UsRFpRogqrdaZPhViTtUaHxoo6GeWw3lgWECG0Hjqv7E4deRh46c6CXoWjeSJnzxAK148P2Hr5GaaTwOXtKedmLWVmQeFSFdC2ujd03YykahRFcYgPdp0EO9fBORrXVDGMuY4673AaCE1Xuzab9wQEKX21rAg4zVw/MArkMETc7i6r5Zrbt/eYL45Zr1eUwU6grbvV7xsPFaa0HVudl4iJeLRkC6YQQMOmwHsJFW6ramZXyMUxbVqmbcN63SMq5BjNF68Uciq4xjKAx7mPXXajzN5vcH0fHNOtLYZ+YD5fkGLGtx3FVYW0E9KQ+NIXvsS/+fmf5j/9vX+Y/+Ef/yueeu5FptOWb3/7Bu9/z6M8cM9lWM6BpdUAhK3dHfr1HpPphKFfs5jPWS3mTM6cRZwjRbh254CHFgNXtme4ko3roKYlMEu8uuP3dnyC+A3MK1WMNvpeZVVijkiGSq4xTN4ZTp7I+Kx1vmELfxBoHORsFilgc5bgCuLN2tiXwugoqdjOvCSzI7Am1669LMoqZpLWwf1bPH7Txb3G7H0BuKqqPywij2D5qReAXwP+c1UdRKQD/h7wISwY+4+r6gtv9doqQnKB5AKNaym4WugVIZqsG6E0INUXQ5LgsqsnqFphpkSQQjMa5Yt1BQ5Xg6ehTbk6UCpNViYKvsDUwnNAU42s6822tjibkKO2QFSerKtdcKnbz1KK0R21diPFThJazEtbFC+e3E65025zeLwALCpMayzYMAwslwu7yVTp+4Exfi2lAVVjLXgfjJVQaZFjh3aav27Bv7meu5OQBsVVCunYsWGFZoOz1+GpVNGEVBy2NvtKoVQnQRlNyepiIs7h24Yw6SyeTK2QiA90W7u8+0Mf4d8881W294WL2x3rNLDuB/qc6KNx1HN9X6VAHDJalqCBpgSGklivBvMzqfJ6s50dKW51kagQhdRhuBfoPGx30Abl9vGalTYs15FVrPBd5znwiS/duckv/9Q1/t4//SUevf8Cu/de4tWDo82ODGocGpm2bfAhGNwiHqnWGa5pjDNe7DiKCFqdOMULoQ1WjNUUvJLNMdSJMG0aQorcunOHIRmfv+1aFosj5vMj1uveZkQnJ2Sz7fdNi/hA452FqDNme57oIOxOGgfnpYazuLrLq/TGYiE0movNA6aOfSfEonVBUzLZiA4ln+Qv2FVl7KR67cVcbGYUGmZbO6T1iqHv2ZlM8NLURchCdA7nx/yzn/ppnvjA+/lTP/KD/LsvPcX21hkO79zkoQtn2OmEwhQL3rYIva7rKkVOaacdq8WCo+NjprtnbDjsheUw8PKrN7hne5dWqurcCThzTfV1ISxOLKozZ7T4anstNkyuNFLLNs2VfFBqap5WgxTbIcZcVdySq7ahmFWGGmwjarbHpWRDJjAL7iCuQmkFL3WAXZuekgYoxszJuRCL/IYD1f+Qzv1/BXwT2K3//j8D/zdV/XER+R+A/xL47+vf+6r6uIj8ifq8P/7WL22wTF9l+2OBMh5u7UmCRxyUnFAxfxVxDa5xOK/4kAgRQil0ageyz4UghmH5ooRi/98W8233xTIeHUaNpJRqlm/fF8n4BvPT8M6EHXaGa9c1DlSgEV8pk4bZ4yCXWBeBgkpLpKNsXeBwEGJZIzIyDywZppRESpH1ao13lvQzUtzKiOGXBDrgtEqnN4NSYxGM0JRUrM8GOrl6fI/b7wrHVBXuSRtv52IsGiOOS8UHDQ7LKNlweU21dXQbPNAoow0pJ2LqcQJd07I92+Gxdz5Os3uOo8WC7a5QnK+4eaUyVmaTVBMrSx+K5FTIrfWhLtTdSa55uv70IK9CD/5kSOhq8Ij3Du8LixUs+8AyFpZrWC8gxkSK4HwHDg6TMkR46qXb5JdfZeVCTQezMJcKhnHmzE5N9LKhsxNf7ZFt5mFJPWEzMHU13cs5Ncq5qxYVJeOz9WFBFY3G3S5Fq99I4eDOAcN6bSZUo95MRujDE0JL03aWS+Ath2CIQx1u5npubFemqDVJztXPojXUxZtXT93zxX5gUS8JqbsrzWY+l1KyBKwhWXFXU6uOsKg4NvBJyTXMY2eblQjHx8fs7mzb+/aj8tOYONdevs7/+Pf/IX/5v/3v+Oh3vZ8Xb8x59dZNvvHMcxzs73FmqzJMvCflTOODpUDNj2ibhl7h+HjO7mJOO5tZf4dw7dYe737bw3RdZ5BH4224zWgO4Um19jjMPE2ROtNypDpXKCmhuTGvdldqQS+kGkRe1CjSJWmFUKJdnckKuHOeIJ7EWCuEIEpELMkpCk0IDJV2HIeIxB7VTF8SqWbqJrU85bd6/KaKu4g8APx+4P8E/G/EqsnvAP6n9Sl/F/jvsOL+B+vXAD8B/D9FRFTvqiB3PVQhqhC8pxeHiDEvGmdbJl+DN4L3uCCQhVKVp9Y9jlijuGA3NwAAymVJREFUI2Sl02A3Sk52kKnFnZpVitKUwtSBz+YlISmDs4tcU+WnJhu2uKYaPAW70ktRnDeetheLzQvF3pPRNdPJ1liN564i9L6jd1scHd1Gh0jUzDD0hmVmy4MspYYsuGBipSqGMiZDLaA6qg9Pir6hf7JZCDbc9eq3rZuO1jp3HQthpUbacKx26YzWsFKpV1UYVRcCg+0rPr7B8CvmLo7GOfMESj2xX5GHnpwGfOd46J1v59tf+FV2toJZGmihYOlR46uM3VBxCsXbDVOgbYOZT6idUxOI6IlfSjWBgmpHUD28JTRkYEDQ7Ai+oSwyh/uFOJgzaKCDNGH/uOe490TJtj1mQgAaVZJYKIdqIaeB7e2ZKSB9ICU7TiE0UAeSwZ8ITYyWK4wGM4a9B4oM9rV9Ajt2qgx9b/dAE0j9YGZgeWTA1CXYeROztB3tZErXTWxAWZSY+io0GjNETyYtY0C8+ZtnVEfKpL12qefUcluMXjveqJoLKUaLdfQmtin5lLq5LmpmQGadbuPrNSGO0DTMjw4YUsQ3trTUK27TDf/yp3+Jn/7wv+KP/9E/hm8mqD7OF37x0xwtV0yadd1ACblYAMd0NmVxfEjjHV07Yb1aszg+ZjKdjDlMrPuBV2/f4syVB2rOg5UNL8GKuYLF89QAIOlxXvDBodFyh+23qp1rJ6CZXOdtOAtez/mkaRqtO2wOW6muUmhDzaX1stHYTLwnp54gjpwCzkzdKTnhYibGnkEzKWUGtWv5t6S4A38V+N8BO/XfF4ADVR33Y68AV+rXV4CX7VrQJCKH9fl7p19QRP4s8GcBtmZT+qJmRl9VYIHCxDm8Kp33uJxR53F10lzEozlttr5KA6I0aiwbUaMgIpZiIhhLpnGOUKLdfBRTpGIdvUrlsiugZjCkFuVjA49cqs+Enbxct2UmUE74RqAa/mi0zrOoRXdF5+gnWxwcH7FeL4g5G7YXE/1Q/dXFuKx9H2nCePNVh0mhdvhWhLMat7j21hUGOgn63XDd1W5mS4WRDcTukGp5Wg+AbQHqP+q2fyy4pyiTm8EZbDq8Ogqx+YQUYh6IaaCJNk+Yr1YcLucMGd7x5Pt45ktf5vA4sT0FlYx6rdi/HTNbcwroaPikxOxopTWmkzfPf/Hetv9+5AXXxKs6bJcCkgRpFB9aK7o5c2HrDNduHrPql0ydcN90hw++79184cVXuLMaKFJN4yqN0AzLaqCxZiQ7yjrx8nPPc9/581w4fwnX2Hyi4A1DV/uDr++TynVXV2cZaRPwUJxlgNr7V7SYha4ZRbXMFwuGGKvvD5uBpw8dbTthMpvQzaYE78lDMqpdyaRhXX1N3MZfyXaebGAg1QpLFVt4VDELA8kkBZeN4DC6blIyJVnspcuFIRpMZve0daKNCzgV+qFHnKd1ho9HtTslp8hivuDMufOM0J84D1gGQYyZv//3f4zvev8HufLgw1zb3mYvTlmlhvV6DkiFvRRNmSa0TH1HzAOT6YRFiiyOjzl79oxh+17J2fHCjVtcufd+tlxrMBumRI/iNrsmUbMuKR5cEyA4tPVICYyJWgXj/TfeUXQgixJjYagirkKye7HuanGmQSma8AjraF176zyh6riTOFo19XojDq9VJl4Kmky1PqTIOheyr0aJ/7GYu4j8MHBTVX9NRD7xGz3/N/tQ1b8J/E2AC+fO6bpkKFawHdUjRoTGW+FoxbZRqHVaBY9IsMHKOHUm47XgQoPLls5TolnhOgHJhaaoBS2oR05xSy0xyZkyzNXtpFbcvta8xnck8cRK/2KENOq0WzDPds3RnArV1ezIhsHPGHzD3t4t+pgNKvFmLjb0vf2eYj+f0pqilfVSjbzK2Hptmqg6UFP7vVbMLfas9meb544Y/GieJXVoKjYrrUM5PYE1GKf3J7/HeONls2Aw9iU64u/U46ykfkVJkZwCMdbhsgR8mHDh8n1cfughbn/7GYJrjHZWu9ENO69CQlp3FCXDkDJdUkLTUBis4zu1m6DuXkrWinPXnZMUJm3LVtNBMoXflfP389lvfo0QPOeC42MP3kvRhpdvHVqwi45zCnsPdY+AYik9iIVC37xzh8984fP8Tz7+CWbTHfMwKWZHYUNpawbM5jUwmpppVR96FUsHk7xZX4PzNL4OO1FSTnZ96ImwTMRshZuuo9uaMdueEZyQ+p6UBoYcq+tlrjmho6tM2XSU45RlpLjibciICOo3JpXEmKpCd4S+bIge42D+/kPaNBLiHM4FmqallMTR4X5dZC3VS52ZI+eYODo8Yntn19SaqpXVVS06vOPZZ5/jb/+dv8Nf+t//N2yHzEc/8kF+4V8+znD4K9aM1QyCnCPBNWxtzbh92OO7FuKE4ziwXC/Y6Tq6dsokBJq2JaJMzp1hNu2Y7eww2d5i59wZzm7tmi1AjgyrJQe39uiHSIoDw/ERB3u3mDz7DOo6UMFnY0UmhEEzfS6soy2IBVP3mpeVx3pgMwBrfcAVpWihF+iKxfhFETpx+LUyCYEgzjKlsx2/XGo2sMCQE1mF9FsQs/cx4A+IyO8DJhjm/teAsyISavf+AHC1Pv8q8CDwiogE4Aw2WH3TR1FlnQrqitnj4jZmRo0ICVBfcyhTovVio+oqMU9erNjjbXJdsdZSlXlaMk1wEBIBxWVbPLwHn6rlrzfanYoZiq1tfbahnZS6OozRarY1pw5SbASiNQoLEIcXo2GqK/SuYQhTw3n7SElY3qkXUkr0w4qS1bbplRtb9KRz3oRnGKi3oVjZlna84Uda6Ml034+00VNF3m5QgMoucbpZBE5YMycmZOPjtMf76eHt+LCm33j8y+ND+uWcxgUb6Imj6ybk2TZpWPHYE+/h2nPfZjFk2saGFK5CReMCIyoYtZBaUITFco2bdmRJG5jBjol9nlzKhiUzdqdt07DdeaZtRiURjxKP33eZP/TD5/jkz32GR8Xx2Nlz/N2vP0XRQHapFvCTYzVSTsVZB1yqlYQiPPf8i/zDqz/O/fdd4SMf+iiXLlwi5oynYdJMrfNCkDCeBNthuSBoypgSoqoRRXBNU7HzBimZIUaDPSqC5lzAh0DbTdna3WG6NSU4T1yvWNdhZSz5JJSFcRmuDoUVqnNi8XwWESg15c+yBYoa7j66Fqpqtbq1RatUO4KUPClFTl8EoTF77fnxnGFYb86p94HQNIgzuGqxWNCvTYRj/u4F0WAZC/X6+9SnPsXHPvYxHnnvR5hNPe/7nh/k65/6KkqPeBvG7sym3HfPJS5efIJp1+FDw62be3jf8K7H3sZjjz7EpfuucPniec5OJ3ROmJ45h5905rsPENcMhzdN85I8wzKx9BPEb+O9Z3W4TX9ll3Nfupezv/t38enr1/n8L3/ObBk0EfPAUGDIZreRgViK6RzqXAS1zNxUIr5+XyhoqdYazgp+Qx2gisOrmfIJ9pq57gmSFvKYH/AWj9+wuKvqXwL+kp07+QTwX6vqnxSRfwL8UYwx82eAn6w/8lP135+p//+pt8Lb6+9giAUL4S00Yh8o4XEuEAWDTxQInoG08cBQZ8rDVMAFo1CK2kGQUjH0EHDRnNg89jt8Aa9qWJpA05j9gXphEKVpLaA5+7pddQZp6IidgaX/YBilw+NdMZkywXivle6WxdHj6SZTSrKg7xSNktjHgVzM9KmksilYxi8XRnHIppox/mXdktbvlwppFbUgZSkmnUZrgPem6NmS4KTOLca7Dxhv3lNYzeuKuP3mE+bFyTsahWOZO3f2WB4f0zZTRuDIxF5GJ7vvgStcuPceFtev2c+GsZM8EdGIcQhsB1F3Ujln1r3i2ypEgbqbsPc+YraaU7VvhjPbDZMguCLMF5lt3/Lw5R0++P53cevrz/HQMjNMd3lhuSR7g1TqkTgpiadSkSw60aAazTZzWfRrnnrmm9y8dZ3HH3uUxgW+7yPfQ0mBxneIM2aTllIl8KeOWnU5LRWSJ3i6ycSCrlM0KqxWzr7zuLalnc7Y2d5htjPDCawPFyyWc2IcjOoIdc4imHCpnk+R6uVfLQmEDfdeS6EEZyEVlQroQ0BLJsbh1Lmuc51R41CHqVRuvW8cMUbSEMl5zJZtcK4xG4OS8d4x9IX9/TukwfP2R9f80CcOaqi4KVD7vifnY6699Nf4vo/9SW5tH/Jn/kTiixdn3HPpDH/kD/5+PvT+9/PuR69w730X2NruyIdHxFWPdC3t7r34YcnBM19lvn+DndvH3H7hOa69ep0r73gMP5mQXEO/WnH9+ee49uy3GVSYr9YsF0cs59Zwtd5T8sB2u8XHn36OfnuL3/+jf5Tjdc83vvBFKB6pPjK5mB1JxpTAKRfzZEcRdeADUW0KYEpUs41OKG1WLFMNhhgNanYjpRfUVxMQMaJAUdPSvNXjP4bn/heAHxeRvwx8Cfhb9ft/C/gfReRZ4A7wJ36jFypabJVHDTtvgvmHOWUgW2rN2DjkbMo+yZZuT7aEci/mxFfMIKgtieLzBvtqiuI0W9KSOKRYAIHjBCv0oSH5CgfUNBWtUEcR87Z2NdnFlYo5O4x5oOApNUfTbp6o5muyyoF1EnZdg1NIeaCkRD8U8mDF3kKTlTHjMtchldS2W0Wsyx7FFHCqddU6CK3MCFc7+WIULhugyilUp/KeqVhKqVCLnvrbfvumztu6cFLRx3Ks4/tg9O4oHB7eZrGas5XOIV7QMqAlVjjAM5lMeeRd7+RL11+l1ZptSTb2gsIYWGxeQY7GYUO/6nGOBtogiCZiriM5sXdkPO2AC57tqePSboeuB9Z95PZBz9ndLd773ncxV7g0n/O+d36AH3v+eaJ31ZJF6+etmLtqnYdU3UVdQCAjmph0Dbf353gRbt2+zf7BIdNpx+HREd//4e/lwfuu2OKEFVaKGXFRfZCKh9BVnF4EHzyT2Yzz27tcP9o3wy4n4A3umG5ts3P+LGdmM4pmDg8PWcyPSUM09S4n0NY4CjV9Q1V664isKcEJIfh6/ShoRrJUPyVTIoe2IYSG1bxHdORpZzQnG4KXCvU5V8VmfqMaFlcziZ3BTEXNeyg0Lb7JHBzs87lfu8A9lzvuvdQTfKFtrfOPKdP3keX8KX7tl/8B/8nH/xNUhT/8oT+F08f5rvd8nPmt63Q+MTnT4ZoJ66Gw3J+zvHaNBx5t+canP80X/sU/551Pvo9bt26yHnoObu9x7dkXiZ2DruHw4IirL1/l5q091hJYIQwl0veRHDPTdoKoEtTx+NERTz31FZ747Hke6CZ8fYhV4JU3cyyH0ToFLO2tzqIEIwoEsTQ1qRBndjWzuUSGkhDvCDVUSJ2dw4DN7VQKUavdSUUO3urxH1TcVfXTwKfr188BH32D56yBH/0PfF2GuK64ndWr0pgCrA02sY5qQpogmCy9QhQqdcbtnBVvMVyVYB2AgAVjq3UwTgwv0zh22bWTqV8pdTjoMPw1mIWrVJGOYAtN641rWyok76kulj7jKTgNNeCgJa0cq9WKJjRojmhOeIHVYN4gOdecVUZY2zquUahx18P+y76soLkVWINpSjHetKsqyTGLc3yUKnqyDcmIxY4XSYV5ThXy0+ZiJ1/r6/5fRAiN58/+5z1/4Pd/hkuXb9B1u8ANcsksVjP+9ef/CzQIvmu4/+G38fTFcwx3btKUcRdSz0bFm6VSSlUTiKUTFYVVnxDX0TiP87m+H6OtGbzmabxjZ9aQ88ByNXB03NNHG/LtPX2VF774LT54/2NceO8TfONXP7sZlKHm7YFYpq/NJ+rnFPPjFxw5RRAljRa49RiIU5ZD5LmXX2Zvb48n3/1uvueD38356RnImQQMxWZD4uw6d+KRGn3nG0fXtJzZ2iJFG7Q3bcd0ts1sZ5vzF8+zvTVjWM559cZtjucLUm8e/0Wo6tjaoOjYubt6bY+cqmDXS7GYRnEn0Igl/liTEUYevKuZtPVSMajTglpG6qf3Rms0kZsBBhZJp8aGqtYVwdti0YTA4viY559f8nf+wVk77r5he2cLcUbjXBwd8+hDD/Pow3+anZ0fJTQdPq/Y++bneP4r/4TLDz7AzvveZY1M6um2t7nvHe/i4OnM8ttf4PyFLbh0Hy++8DROYJlg/2hJiUJ37wVyGbixf5sX7tzi6uGSdXFIO0Wd4MOUIoWleIYY6ePAQUx8c++A+TMvwGRK6Br6uLLjWndAocJaHqUJDYJ5JJljKBvEIVCTr9RMwYI4Qgib42vXmWU+1NJu14kWm6+ojnvXN318RyhULbjZtiLijDY07oG9OrJY8c1FiRQTKkEtpi1l9DxWjyfhXSaLYimGVOYCqBibI6MEaSEZZ97uZUdxmM9yIyRvPuGJk8QlrZirKfts85Wy9XHm92KMHe8EXxpQmBfHYlAW8wWUYnmjeaDWrw0bxkRMyYbrzuFc2RhxSTHKnxtvLKiozAmbZbOGb4pw5ezWOcCY0mTFU+tuyIZpqtbV2aPUIssGyjkZ3la8f+xCT6E5iom8Hn9si+9678cp5VNYTushMGPRFqaNZ90EaFu2dnd46D1v52u/fJOmCOILjN4mzhZmxZhLIlL9S7yJ23zDkJs6sE54V/COasVqXPStScOFM2fZPzhif5VZpkJCWPeZfhH58KPvY/ud7+HHnv8W82KaB3NUrIWwuimexsKcMyqfYCxZ743Xnqv1atd1KOaSuRoiKsqvff0rvHTjFX7n936cRxjwoUFdQjLmb+TNNbI4ZypsB41r2JpuQwhMuikXzl3g3sv3cuHeSyiZm1evcePaVQ4Pj0ipGn5hbpJOTpZy8x+ya2O0+x29wO1SKbYgirf7iCrQwWwW+sFsB1QNVhrdRFEh13BppTJkmobQmL5Bs/18EXuuuoLz3ogOoWFDXUZYHi/ougmT6ZSdnS0u3XOJrm0JPvBd73+SP/0n/zPe9uADBEnocJv5y9/m8z/zz1gvFwxH19l/+VvMgme6NWO6c57FOvHqt59h2/U88J7380M/8vv4sb/2f2dxsKC9cJYLD18h+Jats9t869orXF9FyoV7uf+eM5w5e4mtrV3arqOtSl/vPTknlosV9//SL/D4gw9wu5myns8Ntu1Nqa7Yc2Vj922LolfzM9JNG5mtkIvHi6WF+aqK1WyJcL4eZmtYa0NZdS+tD7X615Stt3h8xxT39XpNUymHDYVBFEIgOEWjpwsNoVp3mcVoQkuybUvFEc33xDo8K1Ku4tZ2FFwNzwZTOBZx5EpZXJYEFftMYh4Q2TuydxXDtE4rOD8izGbtW0UywRssgzOZeeMdWYWjY+GoTyyWS+Ow50I/RJwERoVlKYYnx5htW+aDeXdIjfoQk1lUW0o2qHu9CEbFoa36ZoNrrJnayZc6Lxh3HyKQtQYP1C5ZKq3QXpkx0GDDhKDi9SNrQ0dO/7i7cIQmEMIEkcsIZ0HO4/wHAI+4XyE09pwYWrrJlCuPvo1Xnnma9Y3rZoW6KR5VsKXGRihFTJEnjnYy45G3v4OHrzzMZz71SZwUclkhXuuRsHSo3dkWO5MJ++WY+Toy1Bthr098/fpLvPPjv4f5xbP81N/7JRteNaVSQ09YKWMbpRWO84355tj7y/XGz+ZbE0zBav7ztsvUkqHAjVu3+Rc//6/ZipFLFy5y/b77ePjCZRo1Gpx4o9Ah1AB1By5w30MPc8+VKzz0wINcPHeGw/07PPX1b/Di88+xWB6jyRoY3GhIZvCOIWtqBXYzOxm5SMqoc7CrqnqrOKmcfKlNRKlhGmnTTY7Q/XjtpWTXi3OepmkByDHVhLBiiu7qYW/sBfOOsrhGoZlMuHjxPJ/4wR/gd//QD/HEE09w6cKFarDmObM7pfGRtHiFg6uvsPfSCzz1pV/j2aefZrFc8Otf+hIuZdrguXDxHA899BCHh8fcuHaNtz/yCOcu3cPOdIcPfPi9XN1f8c73vo+rL3+bL3/jeeg9OnuAex99jKZtzZak+vJQd+iqxYa74phOJsyahq3ZFgehpQkDa+ltNqRiyV9OUCe0zth8zjumTqrAyXB4M+lTnCqNU3zONcTb4UJLEMEXNXsUFcR7e1/FoDJHsmzdnNHmt0FYByjD0DO6U+TUUjJoa0K+aRcYYqaIORx6IkUjjceGoyhtMJzeZNe1Ec3RJL/VOMxYC8atTNmGRLGekOhsf6TekcQm00Wk5mMaBuyrNL+oUd68t699Y/xkR8DjiWJqw7U2zCuEMPQRE0AU0pDwXjY4ds7G2y452RixKgfZdOCnzLvu6pbHe622864OxkqFNOoiB7LB5wROsLqR3uhGmMUuvlHF6OpwC9WKmZ5sG8Zib8rFCl15oegc1RcxsdQ5jPHyLNAjTYO0DjcEnIPWwz0PXuSVxQHDclUvRkdWR87CkJQhVduIpqHd3uLM/ffz5Ac/zMtPP092XR149mQSuShNCLTesz0LdJ1xi4cMMTs6P+HsAw/zk19+irlu8Znnr/H5r3wNDY1dF46aoFWTiV5zjQbvmE47hiHim9aEZsU4103bGrinGal2uxkLkkBh5TxH/ZrVqzf51//+F3jo8j2879HHuLx7hvXxir1be7xjfszV/Vt05y+xdWaL73vXR7j4wP0M8xXPf/tZnn3qKW7duFmDyLXuJmwn5rw3ii/WpRfN6IaBVEvLCP3Vgl7qjstVclDGWC32Q26DfWvR6rCqoLaIOPGUZKlIvg00XfX2j8akKQiiLbgWbVpCN6vslMzOpOHJd72dH/yB7+d3/ODv4LFHH6RrBSkR1IgGJUfK6pAYj3nx17/El3/ls7z0/Mvs39lnvVpx68Ye8+USJ7bT2XnhBo2b8NK1V3j1zm0WqwW+RM6eO8ej73iAJ87ez0HTcmd1xJmHI88fJybSmh147FFvYecuKfiAOLXM42Lmf8WVDWQrwSMu0DYdfRigZILTWjPMYsI5h/ee1tedrhqbppRxy54IYolcrWALqxq8S0pWzJVqTWD07IDDKQyiZO8p7j+S5/7/j4dq9esY1pvu0IaBxZgxdZiGDxgsVYOasWgqh21hGAdBRc0a2BlUoGqh2pbOp4A52hUKvbNCvnbGNhEU9d5ClbVHpLWTVXFwozSZD4UE+/ekM5FKLp5WG5zPiDpurxJHKeGahlgSeZUY1gOisF4tabrOCqla2rwTRXOGMLJwxuGeecNYLiwnzbucqOBOP6xHq3CT3Y+bnxlp6qOocwxt3qhVGZ9cWRZiT9xw/hmFT5VKWbt2lcJk2rG7syTll0CPoXwekauIXCC463zosR+n7zPDakFcLbl+/WU+/I4jGueZHwh//+8LL7zoidE8UXDQti1b21tMtjomuztceewRHnroUT7zqV+k3ZrgXUdcKf36CF8l3Uhidytw5eJZVvPA4fF1Yik8/raH+dj3/y7+8v/rb/LLz/8r+owxOXKNRnRS3QxHlpJdh6564k8nE6bdhJQV8bFujTO+rdmjlZ00HvNCIeZYYZExQETpY+GZa9d47torlCGzOF4gmviexTF/55M/zbkHH0RdYLi6YDFfsHfjJs89/zx3bt8i9ZFcISPvPT6Y3UAIoXocqTmbKnV+Yp3iBsKjyu1tG0vWap3gfE0jq+e7wotahFhVqNblC6Ft6iwECC1+OkV9IPaRVECZMdk9x6VHHuXKw+/moUce4D3veIRHrlxit1Uun93iHY/ez9bMEqko++hqQTmek9ZLgu9YLwbIhVdfvcrnPvslfv3Xn+Lo4IjVYsnq6JC4TCx6U8rO2kQznfGzn/oMs7M7SNfx4s0j4Dk++OHv4uF7LgOeR9/xDt725LuZ/eJnee7n/h1RPa6ZkKSgmshqBb1oMn+eTbsJLlh9cU2DNOYj5FvPdNaZd7sOFDxJDF7zTumC0DUBih271hdytlAfvDcHVy0bXD0AvupikILkms+QTa8g4+0IBB/Iv0UK1f+fPlSVFOPGuz2XRNsGnDR4adHsES/4LlhGqqt4p5Zapy1ZheBtmFkqHc4JSWp6EiaKShTLZy0QBbzLJIFFEEJjBEwREF9Db7HV05gliq+AmMehHhppaGVUuqmpy0QZcuDqnTusdQJiA+B+PRCTdSbDMJBSRLx54+hQfQYQYoy1ozcIydfdyCg6OtWv18e4Vx7pjjZkdLUzL+X1g9Dx550IjHjz5vbX+rltsKOwMSobB4u2ulafkkrBmM1azp095s//H26xXAbe8+R38/Db38fu7hka9yMMqwXPPfUNPv0zv8r+3h2DDrzRTv/QH1De+/4pe4fnjZWhELyQh4FV3zNMHVfuv5cnnnwvqbcbcbY7Q7zHdR3ptiOujwlFWcee1WrJTmj4roeu8KHHnmC2O2P3zA6fe+p54gCDtmwiCA3CtM/shJzSyfAYNruY0HYUzA/GCl9C1XZkudQZSd0hKjVA2gmSbeiYNKMSGWJisV4y9D2lCJPQsBUCIguWfeLWS68gEmyoFm5wcHjA8dERKdab3HlcaGnahuA8wdfINwHNsaJ3bnMutVJ5xdWA9tDaYHPSMdveYhzaT4MRQVMprPu1WWY3oH6gy4kymRicMp3SNC3d9i6zrRnbuztMuo7Ge7a3d+l2L/JHfvSP8f0f+zAXd7eYNA1BwZNxrgdZI7pC0yFpPUfymuWdQ5Z3DplMWpbrxJ3bR9x//4N885lv881nX+S5q7dI/UBer2Gt+EEIOmFwymS2xeBaXj4YKHHBg/dd4Kwqk7MXubp3h8svvsjbH36YtPcKMtniibc/hP+ZBV9/5kXe/c53Q9PSOMyLXUDUKIm5NlVaRrtdITSBruvwZQIyMHoKxWSAmEEzthUSb6EhzhvFtKliJ6+BomZpYfYDnlBgghrMmMzaxCzL1fQ0VbGa1BLdnDOFxFs9viOKu2B+KalmZoZSQAcCDWtNaNPSNYa/+tAQnYA3vqz60SRLLcxCsUQkcaSSLPPU60Z9l3LFkZ2QFGJwRISFmHNk29qwKVT8zVKWCsFbcUe0cuLtxAe1tBSct5tRM16E/VXm1rznuChdEwniOFoPFEpNiM/0fSK0obr8jZ7rNmTdPNT8Kty4dz414BQdj96pI6m2kI31d2SfvJ79Ukc89TVHrvNJZ+4wVanZD7va1W54925s3ax4eOdoOs+QhK8+M2U+D9zuM+twkUceeIT10R0++bOf5xc/+Un2XrmNFF/pgRmk8OQ7C4eHC158bnn3tSFC6FrOTlpm22d5/LF38tlPf4ats1sGJ6Vi/iqX7md+fMx6vY+mOdf3jriz7nnk0jm8ZsStOF7Av/3iF/FNh6MKbEoVum3WNt34k28WMrGCKmL5lzLCFiXXpKoaBVlOGAyabYhI5YQjll2Uh8ytvZsG67lgKUBtYzoKhTCZWOpYsoCM2EfaruOeS5fRFJkvl/Qx45uWpmnq8N2Bc9WG2ZqdxnvaNjDpWtpuQmhbfGNDzaab0HYTHnjwCufOnePqtWt0bcvZ7R1EzGrh+rXrHB8e4LynaVumkwkpJYaUmcxmTLoJs8nECBApIVqYTVpu78/5A//pj/DHf+R3MnOKYO6UBwd32NoRvBuABERKHgiqrA6WvPLcNY4Oj/BOme8f88CVtxEXSxogrlYsFivmixUlRtoMjQrdzhbf8+GP8rb7L3JmMkHE8eqtV/n1r36Zq9evEl7wPPzAZb729YEdLUxWx3zxV7/ER3/P7+XSPffzjX/7RWbbuzz64NsQdSSqsaAz24XostmKe2O0CNCEQNcEOqY4HfBSWNtBr2EnZoInztEEoasD+LaxZsiXZJbIuVqe1J1eBzSYtUMdFaL1npTaXY2bZydiXf1vB1hGMF5LqavV0GccgR6sO0wZnwTpTIEafMCr0RDVe3IxPFyKTZCjCFocsVJBc441EENJUnEzlOyssKtzDM4TxDpy5xuGbAwMHzKNMw53aAyP9GrRe75mvJoxmHl1i9jQd2++4HCVWZclKbrKYVdKNimyheIWVsvetv24jZmTzX/rUFNL9ZQpJ4NN7P9H5eHphwlMHJS66CF3LRano/XGjtzwcaPPoadMxMZj5W2S73wgl8yI1ozwzDi9byahslU8bddw6+Y1brzyLItb1/n5n/mXXH3+JUqfKnZpHbPo6LSJDX6zGFzkodlp2TqzQzfpcM4Co3e6GS+/+BzNpCWpBasE7/A725y/7yHmd25w86VvcPN4zdev7RG2O1oJDGnCF7/2LW7cmBNcYxqImqNb6oJqHHvDndMQTVKygZ7sTy7ZIEIw6XyqCuBKLdwklNbs1FLss3rx1myUQvJ1V+Uc0gh4Yd2vKVrY7joO+4HFerWB1TwCvqFrJ0xmWxQcTdPSNG3tDMFPLBGqpEgezIdGNROHgaHPrBYr/LTl/M4uDz70MJcuXSIPa+YHd7iwu8N0OiVIQ8qZHAvnzp6jDZ6trRlNCAYXSiF0Lc415Gqu16eBtmu4cGaHV199lT4VfvgTP8CW83UupGhWC3g/c44iAwb5CS5sw6DM7xxy65UbrFcDN2+9yrAeODyY88H3v58716+yd+Mqi8WS1ZBJKdJXdteP/p4/yO/4+Ef55D/6xzx17RWOhzm9wGHM3NY1N7/9LC/eusG9F3fZu7PP9rRjmRKXvvEtlvMV2+fPM9naYciGgYsKBE/rbNfkakaAq15SdtGbw6qEhul0hpBtmNqbc6RoNusRZ7bJEw+TtsPVbIiOqsbNlikRU8aLoxHFFSjeIb1BkllqylW9jpxu9My1+L81XeY7o7iL8a4LGKNEiylW1eOL4trC4L2puFSIxbi73ineGSZt7nelDpMwa9JCzRkVUla0eJPuiq2uRYVQE9WzFIIPNWhAkWCYffHRunTMYtdYXFX8pKAUvAug3o4+SizK9YO55bcOPa4Ecs44gT6ljY2xhIBWscQJzutOevFyYgMwdpYGg9R/j8+rzJaxQ7f/OmVRcKpzrz9gQx4YtfV1mRgx/LGUnVjmGvu78itGtohz5ErRDCHQTq3AnNk9D/PE4cER3/rKFzi4c5vj+RGTWcfxIm4goNPQhxWCgorHtY6z95zh4r0XKGoMo5w9muD5555hSCsTuwyZLA2um7BzdpcmJOZHA2F3h6OjzOe/eZOb+wPndnZ57sU7vHJriW+2UaldNWIKSyoLKdsQ3QRsjlyitVD+ZJeTq4jEB09aG1Uw13hDLSeU1NG5Up0NmtMwkKSMbPHadQdyzCzTgi7Y+bp+/TqH4shY0pBzAfGOnBM9yrmdc3TTySaUWkRJuWd1cMxyacEocW0YeXGCtB1nz13goXc8wmOPP8qle+5BRFjO54R2i8sXz4LAcrVm6Hu8KOe3t8hlxvEkcHCwhyYT4eQYGZYwZDGbgDbQbc24cP4sGhO/8suf5b/6c/9bzp87s+k5BFgNA0MeEJeN1ktHipmD68/x8je+wsvfepZ+FSE0HC9WrI7neOf45V/6ZX7hk/+Ol27eods9z3/yke9huySGYY1rpnzkbVe4/uXPUbzy3PyYm/uHNkdTJfnMZLdDz86YXT7LnWbgy8+/xM5kxj03r/HMC89z/vw2TRMYUgTvCAJIQ1QIajRsUWu6LDehzi+8hfR41zApDY0r+Dp71ZEBJUpX813butPH2ZWWS6aUSElKl53NM4rBZpoKPohFhYqDmuCWNNLUI2rAg5zUiTd5fEcUdydCcCNFyhSKxrWt0+LKCswRUjDfmVQK62i3SlI2smmKMU+82qTf7GMNA82lZm4Ww+0R0CAGuAYhu0wOY5pOIanQigVjOFFyLITGmcMiGUfH6D8fnAlYAO70cPVwSUyFoY94FzaGUrlAX6LxpUOgKZCGgdHKFdGayWrDlJGVN3qzjw9rmGUzIIXaWIxtMJaq81oRlA1OXfXK0VPlvNzNyoHNRVRkJDvWb1abAxlfXyxtp2k7m20oOA3ooOy9uofmjKjHoYZn186jIsKbwilOoBF2L53l3ofuB1do28Bi3uNLw2w24+vf+hZ+NsFlcwAN7YTtM+fY2hJuXH+J+eo2Tdew8BOOhswXn52zPrxFyUoz7ZiI3cgGSZtvf0mjkLvSZ4sazbFERsxGi92w1sHaQGu9yQ49pQ14jSYAVXLKJ52WM/ZDSoVhmNNNzIM9ra0rXcvACtvWS/VH79eR3d0t2rZhGFa8ev0qKZtVcCkw9AMUG8AbJBCYzra4ePke3vPEe3jXu9/FxfNneeH5Z3j26W9w7+XLhucGz83rN7h56yb7B0c1K6F6VYrQr1bmUeMNz/c+MO1mzLZ32T23w/lLF9nd3aHB8a9//qdpXcP3feRDo7qE8cpcLhc0RIajffauPs31F57lxaee4atf+Bptt8OZ8xeqaZjZ6ZYS2Lt1m68/9TTPvbLHuih/+o/8EB9/33v5tU99kiEuubgb+NaXP89Xn/8Wd9YLbqzmHKq5p7aN5/6L22zNPJfvOY9v4SDOGWYN3z7oef7Tv8rzB5FLl66gJRGTswQxX4OCMBsSdUJ2rhqbFYMta91wvmYElI4oMPWBkntM2+GtG/e2wwrBlLutE3BKKjZ4HlKgxIykguY6uEaNFulMKVxq0yMeo2Pbttd2dM6/ZV39zijuTthpjdO+SoVAxVo9bBR2FkCKekcsimYb/JQihGxbp84LJVVP5ZKsBFvrjqqQcCS1XEhjAygRu/GyBwlCdJayY/b9njIY1hwl4pwQh2JpKV4sjMPZYDWxRnzAq+f5W0ccRBuQllxYrwYTZPmAD55hHY1OF4wTL2KdGWOBc5h6tJwuFKf51/Z4zT83EIls6oxtJV/3NHQzlx2pjiN0c3oxMIjH4CHvTMGpvjKZajFz9b12kxltOzP8W1pL8Wm2iDmSSrIgkjyQUjl5Q+P9X1k44oWdMzs88MjD+GnLg2+7n4O9PRbzW3TTGRfvucDx4hjEETP4pmH7zA7bOy37B9c4PD4CGkIb8M3aul+AOZSopNVAXwqh65DgNtBI8E2V0xv+XopFnwUfiDGOLRkl1bxYcWjOrFcrK/bKOAA59cHsUaqV9AhduXFgXgezOQ70vdFATcyXjOJWYJ1XNE3DhQsXWKwW7O3douRCTrnuzNxmt+e8Q5qGdjrj0uWLPP6Ot/Pkk0/y8H1XuHn1Ff7VT/wEz770PFcefIDVfIEUZd2vuH3rpgWD4Jh0E9QFvG/ovCMNmfWQKCKEtmW2M2X7wj2cO3eBS5cuMJt1BArf+to3eP6553nvBz/EpXvvZQxnF4wxdPvggM996udY33qBG9dvkMXRTWbc+/b34kR4+eorDH3P/ZcvI6rs3b5F36+5dPl+DtaFvb3b+P3b/NxP/BjPXX2RYRjoh4GttuWFW7fZ14GlGHRVcKzFcfN4YLtv2Z7CmXvP8cQT93HfQw/zV/7Gz/H8rZ7pdJeumxBLxhclZRhSNRDE0WtCsOuiyk2qhbd1mmJzTnzw+GJB3y6Ad5V+WkxMN849muDtHGN5qLEIk+IpfU+JmdU6U5yhAxnQSq2kel/lxCb/VWvNlLt2469/fEcUdy/CzrQjJI8boolrajRVcI42WKEfBUx9zJQS8L7yR1OmCRmtW6O+3jwpZ1QTzmVKcaQEKlrtTD2NQNba1YtHsNSVWAqNlJoAX/tgMb6vcw3JtgOMobfZWxckOEosXN8/JiIUtVDv5WppcWDFAkdCHX5pwopL9ffIuTIuRr2J04qd311wOXXzbOAZGcVa1e9DavE69dh0/lIhg+o5vklVpw4QR1SoDljNBUDMhc4mOtZ9jz/jhK2dXbZm23jnccXTNp4QPKu8INU0G8aOvQ5jTw91pdIeH3jobUy3d3n8PU9w773n+bfP/wyT2Rb33n8FF5ShX1KS4poJ2zvbnD875fDwGq/uvUJf1vjGIhans4Z8bN4f7TSQhzWlFFK0zxC0qclBY9ZolY+LecOrGtNBstTm3T5rTtm8gXImDpFxw3UCk8nrj/d43OqcxDubE6UYyahJC6TiqGr5pCIeHzxt2/LqqzdIKdfoRNt5yrgtd0LXdWzvnuXCpXt42yMP8cijD3Hl/ss4gV/41M/xuV/9LEeHx2xt7bI8WnNtdQPNhePlnMV8jnee6WwbXENoO7q2Ja0XHC+PEN+xs3OGnbNnuHTvJc6cOcukbfEaKX3iYD7nc7/6WdqJ5+zFc2QJrBVCxduHorw6X/DrT3+TB8+0XHrwESRn9m5d5+mnr3G0XLCOkfNnz7FYrji89SpXr1/lXe95L4899CgPP/gwr7x8ja89/S1euvoSx9VDByfIamChmewC06Zhd2vG+UuXaZqW+fyY3a7jyfsv85/94R/iW9e/yc9+8pd4/uoe2u4iOGLKDKIEMio9iDHpEE+Odn03auZqiNT5XaKkntFJRrCFNTgI4nF+nCNZUEsbPG0jNI3NlBCHJ+CloQy2S08ayY2QSrQaQjLrEy92/WGNWil1x1nHXPq67u7ux3dEcXcizDrjzjqxIpeKGYFNvMeJDWBEPKUUmiAIyQp5MXVlRllHCxFImuoCYVmnrtTMzWI9ayYT6vAyObPKLUWsG9Js+GLJeKygR7GT2nY2WCybEbDh9cRI422wsj/A3nwgJTWqkjejMLeqgxgtNl/QbJFdrkZvtS1lGNA8LiIVM3cnhUP07uJx0vqWUxRF2+mY0E6NxYFFBubK8qibgxoRWAez+rrRLCNn25gy1YmzDqOL1kGrAk7Y3t2m7eoQNwg5R/r1glIGgmsNp6xFy3k28IyI/bwKXLpyhQvzB3jiAx/ive/7AM88/UWuPHg/s8kUHxru3LlNUaFtJ2xtn+HChV1Wiz1u3LrKUNY4pxaUkjJIxgclDplm5klDQ1wkcslINAwZb7uuusLUkcno3YENz0Mg5xGdsd1UigOaMinGCiu5u4/b686RbBZPVTWTvMJGnGLPcKgmUiwkoGsDTuDg4MB2knXnF6NRdYNvmG4Z9HLPffews7vDpQsXOLe7hbDmqa99iS9+8cs8/+0XiEOibTqCbxmGRL8e6Nc9635tO97JlK3tLWPqNIGYIrfv7CMO7rlwhnvvu59L915iujWxxRvHarngYL3g61//BneO73D20jnCtOHWwW12z2wzaVsKsI6J5TqyXK5wZxqG1THz42OODg9YrY+IcU3jGoIqT3/7aW7evkUphcsXzpCP9nnluW9zY2+P20cHDM4UysdHhwzj7juAC7ZAxrQkLm8SxbFcrOj3C6tzE65evcavfPMFfurTT5PKDJfNmjqueyQ56DITZ+6d0guaA6ENaBoqvdn8GlMxqDYVxUabUkVIRqIQZ3RIX9W+nfc1HtQgWe8UCogEG6o6BRdQSXivtlN0tZGsO+5YMqnGd5p2QmqSlFQV+ps/viOKuwjMWoeI8XZz6onqzGNGMlLVd8FZZ+WETQKSaiZLIVYeaFFlSJmiGXEBLWa7GcfutBR8sDxUL1q9q21AaOk+CU0CGkikk4PphCECrA2eyY2dJNioN50mbi4Tx9mCcTWPRk1C3w/Ve8McJ8UFyNlyUr3BmiFYmEHJeSOeObHmhbsr8PiFvubfpzv9k+cUPVkkNv3kKbHOWJQqELSBZ0qx7kLqz3pqxzDSsOqiMt2ZmekbSh+XHM4X9CnhfUsIDXnVW56kGzsYe3/qHBICoXOcue8Bnpx+D7/n9/4Qr7z8DK/eeImt6YTGt1Zgi0k9tnZ2OHt+h+JW3Lp9leV6YY57ocE7WC7WrNeD2TkI+OCYbXcc9ZGS1SAwqMPrU0MLPTm+rg6jnQ8oNnQtJZJyrJ4sVWJff0BkhNBOi7tOToP9HmNCOefrDNxCzEcLCoINdX3TkJ0Qh6GGMWtlUtmLtW3H5Xvv5YknnuDM2V1evXmN/b1rHO1dp6TEfD7n+o1XOTpcUJLWEO+GIkLMyTQWQyT4wGQ25ez580xnUytgOXF4sE/KmbO7Z7h06QJnd7bI6zXzYYULJpXv1wOHBwe88tLLTGYdvhGGYclzL3yb0LRsTbcZ4sByfsxysWTvzhE3Qs96ecx8MWc5n5OGnqKZ2WyLO0d7vPjqVZZDpAuBGzdeIt0+5uVrt7i9XlFUaX3DtJ2wd3xMTBmcoyUwnTRkaZmvI4urC1rvedsD9/G9H/0g3/+938M999/Le7YfYvmvnseLtWVaMut+IA2FUgJFLcGqTJVcEqF4muDpnKM4RbFUtD4W+gQiyTagpdo3iMd5xUnDKG5WXyDU8z0a0zlrxpwqWZNZllMYzcfMstvjgidVKrYEo5NqJZ6oYGyc5rdF5w6NqzQgFQotrmQCll5ihU/QkvEu1Ng1YONtDKoZwWTPJpfGksqx9PXksEIvRg0MXqqNsN00hWihztESyaVG3IHxiL0qbYUjSsaoSpqxIGRTBiYKrxwsiSp4qek9xdLVhxJJ/YAP3kqnxTBVmlNCXa55nLLpEEsxDrfg69CONwLa7Y+eMGjGeD5frYezrWlVyqwnjJrXwTb28yN8MLoKVjMCzBmzFvv6TFcvvq2d2cZnpe97+iHifUcpsIoJ5xzr9bCJ9pMR7AkNW+fOceWhM8wuXOFdH/pdBA/f+PqXiX2kCxM0C+vlilIcW1vb7OxOyeWI2/vXOJrv4WWEKiDGzHplbpvjmuWcQBDarZb18VDTsTJk3cBFYAMqqXCCR0nVB79pmlMcf6OnlVI2hmF2zE+fj9c8NureWuQLdcHOnOTY2v+GxuikKcaR6oVinbqqDXovXbrE+9//JCkmPvlvf56jo31CCHTthDREVqvePMF13B24aiVRNjmsTRPophMuXLzA7u4u5qG+5Pj4iNz37O5sc3Z3h+XxEcvDA4YhkrHM0pILaShGT1ytmO52hOBYLQ556bmnmDYNW9tnUFVi7FGJ7B8f82x/jRQXrNc9JaV6Pzj2F8csV5FVTAwFViny77/86+wQSMkhJVByYhUHUi5Muxl9WeKCMJkEHji3y71X7ufcxQfIyXF2OuHjH/kgH/iuJ415IsLs+T28U0ruKGQ0Fdasq6VIg2pmSGaS1oYWl2DaTSg4otiOsKgSS2E5JIIvNB4CylAK6q1gO2f+ma7mP6SSQMzW21VfqlwymjM5ZYY+VYsHq2He12AUCahrbIHIII1Qctmw18o4n3uLx3dEcQehCU2dRFu/jKvRctlc1ASDFbJ6ihSKM9WfybozJZtyi2qHmbNtnBRqkpOxT7ITwLzcc3HV3Q6jTSo4bTFWq0m4HcHi0KSgJVmWIrZyezGxFGowzn5R9pbR7mU1dSkYPuq8ZxgiRdX48hVf1XxqEl9x3hACuVoBFz2BaOBuHPeNxyly8j9y6q/RSxndUNM3z1OtX56GFE6GueMiqmoQFrWrHX9/EwJNY0NGgLROuOLI2VEksH32LCFnXs5Xca4x2J5qFTuZ8r4Pfph77pvzzic/xFov8Auf/llu799h0m0hWVj3PSkmJl3H1vYE5yN3jq6xt/dKLYJKcFByZn60YLAtlg2D8aZGjpHTGatalOJKnZ24Cmth8Ew9aL52XG3TWhNRaj5rsaDijX5g4/Fw+qF3fW2H2I5ZyWYcYjMS2RxzGY9x0aqJ1looKtdaoW1atra2efH553jxhRc5ODgyyKuB2M9JMRJrYIcTwTWCb72xLdR2XqFpmc1mnLt8jnNnzqGl0K9WrOfHDIsFE+eZti3zwyP65ZwUB2LKpiGJkZgiJdv90k49vjHooR9W3Lhxnd2z93BmyHRtwHtHGwKEKddvXyWEtWk9simwh6En5mIOn7atIwIvLdZM8LRinioUpcGc/8/uTNDQsuwjq2XkpVdvk3TJTifk5FgtAq98q+E9j9xHSksGJ/zzn/w39fytIU8pZPqiaB7Q3FBSRzcVlmVFarL5JBWYhJbGWcHVUhhSYp0TDZCT0knB0+Nax6DBshlC1TioIs6gQKdK8JlYzHO/pIRmm/klhZIyqhZYBEIugmUSm3lYrjRdsDCQEBr47VHctQYyFNQ7qCHXORskYzefwzVWuCwqzOhkpdjWKKOVP6oM2dVBaKl+IVKn145UoiX/VMm9uT4oinU6uTgitntyKvjiED/YKiwOlwuNK6AW2I1M7PeLcBiVXotFBsa4sfPFYSlQRcyju1oKNM0EIVqCThmLvEKwbddo1TsWeK30w5P6bsV4dHAcy/oYzJFKnV1SZdRSJRAjdq814IKRkDi+JpwI8u1VDXqy7xU1j+5xCBi6hsbJxm7WqY2bkgvsXLiXhx54kJvPPlddDLVCJR7ftFy48gDf9wOf4L4rX2Z3e8Y3vvgUV196gca3pGh5lqv1mqabMZk0ID1Hx7e5c3vPuvN6bZB7VosVaR0J4hlqUEkpmRwzqc+UPtt7UEWlULLNbnwItpPQbHMZH8BVT3zUIvEQYklGV0uZ2PeVCcNrjtubX+PjIPtk1R2hLXDeFhfrrKGCs7XhN8GbloJ64eaNl1ku16zX6woHykbskpPNAMQZ86MJLY1rzA+pDKjCdLbFfVcusbO7xWq5ZLVccnR4wNHBETFmQmhY3NqjaKFfLckpojkTU6nU4lJ3q5lua9vuSBFi7tk/us3+4W1825GZoqpsh47z91zhW698jZ2uJhblVLNB84kITLMZ8BVPlpalelYFchpoROkorPqMT2tTb3pHj9APmf7Wkv3jb+IoTHHosOQdjz0I0y1+6le+zKc+/yVcaI1urdb5ak4Mq97uqWKJSTlF0Iz3DctSyM3AtA2U7MmlEGuoxpBMtTp4ZeaVNiWSCDhPXxLqCykLPmSyKwQSOXkimaKmeykjOyplE5+pbfKKFjSIMdOcs8AislkVuGDumo3Fgr7V4zuiuNuN5TcdXSOBEBxRMgVzJvTVO8N5sZ6vuIoDZ1RtRcwlkynE7FCtntWjT3mFIkSA7Ik54xpXeyODT4xalqp9rAKhbr3NUkCyoLkwlIh4IajHTPiVJI7by55UHJKss0s5mbtlseLYNA2lWHJLzhnnAq6m4eR644x0OOd99Qs3CKDkk5t9U4ZP1ZNRqCRjid/U71rMatiJFYvKj62im9fz50/OjDAyMyxNyBbachdWPZvNUHHEPFgcnnhc6Lhw9jIPvf09fOQDH+BnXrrKyP1WtcXZTyY8+ZEP89GPfR/vePuCl165ycsvZ1JU0zSsCrFXmnZGO5nQNoXl6g53Dm/Sx4yqR1wma896viYO2dhQ1QeEUkh9Zlj05KFQkp58OD357FasqIIVtwGhQv2soLRtV3/MXj+l0dnjtUX9RHj2+uu8Pvv0Ma4aAi0FrYb9Wt/f5jWKEqsWIufEYrGgpJozINZRxqFsXAR1tP9t/EbIZnCKde+TxnN8Z4+rL7/I4nhpGHzsDTIzNxPb3UhhGPrKra7fqzu3UoolK0l10FQhx8x8vs9ieciZfImp3zWBW9tx/9se4qufdfTOytRIdrAwc6P9ZXXE6Dk4TKz7NecuXEREOZj3BO/xLhCc0DXmyKpFjLQcWmR2lpttx3o1Z6Ief6eQv/AFXn51zqd+9Vsk35jwqzaBXhpKtqF4r+Mtc2Kv0YRM07U17wC6oKRsmbZDHKweFYjAWi2pyTnQBrQkw+01QIqEoKQatKGubJT4WQcjj8Sxi7fFfchYipvCUCxOr4hZTqt6282p4kvmrR6/qeIuIi8Ax1DFUqofFpHzwD8CHgZeAP6Yqu6LXZF/Dfh9wBL4n6nqF38TvwOPo7hCSrbN9RhNzQfruiWYk6OrFCQq5zpXZVcGhmIJ8ynX4AeKZYpiJlFOQBO4NlT8uSbPq3VWhrLYTVY0GhxDxfuLbNSlJtn3dXvvOIzCwToRUyH3kRiT2aRWcYmINyZHhyWq9701Z95sDJDWDC1K3gxUqRgrsPn3aUx3VKreDdWMPfip4lCHxqOrpZbRr/0UnfF1j/FCN2zYnmPvwfrYEXJyTLqWPkYciVKUVcz47iz3XXmU973/w7znHY/zt165ak56NWHGec/uhXP8yB/6g7zvfU9C/DfM53OOjyeULORsCmLfTmgngek0MAwH9HFFLga3OFGGtGbVL8hq7CqH2U2QlbyK9McDqU8Y7chhHfTmI5JrwXeNzQDA8NLQmGsfapDNZNIh3m6XnLMNZe9aFO+Gze728nmj2cbJPwx2rwKZ0+ey8nBVjNaLCCWfUFfH86zFrlFTANaXKdkKvmSGYdgIafrVkju3btIPQ6UEj2ydeq5xm2hB793JrEc3qNVmgXJODE4oBjVZLsOC5eKQOCwhncE3DeLh8n330k636cKaVLJh0RrJEskIqx72DyKLeWSImcl0RtaGnBMpOUoZ/X2EWDyzSUcumSGZ/0vwZ/mu3/EHWGjLMGRevvoyv/jzvwjrPRppcePou85UtOTaOGkdvCfG6K+izna/LhNzFT02ycga0XIXss8WbC2epI51cTgKMRYaSXhNOC0EF/HZIvc8bIb5aFWp5rJh9g3RkId1Lqwi9EMkFxM+6hhLhkGLlsH6W2c/8IOqunfq338R+KSq/hUR+Yv1338B+L3A2+uf7wb++/r3mz8EnHeUbENFnNhA1QsJ6zSCM0xOnRn5x6ysYyZV4qc4635jTsRoYdOCEIL5UHhvfztn3Hk70VrtBKofu1Q8ukrthYh6sdSmMpC9VBMvwVSaxYYzeG6tErGoDWU11xNWbXupMIvYNhnNDOuqyC2YP7YP1RXTAiDSECvclA02ELGBZb3JTnvE2OPuDtJuSB1p6bV7U8bUHiqh865A7NMkm/r6RW1H40RIyRTEtkha+ylOmM6mViRq0Wmajp1z9/HYO97FBz7wXbjlIXu3bm6MrowZ4rh8/2U+9pEPU4ae5WrOarWqeLnFtvmtLdpJy86ZloP9l1nFOeo8bbOFliXLxYrVynZL2RXItr12BFKMrI978iozgnn2cKeQkZOCXEqxNC1vXa+Fb6htmSu2fpK8lDcZt+PrnGx25O7z89rTdPI0NovrqTHJ3Sfx5GRuFg3GndTYaWJ7efEnC0zJJ4u+uuqNYoyOoVhYTNng8r5GPI7D1rpDdIY5l6KbHZ/UHYnWBsd5sXstFwuITpGhj6wWC47v3DKnw+mUtVfaiXD53ntx60OGVFDfcedwn+PlIXduH7Ocm3Hgxv5CQEQt6EKcLfRFDI7NwioWmq4ja6JoZn77Vb7wy79Ae+lRjpZKLhPchXfjDr6FWxziMViraIVp62qlatdzShlxqdKqHSqBSKJtAp5MFGfahAwp297XOwvBtoW1WLEtmVYUr4pzhaZAg0GfrTP7CiRBGWdqQlTbzccCcSisUmYRTYwZc6KU6k0livdG8vAixr9/i8d/DCzzB4FP1K//Lpat+hfq9/+e2tX4WRE5KyL3qer1N3shEaHxpvIaYiZ467C9YL7KjaWiS/VKRhzi1bxf1JFKwLlU8bqq8ip28CvhATQbPyNbJqvzQvCZkIzX7l1r7m+I+STXJCOnFm+rNPZC1enPAylHhMK8NOyvI0Nli2g1Byp5hEKkDks8XjxDMuXq2DnmXHDBsM7g3Ukhz7k+LxHaxi7sIdW6e7qov6awF4E6F7BORTjB0E+ev4FklNe9xqZFqx2xgvHaRwaHEdQR7+i2jEZHjohzXLh4Hw+88wN89Hu+n7fdd5FP/ct/z2K5qpS8yt4JgQcefBuz2Yz9xZJuiCiOZjLDdwu2gvGvz56bcrS4w3K5RBNIMWXnarViuVxu2B+K4puAFLH/O1gzrN5o23p6iFlvcMxKN5MIvqVpO4ud04i6YulZadhE4sU+nsZVTh3407/n7pnF6eN+AtmUzTPG3Ng3XKRPqnz9np6ymTjZnalQdzXjen2yrCnK0K8YKjcfDIYSb4tBrgvC+EsFKDnVXdpr3lZd7FTFfGwGhTYT85qYliyX+yzmZ/BBWa5sVRiGJVu72+wdLTl7/jy/64d+D3/9//E3uLNXmM+pIfHjLMKiALVAzkrwVU+C3fPOWeedYzYJfjZX2Vefe4rpGsLOFdqzM979rkf5+Ds/xr/8p/+cl7/9bZzz5JRwjQ2DizjD/DHFe4mA8xbSIZFGHVLMD6cUR86F1RA57hNdY9f+aHe9qmZgTe0dGldznlGKr3CBmM7GiVCSMig2qM6JlJQhwhCVVbTfE7OyHhKqoVKyC5Pgiar4Umj4rSnuCvwbMTLv31DVvwlcPlWwbwCX69dXgJdP/ewr9Xt3FXcR+bPAnwU4M5vYVkMNavG+2/Cq1UHTtCae8N46x1zwWl0gfCZ5Ye09LrPxOta6xY2p1OQUG/i5yoyYNEIboIkG/Tg6C5+tYpaCA9+Y2qxujVU9WWzrJMnXm0c4HDKraEVAxyEq1WmwwhulPjdroe97227ryY2YUzIM0NvnDE1Tufm5dhaJUDv401Fwqid2A5sztelKx2Jgz3Onfua1p/e1HjT1LG2eO1rLqlpo8tjtO2/Du+VqxbQx+On+R97J7/zEJ/joex5lsV7xpS9/1fQFvrFSI0LbNpw/dx4Rx2K1gjzgfQuhpZntsD2bcP7clMPDG9y49rJFt1EQr/SLBcvFUe2ezb0vOAdJWa6XLI9XrBYDJyklrz1A43dGjMEYMKWYEtG7GriOQ4urHtuZEoq5RUaLZNzAeIpVC9gcR4P8TnQDvO6Yj/fB6Xd1+gTan/F1QN/gua89Z/Z7Tj7hKHYxNldJ8WQGtfkRgyNfq6bVN1ho7r7mqiCrH1gv1oRuahF7ObNcHHJ8vEcsRwQxLHeVep78vo/wjS88wzsfeZxvf/slnn/lGt5bJIbWa7B+UmvksPuCUneKNUBksw4WZRIaYjIIr5t2bJ+Z0s4y+y98ib1D4Yf/F/8X9l96nh977jnAdp9OBG1r7pcYxdCCcMSCdnyuO+3xeEYg2AC0KH20pskXRYLNHyxQIxFFaPA0kpm62iN5BzoQXaqGdI6chF7XRusukZwhRmGIwnKdjRaalFhAK6LQuEoU8eaTpb9FsMzHVfWqiNwD/LyIPHX39aEqo4rjN/moC8TfBLhy4Yx6b86ObecpuV7cYph027S2ffSeIjYQcSWjzjijQQUfvVG9nPmruyrtLJ5NkMGYaNSGQOsDjYjRtLhjYgIJxpSvnQPOLrucIurMe8J8ITKOxg60OA77wmrIrNc9qGyCidng1uPQrhBzJhVjF9y9fa8cajWKWAgW3ZaqkKWUYnS+elxeX6BPP2rHVkbA4O4V/i58+C1fh1OLUKkYrN/YEogIbduSUrV7EIf3De984r184B1v51zr+epXnuPF51+mrZhvEbM88OI4d+YM/bpnMZ8zbQvkwqydcO6+Hc5f2OGla9/kueeeJg49SiGWFf16xf7+HjEOOA04zPdGY2F5vGB5sGJ11GNxsHfj3W8w46yfsWoECpbIhNh8BBu6lew2oc9aLZvBCsOZ3S36PrLuB6PLqWzmEdahnPzS0/3369/Km52H0zuMu/Ycm/+F0brg9MJlO0ipO6zNHOf0rxMb7llrL2Ovsincp+cHY/bua2cKJRdWixXdLNBMA6lEjpd3kP3MdGiZbXXMmm0a57j2yot84hM/wK2XX+Wf/KN/TNuGmk4lp8D8+nBWiPvlyswhavh0GZuMCqeKKr4VhpR537vexX/93/5Ffuwf/gSffurzvOf7P85P/tuf52vf+CaofdYYE004Keyi1V7AGSqfrMrWQ9RUVbeJKXNR1jHaXK9PFsmZFUkW9alSkGyKeCeRTgoz7wnBxJc2w7EwopgSiZ6kZndSCqRYWA/KKmfWqTAkT1Lr9kPwpmpVR8CUsUV/Cwaqqnq1/n1TRP458FHg1RFuEZH7gJv16VeBB0/9+AP1e2/++mLcTefMasA5Z5xzZ8U3uIBzgVKHOGNXUerW03vzW/fOYJziDMcaO2WzhjAcsWnE0pScoH5CZqiK0QZQEpWZU2X6SZXsCrmYZWsfLSpLSIhrGRIczJcsVwN93+OcN5HMKTzXsHLDvFMuJ2l2my31KVteBVGlDBHXCd1kQhwGcxYclTonR67+fep7d9WzE9xX64Jy6pzW58ub15X6Ck4MA08pbxZIVfOkb5pAjAOCsJZA10148h1v5+Jswo2DOXu39jjau4ULoYZ/204peM/R4QF37tzm6PCY3bORUgKXLp7n7LkZzz7zFb7+9a9QhjUoxDKwHo45PDpkPfR4P8GLDZtTTCznS5aHKxYHa9teA6/lnxuL5fWHr9Y5UkrEIdG1GSctiKNrJ2hemg+MD6cG6saHf9+73s7F3QlXr9/i5Ru3OThe0ifzbb8b7pLaIW9+45sf9M27Gs/Rqe+eQuPktU9/zUuO+oiyEeRxajGvgPNpiE9Hl09qcMv4EeyOy/m1xcSKY7/qGVYd3ayhP1rQaWGtDf3xiuuLnsNXj7i9d8Dh/oKf4KfqANhmGKKWv3DXkRLBhUBKiTQMoGbR7ENAVQlNY+JBgT4nQmuMs4Obr9Ks9vme736C/VvP8N/8+T/P1595gb/11/8uzjtStF3ztOtGSR4+BGo7XLFzg7ZyLmRJeGwWJtn0BylDPySz0MjCetTNFBMgkY2aKhIJUhjE0zRUr0m7OU0NP1CksE5GvS7JrFNihj4ZnToWU9IjQqOKqiMUy4qOUuyifYvHb1jcRWQLcKp6XL/+IeD/CPwU8GeAv1L//sn6Iz8F/C9F5MexQerhW+Ht40nNnPiPBy84XJWTWzCxqik9MwavaElGHSoOaHFuoA2AmI+74EmpsJmTS4Dq8YCYL4xNoQOlCDl50IQPnkIVVBRFxZERUpEqQKhWA1V6vI6ZfhgsFCFGghvlq3Uan7MNcjb0PKPZjaKZguGHo89KQaqPuxJ7oxaGtiU0wrDuKTnDGxV1TnduOtaSCuuO8M/JLkHG+/otC411fwal2EwEPdk8F2eL5MYytwk0beDBy/ewt7fHN7/9As988ytce+X5mjtZQ6gRfNNw6/Y+167f5HB+zH3bie2dGRfCNt/61pf52je+hJaBQiLnyGplfiTrdSK4DhcaSsrklFgvlqwXPfODJWkYMXU9OUx3HSM7QrLhnBuDxhCw6jmSelpt8b5FBELXkdc9Wn1tYG6vJeC1cGm7457HH+R9j95HPySu7c/ZO1hw5+iYVT+wij3rmJCUzb56PIN62pP7BEKqiLcV8JGbOjYDY70f4ZeRG/saFOU0fKcbJe2bnOUNzCcVpRFeF7w7vsMKx43vXLGA98P9BeCY31lwM9+iX0fW60geLKUKAXGByZYFaTvvodIDnRi9efyMUsOlU0pV3FcZJblQckALBARcrME95rb66qu3+HN/7n/NE+95Nxe3Z/zYP/gHXL7/QYLL+EnLndWx3Y+q5lrqHMUHpDaB4yVTSiGVXGUJYmEetZlMqdD3NlHtyTitrLyieGeZEl7B+4yXQhbBr3MVWCpGmKhIAplYlOy8/Z2FISope1KuAs160Xp1ZHHGBhtda38LLH8vA/+8buUD8A9V9WdF5PPAPxaR/xJ4Efhj9fk/g9Egn8WokP/Fb/QLVCFWaa04R1LZRNxBQ1RnstxSKjXIFGOxQFJPyiCupQnZ7EmTEr0gUnCVAVjU1RBhBRfwrgUtpDQAQnGWolQcJC14CVWEZNuzVAIp96ZoRarAxdg84gMFcwiMKVkyfC3uJ9xZM/7yWnfBo7qsmNDqpEkTI3TUopyGSM6F6WybydYW/XK5kb2/6RCO6gWz6cpP/l919IkZb9a3gCus5a+/y23e5/hboyaTd2smJ8dWaGhDwOeBf/CP/ymv3rrDratXOTw6Zmtn1z6dyoaCtlwP9MmUp13X0mTPNz//BZ5+9uu25dRM1sT+0R7L+SE5Ko2bIN4RY2RIK+JqYFgOzG8viKvxvZ5Gm14bVvKaXc5mEahK5WQ85jAMtM4jzhPaSR2UK03b1MJmg74zO1MmbYNkMxzzk45LuxPWVy4yFKOzLfuew/WCR165ya39OTtZWa6N5qZ1cbH3YAv3uDe9661uLApee7pPFue7NiVv1m2fXizqNmA8/xtqbfXpP6Fznv6VctdXWncjw3pg79U7m8bkJCxm/B1qkGvlZo/UxNO7SbuujI3mvMMVR9NNNkW+FEWHRE7GbumKIh2UYOToEAKL+ZpPf/ozHB8c0E089zxwmXsun+PRtz3GT//kzwNCEtuNO3HWODlvnkFq97ZZBtpilxlzC0p1Fc2sfaynqkCK4xnEacJLIYgjeDPwW1Mqc0ZBIiVHnJiVhLiCGaSYCLJPyYp61cbY/1kRL1hdtHOhtM7/xwdkq+pzwPvf4Pu3gd/5Bt9X4L/6jV737p85UYYqYp7SdciTsnV83pkJVMbip6L6+vbrIXDBQoAVGh/IUihxqBJdpfENquazoeKx4bxDvW3xijPnt7FTybXTsK2YpSsVgpn4FEu5UWCdB3w7wbsB70K1cc116FM2ylOghnDY6ytUl0ijm3HqRsLZkLIUw6E1ZdbLBU03qaLG8VZ7EzxlM1TjLqz0Lj78G2DtJ1DNyc+O3/DOMUb9jRCTitJMG5IOiLSoRrQkmjKnmzbsHx5xeHi0MdgK9fininVqUbamE3znmU5bPve5L/Psty+YHXLJxD5yvDxksTymqOJDg6iYxL7vySkS1wPHtxcM83RqVzF+iDc6Pq//3unFLedCHCJxGAhNS+MDTWjxQL9emt+3t8X74tlt3nbfBaYBSkymylBBUsL5RFeEqS9sdXCm6bj3zJQL2zv8zksX2TuYczBfc+fomOPlijSYYhPvqo//6WviBOK76w1rbQZOfaS7+PXl9Pk8tTjI6WP0Bud//Frufv1T/3uyazh9HFUIbWOCvCr+cXVn4Rs7billvB/Lzslw9PQrhcZbgE8TmG1t05dMiYkcow3Wq8ZgYFRCg9NKs9VStSHCZLbNw489DpOWoywk5/ANnL+wy7kzOxzuHeFDRxsCKQeyZmsyxeMQstgMxQqr7bKjFvrTnkLVuEkwAaZ3VWDksVmBKORshl/1iDuSKcOdoM5gM0WIuXrGF62ogh1jJ94yBJyrVsCOjCOElrd6fEcoVLPCcYSuuiM6IBQheMO9PeN17hhyIWYL9SguoN5VpzrrIhPmiBwEwugX48SUh96jLkDw5DrQdMG6UkKz6c7EFYqUqhpTo0xJMHtYjEaJV4YYWaSIeo8PgoseyJRS8XEdpfrYBVCHrLlQw21HrxM7DhtWRE3h8c6ZiCrnyiMeNiq7N1JAjo/RFmB83FXY9dRz9E1ep16FY/c3ctoLNaxAlaQF8VXRKUobDCqbL+bcPH6Wdz72OFJaPvXiSwRvN6sNzjBpfOMRyWxtd7x6+wYvLZ5nNawoYvz/5XJOjNGyOtXR4CkqDMNA7Hs0K7pWFrfXrA4iWk4LlORU3dG7Pu+po3Tq71NYeLG0qDgMtO1gRl7OYvFyDKSS8KHBOeHKg/dy4dIOpY9osmxdili0Yl7gaCkMeDINgaCeximXp8LF2bm6I1TWKXN4POfMt57lou+YL9YMQ3/3QFNPFfnxfZ8qGK87hac/6/ik009WNsyZk2vgVH8u455QT3270j8oJ7seTg67GZMlo6TWxbttOkK1EY4pbiA981sKr2HvVF2GE3JODDHThJbWO9QHsveUJpPjYPORlND1CpWCd2PjZlTnbtLQNQ1/6k/8SdqdHfxki8PDBbdvXOXxR9/Gyy9dMwzbe5rg8H5i7oypekMBEhxJsCaw7kKyGoUSLZt7HFXzqgJyTUvypVjYkIDmjMcYPaaEVpsXFbu/s5o0s1T4KuVSlfk2S1Cc1Qh1pEwVZZ5oaN7s8R1R3BWYp0xfhK7xhJhpvKuT9GjFWs0qIGVs2yRiMXo4xNuQQVwdxiZFYqZtJjW71DipPngkeIozvM/4psY/d6GrHba5NI6u+EUMMspqpt4iUvt7x7IfmC8WEEyanksmayaXZIVcKiRaC87oZQN24bsAEmoEXy6b+66UUm2G3WaranhjOrmfXtPE2ddy8n3lBDaFu7bZOr6AjIbEb1AANwVBK66azb9EjeNeNBNCi3nHC02YkGNGVfj8l57ia195CcnKN77+6xVuwzjVaB3QOvr1ipdfeY4vf/VLfO97j8jqiTmxXK+rwCOTY6JxDTkWYuwZYm+wRSzMb69Y3OnhrsJ+6s3fhW2M+JNWeqF1miN6tYFyFNv2R/P8aXW6+Y8QGlLMBhk0gbc99ADdbErxLSVliAlJSiOFnBoGFVKyIoZaAcIpbeuqrD3ivGO3gYvNlN1Zx4cefhS9cZuXrr9qXWiwmMecUqVgKjAOx3WswCfnlZPP/Np1+y6EbsTkTv3kaULs3a+lGwfC0zTIk2VGNn+PnikjnVe1sFyvTPjmLNRC6sJw4lBaYcu6YIXQMAzREqKahGs8zjua1iLsSmsB5sNgFs792qDV6QxC26LFMZnMODg45Fd+6dP86B/7I8SU+eCHnqTvH+OJx97JX/2rf90WmNZYOK0EBi103jJNoxaKKMEZWWO02VUvhgbUBaAoVQdju1knZjuo5STsBqV6WIGviuAiQLVvsO69zn3UaotihmHiKpzsTajpXc1QEI+eOktv9PjOKO5qtgEFGPpE4wNBlUaUgNA4T9FgNx6FoeQ6cAhmGCaZ5KkRaJZmb1COGXaBYX7G8w6E0FY1qKKuoSBkacwlEEU1WPet2RR4iA186y4gqyLFcXC0YH60oLBE1JFLolTcdIxCHqEXralNJ2a5NpFvQ6BtW+IwWHecS31fpqQttZiqsLHaPXkInHo9+5xjJ3rKZ4ZTxmIKo3hmtJo93dC9tpNX6u6jdmKWCKOUrEwmnbn+tQZtaUqkmDha9Lx86zYz4PDoiKZta4aobJgSxoee89Q3v25iIc30/Zrj430o4HyDFvNTKSUzpLXlhjrIq8LyzpLjvQUlKpYFe/pYnO7K74ZnxIGrrn2a7W46geOlYsiZlJIpUVMmNwWHqY9TGmgaT9N43vn440yallx6krMoRfEFGRQpBQ1rulaJQ8C7SPBQXMBJsIWu7kYFQbyxb2auEFAQx2xnm92z55h2M1arJfs3b7FeLimaThjOpxgvVjPHa3hc3G1RE9jMWja7uurJv1kDN6/Fhp7oaoNRVCuDZlTFjtfbybW4kWQpJvbKPYirRn+eEWkyqLD+RM1RGM+V+IALreW3rnvKMOAaR9N1hKahCRYDGHzAh9YiC6tL59BHQtPinOAbT14K//4Xf4Xf/8O/l+P5wIe/+7v497/yabZ2ZrzrPe/hqa9+A+89bfB1ptHg1CI1S7Z7rtT7cGTfOe8I3tkO06bwdcdtnkR57DHKSd2xne5JY2GMq7JpxszT5wS0EbFgFhcCoQmmmN7skj14sxMuvy1cIUXIhEohNOVWg3klT1qPFvNLFzE3t6yWhyohmFItV3zM1QSilKt6zORi3gcEG9j6uhr6qoj1KdoV6kJ1h1NMseiIWczQqJiQyGE0Ke8DQyzcvHWH+XxBKdbViRjepjrCnTb8tazOusLncctc7X5Tpmlb64xjxHlznwO7wbwY/UuqY+RpTPXNdmV385NN0TXqFE8Xug0Wa89+kxcbb0hfF58qvVehaVrz4fF1tCGWUrQ4Oua9H/xunvrcr5GHQte5zW8epetaYSqPEOOKVX9MilbgzKURUnJ0Xcfx8T4pmVlT7pX1Yc/R7SWxH28sfV2XetcHQDmhB9XWMRgkRjGZtz2tKpIEcsr0fc8kR3Ix9XFMPVFtAT9z5hz33XcfIa5MHl88w7Cg+IxizqbUeZF4Jaiv7IZqf6EZfGNYblbwRr2UWnCn2zvsnD3LmTNnjBU09JYi5UzDITi78avfSlGDs9LQg2bG8GxqxymMC3ct2t40CaOZXc6GF8d1vzEp821j8ZBq/vJjA3L3tXLqOnwNFAggWgkDZnfFa4Pec2WTjWeqaQK+cUhfZ0VFyUN1Tuw6aFt8awEwXfD4JpD6tQ0ls/noTLoJvmnxTcvt/WM+8/kv89wrV3n7k49x/sw25y/ucPnKJZ5+pkGCWZtk5ZQtCeDtfsnIZpaH1KW4fk7xNcN29FyyTqju8A1u9nqya3ZuzJ+opbxUJEB1Y09iC7HYQtZ1+BBo26Y6yzY2DxDFS0bLW7OgviOKu6pBLTHZxLpxpd6SgnPFtqeVX12wzFSct0KJYyiZUocOJuUPpmbFhhFWLOymdWKFvakLg/M9zgV86MxAn1wpSAX1DTHnmrMqdlCdTdSXqwWrwYIfckwGszjLU4RaK3QsqBVopqY+jWhm/dlm0tF2Het8CvqpzoNt9WPJm1c9OWZvjrufdKvWwb/27hshCtn8840I4OPPal30qF1t0VTFZQ2NeLzaIqglklLipZde5H0ffhePPPQgnxGQ4Ksy2KMY5GQDpIGjw1vc3HuJ+PYex4xQ3fpyGYhDz8H+HUoxf50yZPqjNcs7K1bzaAPyEU5586tr07KOMLEXcwUN3uOCg5iqdTTW1qp9nWOmX60J3vI2+2rzi3c89vZHmO5MKWujxaWhEGhJTon0NitoOnzxJL/EFVsYJRgN0xgQiqfYNVN3hV3bsbU94+Jsl90zZ8kpcnR4h+OjfWLqKdjW3/mAbxrapgHsejHUxhY8s4wwNbb3vsb0mRDQN4GubWmD/ex6vSalRBMcR3rAEAdTdHu/uRbLmDU4XkmvG8qOC6O+ZpZz2r7AFMXjkB9sEdWRJoxYcQe6rgUVhr4npcG81FcrUkq0JSMTaJsO1wiNm9LHYSP0a0KD947gA+tFz9VrN5Fmws0bd5DlEevHlnz0ox9kuRp45ZvP0uVCVE6Gsd5vhJLU4o76qnCX6rHuzOHDaf0cxo6RSpHG+gaqmTSl1jTnAxRLmhjhWnd6lyRGAW8nE8JY3LsWX21Jgq+1LCdK+W1g+avAOhVyArEZM7mqQTOZJkgVN1n2oO1prQtKpdoCA0X0JPTYCW1orHPCmw1qzaEUZzYEmjOhTfX5bT0hiSKZLMKQB9I4Kc+2sgQPwRWW68jo2aJaZc3B4SWYQMpJVTVmMzxjxHdPXexqFq5D37O9s0NJFoHmRwxu5NtW24TRI+RNu+y7jujpr/MGz1Q9XdBPQxh3F8mTTs8WqVzSprCXkminnVHWcEgSks9mw0rh1q0bvPzMU7hhgDoHcVphLXF4cZSsDGng+o0XwWU8HlXP0GcgEdOa/f3bxDQg4qyTXvQsD9fMD1Z1a/Tmu5fNh3iDQ5OGWBuFUpWdBp8huinyqjZYHfq1KSlzIVfbgcZ5fuBjH6NtPIWWJIqjoalhMVkjvrVrAldDl7OJ45ImVAtJLZYtOKDCbU6EtuvY2poRmhm4hr3btzg+uEO/Wld30XoNh8YcF+t1PAxrYhk1EHYPuEpf9b4WJu9p2tY6fm9W2jlHRJTtnS2Cc8wPDwErQq52kVqD208f7NdeK5uG4PThF1OQbpg345zA/hNVE45tXtBZ1GSKkb4faNoJoWvphxVx3VvzFSN9NhaZmxo+L87RtZ0NWLFdhsPTNYGh73n+qW/xvZ/4fl64+jLL/X0O50uu3rjKk0+8m2987kts75yhLoVkVXzwBBwl+7rhK7i6A/fB4JJSzHMKZ3tiX6GdkrM5fIp19k4FKaXOz07srkXHXGhbLHxtCp33hK6laVtc09DNprTVW6sNjlZsZ1+0mJr2LR7fIcVdiBrIOFN4ZavfoSbfmD83diHXtBYR2w6l6kEtvgbXCgQxUYMLjWFwwdeuPQBVKSmG5ZIGC90IU0pOlIpzF+csxxLM+VEy2dnqXpIQ04D3YrF5xRLMleonIx4Rb4lSONvhWbW0iX4NklBMUJH6gTJJbO/ssL+/j1JFD0U35kjOmcH/Sef/2sdJJTuBbE5uOLmrkJ+GYsY/r4Vn5K7/FlVKqXzjbHJoHLaIMTpC2nOHxZxbN28wv7lHcCcdjzVn5tkidRdVCHYscrUlCMJitWLdLykl4ZwnxcSw7FkfDxztr82m1/qhU5/nreAlOfVXGan7ZjrG2NUbBuqdI9cMr6zKMFihsUGWVifPTBOc2UOICUpKMoe/AMR+MHjFK+I9LnvUVQ9zZ111wZPJIA5fqZYidnNvtQ2L1ZJFLBwe3GG1XpFVEBw+WMfehBYBo23GgZTjBjp0zkEpFAcOj2bbHWxMY2txKOrxonSzlq2tLVI/kLJBOtYISZ0zlE08Yp3mcbI7HI+3nmD5m2uw7hAr9UZVTAjn7DooKpR4wpbx3jrifugNohShaVu2ZtsMbcfQD+R+IMdki10uTGdbtbNt8L5hSJGcExHLjhXnePbZ53jwkYcYEly/ecwrr1zn9vyAdz1+L00j4Ku9M1W4hHnFa9MwaCFoQko9riHYwoeRL0TE6kYxyBYJuGKzBKm2Fk6cjUjrAueobJlKtbRgmLpzm0zw7ZSmm9BOJ3TTCSE0NG2gazy+YvupZIY4vMG1fvL4jinu2U0rLGPs1TIoTfBE9fhsmK5UlZdF3tUDVTmtRQ2rtRsHXGigMXxu4yPuPLYtBDuRLTIMhnWGzvxiXG82or4lZ4d68zPP2tcNllLKYOZZLuB8xoeMjJF9ovVmEoILFtiBcdmlnuCwwdXrTVgK8+Njzl24wGxrxnKxqNu/GrJQYFSw3sVj5vX/Hgv13dQ/Hedqm8ddVkAim7/09B2KdYqbVy5jx1aVmrUztO2iQ4t97bsJ0rVcvXHDVKnIZhtaamiIcwFHQ0pQJCPOUWRg1R8ZtFDKZvHr1z2rReT4zuj0KCe7jrs+11sveuP/n4aztJzuOzmx8mXcbtugrutsGJyTYe539g94/PGHqvsnhMmUIZc6g5gaTuuzDVjFI26NOK0K58qGcFXa7trNwjedzpisB1Z7+xytEv26rxQ6k+Q3zQlOPgyRFO2Pih1XrfoLnG3jBezmqfx8FUxprWrD/G7CdHeGU1gMgyUk1UUOHClbtvDJQnrqmqkNxClVxanjemKVUaciFW+vCnM1gd5pv5sQDIKMMRoMFCMqQoszvcF2Q55MLJglrg2KWRZSntBNZ7TNhLZtSMmYJniHBGG57lmtM9PdXWvUNHPv+Xv4//y//za+KN7Z4NQ+lpnFFZdxCr7af2seLROMvVLEoFiSQ9QaFCeF4kz0RHYIyQaop46Ok1rL6mzOexvcuup+2zQTfGiZTmZMJhNm09bOedvQtWHT6cdYLCf3LR7fMcW9uEASK9w2YDCeZ8xAhiDWuRRxUO0JxAlOLeFIqzVB44J1ByEgTUcJLe1kUt0iq/c7Y5GBLK1haE2HkKBG92Wn5uaWU+1QWzTXLsRBKpaSPsShKtAaRgdHxQyVRmzRuOHVT9qdMmHyFadUS19aHB8z29khDsazdmKdTq7b4rcyC3sjzvoJHv0GmPvJs173M9ZmnZwdo6oVxuBuH4TJ9oQwCTXPtpq0Kag4zt97P5cu38s3xeGaZlNoQPHekaMdDxFwTaBtbRfTD5FCRrwSU28sqn4gLRP9QWZ9GE9qzCnF5glf+zd5vVWowDDOk2NwMp8YoQP7sqRC8cV80otRQg8O5rjQ4eowsqSB0hiXWdoAUaoGo0FdQStLJ2dzHKRYRychUHB4FwxvbRrOnzlDe2OPfnlMUUfTTo0K2DSIiPHw+7WFVqc0zioZTcIQV108fW1qBAmuMsTGa0Uxd7WWYT1QovmwWwizDXZLgZLeYrf4msvpFOhyMtOplOJx520DQXtf67y2ewM2s4FSk4lQtUzecbHxnsY3NFNjzSznnqFfkUpC+5WhdFMhdLYA2j2DxdJp5tKlC/TFIcVx6/otDg6O2Ht1n3vPX9jMFpCqAg0eiuDU1KU6fm9UaI87cLHgcootZpoMctNcNrx4ajyix2A3Jw40W40aD5OzxtQG3C3ddMJ00rE1mzKdBCbTjqZpbLdc32cMma6mg73Z4zuiuCPjjeBI2bPJVBSTZxcppLqKZjGaYvCBIGJdMB5xLc6bikuCsySddopvJ2QXcMGKeeNDnWQXu/D80raIztlQ1jVQk+LBrASyBpQEYkTHzEAGSoyUFG1HICZuEk3kynE1PLfYkIkasD3eEd4oTb5p6kWhrJYrcI7ZbItFgZKHWlTrhA9O3UEnHTqcFKxNcX6Dm3FcG07T1+7CTl9XHGXzMqPisJRCN20IM4/rbLErCCllQuhoJ1Mef8/7ePCex/mFf/HTNR5R6qAo8MCDD3Jnb4+joyPEZXwD8+UBQ1whboYPgb5fEOOKvh8YlpG8KCz2lkiFGPXU+z+BlO6eH/wmLrrKwDoFZ536SqTulrR27/Q20KxMh+PFoiYDNQgTikQ0tNYwxCU0BUkC2RtGmxtUDSbAGcY60g2LilHcanHfDp7We3LKhMagxRAs5CT2A/26p+9XNZ0LczA1Ira5oo4UulrM8QazjKHNI3MqlcwqZ8rCUoRSb2wYF8ykT1PNfiu5elSd2AnYibj7PBgM4153HY3aCLvGbNEZZxrjT4vYMLWUvDmbplA3v/VOGwRr8nzwzLa3cE6I0ZTKI0TRMmE2mZhOJitd6BjiitXimHOXHkDUszjsEW1Q9SCmOq+fAKm7quI8JSshCBQl6klT5quJ2V3IlCqqyfKQU4HssRTxghTzdh9FjE78pnM3AZa3LIfG4JfJpGU2a5lOW7a2OryHpvGbY+dcoGmUnF6zm3rN4zumuOMn4DwyGturkNVjmec2qS8YTodrSbngfcXpfWPeEs7bheMcxbeEZkITWjT4mshkFx9SBxpqplDiPPgODZlSg29bCqUIqQRyUlT6CgU4EAsapuKQBXBSt6GVdqjUPFbUIKQq3adU55BSTHzjHb7pDPpJifVySeMc0+7/296/BtuWXfd92G/MOdfae59z7rP79gMNNLoBAhBBgIRASnxYcWTTkvWyWKlSsahSVURZKbkcy+XYH2zSqkrKST5QSSoVpipFmeXYJaUUS4oiRwpLtuxQkmXHIUiApEiQQAMNAo1+d997z73nsR9rzTlHPowx197n3Hu7GyTIvt11Ztftc85+rjXXXGOO8R//8R+R1drU4ijNtu8a7XCfiXyQZ99CzvNVmnru53njKH6zChXvuIOyd2Wfbt4RxTbV2C2gK3zk49/BtUe+wKc+9WkOXz5muVyaV0QFCRxcusL1R66Rxw13j45QqSxPblF04+33qkdDG4ZhzWY5UNfK4etHlLGdx4Ogl7c37A1Smt51Ds45g0i1j5RKKYOVg6uZNgnC6WpJ6AOyMZgtRmu+XCVbFBcSBKttyBKoUdAIWk3yogQrOQ8kgnjiPxgUZ8dmmH6ISkzBcOb1ms1q7bBFdafYItoUOmKXSLPOGomDVfhWhawMQ0ZrNqejQSFBiHEgpt42HRXfIwJUTw5aOHbvGtnhz5+ZVz13Lc5floBFsSPUsUyOSgwmNzCWQuxmhv1jFGARoYorqm7yJGWw2D8gjT3r1Sk1Z0oeyRtrzD6bL6iWLKLWwEsvv8q1x55kcWWfIY/sH+wBBWQrV5DVNhst254M6jnqiRLhjBVjt3jCWYWi2V4RItIpOhQ0D4SSXVpY3WE05l9Uc1hDjKQYmHeJWT9nMVuwv7fHYjFjNuvoYyR2eCvO6LrB0IWOyHsgoQqCSgcRa05drCAiBBPIl9ghGBYmIVEp1pHeE1OBZCeOteALIRHF2RyhEOPM+cDNkzbGA2MleMLDcFH7zoyXNnszBtsQ+on2ZCqU0fi4A25zhUa9CNj3bDFqMAZDdS4/9qbWri0lYh+RGCjDyMlyyWIxJ6bOONjNZd0xbOex9jOQ+47hl3t+OT8eDPXY5xpOOvWbTcKVR64Tuzldt+Dg0iPceOJDfPaz38vV65fY3/sys5gYB+NLz/bmlowMwt6lfdabNanvICbGbBDWbDGzjVNAquuyLwdCCdx+/YjNsnLWvOyc7P2ijbcZZ/MT56KcCUu211U3Lmdsm8JqtbFcSIyMw2A69TFRNFC1A5mhQdGUrapRwwS6Gqe6syhfApI6ql8zVWEYMsthIKRElMqwXrNcrRg3I+M4QjWDjgghdfRpwXzWM5v3hCjUmlmtbRMojhVrrUg1yKU0i1xc9K2ujXRQIMbOdI2KQVBTwlnORnln5/veJ9rcqXofYE/SWuIQ1uuVVYL7u2OLNEqh6xIWWWWHPJTqTXhsczKtlhgj/WyGBBjWKzRXai4M44aQrHcAIsTYcevmLfb397h85QoEy3mlrkOi54Mw1oNKpFIZtZjEBhiG7vCdVYu6s9TuQSx6UoS+0ymZTe4JI0jJllz1PsgBa6jdaUAoRAkkAikKfRfpY2BvNrPrmWy+rJWygDeSoSpd/x6gQoqVDVJFqcFw6UKgqJCcFpilVYdWCIGhQg2RTjpqtc7oZuCtHynOexc36hPtKARq4wLH6PrwILGn5o2xC0SJXU8eNki0LHkteEKsgs6Q0DObzzk5OUXUFt5kVqZm1gYVGBNMmiNsIRxMdqWWTKhCP+tNO2ccOT1Z2uLzCOAePYG3ntFzftb2PedD5vtx5bfVrUy7k5VTZ+aXDrjxwWdJizmXL13jI89+jOuP3uAjz3yEnE9JMbA8vcsbb7yO1modktzjGceBvb05m/XGk1OJrpsxjKdG76qFk6O7nBydILXj7hvHLO9sMNrrTs3A9kjvOe8HjbdubrLzCV7TgCNh1kh8y91WDDtdLwdEOmqwGgszGAKaCGEGUqgyUANOhe2pdK4I2jtuq1YA5bmXipJr5e7piuXGColWqyXL5ZKxZEox+lxIxsbq5jPmewv2Fnt0KVLyyHq1ZLk8tYYweEOVrmPWz5glS8xJDAzDiGbr0JTLyOiqiOYEGTPKdIx2GUnTojl7BVpyZ/fxVrTT5r1tmJ7nGMdxez3Fzqk6tJhSRAiMTodGcHKBGUYtZbp3YozsLfa8+fcaHQvjOBK6DV0/JzhkcnJ8TB4G1usN+wczDm/fpe9nzkQz2LGq9UTNTa7bIdHQ7t/p1FuSVL3K13JjlpyvEASJVtUiWs3Z9hoKpbq9MLgHt0klYL2Ug5ETUhBShJiC69hbxBibPlPa0icfNB4K444IIfZGX8+FGgpFA1E6Y06IoGrwi7EoknlClla1fHYwrF7bYoiVTEfU5OShimpw3YuOVhNX3fBYI5BITFacVqoVUGkZLOERrTcrUUD2oNtjNt8jpSNKzp50Mo14EwKCFtuLmPchEpAOS462DaB5Jjkz298nzHBWhvXqDK2P2z0UtAfNJRiOY+enO5DFGbN434KUnecxA9+S0Orljo8/8SF+8A/9Kzz21JPs9XvkYcXtw5v0IXrhVaZkofGsU6OvAoe3bhE1c+fOiYsuVcpoRWOiwsnJMTfvKCEkVodrTm4uDfM1YeWdM3gQvv5OcPcmmNb2rS0bqEEuIYRtM3K5d2NQheXpmlJhrC4T6yJSQgIdp+utZM8ZuYCcyATBGPoRyVqZuXppJnN4fMRmGMljsaRpdegvRrrUM+tm9It9Ll09YDbvoGSO7h5xdHiHYb22DShZqb6qGZMU7bzzmE3ATALaBWapBxYUrRwd3kHEKnaN+qg7c/a203rPdTizxpxVJVjTjDxazkssaYDEyMbF0oJEo6TGZE3rddsnF3BNfKVtPBIC89kckcCwWlkR3JBJIXtDkMpqueTO4R02m5HD8YhFVPbne8y7nogVCVa1za7k0dkxdmxVIbl2equij9E0XgotMW8iYCZj0JQc7XgDJjSG93oIvmGo1Mm2dDESRei7ZPIYUb1C3UgMhGTEjymn1vjVDx4Ph3FHsEJ0x9eDKUFSFAmdM0tMdiBIomqAmM68W2sx1kZMlArJjWdTZdRq8psxWgGNtopRN2zB+edVTd3NlPiMlROj3cRiZEmIkRz3iF3iyrWrHN46pJRMSi6GVNxbb+yLxgoLWNI1WcK25ooWmcq/V6sVqTM8T4JAMQ9qew0N87+3OpBzf+9uBOcgCD3/um3J/T2f06inmBhaiIHv+/4/yJ/8Yz/M0fFdnv/GqyyXK5548mnSbMaszkkxce3KVZJ8E2tt1j67MgzKyy++5h2CjBmxyVY0lnNmWA90YY/NcuDO60eU8Swcs13QDzr/c0PxidcdI+1Njc+8rJ2vJ78nLxPLlbSEYJu7AMNo/S+LOrNDvbBEC4OaPnepCmpdxYpu3YmiI0NNBMQw9Sbxi62Z1WZj0rbZmirHLtJJIs7mXLp0hSsHl9i/cpmuTxwd3uSNN17j7p1jxpyJIdJ1HdEpheM4sljMyWOZMPfWj9g6DAX65Joqwwaw5itNVtcOunnvYYpetpO347HL1glRp6pGf9wgmQQSGDYbl8m1KCl6sn3YbLwGxLurRatPUY8ijFyQLOJxmmwplTFYxXk/s0LEYb2ZnhOxQiLVyunJMTeu3+D2rSOkjFy9dMD+fEEWa2dXSqYOIzVnsjcDIhjDp+ANxD2CrkWtGXkM3kLTrq42+EkERKkuU52kQjTtKC1GcQ7RE/pFCeaZ2h6gaprwqXMZFbxvgkU0Ifom8zvVc/+9GabLEBArDfebRTw5iprKGmIhcRcCWYs9roaWFay/4DgWb0JrmfYYhHFwbyBaklSwBKFSKXlAqxpuOmZKhnHYFiDgC6Pl8mIIFK2k1HFydMRjNx6li4lbt26iBG8yYDofIs2wizvf1dkTAhLp+g6tVigyjhuGYbBd289/uonuSYI2fPZ+WOfOT1c9RM6/Tqbk0NbY6z0fMsmxqlEx0/6CJ59+nFlcsZDMya03+a5Pf5Yuzrl1+w0YT6i1MA5Lju/csbeKnX8UoRSdKGQ4zFa1sN4s2WxWphm/qRy9fsxwnO1F4kc3nevWO9+yf95ike9sXPdSReMWVgiBrg8sFj2nx6eW6zmD8vuB+Aa5GQfGnM0ZUOvQlRVGZ3gUdyrGorRG6ZOjgTFkqL5exKig7XDb+xHTd+njgsVsn4OrV7l85TIH+3O0FN549TVe/PrXWZ2eTGJ5TUlwHDObzdplOGDYjNTqPO0+kbrOONa1Mo4DJ3ePjC+veqbbl2+N51bP9kqcS/Y49Ci+IW7319bmsjrsuHOBDDo17MWSk6WCWFVnFxMZo83WypaqKd5fQKHmDRqqC7p1BIxCa6JzNuMlF155+WW+89N/kL3967z0wlfZ39tnsb/Pajil1MKYM+s8sBpHRodMRBUNVhWvWqdNpqpLEejuGmsrxrSwYtBGYkJ3k9PqVMpW6ART5bdWI1ZU/5lLZL7o3aVxKrdYk8BWo/Gg8VAYd1XLRpuIkIf0EzfaTsYqBC35IChJPZyTOgUp1T+raqCUyDCMUCt9iijZKkpj8EBcyLWSR9OFGFYDWjK1OL2wFtBMCEqs3mouWABXaoGaGZZrXn/1VR57/AnmiwW3bx8yDiNJoA7VWu1ppXGyBdfpGLY9LYU4eSnVi2QEdlgK3/pcToZIdgziZL/v95lv8T0CVQs5KI996Gke//DHuHtaeOnFm5wsN/zi53+BZ59+ltuHt9nvvWR/WLFcnvj7vS6yqs0LVvlrMsaZYbVkGFeUUsijsjzMnN7aeBGIVSa/HYT04GPXaU62o1U3O0wgAVIkzTo++NQT7M3MO/rNX/9NR7Z8I8GM/Ramap3BCrkWxlzJRbwrvWG3Y85TD0yd/gtUBxMrmG6R4Lxnp5XWyliL5yUii70Fly8dsH9pjrDh8PAOR7fv8tqrb7BebTxcF2KwfsO1qvW7DZHQdeYAlUpMHbP5nNR3rNdrhs2S9XI15RPaJmrU27dZFzvP6pnHLAHfGDQacM0lIUSx3gTZNu72FdF1oNSFsypKzaMZ6hC8MbtF2qqF6tWiDT6rxaSWRTq6bkbXzabro54PG4fKK998kU99zx+gFGWxOGCeCh944km+9o3nGYYN63FknTObWj1pbTUvMQVLzvrJlmqwa5Plhga7nnU6SrWiturpcpFIqaMZ/WoQTlUIEXKtFM3kMpBzIo+ZUgZi6lBvyxdjQLFGMVY/820QDhORq8B/AnzKr+W/DjwH/G3gGeAbwI+q6qHYGf401mpvCfy4qv7yW3+D64Vr9cSpLZxWkmt9TQXV5Ewqa6enakp2UbBehr5T56x2atWSTGN0CU1xaEYCaCQrrDejd2AZycNonlYIwEiQ7CSOkSAFlR7RjEoxOVPg7p07nJyecvXKda5ev+p68pXjo2NOj48YNhtT/UNpjbNTl4gpWPuwXEwTxA3ROAx0yfTB7R671+t+m6vVppRtA10P+XRbxPXA9+1ck1aGbnB75F/4oR/iD/2hf4kUhDHP+O8+/+tIrDz94WdRgmnei2GgJ8fHhGiQRNKGv1qYbEGDG8hcp783y5HTO5W8LpO3+K1vb+fH1tP38rXJJ7Wm3RGNwgc/+AGkjHz9t77Bn/yT/xpffe6rDMNEhfLEajBHJIh7ktVK4h0fL2OlZIMDjJ/dOoeZV2/aMBaVFazNG16Ji3i7NW0eamEcC6koNY2s13dZr46oubJaLTk9PWUcqofuxkYRjMteXACriz0kc4wiAekjaZa8WQTWfH3dIkbXUal1R9mwOVfvZI7ZCm/uwnxekhmiwWGbzcabTPh1Eby+pRCCCXZZIlu9IbytjSDRox9n2OjO+lTBZFayccCjy2i3SKSa13x05w7HR4es1z3zvQOSrhmzfe9m3JBz85oN569qa9kcPtv0GnRrucDQ6po86epzOEFwdi2l2s+gapCSZtvmXXOqqNGlR2/QnrvKej0SZx197MgesSAFEYODQL1F6IPHO/Xcfxr4r1T1z4hID+wB/yHw86r6UyLyE8BPAP8B8MeBj/m/7wd+xn8+eGl4KGuTVOjEwjHpnDrllVmWxTcjXyhILYSafdICnlu3Krdaka4jU6kpELOH2VpIQawSlshmvTI1w/URpViP8lIroRaCa6unYO8tdXSFyoxWky2IqWPYDLz5+uvEw1t0fc/BwSX29hYc7O9Rcmaz2bDejAzDhpwzKSX29vYpOVPGgdV6TfZQtWYYtRBjZ7zbSdJ2C5VsJQJ2JQneyiPfhV3aa3dfrzvPb9/X2nwhQpcS1y9d4tFLB9w6vMuly5f5c3/2x/iNL/4Gq5ORvcUBi86KtF599UWe/9pXqFMLLfXQEyY5WjcAVy5f4Y3bp6zXA2UonNxZ2/nd53Qmydozf59ZSefOo73OqgybUReHDFLf088WbPKaxx97nLJZ8uTjj/D6q6+Rh3ELa7WtpqqrAhtvf8wDNedtZaKa7nsVq66uE8ygIEbRbUr/gTpBNFmVmA1HLblYNFoyWirrcSTngdNTMW+/qPX7rE0x1Ci8JnlruYUgcZoPrYYlZy0Ejewv5ozjSAyBeb9H92jPzZtvWqSKQyJmUTm/qt5K+qItQTNY23UU1MgOEo3xMY7jzmrUCa7RWidMO4qzV2qmajT9e4cSGyxaa7GKT1/LgmHXlOIUSHPohpzJahHRcrXh8PYdZlefIueA5sLROLC4tM94+6Y1/8nm7RfNZrBLMFQS2/i0WCVqCNaBrFRzWKzuxgotp7nHlUCLNfjWYg20m7NV1aU0EGoRxqGyjgOS1tB31OPIogRm80DqxGSvu2StJvOGcdzce5PsjLc17iJyBfgXgR/3CzoAg4j8CPCH/WV/HfinmHH/EeBveC/VXxCRqyLypKq++sAvUSUPG6IEZgG6CLMohB5SJ1aAJFtowbwgQYsV+WiphjkGS6yKKAUFHREUzUKW5MtN2WhFpFA1slguzUsa1qhGVCKhZqRkTCfObscoEKKpRVIGhBHUZAeCWBu8cT0wrjesT5eklOj6nhijC//0zOdzizo8sRe7DonQu7dbBjMSpVgXmOYy3etrn73ttjbu/Ot233neYDYg+l6DOOGs4k5cKRxcvcrNO4e8efNNxmLNCrqu48Mf/jBHRyes16d0cWS1PuGXvvDfcfvwTSS6RolWhw6Mhopax/uui6zWS+4eHqGjcnq4pA5g6fR7q++Mb//bg6rOnx0oYx4omDzEm2+8yXd/8jv49Hd9J7/0i19waK56WmKH+aEWFc5nM8sLwRQmG/Wp0GiAhskqeMVypXrjEwWK86et9D174UzJtmZLHk0UT5VhLMiogEF3SvHEpjHFomwFqtQjA8OG1e6Raq0f2QiaK4vFgq6bsdjb48redcaauXvrFnksnrOW7bm+FWK3k1zdtgCUM38Hh1SDBErOZ3oSAN5XIUzMEwlNz8iKAatfA8TYQrXaXbmbXBIxuV0Uyjh6pNCEJLZ5p1wyr7z2Ch955AO8eWvg0gz6OrC4dI1N+TrDmK2xvUeZtRrLbrq0no/QXEwUDmu/R7BcINXu34qYLAdO6sgZxmpRh2YXcLNr13JrWuwkNyGbDlFKnrNQy/GNga5L6Ga088wDOf8OjTvwLPAm8J+JyPcAXwD+HeDxHYP9GvC4//4U8OLO+1/yx97SuMc6ErATmCfooxI7IXWm7x47y5JXVcZsEI63ojb2yjDYzaKG5SqRUSvRw/CM46utT2q1hhyb9ZJSK+vN2pp4IBYNCFQsWx+lOl9WPKtdSP55jQVg8gK2K9ea7WatSvT+kXjT3ejaOCqW/FksDtC4ssKebOXl42ishlZevnuDnTVu93qpv71x7g6ecgTOjhDh0tWr3Lp7m6/+1vN8x8c/CUReeeVlAsq1a5f48pdf4MUXf5Xf9/hdQnzSMvwOF2hVJNmxS3GvXSur5SlHwxIZgUGpI1iF7/098PtHGOcod/fx5re0xx2wR0HLSIkQZcal/TmHh2/yMz/zz9hb7FuyS7bvaG8TzwUtFnMP9z2pr+pyAkoQ8+IKRqGrMVAKW+ZQcK/XNfJrLYidvHnl+GZQHK9v0U8z2C5pYcBPdqp105QB1WzUPPXcDYb3ikRqheVySYwbJCUe3Z9z+cpVju7cRd8Gwz3vsfuDZ6+UawZZVLn9hyrjZtgm6f3zGnbejLZOrzfaYKmKhGoqiuB6Ur4Z1Gr9k9sakMBYCnUY6WamjxOwinP1ROgrL7/EH/gf/UvsP/Zxfuu5r7HY3GV2yWR8R4+WcsnbQsQQkKLT/T21O6wB0Wi6ScVjsCBM7TvV6K+aM5JHXJMEvB9b8GhChG1qTQsj2YgGcmKMHc+/dN6FKohd01wyufzOjXsCPgv826r6ORH5aQyC2V5PVZXz/LK3GSLyl4C/BNDP99BhTYgdsRP6GJgFY85EEbogJJcDLZrpooUwXRKKQB4LUXRanFkrtVrnmlotZLX+pWEyulJN6GqzOkHLSB2W5jEEdwokEEUNMtTWlDr64q2kaIkeu2bF2Te7i97MUC7ZqE7RogINkVAjiHl9MQRSiBytjq2RrgRmM1f922y4V7J5WglsDd3vBJm+971mB2UyELPFgkcef4zlasVXnvsS3/3pz7DeFA5v3eLxG9fYnN7ma1/557xx8+umQb9ZM6xXhLhn5jeYwW50QbXyY8YhExFO7w6c3FmeO69dKGn3yN7uXO+/+TXhskmGFui6wP61SwQiiz3lGy+8wK1bd7lZD5mMEzpBDruff3CwT/HCImX72Y5iucyxdRETouU7tG1sHtF4hF6lGg6rtobmi7lpsJfqxrOa7K7i4IQFXIbhu4cp1RRR6+75Qgi2brvOBPSidw8q2WCkMhhMMlssGJebs3P+TqKkLWXpnmskQcz4ihnX4v1v8c22OQClFGJM7sy4M2AgLbXikK33EnU5Y3WcT4zIQoOSaLCIb6RBrMuS+cqBu7cOKatTfvTP/RF+9Uuf5p/94/83j6XI1ctXuX3zJuRqvPrqjatFiKWSgymVljGT00isEanV1Dadw69VCCImGC1WLVvyCDlTsomQBb/vLVLxqYuWP6xSoUBZKwk19lVRek8ud8n5/5Nx/52rQr4EvKSqn/O//y5m3F9vcIuIPAm84c+/DHxo5/0f9MfODFX9WeBnAfYvX9comU4CfaxIHUh9TwjQx44UK32ykC2XbGI7qVKj6Z2PtZKrNbLOjmuFMlprLnVuqahRHrMVaDS60Zg3aMkwLM14R4EUqWJa11HEtJu1UmUkRtNvjsnkOusmU8vI9q6yTL6kZNWv6n5oBY2NEWDrsOSRcbTmv/v7B5weHbEe1lbBJl401XaJ6T7b8rDbaLTH7QO7Rv9+e+472QxcYEqEpz70Ia5dv8bh0W2+9tyXoWS6GNmb9YzrE/7pP/6HvPjiV1BdUspoWh/FNl9cL2Vq2OBqeNVPSteVw9fuGi6NTqexewpvBcXcW2CkD7BJzQj5D4HLlw64dvkyr776Gm+8kbh9+y7Xrz/KrZs3JwN0j7ETE466fv0qVPO6A1DVWyBPXxNAjQWVcyCEDgnmrFihk4X+WV0+2mluSmXe93SpYywmSpfHESR4WzbzUGXKwzQQzZJ8pn3kS8bZHEYNLozjmsBsYpvM0sw6IRVlf++A5Z0jZ3fdD47ZhQG3v5+JoxxGsGjPr7eLXY3D4D1rd/IYLhlda0VCdX0bg1lFnE4YLVFfGkOkWq0IMFEip+SvO2W1mg5N14qhQpiKCsf1mn/+i5/nD/7g1/nBH/gB+r7yxf/+v+bG9Ud57ivPmaddPbK3LzF7ARObbRwGcjDRv6DR2/BZIVbWYp53LVPrTB0zYQdeamSC4HBUqFgkVo2QkLMyri1vOJbKOg/0fUfXJUziTRhLZr1Z32+hT+NtjbuqviYiL4rIJ1T1OeCHgd/0f38e+Cn/+ff9Lf8A+Msi8rewROrdt8Tb7UsImq3kVoVIh2gmuMhOQAgVuhBJjjWOUci1EKISYiVqpuQNUQtaMlGh1EweBkqtxjv2hFXOJlAVEbSOvo2OaBgt7C4mZxBTMt0Zv3GD04+0USQ7Y+NM1aZtMUeT79TqbfMcuulSb+FxUPeolFqto1NKkUuXLnN8dIdhGOxCG1jILk58L7e9Pc+Z1z3Ig92Z8gcbTYGJ2RLg6Wc+jNaRcVhx+/YbHN095ODSNWZd5ef/m5/j61//MpsyMOtGgyzmewRJ1v/Vi4KmI3NOuKpSR+Xw5l3Wyzx9sbTz+dYCQc7r1++cyPTY9In+2X3qefGFlxmq8o0XbiG10F0qpC4wOiX13JfYJwXlxo1HDDsNBtN5jSPqOKy6rK301l+ValTF0gTkBFodc6nqiTiDIPb6yHzWsRrHrVxFtVJ2MxCG7YQdmMQqsH2inXppDVKi4/lGFpZq0hqp7+n2FozVGDcH+4mjxYLV8QlbMOodRIYTq4ZpvZqjbNIfwaG5nLNvlmG6BpKSJdkxiCom+4iYAqUIMSpVizWoqJU6FmJqTS7sGA3hbs0wFMQqOWtWkxFpkse+rLQWXvzm13n55a/xkd/3Cb73O58lrL+Pb3z1VyiK9WKo1TsqYYqYTmqw3NzAGFwHqlYkR7oUTZ1WXaMmmyOZR4NwgvPxSzQbVB3+bcU7pVoEBq0ZjKLZNoeslVAS681Al0wYMRdj2Bmb68HjnbJl/m3gbzpT5reAv+BX6e+IyF8EXgB+1F/7DzEa5PMYFfIvvO2nq0IeIFarzqJCycTaQYbQe99ArdZBvlRCUIJWSqiEUFDWdGFEy0AnlXU1/uuYRwtbi5j2s7ZkVUE1eIXhNkmFq0Wagzmiali5CFOmWxgRsYYCeXDRfjdKMXb0yeSHs5juexDxRBH0nelH5DKS88g4rAkS6KLRnvb39527W7bG6i098bc3gjtIxI43+uAb1iBTV8+MAQ2FTd6AwHq94ZVXXiLFl/kv/6u/x9e/8RWiBFTN06i1sjy1puEhWCiMgBQlRKHUbMJKVTlZrjk6PHHIor7tcd3vvN7+OYcIZPrNG0nA3TtHqAvGKUbJO12u7GabzFvbtCdXnhgDB1cuWXKzZMRVS4VADAoaIVT6rmPUbO0evTIUhNQnVDOxJILi82aJskBlNotcv36Jw5MjlwFwumiDDCcIwg17i1Zs9yQQ6WLy5HwxCq7Yyq2INemYz5AY6LvEYjaDUriz14z7O51/nX5OPQoagwbzrBExeGLcyvtOxr9LXqRoFOaoFtmAwRvqoloe5ND49+INP8A7Slm8QKgCFDP1pVJjpE4a6N6rVOHo7h2+8fxv8Ac/8ymuPXKD7/n9n2E5HLKYz1menho1WbfugOBGuVTKZmRQIXY9hIqkjGqkFBMuVDXRwVKVkCHSZBN0e14FI0xQTROrfYv6PVIqKoHNJjMqkNQ154tBONU07+vUWOb+4x0Zd1X9VeD77vPUD9/ntQr8W+/kc7ejgo5mWJtuhHaWQXY6nUlvlElGIAQhBiVrJkumBugSoFa92oVMYSTqSM4VzYajqUAtozUEQCasXHOZGi3Y8lKoAyEYDSuKEMW8rIJR3obNQCkOnYRA6mfM53Prgeo3vlbIminjSImZktbElBhrYbM2HQxR8RZzidRFFyor09yo7ni0LQw/76n6XWO22xbS1jHXc576WxtQu1lNYnS+N+e1N16FBHuLA0rNfPWrv8GXvvjLvP7mKxQdqGI+a+cFPtR2vNawXFycrRZjI5RsGPLy+JSazSi+k03qfuOMHvtk4e77yp2n7D2np6dITJZoVCsJzzvnv/X1dceAQdclLl2+ZNGfNBzdGBNVrLMPwcLnFDtTE+3ck0fo0sy6fBUzeFbQWT1gsl6bH3z0EW6+eZvDk6VFcXiltLdla/vgrnF398LE7yqmk+LURsEx4RToUsdiNielyN7eHn1KbDZrFnt7902ang/wthRIdua7FYaFLRzkyWeDZCqtzkBECLEjhc5hODd+zQmZNgr7VPUqb2kdnEoxcTGxloUWCzrP3+cipGTXM2fXj2ol+5bY/dpzz/HqN1+gj4nHP/pJPvGd382NJ5/ia6+/PiW71fswBGlagPZ5gxixQmJAqlDV2mBr3Tg/3qiyNBehXd+p0j64No1r5jQWmIDukBkUM+Lm/Njn1MEzPFV3uobdfzwUFaq2c29QZuZFa/HkpSfhyuDJcyFEk5WpdbSEZ8ykWCAX974zQUYQyzxHClIKpQxWpKBisEweIQRLfngW20S6BhBT+OtTdC1mu2AtlBKsc9JquZrkBWI/Y+/SJeazGSUP5gG0NmLVduk6ZkaE2CUW+3t0B5csSeYl6iFEch68SOLMBNGSdm85JuVIptfufs5ZksPbe8ghJR55/Am6vX0kWY/aUDZ8/vP/P7SsmM9nrAel6AbcS1StrNcDtXpPSXOwLIoaB2rNJhaWq3PJt8e6e9zvZOwal3vyejvneBbK2p0fQWu23IIKSEuOinviW/NuhTSWRD842GfvYI9W1KLVwnA8AV+bmQ0t6dZDHV19sBlgo/7ZZW0bnExZtivzGc88+TinX3+RYbAKaaFtul7xqjtiWuBQyJYuWpsu+Q5rJQaDEboUmM9mzGe9Yd4psre3bwnOM1z0s5Hffa7CmTxHM2ZBmt45U9V184WDWPSQRLzIC6RaUxRiU73cOggSttEBGDXZsgtx6xhIcAkc22yDWPSlzUnytaLOpnv1lZd4+dVXSLM5z37iO7nxyGM89cxH+dKv/jKx2CZgRroVJJmDMpZMFkFKISTDkdoGGoKQ2kbrtQeKFTgFZBIME6AUcyBbnsY2wjgl3HVH8LGUYorRrn8lYlBOeZtb5eEw7lQ0D2iJ5AypU2pxxbUSka5HciF1pvAWKUQpzpmtUAaSZGZSERlJ3Yhmew1s3CmwJVEqVDfuJcAglgwbSzZdDokkV2qzXr5brqvKSIhArqxWa8ZxgIg18b18lfligbph35wuadLtcgZrhDxkluWY2XyOePeZWkx6NaWeR65c5fTkhOXJKVobHt1Cu63ROuuJb70pPWfgt695J9WGTKSDqzce5Qf/xz9MToEQK7ffeJk6WFVfLoUxn5BzJkT1Dczog6VY7mLUSqxCHbz6rxTn547knD168uM84wXuHvNbjS1cdVaj/axRP5sAtHlskcxWNMw/Z2ee7/02M67Xrl6j71yhtFY3nnjuxTzk4lKvwe/04sdgjdut2bk1VzdHRaLRG1OEuSQ2w8BTj1zm5vFlli/e9KJRl3JQw+Zbi7rmIasIGpQqGSnqwnWTS+/9CcQa4ghuAO2Cx9gx39unm82NK74zG/cb5+fb5gYmXf4AEsSu+S7lSxSJINFkBghKbPoqHlnHbJ+fYkK1WCNxbUU/7g9XhVAJETfwTI0sdtM1RqZgUnNELBK49drrfP2b30Rne/xQWXHj0St84pPfwz/5h3+fOuxoC6k7DtF+UWHSwaJWpLj34myZImL7QS0mKtYiCWkVM0zUTfHHLMmuFvG6RLZ4nYtigVpRq9o2NKq6E/we8Nyt0exAGYQcO3KuxNBoXpE62kRLrEZ7ypUO08soTZGtZmIo9AnWQyWjzCJ0UinRoIKsxcuzTe6AytQYt6qFOdVV/ILjX1WL3QTtRsWSqcPGutykrufSlatcunKFzXrJyfFdlqdLJ8+Yxz5Vehp6D1hhwnqzhhy9is/uiKFk5qLMD/ZBhNOju3YzS/NOGzTjc7fjse6yO84b8e3f78S6g6SO7/r9n+F7vvcz3Dq8ieYVcThmHGbkUjldjQ5OObc6CrH35NloHlLOI2Op6DCQi1X31pwnHLglxd72YB4wzlZI7rJH2nycNU3npRzOeqTbjWIbDbRNZ7uZisBTH3ichFpDF9VJV0QVK+3XShfMU6vRtL8jHSn2VKn0vRDijGE9IKJWYU2HSKALCSmVLhmGe/1gnxfiLZoKV9Pa1xocCvLCnzZP1R0ZVdNe8jUXCERJVpNRA5tNIedThrFaN6DeGm/v7e2xPj0xQ3afuW7zsJ21nc3DZy4Eoy1LEPJmPHudJRBiZ58t7TsauaA1vg8k2c5/izpyLme/W3UqIJvmv2H0DbuPnrCsDSIxI71er/jGK9/k0lNP8drt23zgg1f59O/7JE8++UFe+sqXPKCyoyvNmPuiUWyjtF+c3RNcDKwtJTXHcPpOM812/BVjkaklmWuw9qARo3o6T802dGf6CMFUYttuI2/vqD0Uxh3UFOxcyTd7A+wwEw95TYwILdYZKwKixFAoZY1QWATMaHvDhExmrIGFBDRgFZUxWKGCh9ISgl9rhWpl2+L64xItRIre7zIEIQVlGCohCKfLU7q+59LlKzx64war5ZLTo7usTlceHqpfVJky/FbjpN5ARL0TjPGdLScb6GadFWoNAzEl+q5ncJ1rG2eN+xlDdM9zO6/S7evfFpIRYe/yZf7In/hTSOy4fesmp8dvcnx0SB6NfTQMa3K14LhWtU7zjXY3qCXQdIOO1bB2XLLVV75Vqr5zCObBY8sg2hr4b+39Oyd+dgPYZmGnEWPk2Q8/TVRjLIhfa8/R0qivIlanQbQGE53KpAEuRGdbdagqJpvlJfbRG9SUwKzruL44YNEvGPPSnAv1Yig/3tb8ovmv5w0iYv1BW8FdjNGkLYrLaQDz+Zy+79iUzGw+nwz1g9z2+83z7l+tKrX1fG05IBWrMo0pUTG6IZM8bmMCBRf8KnYvTvr61UXbmJqAqLNNWmVuw9zVIzNrn4fTmacZQ9XW553Dm6RF4oWbr3D90cd55ukPcfWRJ3iRLxG0bDcJl6+Y3o+aroxTnQ269c9vAUL7vV0LnC7p91+luNZVJFTbLIyuXd3we47H16FiBZBNhZT7OC/nx0Nh3KsqWV00qVTKqGSUIK2B4UhfIVahDxEpVnLMuGGGVYQGLUSp1CAUqSxigCTUZBOfayCrdzgJQgkmhRq8CjO4cYrJmxjjxQkBcHVC6wozZzMODGXkyuXL3HjsccYxc3jzJqcnJ5YbQCAoISa6fo6o0aKEinpTBkVMkrglVEIi9TP25j1DNu2PTiK1N+P+oNEWm42tgd8tqAHObA73hSt2Hgsx8vSzH+HTn/leXnn9NT716c/yla/8OkF6Nuslq+UJEiDm5i2BRqXv1I27wxXFMvrGOBG0uPKn5nOG/T6L9IzNvf8ibhDUNmqxN55f82cTgDuvdQz3Hrz+nqhHaNWUs77nqSefgDISvNAlu8iboFAq1em7IoEokerQVUxCLQkJzv6qahFn56JkKEaBr9AVUs1c2YtcOeg5Xi2N9z1pt1T3iDsrllJL/InnkSakS6wRjZ2DOSahi8znM9tYoslAbDYrVqsNqUs8KL9zNom/e5FkZ+V5T9ngwnhe/zFJP8fkDq+rJXrUKq6KmV3PKYP16N2Ze1UmtUfEC8EaxKZGvYSWb26whXjyMbuB9zqAWnnjxW+Sx1OWLHn9zitc6m/w4Y9+nC9+/v9LXZ9OsMhudDcZWzeuTRVS3MBbtyj/GcVfa+JgXvYxMedaVGVtE8L0fW0Ex0etKMqVIRAIwZOx91yiM+OhMO6gDMMKo8RWihbmYpV7VSKpi+SSrUmyWmVpoZCoSC50ap3stZq8wCzaz9opowoqlbEGckmkUEndaNVn2vqDButzmazIKHa9KxoWb2Vl1EskEbqO5a0T5rN9ul7ZDCe88errnB6vzcxKIKSO+XxuCcf1KcNoDaC1WFmyVbSZSp7BTYHQJfYv7UMejcZWleXp0u21caAfCIBikM82MN7FQ++d67OwzpbGBm7wJfChp5/myqVLfPOVFymlsj874JhDZt0M5gVZF8RlWAmW8EtuZNeblYk3NR2UptPhEZOtVIOsPJnxFue1e9z3eYXYc/dz2JtHez45bSNM8yZy74YATCqHorY2CHBpf5/rV/eNGUGg1oFcjcoZ/eOTgIhSpJiXnprZE1MlTZFSGuzXvDib++oC4DFEOoT5fuXy5RnxllVaFlznvunelAHrHRyorSPQRH0EVKduXqqVKpXUBfq+Q8R0z9ena4ZhQFIgdI7fvzWce3Y+d6i6Rn+1TSB7K71pz0wR6YxR1JqYBO/wJTEZfq4mCaxYUwsJEY3WxlAqIK562dYrTNg9YppFxaNgqz/wqmip5EZZVtN9OTq8w2vf/DpXP3mNU97ks0/+EN/5iY/xTw4OONksmSbRk7OoTHea7UWmVok0R8OPF0ALGbv+oqaNpWw3hSCQCEjdLv+gVtmqYuvJHi5Yxyo7bnudrZMoO1nX+4yHwrirKqWMDIPrX/hJSO1JkshrGGqg64UxD8xitK4p1TzfoDrpvyQt1r2ISAZGSRBh6JRNUcY6oxRBU2X0jDUCqe+8+a1REUNKBLXkalNm3N874I07x5zmDBIZ1qecLk9YDiPMZkjfM08dVw4OSFq5c3ibzXrlksUgQUg0hUv3jqLdEH1K7M/m1GRaNEd371rSzT2D87Zt1xPfwsLNkO1COGe99K2XKmd+Tp8LSIwcXL1KdgbPZrNmHFacnNw15hKFsW6MDVCLL7pA9Q4x41AmDR9FXbiq0cnAuZPb8UAkRSd44HwEcq8X6XOxE620D26vPeuxbz9f/H1nIBnd+QwRWnHSE09cpw/G9MkF76ZleiHFG1dHrZ60NDqeNEMrDrska8KsDkFImpn0MZFITxcrox/9rAvcuLrPC/NDlkuXw3VDo2oCebUaGSBE9aKn9popjqMZ4XHYcHK6NBzZi3VqsURrigmNkZisUGZKxrYpuU/Ed2b2fZ4kRKQKZSzTPCpGUhCEoFsBsNZGM0hHxSIJNFjE51zxoIo4VGu9Ss9eG/Gq711mUK3VKaZMUJjhASYsqMC42fCN55/nkTce5zAXqIHZo5Ennvogz9++ZRsiOs1ls+xermDn5YnP4vUs1piFrWx5i6y99+mUx1HzyhFr2tHUNCWIM43iFHXFYDUzEiKNphkU7ieutzseGuOe80grOc4lOgumUHKgjiM5BZhHSlBKCsRanVdt+FwI7hVhAe7oic8QI4FEpNBLpJPCqMEy7VXpPcQLYrK2JlBmLa6CWtiMBmt6kHpu3j0iK4SY2KzXgHDj8Se4duMGi/1LiCpvvvwSr730srFB1AqZbB36DhwstHIFf6teDZEOKCFw93TJar1mMTO97beZvR1HXabNysaOV9x+3Pe+VH+7LcDYR67fuM56WNH3CbSwXB6xXN3FmiVUNuXUqgqDkfqCJNcyzwybjFQXU8Pxymalp+CiGSjzcnchpd3jnYyDCu9MvuitvRlxb8fPuG2ND5iTZhqrQ3iJ73jmScgb8mja/9lTB3ZTJ2LwSKDgVDp2cHgL13cVMpvmThetviJoJaFI1yF5JGR45GCfg3nPerW2oqspR9PmRq0JRvXiPm8KH/xc2xZfa2UcM3p6Sq2VWd/ZjHWRftZzsNhjnM2YLeZOUz0b5U0zcx5zVyZYJaTodErXRW9z7dz8bcWGTgquTUdexGtNo3rkp2gr4qKa+BxMXnt1rnlb4+Y91ykpCQ6J1CZ24VfbIbQ8ZF7+rRf5xJtHzC4nfuXlX2Z2ss/jH36Mr325ow6DCwaG6b5RXzVBxUTF3INvNkTZxeHPbYa7xXEKRUwyIiXfLSYP3k9JgkcJBl81QrTjUw/2iXw8FMYdXLehZEDRDaDKOGRSZx2VFl0grwOzoOz3kXmwPpXzoKQEpOC89QgaiTrQxUBHotAxo1BpCnzCUqxNXxxHggjJ6WQxRmKXiDFBMVhGA2gKnOaBPIwkN0jjaNIDCSWVgXxyyK2bNzm8ectuPvcyjN1QQUwASbyiMxC9CYEtjNbzcths2NvbI4bI8uRk6277eBDuuf1poe6DYZyzw8JJ/ywRDi7vM1903Lr1OuvVMSdHh7z55quUuqHkwjBm05wXBc2k1Li+2TR7SjHFvGYW1WhsrUxepq4OLeOvO8fiZ3HmcTcHZ/IGZ8/hLDVv99zOvGrKEbi5Q10qdvs1cuYzzHs1jvle3/HxDz9FWW+sVJ0RLRZ2m7yvhdClmoFF1USoAHM57PNVIkWstkLEYMbBaYAjyuheZoqJqJUrex03rl7lztGb5Or1mL6GzmyGasydogWiRQfiXPMQzYBohToOjFTqaP2JQwqUPBgLbT5jsb/P8ujELdnOXMjZ+ZzmcnfpuXMx5gFx4h/RIha/JawYEKzITw24C8FaBIZg/q664bN+o4pIoQawWoIwOQfTrjnBP+aqi+PirTFI6x+rDq2oKBS4+8YRhy/c4aMf+ySn6Q00HXP1iX32Lx9wfOtwOh8T6BVETEpCouv7+LdajsUmw2BO3W54CqE0B88SwAFBgl1LcwvSNqJX9TaftkmkWiFYLkRxWQTv3PZW46Ex7gU1of1qEgNDFYKMjBuF3jQqtBO0j4QMEpUu9ZSEZdSlp1WxiQZEElkiPXEyqCFUskKhJ1OodcuECVEguS5MMIEe8eSnhMTJak0/myNaKWUgOcB8enrKcrnkjVdfxXRjhK7vuby/IOdiHXqKQ0VqYXUrlDEql4V+RStHy2NUK1euXabrZwybgc16jzIO5HHYsTm7Xm4zRhZ2v51Bn0qhRXbsmAef3tvx6vWr1Lzk17/wz3j9zdc5OrrD0fHtScuiUEzPx70TrcFkjUNx1ccGyDDVVbVmFkLrnlPvc6hnjfn9ht1ArRJx11if/4yzj02wFC1pZRDKLhjj9tF/bx6+OqQsXLm0z2PXLpPIpFopBWKrBPVkZS0FTQI5IhoNtgrmQVaFSqBUaw9nVY2RqiNU7xqkkVLE2Fq1oCGhqlzaXxAjMG6jAcAS981DdxZJqVZvICE4d7qdjSk0GrSR2Sw3Jkvr5fx30yHzvT0Wewvm+wvWp6dnQgT1+d91LlpFZQhGSEgxGSkim/OECCl1tqmpKR3itEEJ0ROkypAzSYzhIq7IqkHIYmqZuNPQ9YkQtxCGIR5TeOer25gzVj+oTrvFuYXt/rMNYrNe8epzL/LxH/oMB5evcLI+pLvW88gTNzi9ddQqkGyexD47xDidQ3CvPXq0VLTx/cN0WxYKSYLDc3VapqpKF4N7+LYRasMVJyVV77srQhVzHNTPt9G4HzQeEuPuvOia6WIybYU6mhaHwmZj+jEU6NRvjg5qiEBvGJVuE1kIRGaMVekADZA1EAn0SSgaULXwti+2IYTY0aWZhdVBJg9TJHD35Jj1ODKb76GYMmWthdm85+TEWwQCTQhoHAqzsTLfW9DvzQghAdYcwlp5mSdoTcCTeTK5IClwsHeJrutYrdYM64HFfEE82Of47iHDejgzZ83zvQd7dgbJW8dtytbxcy9IArPFgrRIfPHLv8JqZVHD3t4e62FDKUpKYapBECx5VWql6zu6zpUN1aoOq7oxrgYR1IApA05w0VvvRNrez33OkV0v8v5e++5j2ypK2XnmfNGSnHkW9+oV6ELg4888zTwoykgfhDIVsBR3BKI5FePgMIPXUwTrIZpLoQrknGlVhqq4YuS2K49iMIUWmy+VDDHSzXrCeu0FOWwzex6+W5OOujXA1SIyQbxzkdoxIvR9DzNhyMVokdlEsfJ4zP7+AdeuXuPNYSRvhu0sqhvOnXxNo+CqJ25TTNtWdeAcfGspWNyDxtVRZeo2ZmJdWhVvoWCRkLfnazURALUYy6b6uZt6kcM0fg2blEMguHGv1FInPNyCWnPOas68+dJrLI5n7F97kpdnX6NePeHaE4/y4pdfMIkIB+63hUfq32sbRExpm4QWm3+Z3BudIsJdhwQJdMGYMilaG9GK6VpJky5wPJ5ojJniTpkFnXXbRfMB4yEx7gbLNAGq4l6oiTJFslOjVI0F0yoLs7YbNBB9smIIVEkM1apZuz4wZk8OVYghMu8CSqEUJWp2CmRvm4njzgSMsbI65c03X2fv8hXXqYBhNOGxxd4ei/0Vm83GKH7FJr6Wwmq5ZLU6RYLh813qTfwnWIKkYYmlbGg4cd8nahSOlqcGc0Sjf+WcWeztkcfimjNvZczscT3Xnm/nKc54xu2uFYEI/cEMZpVbx28QBBbdnM2wRksluWStcZAt6pAYzaPQim4GrxpujQj8xotmrJr/pxW2uO3Zc2ih/4M4vOph9v0Tqt/i0PNlTW1u/DhbMlRglgKf+sgH6TSTrRciUq3oCAqqW4pfwOZjLAMQbf2ZrSXXYg2WFWNsASUr0rB7xULubFoFIQSIyiYXJHlbPbWWlOoenkUydcf42AiOCbciIqVQsrF1+tmcS5cOqM4hHzaZzXDKerVmrIU4m3Fw5TJ3bh16Cf8DNmNxDF2EFI1TPw7GklEPhUrJSDB2ThRPMlpoYT91W6rfIBVEjcKZ7J7HdVRqrWSyRck7UYOqVeS2I9QJmqzIznHruWMH5c7NW9x56U1+3zOf4rk3v8ZTj97giWdP+fLnf5PN3cGZMVvNHCs4lAlnb8fe7JLVzyhI9UjdiQXTLG4LngQrkqquCsmOcVd3uFqUYqJjvmm0TPFbjIfCuLebov1sDWh7x5VC8PBfoapQsqAxGu4pbeoMvw4xWKu8EEnSxJuEPgijQvaGG0GFPgl92+9byORdXhQLnV995TVqMLnWO7cP6fqO1bgx/nIV+n5B3y8Yx5EyFtsAqpeGV9BqkMyQHYYQNflWL1qQYAai1gpR6OY9sYvWRDsmqJDHDZv1ipQSwyQW5AHoZKx3wYX2/L2ue7v5zyr4RVSUNItce+wS3TxSa6bvZ+D9QKOziGp1xkGI1spMm2yxC7OZK0pQb3WmBltUXFffw+Ot13zvOO9hfzvHGRMl7ceOv74TLUxsBAlc2d/jO556jOjt70IVEpVSB4IXIHldoVtoi/qGDDUkq7+rChK8wQeTN1m0oGVEtTLWylAsSRiCNX7eZGOR9d3MOnvV6gql4rm1untWtEras4lPq6QctVDUoIUuCbHvqBroInQH+xzsH5hRCcJiscfp0SnDmY4/u5DMFhMT8WI9KrkMho2H4H1kYaxqtSRTk3ahdSVqUEquBYrS0WAPL1DKDmYUW2OmtOoxjngCtlWR+fWqbZ359zeBtXvWgsJ6ueLXPv85Pvrd38fx4R4v54GrH3iCR556gleOT2iFRxOwJ4J6BW6UQBXbrAJGBFOnX078fHdYBAz+Vbv3iztZoTZO/FTTyiRp0Jwkpw43BVqN8W3i3ofEuANTtddWpqciGl3fZdpkDaOOWFii0e6lalobSERiZ5KnWIJVKyQdmRFYR+OIRknMxDLe3ThutTnAOMcqaC28/sbrnC4HHnn0ESzBu2HcjIQQGTYjy9WKYRgJQehnPbWrdLUzQ58tcWsNN8yI15oNi3T2hSXAKmMZQdwjpNKlzqryGKwopRaWq7UtoJhQbyY+QTNnGCLnvVE9+5hsX78t2wcCLC7PObi6QEIlpbnBLmVweQfDQpGtyqNqca8sWNOHhqmb+IZpWteyI56Gm8DdDWp3vB2WtLNxqRvl38YeYDf1zjHsRjfuLOxK2KYY+MSzT3F1P1Jd7Exd3yMHpWpHFGukjjBhvEOpFA1+zZmkd2uxpGd13ZWcR2q29npZK4MrKAbPU1SxhGMXEyEmkOxJRrtPVNmSohpmyxYGQKyIpjWhHseRk+MTah7oZ3NS6kldj0i19o/9jHnXQ4B+PmfYDEzsjDNzNe2OBoWG4BLYeDTc+Rr36m8NENRVHA0rt0bhhpGj5gBkaZCHe/khGWtNLKI3XRdf10k9f+O9azGnT8S0oTLq8Ne5aE+NtQSgpfLNrz3PC994jicf/TC//tUv8fijiSc/8h288lsvUMtI37jmtMSwTElqwTu2iUNBqDcEtwRuVUsoVzWnJjas3v0vS3rLdBG1WuRSsEQqRaZuTUbSsM+6X9Zqdzwcxl1xjXbb1aMEUggECtZ3NPruucUazet17rQEa0OWBPHuN4CJnwahx6hpi96q8zaqkC1DvZdnUwa7RQ6ilbuHd7h96xYxzUgpsdkYX304HulCx9HpXTbrNaqFzSZbEVayZKwlhYwDbUUOQq2WEEIbnQkmfNXDrDxWVqdL2NsjxEjxvo+qJjglIoxarYrvjFE/Sxo/exPq7lM0Y99ufoAalG4v8ehTN0jzjtib2l5Q62ErSc2R0GIhui8usxsyddhZzA+cJVLItRB1W10o6owNtW42zeDdsxT8cM+jSVvvWh+IENz7WQ+WI7AbvT7gdVssGREWM+F7P/UMSQdyxRhXZKIqkoUUijP9bG6z3ZsENSy8UMzA1oyKkutIKdVFtUz/W7wQL1fIFSBab86i5FopFVan1sw9BKGoc55FHZrRrXeqztbBNfWjKyYG49VX57efnqxYr0dr4D7rWcx6uvmCXAqr0yWzbsbe/j4nR0cPmGAmCNOK/mCzGad5QyA6ZCe+xqVWgy8DiKrhzV6nb52ZvD9qaZTGOnm94oawJWpbQnXM7kDhhj6IM7l0u17a4d4PzlPl6PiEr/zmr/P4d12hjDPu3FSe/dB38twjv87xzdem+zXssF3cyk9xiMX79ldWB4wbAcAWHU3et9UWGJnCNrfoXrqKRXka1Jtxe+5HLDKQgGnaP7D4z8ZDYdwnA4DniHw3NONhTTlkkliMlDpSaiBX70hSAjEExgK9mmZFiN7SrBg/WaXxRA3vcvPlGKAncFzXYblccfvWIarCbNYz5pFczNs4PTmFXCib0Qx4BcM8YVhtiDEbjzkEC9FEEW/rpxMcsePx7BhmEaGMmdVyyeJgn9liThTxkn28iYR1eLd7+DyvHbZ33LkQdHIU5Mz3AUgnPP7Bx3jsA48ym0diDFuWgSOKBW8S7lrhTdOkaW13vW1sDTOMIRBc2dMaZUffxKp7aOUdGejtWemZY95uVO/Mdb8X428/t9osLSLYzUaHIHzg2gEfffwyIY9EjQQtVDakAnM6lBUiCVJHVqUL3tsXTJ1RhFzMgy1SGXJh3JheUC7WTEYcthuGQgnJchTuCIxV2QyZk+MTtLV6rO1uwaKNHY+0reWmqR5Th+zg8ttNwL5TxepMNkCpwv7BJfb3rY/rwcEet1MgD+WeHVf8GgjWaaw4vm62PXg7v0YNtPk1KFpp2jH23jMXavqelhxulaG2hgx+CY5ZW48GF1Fzpw6sxsA8Zc83NEN/dlXYdRbLE7zwpS8zu/wEl+J1DhbX+L7v/W6e+9IX+fLNN1BpsM+2mntaVw2Hr7vOgqWuaXMw+f1t4zISSEW9raaQ1aNit0OtMtV0/ANTnYxgrUQf4Li08VAYdzAfNgbjjjYd6EZBw40l1TyBJpHZaFRjzsxTsgKaPND3c9vHRQkpWZnuCFTjIWs1lkOtmYan9dErWoeBo8NDVusN3XxBiNFhFuudOOs6Do+OwRtylNIYDxEJ1cq42/F6squ6NOy0e9MWPLYow/YiVVVKtQrIvuvpZj0xdIbXVmMd1FxYL9dMi/XMDq5t4u6Z4YbDNtzQDkdYHMx45mMfYu/qnpU2l8ymbiz7pcFVXAVbqeYlOUHLnSgzkOuVNx8Ra08YkyOqO15WxOhimfEeAMmnh/MGe+tpNfzBj0XOnfd9xv2qWNtrtzep+13aPmd706cY+K5nn+Z66i3zpSNBc0MISLRS+QhkupBQEiY2bawYqrgqYSWP1Sp+XeN8GE3zX7zfb1FTOlWvI6go6zxwdHrMMG4IoZL6RIzieHIz1nWbVHQnQrFkfEjRNVBs3UX/vcE0TbRqLJWqI3GzZu/SnNXaeuL2fb8jz7w74wa/hSB0Xcc4GHTS5tXuiUhyoT9Qq6aXBsO2a9lgMJ1ksnevneAeO2CVppVG+xUxYTYJyc5JbFPEbYMl6nyTaIYSZVJqmbjzwt2bt3jluV/lI5/6fkbtmO93/IHv/wG+8oVfQMbVdumB2yAb1ec8+PrZOhE6OXlFlYhMTCc16VmMedNm06MbEYdmPMIR15Z3mFq1OrvmrcfbGncR+QTwt3ce+gjwvwT+hj/+DPAN4EdV9VDszH4aa7W3BH5cVX/57b6naVJEv1hJzPNOYmJfAUghOrnf6FGlVOgAsbZ1pYj3Sy1YgyurV5UQiXFkFoQ6mFxBM+Z9F72IyT7n+GTF8Yk1eI7JuMbjZsOw3lhidBisdZ9OPZzs4pZC189AIA+jiUWFiHg5YWuioC6G1AxMJHplmzstwSETjB45SvZEnGF8Ic2QfaHmyrCxCtmzxtANn48JXWhe6W6MGgJh1vHEs08SFonleoRcTS9FR/cecBaRghTMH1VUI8X55jiNbXXnDmMekdBZAlai0VPBICSRqR/tZr0xDvh9x1u59DsGWVtyuJ3rjmE+89qzicXt10ibnDOfrtN74NKi47s/+izRi4yE7HMZzJCIRYoNp6+tiCgbZKMFzz1giedarEFIrWS1hu61KOI1HkWhlNZrE1QDq3Xm+OTIkvMNaWgUx+JGVtjhcfvpVWUcLFGbUj9R9syhCEyzr75ZxwgxEruO9Xrk6PCOtchzPfbzcVLzIpvHbOJ4/ko34OKQTYM7tYHTTjMWtSyMteRTI0FMp+MCYYBRk8W8dxGnDDIxgaxS3RyY5oxUrdRi96J91vSm6WJL3AoE1qq8+LWvMqzWfPcf+CFe/eZzPPPM01x6/DGOX37RvO+dQFl1myvcRuMORXr+MDTEQKQ59hh8ZNPkbHbb6ERoWu/tVp2YMWJN1aMYjVTrNmfwoPFOGmQ/B3zGjlsi8DLwXwA/Afy8qv6UiPyE//0fAH8c+Jj/+37gZ/znA4eIkMTKr6NAwpQbo1hnkyRCEkswBXHGjEbT5tBqOGWxxMmYCxKNeoVENCTHh62LiSUpYOJZqyXGqCOljpyeLMlZmc9mdFEow8jqdGkJqZw5PTmlSx3DZpiMdM0m5hRioO/ngDjX2RKfErY0qq0xcsMpcbtS2ClPrpBzAQYr7Oo6ky4NkU469g72UbUq2Xsv8Y6ImPjf7s0H4uS9hJS4/uQTPPnhZ7h+7TrjJpvGz7gm54GcB1toxYSXghfqoF6QU4SUEkk6Nqslx3eP0Kqk1JG6hMREVLtuUczTqA7tSAwTvW1ntXE/qKV5XfdsYu4pnfW4d9/THt8No/F53s7TGdXIKVo0uednnniEpx+9MvGzoyoJ24SLVue6W5QonjgumLRF0mh9Umu1PgJjtm/WjBDc6BRyVmK1GzoXpUSv+K3mEZ64sBduIGpRYgpUsQ3GLufWYDXcvV2nsVrnq5QiMfbUqMS4nS3BkqGirVNTx+pktWV/3RP9NOghEMQlfGu1oqs2ybWQy4BooIu9eZvZKk7xKvAGzxnjzzB3CE5p3HLCiypBKjT+d209RWU6V5P7ztb4Y1oPdXs7eIVogztQY++ELlpEnJLlMmrg1VdfpfwP/y1aNjz+oWd49sMf5tdef9X9AOeft/WCGfApKnaPPghWLQ9T96V2VKOLB2qtVhmPRU7Bp64U09gX1CXJzZiHaFh9DNEkKsJbOUHfOizzw8DXVPUFEfkR4A/7438d+KeYcf8R4G+o3TG/ICJXReRJVX31rT/aNdwVYjLGRRJTFRAxhCOKkkTBTLTt9lrREshiVW4xRYaxYsUEFevjEqganYJok5lLtijbjapq4fjOMWUsRk+czSfPp2qh1MJmtUErjDmDWqgd1LAzbUY+JbrOFvOEs1fzAoN64w439qBTCbFqC8t8EZRCFfMTwaCa6AJdii3ufj7zgpG6Y8y28zm5ca1MdPJLzXuaHcx5+qMf4vLBo/RxDmlJjAmRREqmLWKtAjO5rNygG/YaNTKf7yFSWZ8O3Ll1l2Ftm1zX9cTOPJYINk+yPZ66E9KeHy3htL1B/Wzu84azsM7u+e2arQd8z87/z3yG+HyJUQU/9ZEPsd9VZlGoBIgWE6Km0jggDHgf3Kqm6giMZURLxar6Wwch/9ZSvEjJmoVbjUQ11KJkhhxcKVLIdSRXw6fb7lNyNe0jwRUW9Z5TMQfd6ZKen6rV21CmZO93AoDJbhj9skuRruvQMZP7RPay/bPmaWe6REghknP2yGFrycQNseZCiIFRjA0U1TuE1obRWxJx3nWgMJZCLtmNZiBnK8CazRJgLBjcu621kpozI6aN3xru7EZwIRps0zbvWkYWe3PSYsby+JgkYZJTrhVef+NNfuVz/wPfp5lnPvgkX/m1GRzZvmCyvhYBqFrSmmBFSOZ527yI+vbiG5DuvKet86ImNWDUaNvItB1zZWoUtOu/qHgjx3ovVLY7vlXj/mPAf+6/P75jsF8DHvffnwJe3HnPS/7YWxv3kIwu5cUuSYROMEgmWJLK5E9attqpXSLUAGNRUiwmjITh38JA7UBJVvCgxmApDi1QW7KycnJ6wt3jY0oN9F0PGIY8DhuqwunpypOogc0wmBnxMDhE44U33fauT/R9zyBCHkuj9JgH5IL8rfFArWeZGpVtMYOW5iUKoW5ZQOM4shk2xkIwrYYd7/O812t+qramjGIPdoue648/SpXCsFkzdkbZqzCVxhtDpzMjM3RURpDqinuWdF2dnnD31hHr0w3t0EO0pGzAO8ZbBtaSeyHZNQwJg3jOHO2UQLOx9eTvx3xpCb2zjRR2x7lIQMHcuLrz3Nk5MxkGC+Gv7s341DNP0AXjY0dfa70Yg2usigbDlkM1Lvvo3YSiBmIVklQGp8OqZnIx3ZVSrbipMWGsC5iSizIGk6SNCpv1QHHFSPFrV6u1oRQxD9mgjS2gtDuf0w9ta828+KqZTmYEsd6dqE2NBghJOLhyQFp0hOMTNuuBdTl1jjXTtQieOAdM3rcFDx6lWnOS7eajYMqrKdm2M11qnS5H8sbWEKlezRxCw5qtMpop2W8bfPFtjMbMEUtf1loIIRoLpeUZ2loKcOXaIzz9kaf5lc9/wTZPrxg1SYnKrTdu8qu/9It85/d8hsuXD+ANh6IcSpFgRVit826QYAl0h5iiiPWAALM/XtC0y9TycAJR+87g3mxtSVXstb6dTBsAarTPtxrv2LiLSA/8aeAnzz+nqirvTLJv9/P+EvCXwGh+BOtHQ2zhmYU6UaATiFqJIXoSyQo8aq3kYHSzGCO5VAYpIJWgGyvrr5USeiBQsnXMQavxa6t3CaqVwzt32QwDfbdgnjrrOl6sjHqzGRhHq9ITraQuMWwGW6AxIHbVTCZVLXyKKZpqpQzU0T2znZtvEv3RVonm92BwCEm9bDoUgrqnr5FSM+NoPUhjjHYzjPls0nE7y2fmPIjxjKXveOTxG/TznpOjI2IS+v6GwQDBIp5xMANUa51kIEJINOw9jwOr1ZLjOzcZVpkkHauhOuZoTSrQ4kyCMEm8ItF1++WcsW6G437+9q6BP7PuzpzrNlTeNXRnoRzh7MOqW2Olun2NBHjmg4/y1KOX6GT0YhS1Bu2Y0FyQ2UQDDTUx+NY8lAwENFekjuBVhU1SYDOYQFyuypANVpRiPO2xFAax9a2lsFmNnJ6saCenVC9sy7ZRQtvBaUwTw8LFNWe2m5g1hZFJTKyWSgnZ8zK27sY8MIwDYTZnGDK1KHt7+6CV9XKJ7jojYveuaST5+m6J+gk79s3Vcenouixat3IAto4NhiilTFW50SmApVqtgWqhVGMsbWMvy01UVS+QMg9dBLokk3iXlypO/WvXK2PUff0b3yB1ndvY1pmtEkOgqPLq67dYf/6XTFu+QS5u1NvU+/9MksANuzbIyK9Fi2rOrOHmiasy2WnPIbT8jSZLlk+Bb7B7COQsA/o+41vx3P848Muq+rr//XqDW0TkSeANf/xl4EM77/ugP3ZmqOrPAj8LMJvPtdF8oichVYQSoHqYQwjUBl6J0R6DV4iVWpGhQkpIAQmuWFi8CUc1PG6o3iPVKwNzyaxXp+YJr0dqtfeKgI6VccjWOGAciSpkFcBwyOIVY4a1eVmO33xjHgmpYzbrCCkyrAfKZuNNFGS6oJYg2rkRJgzNsLaITD0itaq3uMvbx8Q3l+ACVWc81eZh2Oe1NIOkwLUbV9g7mFuiajOyOl0xXi/MgoXmFWUc15CtP60qlFyp1aCWWgp5MPx0vR5MFbkUxsHmbr6olJYgCiampqqTVG2tyua0WNGPD1ef9aNl51x2fw9nXnHWcO8+puee4wGvOzsmn0wCiy7yyWc+gJSVNZKQiCboOitA6YPRC+eqlBxNL0itkXEQBclEMaxdWoSYTaJCc0VHZRytp0BGaT3GK3YDVyqaM6v1mvWQW9kPOBWw1kKSzpyiBvu0DXMyOFsPG/BS/60WixAsnxKKf6wlAlfHJ4gKlw8OuH75KpvNwHJ5yp27t7lz85b3DIUYrbNSo+rarNt3iNMTpSiZAsES81qrFS9VpZY8HYepYPv6qg6rGg+mqQXTWOTF77vQCl2AzkXI1DepGOxeFY/WxHBRUtzi4LWa0N2s6+nCDs0w7MA3Cjdv3d2uKfti8/LF7lFj5tk5B7sQHq2K50N8Uw5KMJzGkAePQEIIdr/gzXwEK/yKnS97szVG2fZeAPqg+u7t+FaM+59lC8kA/APgzwM/5T///s7jf1lE/haWSL37dni7CLTGtmbUnSWDYYpjAE2BKpnoaQpR02epUqgRNEYz2lIZxpGZRMbRK1rFWp9pUajJvHJgHC1ZOebMJmdrVNDPGEpmnTcMtTAOo7VR8wU85oGSC33XMQ7FcPUpTDIvvI6ZEkcW/ZzZ3oJltyHHyLhcGi1yOmeZuuYA5h37H4qJbzm8aIlT8avunPuIzVc/69ism6Turtlyj6lJHaTKtRvXuPbo5amZQC1CHjLr9UC3WBCDNSfuusRYNoxlpA7ZjIAmSlH6ruNgsU9V5fj4EifrFZor46jcvZv4q//rWxOmuPXyZBvWo2yWw05oCpcuwX/0v3kr48303K7Dfn+NGZ/Dc1BVC+R3I4YG7exuhEjg6v4eH3/iBrGMth410EliFkziopNIbPUNAiOZSmUGjAU2Yo2yZwKjVNDs1F3IQ2EcK6VYgrTs3qaWMUXFIs3TU6MjWkXszhxMG96OZ7d75aVFMmFbAenvKu5RhrqjR6NGNQ6SKAWOj08pOXP58mUuXT1gsT9j/8qBGfq7RwadJtOvH4fBIbG63VhEmkthlZQu+aFqfXfVk6CNq90iVUQQNdkFEc81sBPRidWm0DaxEOzYGzPNKZcqWCVnBbznaBfTdO2tZ0NCCIy5kmsl7WyCgeK0xZZ8d6ch2PUK7ZhdhTYgiGajvXqkm6sVLVoAksnVNoNQ1SvXmXRxwBOzDT6LUFwpVEJCg8lExGoVsNYd6q3HOzLuIrIP/BHg39h5+KeAvyMifxF4AfhRf/wfYjTI5zEq5F94B9+AhGSGzWlSFnXYxIhgdMKUCNVwc42grrTXLryEjrEUksBQBJECIRvPVOy+KTlDhdW4oqg1N2jGedHNCVUMZ88jw7Dx8LlSaiaXjFYhxQ68clTVbogqblzdOx82G1YxcmUx4/rVy6z6jmVIbFYrxtHKuX1up5+mbeO7vYuGtR0eDAtHLeKIyVxdcYx0tpiRx5E8lnv9VgE6uHTjEo984Cpd50yDKoRiErFTYsvD5xR6YoxUCiUP1CFTNRJnHf2sN72PccPly49S8x2OD+9Qa+R/9b99jNkiEpx2F9q/lKjZCmvyOPDi88+T87g9RhVW6y2cct5Y+YvOndS5VSRwvloXzsM3Z0dKjhlny8NYclH46FOPcnUWqcU2fTQZewtT9o5hW3CHRAJiNFC19mlBhSSRjpFE8SYetghrLuRcyUWs4Qdq7CEaTGT5pZIrq9MVtRaX0FW3XTIV/kiDcJo077SRedTkEV51PZrm8Rkdz2CEUoqznoI1jnD5i5g6YrIOGbHr6EW4cv066+XKQLAQJw81Jss5NMqiiLjOk7Gldn1vPAl5pqCKBlV6QpLgEWolhG7KUQWHnaZqTdTzaTuFQLTEdqCKWotM198xSepqOLwaTTMGa3PZPObiYYj4vReQpvxrhA9laqhhLf8CUxAqQqmF7PMSUKs0dihZqs1RkEYc2Dobkz6/wz84RFsdpq4KWpTQWTitb4PLvCPjrqqnwCPnHruFsWfOv1aBf+udfO52CMGxMsXhCIGMtdGzLpOQS2UuybSepZoWS+xsUWQh68Csi9Ri+ssRkBTJdQSXDqVYpV7VSimVcSh8aL3hL998k9RZ+FVLppTMZhzJuRqlscmptvrZWj15g3lkXtWy1fiAsDmhPxyYrxcowiCZMXovR4VWIbhNV7hWCOCrwDG4YoUzpSVXINTqnH8rJRcEiaaUie5CGoB4kkwr3Ru3pow+/lIF5neOWCzmjjk6hto22Vom76Xh59U1Y7QoOWdOjo7IgzK/fWfSu7Z5aBivGyYJ5Dxyd5UnT76d+yQk9MBxHnrZfez87+3vxl5g+rn7OqHSdT1jmzYghsrHT455+nO/Rhegj5EUTUe8c2NAq7zEne2qXMt2U4+5sh5GSlVyKWyKsq5w4/CYX7j+CCWPBslkVzaNUEsmlcx3Hx+xSh0ihfXJmseOT4hdZKBYbCCtchs6bEPOUmnt3lpEaFGbV6A2yVw1I2PPOpRQTcemG2EmldkodBH62NMFKHfvoFrp+xnDMPKxMnK0mLHZrEmaYbRiNA04/9pmN3mDE2V7TUVd+tLhRjyxGBFCHqdoo10zw759i3bYCFtCU+xiEaHppVthkCX621k2MTcBZCikVE2uQJRL6xVFKmUYkRRJ1auI1Xv+inHtRQVR5UYt3iHSbxqv3VCqVwy7fowXn1ENfrFDCMal94ZE1avug2x7E7TCxiRWoWxdqSyiUq2uY69bJ/JtcBn5HcumfhuGiBwDz73bx/FtHo8CN9/tg/g2jovzebjH++184P13Tr8b5/NhVb1xvyceFvmB51T1+97tg/h2DhH5/PvpnC7O5+Ee77fzgfffOf1en89bBsEX42JcjItxMd6b48K4X4yLcTEuxvtwPCzG/Wff7QP4XRjvt3O6OJ+He7zfzgfef+f0e3o+D0VC9WJcjItxMS7Gt3c8LJ77xbgYF+NiXIxv43jXjbuI/DEReU5Ennfp4Id+iMiHROSfiMhvishviMi/449fF5H/RkS+6j+v+eMiIv9nP8dfE5HPvrtncP8hIlFEfkVEfs7/flZEPufH/bddXwgRmfnfz/vzz7yrB36f4Wqkf1dEviwiXxKRH3wfXJ9/19fbF0XkPxeR+XvpGonIfyoib4jIF3ce+5aviYj8eX/9V0Xkz78b57JzLPc7p/+9r7tfE5H/QkSu7jz3k35Oz4nIv7rz+LffDu5Wif1e/8Mq6L+GNQDpgX8OfPLdPKZ3eNxPAp/13y8BXwE+CfzvgJ/wx38C+Kv++58A/kusHuMHgM+92+fwgPP694D/O/Bz/vffAX7Mf/9rwL/pv//Pgb/mv/8Y8Lff7WO/z7n8deB/5r/3wNX38vXBlFW/Dix2rs2Pv5euEfAvAp8Fvrjz2Ld0TYDrwG/5z2v++7WH7Jz+KJD897+6c06fdBs3A5512xd/t+zgu32xfxD4Rzt//yTwk+/2IvxtnMffx+QZngOe9MeexPj7AP8x8Gd3Xj+97mH5hwm8/TzwLwM/5zfVzZ1FOl0r4B8BP+i/J3+dvNvnsHMuV9wQyrnH38vXp0lpX/c5/zngX32vXSOsc9uuIfyWrgmmcfUf7zx+5nUPwzmde+5/AvxN//2MfWvX6HfLDr7bsMyDtN/fM8PD3d8PfI5vXeP+YRr/J+DfZyvO8ghwR1Wb6PruMU/n48/f5Zw8xbs8ngXeBP4zh5n+EzF9pPfs9VHVl4H/A/BNrDfCXeALvHevURvf6jV56K/VufGvYxEI/B6f07tt3N/TQ0QOgP8n8L9Q1aPd59S24PcEFUlE/hTwhqp+4d0+lm/TSFio/DOq+vuBUyzkn8Z76foAOBb9I9jG9QFgH/hj7+pBfZvHe+2avN0Qkb+CdaT5m+/G97/bxv0dab8/jENEOsyw/01V/Xv+8Oti2vbIb0Pj/l0c/wLwp0XkG8DfwqCZnwauikiTqNg95ul8/PkrwK3fywN+m/ES8JKqfs7//ruYsX+vXh+AfwX4uqq+qaoj8Pew6/ZevUZtfKvX5L1wrRCRHwf+FPDnfNOC3+NzereN+y8BH/OMf48lfv7Bu3xMbztERID/K/AlVf0/7jzVNO7hXo37/6kzAH6Ad6Bx/3s5VPUnVfWDqvoMdg3+sar+OeCfAH/GX3b+fNp5/hl//UPjcanqa8CLIvIJf+iHgd/kPXp9fHwT+AER2fP1187pPXmNdsa3ek3+EfBHReSaRzN/1B97aIaI/DEM4vzTqrrceeofAD/mTKZngY8Bv8jvlh18NxMRvtb+BMY2+RrwV97t43mHx/yHsPDx14Bf9X9/AsM0fx74KvD/Aa776wX4v/g5/jrwfe/2ObzFuf1htmyZj/jiex74fwAzf3zufz/vz3/k3T7u+5zHZ4DP+zX6f2HMivf09QH+I+DLwBeB/xvGunjPXCOs2c+rwIhFV3/xt3NNMBz7ef/3Fx7Cc3oew9CbbfhrO6//K35OzwF/fOfxb7sdvKhQvRgX42JcjPfheLdhmYtxMS7GxbgYvwvjwrhfjItxMS7G+3BcGPeLcTEuxsV4H44L434xLsbFuBjvw3Fh3C/GxbgYF+N9OC6M+8W4GBfjYrwPx4VxvxgX42JcjPfhuDDuF+NiXIyL8T4c/3/nt+ajlN+lLgAAAABJRU5ErkJggg==\n" + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "import torch\n", + "import cv2\n", + "%matplotlib inline\n", + "from matplotlib import pyplot as plt\n", + "from opendr.engine.data import Image\n", + "from opendr.perception.object_detection_2d import draw_bounding_boxes\n", + "\n", + "for f in 'zidane.jpg', 'bus.jpg':\n", + " torch.hub.download_url_to_file('https://ultralytics.com/images/' + f, f) # download 2 images\n", + "\n", + "\n", + "im1 = Image.open('zidane.jpg')\n", + "results = learner.infer(im1)\n", + "im1_dets = draw_bounding_boxes(im1.opencv(), results, learner.classes, show=False, line_thickness=3)\n", + "print(im1_dets.shape)\n", + "plt.imshow(cv2.cvtColor(im1_dets, cv2.COLOR_BGR2RGB))\n" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 7, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/administrator/opendr/venv/lib/python3.8/site-packages/numpy/core/fromnumeric.py:3474: RuntimeWarning: Mean of empty slice.\n", + " return _methods._mean(a, axis=axis, dtype=dtype,\n", + "/home/administrator/opendr/venv/lib/python3.8/site-packages/numpy/core/_methods.py:189: RuntimeWarning: invalid value encountered in double_scalars\n", + " ret = ret.dtype.type(ret / rcount)\n" + ] + }, + { + "data": { + "text/plain": "" + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": "
", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAANkAAAD8CAYAAAD63wHzAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9aYxtWZbfh/32dKY7xY3xxZtfjlWZNfbIZndTpIiWSEoyZRuyKAOSSJimYYgfBBuGKX8wDAEGZH8wTECwAVqSQQmyaQ20JFKk2GRTdJPdbFZ311xZOeebY46485n24A/73Buv2N00O9NdTBVyJyLfexFxp3P22mut//qv/xIhBD5bn63P1u/dkv+k38Bn67P1o74+M7LP1mfr93h9ZmSfrc/W7/H6zMg+W5+t3+P1mZF9tj5bv8frMyP7bH22fo/XD93IhBB/RAjxjhDifSHEn/thv/5n67P1w17ih1knE0Io4F3gF4CnwK8D/0oI4a0f2pv4bH22fsjrh+3Jfgp4P4TwYQihAf4S8Md/yO/hs/XZ+qEu/UN+vVvAkxf+/RT46Rd/QQjxZ4A/A5Cl5sfv3txDiPXP4p8BEAgQ3T/+4SUghO7HIf42QhB8IAS/eUgIAYHYPC9i8z+C9whx/bPo8MPm5/EHAu+vn18Iuf4MiO7nQgq89/H5pEII+cL7Cnjvcc7Rti1CCLxz1E1L1djNx9FaIYSgbW18vJR4T/ea4LvPpQhoIbDe4xEYJciUoHGe1oMSAinAAy50F8kLMtnDFIL+UKOk2HzEEAKE+GurqkXqgsVySt00+BCQUtK2HuvjdZRSEEQgEBCA8/F3gg9sj7aQQJokeG9pqhVCgDGaECBLNUpJlJK/5aaugy3vFcLknJyfUTU1vbwgMQlBCNq2YTKfE/CI7jURUCQp++MdjNEQPKFdIkTY3L/r2y42r1eVNQFI0ozgHUIITs4umM7mgo+xfthG9v9zhRD+AvAXAF69fzP8u/+7P02aSpRSaK3xIYAISGQ0JgLOOUAgpURKiRAKbz0IcLZFSo2XKU1d4WyND4LaOpwN+ODJk2SzuZwPuOApyxYpBEVmIASUEkhJNFRgPl8ilcIohTIKIQ0+CNrW44NBZz2kTGjalrzoE4JkMBqD0CQmRxmDsy3eNbRtiXeOpq1wtuXq8oLnz4+ZLlccH1/y5NkJZdXSuIDSmixLKJc1w/EY56FuHDrJSLMeUsBsscDXlt1U0KNm1rQsLOznirvjhHeOFkycIg2WO70Rxn6JZ/p7/M/+p7/A4cEAKeLB4GyLbR2rVctvfucpqzblve9+kyoEWt9y1goua8fFvEGjEEpD6iF4lIT5siRLU2zj+Zf+yL+Imy/53KsPmE9OaOZP6PcMCIkSklfu7bE1zhn0E2R3eIQQcN5hLTx9esqz88Do1pf5T3/xr/Hh8RN+8is/QagsbeMpbcWvvPVNWtGS5All3YAW/NTd1/mf//f/FW7sHyDskvbiN1GiRmqJVAmg4hkZAkGAlJonj05BKrZ291ktpmRpxp/9c//2x97TP2wjewbceeHft7vv/bYrhID3Ae9AynjqezxKyM5TAEKgdfQcPh68BG/jKQ8IIfE+ILVEK0NwDUIIlFQIA1VtsQGciydwd3jjkdRVi1Qy3vQQUAiMVnjvqZqWLJPUtiWTEo1HC4FJBG1bkSoBskF5j3KCxaoleEuSj/A2kPgMD3gvGIz2sW1FH49tG4zWjMcjprMJX3rzVeq6Zr5qOD294NnRBUen59hWcHLyHKMTiixlmBrK8pzKCbaKIZU2lMpQ06dWNaK1VApO5i2LypIZyU6u+dlXd/kHHx7TIlBa4p3HB09A4Gwg+EDTWtrlCnf6hC/tDmhQ9PKc49rxV77/IY4WYQTQ0pYtRZoSAqRp1t0iiQiO8VaBpMW1DfP5ikTnFMM+vmlpre3ueXcPQ0DKeI+dazFasrPT48adO+g0IUhJ7QO3btxguaxpL8+QLtBPUoITOCR4gfIS7z3WWrAtQsRD0geBEnITm4huv0BgvpjhvGRrZx9JjBw+yfphG9mvA68KIR4QjetPAP/jf9QDoneKJ6tUa0+1NjC6cMSDv/b4QsXwSgoJTUvbOpxzSKXQSYqzFoLF+RgWuuCRUkGI4V3wDqM1zloEIoYrXuClpPUB70GqBJPEcAIEzgeUVrgAVWMxaUAKS2oksGR7oJBqRtvMqasE1/Qpa0dA49oKISRplqLTlKHaYT69IDcJdVuTGsVgd8StvS3eeP0ew9GYxarhg48ecfTsjI8eHzGbTnHOY22g9JayrlFaUfRysAKVZKyEpAoZcrcfDzBR882n5xw1Ap+AknSHmgViCFaWDZcXU2azBa5pqerA0sGoDdRCspNpbmylPJ5UrKxn1VqcTnA2YH1LkqSAQ+Cj0QRLXa1QSlFWNcakVKuS6XzO9k5B8OCFR4iA96IL2wPDYY/5icf7QNtYghdYFw9hAggkidAkOqHxLdpLQhDUy4ZHj54w7I/IjdyE9TGkdAipN2F3AEQA5x2tjekCSiKUZBNDf4z1QzWyEIIVQvxZ4G8ACvgPQgjf+51+XwiBUsDae4WACKLLuUI0okBnXeE6T9s8HpSS+CBAKYJ3uNbig0cphQ2WQMBajxQBJeQ6vcJoCWmC0pLOJ27C0xivpxitEcJE4xTEG2cd2iRdVhJvpgCkABEcRjo0JbRzekrQWrDzKyazksYJTJox3t4jeE+a5iSJQQqQWhJcgOARrmaYaV65e4MvvP4SPsB8tuDk7JyTswuOTydcXi04u5gwsw0hSIypMFpTto7tnV0a55iJIdPWUaeeIlHRAzgPXY7XVC3lqubJ83OenF6xuJzSeEVjCjKW1EGwsoqRSdlJJVsppD7QMwmlrTlrAt7EK+bbCi80aWrY3h2zmiyoywXeOfKepl+kKCnw3iEkEAKCsMmbtdbUzZy6qmNuikAEOqNradsW7wK7gy0uVnNq28ZDVBrKsuVv/9Kv0NiSoZnx2t0+d29vo1P9wp6J0VDwnuChaSwCiUBBl5p83PVDz8lCCH8N+Gv/OL8rBGgt0VLEkA0RczK/eS7iMRYBCKVlvCkd8BA6YEEIgdQS27iYW4XoHQUaM8gxvW0IjqZasb17kyLPKZcTVvNL2qbCe0cIMXTyIeCcj6+9Dls7FyqERIiA1hKI4EQIPibz8RBFBEm05YBUAiVAmwDO40MghAVuUVM2LT5InIO2aRkMhxijURKMEjjf0stTmrZBa8nezpCD/R1a61guV7TWcX55xeXVlKOTS54fXbAqa/CWy7MTjNbkvYTt3V3OzhdoIWmaGtvYCBoEaNqW+WLO1775EaiEm7s9TmYty9ZyMCp4/9kV8waCVzS2JTGand4W/UTROsXcBoQP5Ebw/P3v4nfGzLd61FVDuVyRJIqin9LUNVLJGEXQRQrdpt6ARkGQdmGodRYEm0NRagXekijY294hyRLymWFS1Tx+3vLB3hHj4ZC0SJnMPN/43nPOziZY79gb93njjQcIGdGVINaHu0Ko7t6KT2BhfAqBjxeXAAjrPCkgpP4BJHBtUABifVc2yF4EKQQRVQvdUaSNRoiAdYEgBKsGlm1OUWQkhULkfUResDU8YOdQ4H1LW81ZTc+pywnlaonA4tZhqoyb48XXNEly/Rk28GQ8KGCNbgq0kvjgkYBW0RcTwCQa4Su0jh66tiBVhbUCrXOq2tK2LXmvhwESbZBK4YNDYtka9ajrmtRs89LtA/wXAl5IFsuKo+dnPD864fhswuXllHp+xWu3x9y/vU+5WGHIQMSNbZ2ntS0Oh8HhPdStIzEF1sbTXumUJEvQTjJflPjaUSkFBA6LEVa0JFhqa5muSp48e8awP8RaR5IZ2rbGW0FVNZvQfI35xdxM4IMghEBT19RNhbUt3jmqZcXR6ggRICxr9pIBVV3z5OiYLEk5GO2wld7g9q0Dbt44IFUtD9+Z0TQtjQ9oo0hSjfMOyTVC2h/0ME0EzqT6nSDsf/z1qTayADjvIYAXAiV9POGkREmJd9G7rKt9IYTrJDVELwYBqSQhxPyNIAjBI4VAKsFsMee/+qX/gp39Pe7fusPh7j7DXo9empEoTZIkJElKunOfgZIIb2mqZfR0iwl1Nd3kbgiF8wHD2svSwfpx53SRLlJItIo3VBiFQGCMItCBNEphTBK9dvBILLmJoY2nJCAotEIbSy0E09mENM0R3XXRUqJ7OUWe0NY1znqUMSSDHruf6/Pm6w9onWU2XzGfr7B1TZokXFwsOT2+Ii8MaWpwzjGfN0iZcD5dcWoDtRPkuWKlDDdu3WC2rGidxbYWCRit0UbTti31cknW01gvOJqs0Jkmq65obMNOvw/K88FH59zYHbNaehDdVu+uXejKByFuAVbLFYW1+AA+wPn5GQ/u3qUpS5a2Ye/wAFJF42pmy4qT+ZLP3ehjlMQYSbWsUJTcvz1GqwjU9PtFBEO8j/dKAN5jdIx2JIBzn8jOPtVGBi94AiLMigdEDNviKdNdl3jsxRpUFyIqpUDGOE0rhRfgnYshpBTQxf8fPHvId569j//Gr2KUYZgXHG7v8eD2PR7cusP+eIdelpNog1GKRBuS4oDt0S2CKzl69BZtuSQEi3Ue6nYD2OB89Fguel2pJFp2OZoImyRcexUPEi9BSqRSBO+RQsUwkhiaaimjd/c1brlEC8U40wQcNkiUzPFeIYVCSoUxKUoHlDEIobvw1ZBkBQeHitl8TlPVCKEoen3atuHi5ITZdMJstmRZeQaDHuezGq8EmdFsFZrlskYZzWjco7WCIKBqWlrr8C6Gmx6B0hlSeZqm4fRqRn8geHZ2xkeNAh/wVcXF+ZSjJzkvvbRPf5gSfFfXDC56snhrGWwNCVLRWIsWips3b6LSBNE6QEfE0AZE0GgZyzPKLzg5OWVvdxujBbcP+13o78AHqnptQHITSdR1Q904wHfR4o9wuAjRKISMMLp3HqkVUq6LlgKCQwSIFySGKUKwgX9jMThcI48xKIjHIxonthmpMVuJwopA5WrqxvLO0yd898lHCAF5kjLuDTnc3ePm7h63dm9yY2eXYb8gT1Lynfv0QwvecnHyDCVhXbj2HtoQb5gJXQ0vgNRdsVpItDZ4b2PYI2SsHSlBCLILk1XMUzr0UkoBQiORKK1ByghRtyXC1oQWMClCJeAESiXgFW1okdqQpjlp3seYnN5gjzWo09Qll+dn7N28yWh7izRNKcsVe4fPGb/9EWfnV8xmK5rFFTJIUpEQ0FRlRZL3KIwg6Q0wJmFZrqjqBicCpbV462ml4mJmWa4clC2ucWjb4KyjFRJlEiQKoWIa5PCEIKLRIWiaGl+V4APWeT549AHj0YhEpVTB8eTpM155cD/WMoWn0BlaGc7PL/jG17/FrYMe9/Yl5SqWBJQSCBXvB2tgjY7IIOIhtz64/zsFfPxulzGmiwYDnoDqQr94IcIG4FAynvjXBhbjeO8sPsRTMwgRw8kQsM4hhOLsqmUxWaJNghSSVCmKJIGkx7mf0HpLKlNWi5K3Zx/x3fffw3tPohXDfsHtnQN+7ktf4YuvvEKvN+b46TO8sKSpjp5Hx9wtdECId46g5CZH7CpBL+SWGiEUTl6zRYTUuM6LSynxCKRUREcdwR6lFErlBHwsxHsbUUkJIjTgK1QA36ZYHMu2ARKUySn6W+g0wSAZDCNQE4YeiWe1eMjL924zyDW2bUFIrJc8PTrj/Q+fcH61xNmK5axCBEkyaqmqiAbfGA9prUMmQ+arhta21BXYxYIiSVngUEIyGG9RN5bgHd6LjmWyjs/iQdS0DusEyjlc6+lnOZfzCYvlDOkNMjFUzpIoiRbQS3vsjXfp5RkHt25wY3eH8+cfMU19RIODwYtAXbdxX3SocWSqeKTUmxDxR9uTBdA6wTuLkIHg3IvEpogoyuuLIMU6SY1/ShHRyAh8gFQKgScESZolWK+RMhYlJQERPK4NeOsQXiJSgRSKwqUoG+tnDkc2zHDBUnvLtx++x7if8uXXXiMgsM4TpCPFbGhJUsa8TIiOECbWX93xEUBKg/ORkCSEREqFkA4RRPTQgi6fBLyMRAXZ5RCiK18QQ2qhYxIjhSR010cqgfAeLVqCn6LcEiEUzVKwqM9AZgQ0q6pFCMPWeIwLjqzXQwTH7vYWVbnCOcfW1jZ3b+/z1S++RFlZzi+nnJ5f8fDRCRdXC8q6pmkt1WqJC4HxaEQoWxKToKUhG+3g8GivKJcLFitHYmKRP3RlmuA7xodz1HXDclVRN45mPkcA/SSlsTW9vIfxkkVTo5GcnZwyygtGwxFNXdLqpAsEPf3BFtbPILQoHwjO4qYLCPugOhZRINY7fbcB4UcbXUSKbuOGmMcoRdu2GGM2J90PIIxijUiFDbQu1ly8rqTmvcN3sT6EuJkjjBTRJCHQQhC6wiVSxPqUi5tfBRgnfbI0gUTynfpdpJYgFD6sPWUH+3bgi5Rro4qQtPOgo7mjpIx0pNBGWF8qhOzyqURA42kaj/MeHyxaKbTSKKVjbgZrNOX6OriOAeNizSesAc4gkKqLxQQI4dHSxfyusdg2IL2grAMn84JVXcXcylmSRDMcDgnBx8cJQZ5otBJkyRavPjjkp77yGquqYb4oeXZ0zuOnpzw9uWI6n1LXjkSrCM4oQ57nJCqQjrdpfKAwBmddl0+u+Z2RfVJWNUdnCx49WaCGmnZV0SyXaKO4upqy2x/QN5reeJuiyLh37z7vPX3C85Nzevd7BO9pyhpjFJN5zc7QYIxAJoZ+lkZOo4UgA0IKijxHKtOlFl1p5r8rxejf9fKBqioRAjrWTSTLBnDWRej8H2a8CNEZWfwKnaEJqWKoY4GukCkQOOdo2hYtu9qIBCcEwgdkIpEefBOwtu32sojgR2KQqUJqHcm6KIRKePDaF3ny3jc2dZ5YyQsdIB1DWd1tNtkZhXcNgoiMShVzE9uW4DoqmQ/UjeswHI9JomeS3aESuZzimnRM5+2V3IRezq+96HW+Sufp13mHUYFEC6TwrFYXjIucummpsVTlEnxNINDrD9AmoaqWLJZLkiQjZBnlqkQpyeHeiMP9Ld547S5Pnh3jECyWlqurKSfnVxyfTFnMZnhn6fc9/X7Oay/vdtcChHqhhNA65quKX/p732PnxgPubY24dbDHydkplbPUrmG6CNjWoaQhSxKqqgUfSLRCScV0OiWVimFfMZvX7I5MR68KG5pVNOoIWy7mC1yQ7N84BBGR7R/dnEyC913htjupFQJvHaz5h7+NK1+HYqE75gWxluUDKBkLxTa4LozzeMA614V3IhY31wlNTP6QyhCC77yj7DaljiCK7BgeQpEV484jQkdd6E7DWHPRHdHYhw6oCWEDWf+2UYmA1jkePb/k6GzOeFgwHhi2xwmjQUZqDErrDTnaB4EPNuYU3fnrnO+8XeQmqgCmY/YrpSLNbP25pMB4gTESoyUmKUjaFmYzcBXgsXUAn5KowLAo8EFSLkv6/T7eO2zboJUkTwR3D3d59vycQRJ4/SdeRyA5P59SNi2L+YrEZPSLFELN+dmC7a0eWRqZFz7AsqywziOUIk0zhv1RRCVDBIKEo7s/MUd+/vycyXTG1XJBmmT4pqGuG5wPtC7gvcLaFikThIjlnQ45i5FQ6HIyZbqo6IUa7Mdcn2oj00ZxcHvM1fmUuqwoimLT1pAkSZefrNtXxHVRWMhNmOiD6DZ7DN+stV1uFE+uEGKSLqXqPGAEJyJ9Lz5f3dQ4p9DGYLTBJMlmU4PoUDA2NbhIDyJC551njf5FIYQHZIdwho1lxRvcUVk6eF8q0RmD43K65K0Pjjk42KHfyxhcpgx7lvEgJU8hzxR5opACtFR4HVFJWNfnYsjjXewy6F50c7is6UVKK7x1JEoR8CQ6hyDQOm66pmnj57QVghjGe++RQeKbGmTKcrFASkWWZRijuXnrgNY2GCVZrUpu7O+QFT0CEqUTmrbh6vKcqqx5frZgPp9yY2+btq4IIVC1DcEHPvzwQz549Ijjs0uSNGXY76GVQnqQicZWNRbH6XRKZS39tGBYFGgleee998kyTa4cJ2mPohhgTOxq8C7ujSBiuBhCwGi5jvg/+T7+5E/xe7kCiIqdG0MungfwntaHiFgJh7AOrxXg46ZX8URHyA5pFJvNex2udeik9wgVN4h3gSSPnqos442N0K68Ri5bR9sEpAgbNC96KNHRpwQ+eJwNkTO3DsM6OF/I6MHWS1xTG65zyO5bwTmuA02PkIFV1fCtD5+QPjsmywxFkXGwPWY87DPq5RRpQpFoklRSJApFixSOg71tsiR6O6N0vAohoq+IgGt9F6rGShHEkNx7T5CSIHwETURMbH1wtF4QhIpHhYg1Oy3B2hIfVgwyg8eitWe1qkiyHOeige/u7pMVPZo20siyIidzWSzIpym2rdmpG7yzrJpLbNsyn1t2tvq88/iEZd1ihKJnAu18wijL8EGijMEaTZJmrNqKi8kU29YsVwuyRNHahn5SYGTNr3/zfZQUHB2d8OqDiDz64LF+zZGN9yx0ZOHYh/jxd/Gn2siSVLFcLYAJg50hwiVMzudkWQ6ErrdLdCAGCO9BqC7XEdc/jy5tEz7Gull8TOxTk11B+zqkFFLQBk8g4LyPRWAhcLYlJjrXzyWFxIWYR0RYuCYxAhUE1vqu6RKC87HtovN2MsQcDLpWmwAIG1FBrg+JxGgmkzmJ9DRtw7KpuZgveXx8HsO7RFMYw7Ao2Mn7bI96jLd65Ebw0ZM5wjW8/OCAl+7finloG5nsSnq8d2gtY22vO3QQdPmhQipNINYnrQWEorVd5EBM+1zQEayJp1Z8jabEuxYtJa5pSaTCuprltKGuVpS1p64do/E2g+EYk+ZkRQ/nMkbjhBAso/GYqlqx3zbs3Tzg1VducHo24exyzsn5lOWypF4usN6TpgkCyaDXI0sTdg73CS6G6pdXU1rn8EFwej6n6PUpqxJMxsWkpG6qDtyITaaj8WhD4Vsflp9kfaqNzFpLWTumkzk3bmsODlJm03iariHqdR1lw3FzDu8DJlFdd2xkUQcfOl5xhMSVkgQpyLKE+3dvcHY5o6pbcqPRiWEVaiBmVGeXVwx0zqDII6+w84uemFsZFcGPaGSAlBEN9AopI7NdhtgRQBAEF9kQECLfsPNwQsjuOVyE98M6DA1MpzPujPt4BHXraJ2ncZ6qtTSNZ2UblouK0zDBBU+aau7s9PjCy3cYFX1MUtC2Fq119I9dV3RwgSAhdGHtmlAtlOwIzbEUkiYGZ23HkezCcCJQYW28HkqJ7lALBGFiJ7gUMXwOoIltKzJUqOBIDVTLUxbTU6TOGG3vxl49qRkOhqRZD52kTC8v2doaYnTgxu4Iaz2rqqFqHVezOWeXc45PLzm/WDCfXNBYyFIDBPK8RxASoQxXl+e4xhF8g1CSs/MpN3cOcU0dQ2UlCUowm80wSRq5r4IujP/4hvapNjIpNISM1fyKq2cVvpwxm1UMBwW2jSew9w6l1239EfqO6ZdASB37o6xDGt/1SrkYfweLlCnBO3bHA+7eucVbb3+ItZbGelxTxbAqQNsGJqslWkq2RgXj8Yi6rnDe0zoXqU/eRzZG8Fjr8F7igwQfQzHrIj9OK4FSUWYgVhnkBiUMYY10RbDGcc3h29ndoWwFbdti28hK2RkWrMqG2loaFwu5lXM03rNsG0Z5zCFHWyMCisWqpshj97cQgiBjOEQbgQTlA4LYhc4aqXSWIGJumKUhdn93pQEXIjvetvGa+iC6umUkKyu57sPq8hsh0DpBCEiSCNKsKwrW1SyuHhNcZHNMzw15f4tef0SWpdg2MBgMAE+1WqF0YEBKv1C8fP8GAcFstmI2W3J6NuX4fMrZ+ZRqOce5gEpSFtYy6PdpbeAbbz3HO+K99l1o7l13v2uUjhS0sCZPfoL1qTYy5wLVasXBwRhhFSfPFvSKgnWu4oJFCI3sEvfuWsWColqzPtzG421+SKeLYSPTIk002+M+UgayVFE3zYZiE5fA+sDldE6eapbLBUlisF3ntkDhfegKmLG8ELzuvEIsI8D1WRg63qXsQtDAuqAejUxqjbNN94CoWWHblrapo86FkCQmQUrIshTtDQNpaJoWP5ljrUcosamnKWVoG4vLBE0b8z2tNIJYzJcdlzKEQFtbMN07lSKilDKGUkqBUoFV7fAIvLMbelvrIkobYqUE6z3BWozW3eEhNj2Agbix/UZvJRq9EoG0iBVEZxtoz5ifn1I3sYmy6PUIQNs0XbOuIzGSPE2YTOcoHDf3t7h1MMJ7sB5m8wWnZ1OeHl1wdDphtZgiCWz3txgMhhzu74DUCOG7lAG89bi2O5Q37fY/op6srmrG24Y7D27wzjePqJuW8VhTNxVaZxtXHovVChkEzvvI0N+cTvFmiuAjW953SF+XU0nZtd37gG0bdra38MByUWODixQcITp4HK6mC5q6RSnVhVhEhKpDKmMQGbo6WdjAwxCBkbYNGBO9xYuF9LXUAnisczjbshakEUJg6xatNc7Fx8tuUzjXUjcWKVqK3LA0ChsCIlEMBgOytGAwHGEoERJ8CF1NkJgXysiCif116/Jr51W9B+EQ/loMyDnHO+8/Y9kGxqOcva0eSdcXF4Ebjex4pLG+16KUiZGGUNjgr0kD8YU67xcQKpDoWHKQSSy2GxVIjaSsatpySl23ke9pEoIP2Kqi8oFEQG9QkOdZPGyCQynNsEjY3Sp49f4ezkHdhi7aETRVQyLh6GzOVj8nTTVS+O5ehReoXZ9sfaqNzCQaguD48ZTLiyX9QY801/RGGdUi4O2GTNS1rQckelNkFUF2uU18PiHj73v7gywRISR4sK3DGInWgjRNWIUy5i50SLsQWC+oWxv1YiIfGaDjJwbAkaYKIUPHtF/z8DqDkfF328YTtELKAHrNnOu6dkLY8Bb9Cw2qrrUECUYLtNQIKclTiRKws9Xnzp2b/OrXvk/ZWISSGJOQZhnOefq9BCmaLkS9Lr7GvPWairaGrdeMmXXiHzrEDQHPT6ecLi2Dy5yP0hn9ImWrn9HLNHnqKHKDlrExJwofRW+cJEksnKt44Gi5PizCphwgBYjuAHIhdksoYgHfWreRm/BtBQR6mcT5sjtkHXXZIFWClpAXCSHENiIAlRiSJKNpYjlluSip64rpbMHF6RQfHKmWVKVle90SuDk8P/76VBuZlAqVFpwdT6lbGOqU6WTKaDfFFAm+0RAcTW3XuwbnLSGstUDCJgzCRdQr8uPo2kcsIcQTs7UN1lp2xltsb22zfPKIS1ducpMQrrt11wVL35FzY7nuurlUdzUWIWKoEUQMk5TougF+wPg6JL9jZgThIpoQfHeARCTPdlokIqzp0hHscc5ijGE6L7l8673I+A+B1jmq4Dde1uOpqxalfNe7BqormislYv0uuBgabryw6LyZj6UIG5kzj0+v+MbDE0xiGPQLelnKVi9nmGf0s4zcaIpc0csN3rakOqp+3TjchSBQQcbes64O6JzbkL7XvkNF3YlYIgkBrRVJajblEYGIwBCetrWkiQDXhaeuQUiBqxqENNjWYkxBnvUATZYZeqMReW+JUYqmqVkuZtimZbFcko9GpDrg/Tp//xEuRgsZqOqW46MZw1GfxpYkqWRVawZbBTo3jLcUk4tzeoOMag6D7chAmE8c9WqF95HDBxbVhTDeB1wIhLAGKALWOkLwmETjWwh+XaTtjGD9l85w1hqJwYeNDuK6xaKua6TQaGXQslNEEtFjhNDxCTtBoBBiOAudPXdhGd7H3jcXBWWkNmiTIaWkaRqctwghmC9KWhtDZKklUihMktBgsc5hrYtyCX6do8XQznfIpSB0SOELjaQdUyV096Bj64II2LphOpmTK41tHVeXUy4EfNQ9ppCK3Bj6RcL+qODuwTa+qeilmr39HUQiOn2VyCEV62vTyTQIQef96K5JFDmSUpAYRevA2hiSextDO9Ghh0L4GMJ7S2IUUnikaBnkEh8q6sUprZUk6YCZiwJLrTYUvQFZ3sekKdVqyZOHHzCZnHDz1kH09J8AWYRPuZG1reX99z/ix3/mSzx7+pid3ZvUdcnTxxd877vvc/+le8yXFUkqMcGyDA2JG7K9vUu19CyaJvZbEWtooStE+7V38r47rcA5t8m9hInNlWyES6/f09rwojZhuA65fMczFF3CHHxsa++8gutQxvUm9kT2iVISHBuq1gbRW4MgSiF8YHtUUFUNrlNwSkxGVdfcuHmTZVkCMYc9O58yrRrSUYHpqFVta2mtJE8UiYmGJOW61d9vvKgPHXtDrje6wNkGRQJGAhbrAvfu3uSgcZycXuFsYL4smbcWKwSNs1R1y9liSVmueOnGDr3xOBaEW0drLFIRpecQeN2BLnJt4GspuC6s9GHDvNFakaeBCmJzqI8Ea52YeHOEIogo54cw+K7jgtCJrDpLagy2meDaOctVRZKkrKaGNO0hTI7vDlAp1tSu7tD7BOtTbWQExaC3xeX5hKp0PPzwiKJXkKQD8qJi92CL737n66gk0LQ1EsXueJfh4JwbBzcQsoPs6WBlb6ORAUpq6KhGsqMfrfmR685pv0YYO+Jo5CnGTl3COg8DQieC4z3ITjiHTugF/wNMfO8DHhdrMut+OB82z6GEhOAI3qNEZOpLKbm1P2JrmFHVjvOrBcPxTd595z32D/aZTufRYLckq1XLtGywIUo1uLVOh0uo6wYpDVoIvOs2UojUr1igD9DRy5yLXcEX3z/FNjn5K/dIshEimbCzVeBlQtU6gvMU/QI1mbMoa+raIYND6djXlaQJvd4Ab1cb4rZzruvZklgXMB1/0AdHcGuKWRdyEyMFo6PysNYK1Qoa17BWAhaoriesI13L0MXjMSx2PpAojUlUh+hapJSkSUFiJN462vaKxfSUZelYVQ29ouiiGbfJlz/u+lQbWVXV7B/scfTkkqtpxWy+Yry9zfnZKXt7OVq37B/u8mu/9j1efnmf3fEOMiRcnV+wvTNA6QLXBKLAsO8uuKWxDVKA0dkmNHLOxvi7YzxYaxE+eqzEmE1yrtYqwgGsjwq3LoDtmO4ueFzbErTGOocWHX/QB6xd171iLhW9iejgdCBEipbWGu9anPNdaSCgjaaHZNiPPWtJpsG3rKaXDIs+g9E+Skg++OBJxAhD9Jh1U9Pv5SRJhmuqyD1Edc2fxFBQis3Gdp28tes8WjHuY/KbzFwPKXJcWLEqaxpbkxjNdDXDowkITGK6w8eBgtFwgE4SlDHkWRHvQbiWJpci4BUEtQaX1h0UAvAbwVnV8VOtszStZTKvubhcUaQaIRqKXkIqTazLyZiv2o7tEdFTNp95vSKNLebpaZajTUDpmuFQ8s47j/GJ7oxrzSz6+OtTbWTBB5bLOecXVwxGA8rKUdUr+sOEVbninbc+4uR4ghaGXlGwf6NPvQgM+y/hmiFCBZyvWVMqnPPMFitWVct8WSOFYtUOaRoXEa2usN/YFms7Dwg4a6NYJh6TpTgXO6t9FyKuWSaxML1O4gXWO4KNGb1SIqKdG65I9B5tG4VEI6Qf+X3Y2OIRW3Hia0Ekr5jEYKQkSxOkFDRVzcnJGb0iJzUaFau+XfgXNjVCpSStd90G9MjuMImIokT4GK6txXtCB2H37+5Rz1vKh4/Ibh5QLWc8fHyEECZS2EJ8TJElKNsSnKNtHVJHPuG6AyDP0w1ZGuItiSCUwHuxIW2Ldd4a1qWptVdao51wMSv5+9/6iH6RMxhmDHopearIE80gTynyHCeJhW7pUFLhpO2841o17AVkGYmQ8TrE8o+lqquO2iY3++Djrk+1kWV5wgdvH1E1FqkdaeYxiaS2krPjmp0Dz0svHbJ7800+eO89rq4uePnBy0zOJatpg/WxEL0OGZvG8fjJEU9PZoy3xuS9HiLJefbslO39MbW1LKua0/MJTRu7kr0ishqsJwSHrts4IMK5WH+DrmAZwz0fAq21iDogZUIghn0RfPAdwhiNai0dsK6PBS9QOno+34ESCAhdyFmVdcxlnKeqyhgqyRjyRsAglgg2sgwhdE2aEm0UaaaRMuaflrApFLfEzk7V5YzrcDF60SVNo3A4qtmEul4hieRp7x0Bgbct2nuCCLRSYoFGQCUCOs1IkhSB3XixNDEIJTo1MSI0r9iUPOLnv+a+rB8XCAgFi2XF1957Qr9XkKWKYVEw7BlGWcL+sEeRKA53dzAqHml5oRmOclobAEuSmi5PW9cEI2snor5ReVrLOE/BdynBJ/Fk/3DL46dqNXXDq68c8OYbN8mzQN1MyXopb7/9iPnyiq1xwmCUcnF6znCQc3BjH2MsN271yPoGKWJYFkKIjITguXmwy+HeFqNRD6MVUkHRT8hyg5SSne0xIoCWhmZeI1qBUZo0iRQlKRRta+MmdDEnsb7tuIoRodQmEm7bpu1Uf6NHEZ1RQMf+73iC8S6KTYeAc9HTBei0SATGJGRZjjEm1uvaFh8Ck9mS0gYmixUXV9OO0QGOEEVAAaX0hgArgNUqUsacCzSNpW0djY2ghnUB5zoQpOPlh3wAd17msUt56+mEB698niZIJsumK7p5qqpmtappXVTqilajEJ1MnjZJLLJ7h/euA5pivmS7g2PtvdbGtykGh2vACS84Pb3gjbv73DvY4uxyzjvPjvnWh895/9Ep7z85prc75PM/9mV0TyIzj8o8NjhWqzl10+Kso64arG03HntdP5Rdjc770MnDR0LCJ7GyT7UnM0bT2pLeoGC822O2LPna195h/2CH6XTO6WnLF76wDfKM1OxixJjzixm3bo4J/oi1pJd1vmMBgE41KokbzlmLk23cSP46jBEEqrqMBGA8VVNRqBSlY0Nn3TRcXk7BKFwD06sFDx8+Yf/ggGESWC5qsixOiknMOg8Lm7YaCRu1Y4hg+RrO9h2R2blYeHU+YIkNjBDzNWMMThm0Mbxy7x6T2ZK2rmjKill1RSDQdgfLetNY2+Cci+0l2Vq+IYZKzsNqVRN6Ck/LfNFwdrHg+GzKydGM49MZV5MFk7Jh7jwP7h1y8+Y+w+GA86sZRT7kzuGQjx49pZnNcE2LU0kMd72jbSzeC1SIvqm1Luo0ytgcqmRHmPbxM8tux68JA+sSByEeE0YlfPGll1CJYHJ2yem0xrceioDpp/z6N97hx37iJ7nxyi7zaUVZlXjZMriZYOeCxjccHR1x/86diOKGaOgRUXSYREe76lxra6tPBH58qo3MWUe1slTlDJMktJXhwf37vP3e90gTydMnzylXJbfvHfL06JR69hFf+PJXef7RMeVyRfCii+/9RgxViCjX3bZ1DIdw2NZjG7uB5p2L3b2IyMHzQUb+YVfDSkwS2ewytn8IoanKmrpp+N7776JsxfEHRxRZSpImjAcZhzdG9Ipkg5it1xoyj71aseIdw0e5GZThApSrkqpuaNqE1lkaX+Ncy9nZBZdXV+RZSpamndkArHVOuppX8F3+B84L6ralrFacXcw5OZ9zfDLh6nLJbFpGj1S3UbsxdEV9KTch6XJVo5zjajLn6HKBlFNeSQxZXjCdLxHS4n2IDI+OAOCDjPQrFRkgdFIo64PG+9DNNovt/ptmZBFJ1zF8jL1tDpjNJyRJwu3DA27eVDw/PkMSsFVJUfT5j/6f/wl/6Pf/BGenRzz43C6T5SXLecVY3MD5kvHOqCslgFCxUyBIiZQarTXFoKCxK1AWa9tPtI8/1UamE8WdV27Q6+0wuax59OiKk6sL7j/Yp6oXVNWEO3deoizneLtiNO6TJYLtnRH24Yokz7g4m+FsJDXForLEGIUDFIbGw/HROQJPmiTMZnMWy5pV2eANyI5utWjiJBJjzEZVKW6eDmJOEtI0oz/sQ7Wg6KWcHF8ymVd8+fP3yTJJlm4B0VtqKVEvnNYb0+sUkV1Ya0tIlFRIpUizBKOjVFlrYyLfeglSs1xVOOejtBqe4OMUG08MfcqypiprpJA8Pp7yn/+NbzGf1thFg21jNCSJ4qmyk6MLIpYbJILgfJfwwWxe8t6jI7I0wXlPked8/pVbfPT4iLPziw2n03bKytoYdGLwbkUgdh2vm31kJ1QbOhxGdE2xYX1tulpeBH/shvEhVULbxnuW93POr67QKIIV3Dy8yaqa8ejJU4qsz1/9q7/Bq5+/wcGox6JxeNWQpylOrqjbgHYJWZZBACkUy8WStOhRVtVGDdk5y8ddn2oj895zNbmgtZ7e9iHSSG4dFFxOL5hNPE1d8/Y7b3H//iGvvr7HzvYd8myH2eQjhtuWJJ8jJZydWNpWbQAGAdjGI5VFG0hSw/bOiIvJjNl8QdU66tbhpCDYKEwaQoTOl2XJqqzpS4XrQo113cdai/MS4TxKwL07+yQnM8ZbsQXeua6G1s3FelFxKwIcAYRChEBrA3Ud80khFW2HdgIkiWZVRmMcjgaU5YIsS+j1co5OJrGfDoGRKhbavUfqNMLawuPwPD+dQQ2Jizoq6yYb19XtJGsARW4K456AI8SJMFXLqunCXw+93LC7PUQisT50Ydh1Ayh0EYKNE1pECJs2l/U01HVNS3DdsLrW3VgTvX2QXJxP8brH5PIKoxVl1ZJIQ6IFo0HOt7/zXZxpqW8f8sq9gkQZmiph59W7PH3vEePdPlpKFtWSumwZ9TJSYkHbuVjDXM5mGN0npIK0MN2gyY+3PtVG1lrH0fGMxJSMdxd85ScOePjRUxo/4PjkDKUUicloWomWParqgsnJkvn8FEHD/VfuMNxp6Q+3mU0ET59c4EPMxarKxXYQE8OF4daIXnGJSTRZptnfGfN0fokIiizXuFUcQCiUwXXsCOcszlrKsopUpzbmH6GOwMh4lBJ89C5amRgSEYcb0OmsqzWsFuIm9s4ilGC5rHjvg6esSkdA4YXh+PiE3d0tFquW2kmaNsQE3gUaAqkLG8+VS4Mm0DQNIUux1tO2LdposiSCPI136GjtrIX6XYgNo9Z3hGoREEojtEBqTZEkEZ0TMF8uyBJNnvX59lsfsSqrjjrWDfroDCvABiTyLrbTyA5KF+qa9Q/run/UpZdrtE/GzokYPcQ+wkXbdlNnljRnE5wDLQN3bm/Rs4bSW4RqePzwiNu37/Hs+Ij/4v0n3L91yK2XbzGbzJhfzXj9tTew9QLbjWOyHVBm0ozEFFxcTmkurjY58cdZn2oja2rL44+mHByO8X4BwOfeeJUP33vEK3d/hmK8TVHkzBfn9Id9Zlcz+mNJ0t+Lifq8IssdiSgZjnukZyllVbJY1cyXDUWeY5Tm/HzCYnJFuZySmEO0jJIEIXikgi9+7hUevveE1jqUjDoZJtFUtcUFcJ23iW02isrZDYukbmqaxmJUuEbNiEKajkjMDV52fWcxAcc7gmsYDQuEtkhhQBqWi4L+cMDZ1THTRcP5xYznJ1OkVLRtw2hQUDVRKkArDT6WDoSM6r/WeXxoUVJRZAn1ssSvPaiMFKeil1D0cxa1JQiNk12fnFyjn5amCeRFwsHOkCJVpLniajZnuaw3aN06v2pbGz+tlDTORk8mNV4QQz/8xlsBP/D3dbFchrDJrUMQvPzqy0xnDctRj9o7JvMFz59fYm3NdLrk1c/dZLK8pJk5nK+Yrc64dXOMr3oEAt9965t88eXPIdshi6sLtsd7NKu242eCMAKXrPC6x2B7yGpV/ZOpkwkh7gD/IXBADOn/QgjhzwshtoH/F3AfeAj8j0IIVyImH38e+GPACviTIYSv/6NeI0kS9vfHfPDBI4peyvb+F1iuPK+/9jlW1QrkiqJQCJljMsn+4S1OT55yevKcm3deZTFtaNsZt24agl+R5yWrpWPQS7HOk2eK5fKKNPf83M/f5A/83C3+xi+9zdHlBS4XUffQWtq65qd/8otMZgvOLq4YjwZRA8RavKebcOm6UMN0uh1RZi1NU5JEI1XXzuIiq9tL8EFSt22n9xiRxAikxRaQLBVMV47gJCcXS548v+L4fMn55SxC7dbR7xVxYJ1IyFNDaz1N7WjKivefHXH/9iHHJ+/z4NYWZyfnMRyVkq1BRmYMvUwzGObcvbnL/Vvb7OwPODqb89/+/fc5v6qYL1cUmWJ/O+fOzW2SLOX7D08ZjbfAeUJbRgm9IPDUMfzDE+cuKhKTUlUVfqA6xsx1QXozlUdcM+tDiHzEdbvCGvnrauSEIDg9v0DJhK1eRhMC+3vbGAHKRG37d98+YbZcMBgY7h4OOTuZYrThS69/nnI1Yzr1TKbzbuih4/L0kiTJMImhbEta2TI5r7g8rxlt96jryEP9uOuTeDIL/C9DCF8XQgyA3xRC/E3gTwK/FEL4d4QQfw74c8D/GvijwKvd108D/9fuz99xNa3j/YdPefzomAcv3aI/GrG7k3N1fMWTx8cEanZuOKyv+eDDS7a2xpydnqKC5Oj5KUXao51aZj1o6gX7Nx1bO32eHZ2SJJLbt/u8/Pl7fOH1f45eUdIrBtSq4dnyimAFNkCwlu8/f8rZYsKoN2CwVdAfZlSlj3OOQ6Cuai6vrhgNR9i6wVpP0zjqxtJaR9O0yA4t1OvwyPtuZvSa29iJSXugC42kgKauaZuW7713zPHpBVmSRnZ+8HgHZl2zCxHcWA+ud0R2fZrllMt60+R4cTUloOgXhruHI27uDxn0E7aGBeOtnDxXtMOU3YHi9s42n3/9S9w83GbUz3Cu4uHzCd9/eMxiNqVtHbmC52cT9g/2KHoJWgpWs9mm1lRVJRCw1nTF33U9LLJbRKfai+9IvHhcCMiOoBxLiD4iQQAB9nb38CTMLk9I85zpbI6W8NK923z09ITHj2fcfzBGJRWnZxdIejw5PuNge8y9m/dYTubY1tOEOYlQGLdFW2tsteL08oyzyznTWcObn/8ck+lVHAPV/BMAPkIIR8BR9/e5EOL7wC3gjwN/sPu1vwj8HaKR/XHgPwzxGPs1IcSWEOKwe57fYXkevPwyk/mCug1891vf5f6DfUajAmXg+HRBHVqkSnj86AipnvLyS/dZLSw3twyhtbTec3424fz4gjt3Bwy2NTdvb3F2OuX1N/Y4On3EdHLGfAZf+tKQH3vzJt//4Ign80Us+gaY+YrZtILJOUootJT0dY+qrbHBxjBQSqq6wbeOurZcTZboRHJ2NWNne0A/1+xaz1rP3nuwIkYocfpnBBkie6TTAAFECGgVC+JZmpBnGcvlMhaTvYvTMwW0jaUqY/Hauw6xi/eGpqlp2hqlAkaJyCVMJfdvjSgKxfaooHMnBO8ZDxL+e7/wZRJjOhg+5oqrVUvbetqmwSmD8x4XJEFIgrfkaU5uEk6fd2Gxi0XuvGPdBGexLXgZh6VHNFF0LJWuWztEhk3n4GAt2e2jBkdjHZPpnKoRzBczmEwYDIbkvYJFNeWll25xejEhSaBuBEbmXMwWZKnkO+884v792wjjsd6iVIbD8uDV1/jmN77LvXuHZP0ELXOupk9567132d7aBpnxSXgb/3/JyYQQ94GvAv8AOHjBcI6J4SREA3zywsOedt/7HY3MJJpnJ2dk0vK6v2LrWUWYzzhpGgqTc1iWTI8mnGxFIupLL92jKitGW5rpxZyd3YKil7GzfYPFpGGx8uT9wOHtIWme8etfe8xFmVEuF7z5+TscH5/yz/z8S7x8e4e//De/yd/+2kfM7LpBkAidB0/jLK2Dpq1xwcVaTtfSXvQKfKnp9bMNKuh9ZHq0zhJCJOLGbt11qCRw1m/oUM75CGCErk7moW0bIELrcsPOF/SKjKpuUIkmzzNWZ5eRpOw9LkQgprGWso7FaK2gPzBkiSDVgkRBYjxZluFdQ11bstTQSzVCesDResn5xYzT0wkrFwvjtrWsypqkKFitalJjGI8K6jKOw+0pQ6o1RV50RXZLaBpca+Mc7o6UbJRCuGs0kTj2YaOqTNd4632ketW1ZzweY71mNjsjIJlM5tRNRdkqypXnC6/fZ1Iek+VbiFpD1mKynLcfPeFzHz3j9OSUEFLqaokQgbZ9l9PLCdok7O32WCxmDPoD+oMhgsDz48tNE+nHWZ/YyIQQfeA/B/7NEMLsRdnsEEIQa+bnP/7z/RngzwBsj/s0TctgXvI/0Qbz9a8hOvQHASQZpy7wf3zlKxzcv01Ztjz56JjPv3ELITK03mFVXpEtJhR9TVIkfOebz9k+2EVphTIpJ49b/sHX3+XJs+f81I+9xGpZs7tV8K//D36CydTx3771cDOwocs2OgWsGtuNgPTOR2ChrWmE67QwWrxLGAxykiROjWlah9QaiDQirSNx1+GjgtW6a9tHdSvvBWXVUrWWVW1ZLEuc9TSt3YSeVdNiXXxQ0zSREtVpOK61Tq5VsOhaWOKGtS7KvWVGk8jYnh+ZKLGfKwQHTrBaNTw/OmOxbJBZzqA/wOgUthSzq1OkkLRtHIxRNxYhJKZT4XI+jjwKKJwLSOnBhqgwth4NJdb9eeu+LRk7tYmpmfeRgL1cthyfzpjVmtV8xd07Q54eXxBcRiY1TVnRZoqr+oIi3yLNFLkc8O7VlGU5ZWvQ53tvP+H+rV0m0zM+/9orPH9+imtTDg5vIPC0rSAfDFgdX7JczhHCkyZp1/j78dYnMjIhhCEa2H8cQvjL3bdP1mGgEOIQOO2+/wy488LDb3ff+4EVQvgLwF8AuLE/ChfnZ9xLBeq1ryAHW1CtEGfP4OAu3HyJ5K2v4Z3lalKxXByxmDf4IDk7m9LWjq0djU76DLYDjz84ZzA0ZFnLs4crPvzgAu8KfAi8/PJt3n10xlsfCf7Iz7/JX//F3+Bb7z3rSMAq9n55WIdV3nUb10dmSttBys5VeOs7QnG8MbG+I7qm6tiFHaHpOGgunuoKiF3cruve9s5Hrl3QjIYFrm3Z2io4OZtC1wNW1W2U1gaaTqIg3hsiwkgsNeBjX5Ts2A2xgODJEn0t+hNi4VlICMF3o5wCSgYO9kYIMSeYlDffeJl6BYtFxfzqrKPLS0JQTKdLbAjM6opyUXJ+PmE4HHBytkS5ijzXUV5OSVT3smJD1L1GYKPwDiAC1gfOzq84Or7EaUVvPEAGw2CY89p2yvtvX3Br9w7T6SWLqqKtU4bpgGpak440e3s76Nzy+OiEsnY8fHLKvdujyHppZ7z17jvkWcqov82irJFZw5tfvMuHH5xzfDpFC4k2H99UPgm6KIB/H/h+COH/9MKP/ivgXwf+ne7P//KF7/9ZIcRfIgIe0390PgZSSW7fHHOzTgiLWTziTQovfQGWU1jNoRiyrwY8ffSE0TDh1bu3sZdTZk+PePDlN7l4/4Tp4wum0xn3b9/k4nTCkyenXF3B8TvHWJFwaBT24pSzx1PeeXbFB99+wulshagduzJOYPQy1n7W9RIvooSBEIExLb22pGhW6FDT1CuKpiZZQVauyOuKQhq2Qou0LWv5N+nj5pJSQN1tNGL/WAgCZR3bKnZul6IizzwHI81h0uf0IuapSnX8RuHo9wxMHbm2BOno10vSckbfVmRVBU1NaBt6rSFvKlanE67KGbeKm2inYqOljwrJsV4l8EGig0DQsmxXsVRAn+myxDUNfVcihGXL14yDwynHQlhsgP1EsiMDfeFQixmL6QU2y6gri3OW0SDlzq3t2N2sFFLJTYe4VKrzcAqURa9KLp+esKwd+zf3aaqS1YWgampc2YCQfOXBHkdXU77z/UfotEdoalbLJe1izr1Xt9m/Neb4ZMqot8uWDOyllmR3nw8/fEIvtSzOJrSm4MbBkOHQc6HmDHcNW8Nt/vbf/vjUKvFxZa+EED8H/F3gO3QlDeB/Q8zL/hPgLvCICOFfdkb57wJ/hAjh/6kQwm/8o17j8GAY/rf/1r/Erfff5osXFyyTjIujE/LdfdxyztxJ+lJwMp+DhP29Lfb2B6xWFVVZMx4P6fVSTk8mLJYNW1s9kiRjMplSFDmrleeDR+ecLxb0U8nnXrnLyeWKh0fn+FgdjhoeoZMjEBDWrfnroilwY7zDsBjRK3KCq/H1PBJx0yhwszUqUELQ75mY7HfFZ7UZYdT9r/N03q83OVxMloQgmK9ivWs06jOfz1itasqqjqWdtaw3sCxrWucgNfTTjMPdbeqqZDzsEeySsmwoipTnp1Flql9odrZ6XeuN6GhN8W2FTW8atNYzXZQ0TrKoNWVZIqVksZjQNp7+oEdRFKyWK6bzBT5RHG5t0c8yTKLRNDRNhVaRle+cI000o0HWQfZrEERuABG4ztPKqmFZtrEXsCxBBlwb4gB560gSQ95TqMRzflLFG0UgTxNCiApiWa6wbSBL0siqcYHGBqq6YlDk7GwPmS7mnF8suHVrj+ViydZozMXpBf+LX/mA709WH6tc9knQxb+33h6/zfrDv83vB+Df+N28hnWBJ4+e8TKBb93a54PeDs/MPjpTXKUztkfbHD07Y2FTDm8e8trL93nplW3eeftd+v2E/f0BQjTcvvMa737zMQ8/egxK8P4R9PUW7314RskBb5cJyQq+crnF8bnguBXUziKdJCwdrrSxFUUISBSkCkzM0JKgeHXrLgfJPoc7+7jVOcW4pq4biiLhlBl3bu6TaM+N3R5KyY2Y6FoYVMCGPuW6cLG1kUX/zvvPsd4wzVMGwzGHt25x/PwpvUGft956h9OTczyRCK2V4Hg2ZVXV2FHBHTPmJ3dfY3o54eU7u4TyjMlkycHBNr85ecyXX7vN7iilP8hItMIYeU2lCl09z0ProW49z44uuFjChyeSxlSg4Kh8zGzW8GB8h+3eFs8nxzybnOEGhq8ePmC312c47JP7CfPVPDZ3akNVNwxyw53DMWmiuik8AmU0MrBujOtCdcWHD59zer7g2WxBf29E3dbUbUOiFf1+wXK54sEre+is5f12jpIJZbPiYLvH1iDl9OyKeb3glbu3GKYZZQ1Xi4rJYk46TBkVOT/9Y69zMZ/ym996m1sHN0juaCQZy2LO6mvPf7cmslmfasaH957pbMGkqRjtH/L6F7ZQmeHJkzkESZZnZL0e08WUJ8+OKfKct777Pm9+8QaL6ZJiqJhNV5TVO9y/d5/51QSdOsZbff7K33jIWw+vOLgxZi/tI5XkO++d4J1FK5Cmk1JTGqSnbSwgUUZijCZIS6IMOYbtouDejX12d3dI1Zh6eRVpRRKGo5v08pTga9IiR/ioxR6noYQXjqnQFVtlRzyOcL+RCqE0F88vGY13+M533+KDd96l3y84vHWLc3HZFXAFeWqibqSVHbF4PRaWrs4UC71GK/LEkCaKNE3QHYvFe9G17EOErLtxVMScsq4dVeX43tsfsre7A4BzP3hgtI3dFJuj5Hns21ImCtp4H9DEQnTVxg6DsAY+OrbIWpgWGXUpvfcobch6Ga27pK1KqrYlzxQH+zusliuGgwzlMy6ezfmxL32Bk9MLHj2dkyUJy1lNkuQUDnJt0Drj/Pkp0/mKIBRn0ylXqmIy/Q47OzmEjCcPJ4y3MorM0lQW9wnEdD7VRmaMYTDY5em3v8WTBpwa8PjxGc+fnBEQPH90wuGdW5R1w+Fhj7v3h7jbW/QHsP/KHbKioFqc42zLxcUjhvmIeTUhyJzT8wk+aGZzG8VlUoP2AdDcHI9xrsZngplYoYo+8+kUrTROglGSG3s3+Kd+9mf4+Z/6fTx4+SUGgyHamOu8Ql0L9DgbKFerCEZoiXcNtq2xTUVb17EXrFpR1yuqckVdlpTlitVqxX6ZUjUtd+qEfpFzJSfs7+zgrWXQ6zPo5bFh1Hm8s5EZ7zyNjGKhceg7tG2crLIeZC+loG09CL0RlHT+uk0GiFSr9dCLNQ/Sa0ajAW0bh+t5HzDG0LRtN+vsepJnuh67i8AYTZomJInGJJqkQzmlispdYt2B7GGxsiRakqVRirt1NhJ0gyfNNKtVSX/Y5/DGDlW5JNWa1juOnj/ncH8H2a4YpYbD3V329/bRAr75nXeRSjMvHWme8OWvvsnb77xHcIFHxy0vPzjg+OiM4/Oa2nl6ac6dw0MKpWjDi90Sv/v1qTYy7wPWJty7d4/3rya8/9YxgcDh4Q2eH52R9xV7BykffGi7YXktg0HChx88Zzh8hQ8fnvLu9064eVjw+S8c8PjpEyoSvvf22+ztbnM5OaFpJEpFCe9MKvKkYKsYcPfGqwyKXkTrNFxdXoCzzBZLfvrnf5Y//E//YbbGW6zzhqZxNNYjVeTpKa2Q0tG2lsWsAgS9YYbQGcpk6PwH87E1jL3ORYK3XSkgdmDHbmzHalVSLlexX04Irq4uaaqa5XJFuVrx7Plz3nv/IStv2R71efWl+8yupvRSQWjAWUGSpCRGR0m2SKkgyM57sFYz7qD/AMFHb1Zkhmnlu5od+GDRRrKsWgSS4GJvnNQxtFZK0TYtlaxwmce2NupDqui1lZFROs/FnFeI6EmfPLsgTQyvPDggOIdE0TYNRmuKNCcbFxht8Nazt7XT8Q0Vdb1idyenPxDc6t1h/utXLCYTqralaQNZYnjn0Smtlzx+/oTa1tSlpTdMuJhc8Mabtzm/mhGCZGe8R9tY0p0+1bLaaH18nPWpNjJjNNPZlLOrCy5mS9xgG4ng8mLCalmS5PDhh88Ybw1IdML0aoHaH7JYCd5+a4n1Cc+PHc6VvPmlhFpk/L//6jeoyjYWRaVgkKQ8uHuDN954nc+9+iqvfu5L3Lpzn16/F4dCqNhfNbm8wDYtVxfTTsw0MLmaI5TqNOW7mdMdOVcnse9sNluyWlRkeUKSQFvXHX2IDT/vBdyD+G2x0bpY17eEkCijGI0zxjvbHVAQJ4QqpVjPfvbesZrPWMxL3nvrLe7cu4VJ0vj7OJqmInjPj/3sDBEahG+xdclqdkq5nMayQ1g3tlzLcwskiUnJEsedG3vkeUoIHu8E5+dTMqNREtJEkRgVNUXShKQzHNHxM6uqIRAPJW+BoGJdkIASEIJjb7ePtQLn2o2C+I3DHWaLhuKqRBuFMYpBf0CaRGkIGxxFkZJmGi8dlV3w+hv3UELzrXfeY1GVmFRzY3vEqy/f4u1332KrX7D32oDvfO+IhycNOzsW3aHJ0/MLhoOCxycnCJX8zujDP8b6VBtZVdXUzZK2bdjd3efDyQrXdTRrA9bW9Hv7LJlFdE1knBxZRqMH5HkKwvDlrwwQoeXRR5Y7h/f4U//yfdJiwHi0xf7eAdvjHVRi0NrEkUdCUdcV1jYRVu7AiMVswenZRRTJIU7sNDoak1YKpePGUk4TTETPqqqiXK2irJxKCDaqIK2Fb2J4tB5ssB7CcI3urc1uPWp2bXECEdtPYk8nUimU7gQ+Q8DZimdPjzk5W5JmZ2iTRDEhIZEiIJXGmAytB7Gh0mju5l9le3uLpm2oqyVNtaReLSlXS6pqyXJRorJL9qzg9/38AQRHuaqxHqqmYTFfYm3L0ajg5LyHD45xlkbStBQkWpOnCc4HTGoARWpigdp7h9AqzkQLkGcpi3lNawM6TqNABo+WgtFWn2XZ0h/0o3KXVmwNBmgtqFpLVdUkZsjxsxNkqshSwb07Y3IlOX4+x1UlD997xHCQczUpMQcH7I7HGFNRVhWJFDzY7XG1qhn3DR44Op/Ax0Th4VNuZKp2JE+mmNST3T2Ax+c469GmT1Zk3Lq1xenpM4peSn/U4/D2PZrWcHWV0rhA28a5xuOtMa+++ip7OwdIldJUbewBq1smVwukMiR5D5PGFpeoiajxnYTbZDLh0UdPOqPwXXuGJNUabQxJGvU2Ep+QINE6dMIwgjwv6A8KjOkmNxIFV73vNB4JeG+7wm+8l66TOYiip+sGyE7x+HqU4eY/iJNlYm0phnZGRbWvunFkhYkNqN7DBliJZce2aQkBmiplvHcDo3PS/jZG642XVlohReDo6Sm/+fe/xq3b+6g0R3chntKKJE3jZwyB4CzetdR15Bl+4+tfR7o5/TyjqWuQgsbEupNRcXCGVnJTJkmlItlJqRtLmplOp8DRL3KcUwyHku3dfYwx7PQLtooMqQXL2nI1W+CF5GD7FmhP01RU5ZR+kmLdOVXlSbOarb3bWFlyNq0pBgmHN0cEL3j3rWOyjpz8ZHHGsmzRwuP9x6+TfaqNjLpBfPcD5Jbj6bQm7BxweOcGtgkslhWz+Yw33niJXq8gL/qxN4sRJssIHtLccXv/Bof7B1hruboogWVM/NeEVCUxWY7J03iq6zUDIjqP+WzO08dPuZ47vUbALKVt8eVqI8IppECrGCYZYyIiVqRICXmRorQizTNs03RD01/ojPbXpF5Cp3kRIv8w8vbCRjJ6LUG36d3a6GR0ncgE2nrJSy/fYjaZYrSOFKvwouHS6XZEBotzjsvzs+45ZJc7qW6sbZRXmE5XTGaW3nSGTpvNIUG4JjqvSc03DrbZPzzk81/+Aj/1sz9N01Q422CblrZpqasV1WqJdzVNVdLUFW3d0NQVrm1wNkpACCKY0tMWrVL2hlA2jjTvsTPqcevGDqOtAcoYzi6mZOaC8+mMoCRXkyllXdIfZbTLihs3D3l2OqEWKScXUxa14+HRMduDHl96rUB5GA01rbGcnF4ig2B7tEOapnyCxuhPt5EtRMHT7AE/KZ8xvpzzuVuv8WTVUjUVdd0yQGJMxuuvfZmqqlgsl1xdLCjrEikkr9y7x/7uLsG1KAkqje0ka+9AV5tKi5Q0z1EqegOto6aGbVqePXoc9TLW4Rrr8HzNBwwbnQrvPI1raJoYIhptsG1B8C2rhYkhpYpTZET3TGsVpDVjfv38m+BknZyJtTCnRGu5qSPJdT2py+OiHwsYoykGfUbjAXleYK3Hexv73qzDrwdRvKD13tQNbdOs30z8fJ16lOgG3+/tbUWQKF/PZ7sGSCCGrgTP1WyF11dczVZkvcEmt5JCIlRGMshJhnsoGUnCSslIHO5yOLVuJvUOb1tc6wi2Y/LXDS54siJh6+AAlcTBkLuXZ6hvfwspAs+OS8pVxcV0zmS1oqw8RuX08pYb+4ecnJ+R5zmZTMik4aMn5xzu7qLylHk5ZTBU3N/bYzTq8b13Tn50IXyR5Ty++5N8e6p51T+nfet97v3sl3koFHfv3IgbQoz48KMVjRVUVYb3gkQHemnCnZv7aAWBbsif9dSVw3tLr28QQpBmGb1enyTPI+uhQwcFgmePntA2zSYk24RtoRtEELq6ko4EYNZepfMy3rVUVUkQIepadC33WmtclLnaTOEkrGdah8hxfCEH+Ic9ltwY1fVXvGBdQVdqlIwyC0JLqtBtbgFaxTpZ53NYq+hGtoWI02K6NpoY8obrfCQE7tzdZ3J1RV7k8Rp0epZrcaF1AdkHaKuGy/Nztnf3IjezQ1CvC83X732tebi+RrIrg6hu7rQUAq1SVFKgB4rCGJJEI4zZIH/F9j5v/r6f58Fizofvv0/49d9ktlhycT5nVQd6/QFSJjx79pQmSD56ehYFXZXi5HTC8+MJs8WS/fGAHg2To2foXFPXNnasf8z1qTayPEv4whuvsvz1d/mWu0O+PIXvfUjy8j7bu4J61eNwb5edw4S331agBUpFpaZbN2+SmMjWni48pxfLyJRvLDduDOj1Yv0mTVLyIidJs7jpu07c5TxqKdZ1g/UxjwrBXTsWHyiyDJPFZkQRAsoYVJJcbya5RgpDp7YboetqteT9d99BJwapDFlWdN3R8USXar3xInoYqY7hhWme0WOtDVTQvR5Rl3BNrDV6XZvKN0a0boSMsxrWrTOy29jrqaIdj3ANexI9mpIxFN6/cYBJzEaoFbpQdd3D1hGLtVSU1Qpnm838ts37ZS371j1HdMPx9bvPHmelyY2RiU6WTioZx/QaQ6J1HJOk1rlmIAjJvde/wL3XPs8b3/kOf/2//hu89+gZV+eXZJlm1TScXM4ZDxJ+/Etv8u6HHxK8JzeG8d0B+1tjLo+n2ADPjiYgJR+XfgifgLv4w1gy2Q3qzh/nX84+4sQZTDXjdfuI2Rt34EZBL8+4f+8WhzcNq+qA2SIH4dkZbvPynfsoAVeXExor0IkhSzVSEfMf5+Kpr1KQcThE2zqsi7Wp1gVaHwBFv5/T6/XI8pQ8T0iMidNFlOwY5J20tY8IXlakFHkGnXF5F5Ws4txny3w64d3vfp/FfEFVVtR1Q13XEU2t6071KhaY6Q5+QkDoyNQXIvIeldJIrTFJgjZRL1Dp+GeWxVxwMBhQ5L0NsPJbJOiIxWMputAwdDW7jaZHNEzCRlQPOtk4pXU8HBBrqXqklLGEoXVsZxECkyYxHHxhsF+8anStPd286XXo6v0LnvvFMsK199t4PNHR1NYz4rroIEhJkuf0ipzlbMp/+v/4S/zqN75BnmfUdcOqrZCipZ9lWKdjz92qRiUtk8sqKlilGc/OZkglePq93+B8vvjhchd/GKuQLS0W4RxO5DxVBzwoPyJvLSvfZ75seH58jpAZL7+c0Mscg9Etbt/8IuVsjneewagX20dcl5O4EFHDrgu5tTUilKxxiNiLJbu8IODaFdVkwvbWy6yWU1ybkiiDSnSXY0W1qTVFSmmNcxplNGmavlDzukZTBII3vvpV1vJ0cQ9FmpXz6+JzrOW11lJXFeWqoqlqqrphuYzQermqWS6XrJYrqrKkrmsWs5JytejC0cCg38e1PuZhwqOUQksdyw+pRpnoFYxJUVpjlEF3dS7RyXRL2YXQ68GF3edYh75KqusyAyCURInOK0sZkU+tSLTZhONSxfreGmBRXb1PpqYb+hc9nJTiBSL1eiijIOBQIlLcCJ48z7HWM5stYleC92Bb6iqwtTvmX/vTfxLx73veevd9Fk1Daxv6I83VrGS5akm1IVjL3YN9sjDh3ku3MXrE/ZdrHj15wuPv/IiGi5mrebN6jNYeaQyXC41D466mpC+9zGB4i+AuubzS6I+WJKbl8OCruNqxWq06JG1N84l9UjJcbx6NR0kfxyEFjw6ia4SMEL3w0NjIr3v25IggBNpo0s5zmCTpRHLk5nQ1SYIPESgZjIbxNF9D7eJFT7Je61FBL4RqUqFNsikKg+zO8XANhPwOa6NP2HlB30aDjV+Wpm6oqnLjPVeLJdViifWOsqoolyvauqWua8qyplwuqat6gzK6Nta9hBLdIWNI0ixO8Oy86Nogo7dXSC0ZDYfYJEV0OZaQMno3ITdGLJVASLWRi5NizZ1ce9Z4Egq6DgHibDUBJMkcELRtwHc6/K77vG1jSXt9/sS/9q/yf/k//3lOL6/oZQXlrMY6z+5OTrAS10BRZAyyG7hGsGob0iIgaTtB1o+3PtVGthAZr/hTvmQ8j+s+MhiM1NzYHvLW2RMmkzNefnCDk+dH7G/9JK+89mV8E5g2sVsX1bVNIH+guqRUZD8I4eJgdASu9Uipcd6RGEnTRni638/ivGYXCM5RtS3lMrZ5mESTZGn0CioWU5VuSCrTSbFF1d/1IIl1CLY+pddNii8a3osD42Gda3Ugi/jtFdl/O16d6F5LaUWSp799TvFCqSJ0SKcI19+J9K4YwjkfVXxt03TqXHED11UVJQ6aKhpvWVE3DXVVU60iwpckGtFracuWqmo2TatCxBBSmQgKma6orkwsMq+vq9KxjrYukRBClOLzgcb7KKbqX5jAIgRaJ10IH7mXea9kfLDHv/Av/nHO/73/G04pHj1d0e/l3NgeMbtaQJowKxccjMdMpw1tW3NxPmcxb+Lcto+5PtVGVgvFr6Wv8zPum7zEhCssN7ZypjaGCz/z+7/A0dMLvvzFr/DG619BqaTrrWKT3HekJFRHSxJ4vGsQwdEJ29K0bTfVcj3eSCI7WpEPAonDi+sW/hA8TdtSNxWqLDdDIHSSYkxDW2u8cyilGIz60ZOu8wgZN9Z6tFEcSn59A6+9XtiEmT9geP8IT/YDSCN0UyWv53G9+HusPetvfZJNb1oQnZJUVzYwGOhyzY0hdtc4cJ2z/cNGH2dCiw5BvTYI77tSgrXY1mLbZsPVdNaynmGgOqHTPE/J+znOWtrWMp/Mmc+W+LbBB48xhqqqcdYRkmSjiRkL+Q6Tpdx/7XN8/nOv884773IwHlDVFZOLKb717O1ucfuVbebzCUXb5+JyRtNYZrPlZiDJx1mfaiMDwcNkzDfDPq/bp+z3d8mylMn+gL1xxnRWk2U9Xn7wBkYlCBU9Rrz9oUMLI0igNXhfQgix7V3EC299HPAdVByTI4TAhQahQAQXR636Bi3AEkfwoEHarkDcBOrSU4a4zaSMOYnRhrPnBf3BEJ3EMEobvSnsah2ZDhvETMah6tEwrsPPdR4EnX3JtbKu3Bj9pqhNh8qFtUGujbb7/hq9k50pd8a/SXnEdZ0qPrYbtCGufVv80QtE5i43Ex0Y8sLbWYsSd48J1za5ebPXf4/vvRu0DpsD6UXP/4MrsHcn5prWWpyNc+Ocsxvdfh8CTdPSNG0c92s0Wgn+h3/iT3D0/BmrVRnl3sqKpi6ZXJ3xta//GkmW0TaeqnHMJgvqVfkjXCeTAiMFSZrRT/o4W9OMe6wSRZHlPH92zJff/HF6xQip1nlLFLepG8v5+YQbBzskucTZOqJP3uFsnHBSVhWL5YrpbM50Pufyasp8tuT04opVHSgXE7YKQwiexBgWVUWQguWyJk0jV7As24hSakPddAidDCQm5mtZlrO7s8N4vEXwsLO7y+GNQx4//Ag6Qq3RCpMkGJOgu4L4Go1TnXGmaWSkKBm/d/2lUEJFQ5FdQXqT58SywNpY1x5UrIEMca0a9WIHgJBrsOaFzb02pk3ZIFrQmncZJd3Wr9upAK/fixAvhMovfK+TexNduUAI9QP56W/ZD933Nq8pFTpRHRk76/rQ4oG1HhARQ3I2BOpARFz3b93aHDRCCESIUuI//f2f5z/+v/9HPDmfYduWuqo/EXwPn3IjG4SaP2C/z88Mr5Bty2ywy7OhxGqYL0synXHv8G43JTFsohghBB9+9JDJZMpolDNbNMymMy4uLjk5PuXDj57y5PiEyWzGZL6gaWIrvFKC8c4Bb739iHzvAe30KX/4970KzmIyg2odq7rlcjrjzu19tBTM5ivmq4ZFBUdXIgrW1AuklmghGAwK8izl537/T/KFL3+Fvb096qbhW2+9w3sfvkeWpqSJQeuELM8psgwpBUkSc5JEpSilqZoKqQVBJNT7P4bqb1EkgkJDpuNhNKiOSGhYa4hE4CHC7BsD6boF1qCEUB3krsy1YXdFeSUN4gXDiZC97v6tN7OmIytkbbDy2kNKSblaUuQZSpuIVAr5wmHwQo1OCaRQXd4lu/cYD5jEmM0E0H94vVgrX3s8IeIct+v1W8Gm65+sY4EYVr755R/n3/g39/iL/95f5NtvvUW5WOGDJ9Ef31Q+1UaW+prPmwkD3XLZz6levUtwM+rVCs2Kuw9eZTjsoVScuLJYLrm4vOL49JQPPnzMB++9x8nJc05Oj6hsG2dXZT1+7evvcDRdgrPY1pHkOVkRp6kIOvUolUTtexVicyOxiZS6pasybxCvOKVSEUiizFnHhJBd8farP/YTOCvI85zJ5IrTk2OqaoHznslyRVi+CO/HJaXoMEWQKnosApisz8nyFcqtbXRXSA5Ckip45ex9xNn7sa1E0MHjkjzLSHSCJ05SIUSBmiRNiVNkwCiB0bHbPMliV4KSMuabXT0sIoGd2pVUsX4n5GYSSrLxthoho+zck4ePED7w0iuvkmZ5/FxadqpZ3fN2NUetDEJptFaYxKCTBJMkFFlKmqUYE5HaWPiOtyGCMGuVq8545Zpudp3HCnHtfTdlCNh48U2aIWB8eMif/DN/iv/Dv/2/5z1jKGcV1v6Ijk7S/R7bP/8zpOUTBrsHeD2kV2W09pIkbemPxnzn+x/w7rsf8s57H3J0cs6yrOLUFe+5dWOHg5v7XM3PcbUnBEPd1LQBiiKPxWSgyAy72z0EkOeD7obThUCaJsRpnFVdY7SmrLqmxW6wep5krKoGgouS0gISrXnz86/zR/7oP8u9+3f49V/9Nb71zW8wWyyZLhYslyWNjXJn8gXSw9p4nQv49YawFk0kBrd+iW/LOOIISaY1bYiI2kLkzM7P4hN1KYQngg5CRA+RJ7ob8+QIUlA1odtoMTjUHaIXc7U46snI6E3WGzZJE5y1pGlKEOBcBEj6eY87t+4wHA1RUvLRh094+vQ5b7z+Eln/HNH9ruv4h1Kw4YnqDkFMkgSTRiM3KhpZmiQkiYksfxnJ1HEsU0fZDp6A3HhwDyhlrovW61KCEPHAUrFuJ6VAdrO7RfczrQQehUkK/tU//af5xv/q32LVNt0Qwo+5jz/2I38IKx+PefBP/SG23vlVkt4Wb9x+A6UVbV3xy7/83/CX/8u/xbe/85h+rxdVbX3oTllJkioGox5pLyMvMra2IpK0nCy5fWeM0IqLyYL9nTFtWfJzP/vj3Lp5wN/55W8TQhTxUVJ1nMN4ytVVjTQJTdPQNi3CRIXgdd4TiCHi7t4+/+w/80/z+3/mJ+jlOadnx7zz/vu8/e6HpHnK/o0DDg5ucHR6wRoBWKcg4QUIPR7OnSZ+Y6OHCo7UVSxilZjG+7W/w2dbBGSE3dcncwezCiHI0oR//o/+UbyAv/U3/xaltayaCojKWHdvHiI6niPA6fk5vaLPeDSk6PV457338AR0lVCWdVckjiTrfi9nUTZ84Sd/lt5whG0aDl/r8fKPfZVensXoIIDwDrMu/4lwDcZ0F8CJeGDYIGiDQAeoWou0HimajhUSicKbqai2hRAjCOscwUeJ86ataZt6XWHExxm1195TRmAn+NjtXZPC/udAKrQQ3DzcJSm2CJdXG23Ij7M+1UYmpSTvDUmznDbJSdKCgMOkBTcOX+Hb339E1Tq2ugmUQkjSVBFEVOTd2d9ie3fAK6+/xMH+DtVqQVU1/Au3blFXJf/Zf/bf8HM/9xP83b/z9/nqV15DCMegHwV0jJKYIhrYeki36YCEEDy2daQ6shka61EisDfKeePNN/iFP/T7ODzYoW1rfu3v/z3+yn/9i5ydX+K8xV/BZDLh6uICZzsVGg8Ev0EE1zlDLENDDHRkJO7alsyXWO8ICJwXMSyUnpD0ieP7wrXRci2B7Wx8z/2tLYo0p/Ul6/q2AA73dtgaFOzt7/PWW+9ydXnBG6+/zCsv3QM0l5MJx6enOB+HtnsXjUSu36kPPH78kM+/+UXKsqboFfSKnHK+oDeMEYJQUbSnKkuKvMdiNqfoFwDoJImfdlOIjgPmhe6GCPo4H04qCDrQBo/XAm/WuVnoCMjxMxedcYnuenQRZWSJdNdm7Z9ml5f82i9/jYc7O9SqwAb4/P0hbW8PKz760TWy4MOmntThugwHW1xdXqKzSOrN+zmjcc6wl7E1GvL5N1/m4HCHb37jLcbjLd744stUi31m0xl6r8fRszNOjp7RWktiVGSbI2iaBrr6jXO2QxRjUh6dS0AJjzKg9bqzLI4Hogn89E/9NH/gD/4COzsjVsslz54+4q//9b/ON771PVyIIjSEqA/Y6xsQjtbH2cqCF9jmYd0XFuFxL7qygGdDMs7simAbUIJWSlxwBDStLqLUVNf8tB7Gt+4OcN7x+NEjbtvYDGmamnu3DmL9ynum0xnz6YyHj54itWG0NeTw8CbOC04eP+RLX3iT4799jr8ueyFZcxJj6PX4o48ITnD/lVfoD6OWfLlcIIXH1jUCRetqjDZUyzkhCJpyjtYKbQy94YiqrFBaEucjKnJRUK5WOGvRSYK1UVNESEAqhNTdNJw4GUZqhbOW+WTGcDzq+JMpSgka22IECBnD4jiIUFL0euyN3+WDtsTqHhbJ0gVGuzc5+0j+6IaLEAvF1sbJKcYY2tYihCJ46Pdy3vzCPb7y5Vc52BkxX8z4whdfwtNw6+YuzgnatuHXfuVX2NndiYPomgAOQJAaSWhbJBZpUm4dHlL0vk/eGxJMztZgi7v3b5HlOQcHY/I0weQpKys5vLlPajRZf0F/eJvf/7N/kC9/6XMcP3vML//G3+Xr3/wW3/nOe1gHRS+naeL0lWLQw6QprfNRDcrGHC6GOCKy07sCOtDlZ2zCqRBA1wu0b/Eq2QitOuGpdEYIBmerTqLtennvaQVMyiX3ixTVtYgYrdjZ3sa5WE+SUiGsIkk10/mCX//6N/ipH/sqWzs7fP+jx129bS0XRwcqxN4yqRSTyZyjo6+RZZqt0RcwSUZvNEJrFT1plkN1XTbwPuqE1HXXod3U1GWJ1pG61RuMqaqGpqywbY1sGhBEqQgXCQVBatrakiQG69rI3Ckratcyn56hdML91z+P6uJn6xw4hxApCIHRnapYtUSHOWS7+CBY1Za9rV2M+a0F/d/N+tQbWZamKCkp8pxGG3zw8fQhdhv/wZ/9EjtbfXzQXFxd8O47b/PgpXvkeUqiMyaXl3zpJ77Mcl4yvbiitS7yA20gz1Ka1qHTlOnkiquzM/pFTpLnpEWPPC/pDwuqumZrexhvslbcu68ZbA3xHr7//hUvv36P2zf3+cW/9lf4la/9A5y09Mc9slwxm7eRkBygvzVkMB6hpaGpbVfUvS42QwyVfOe5BQLXFdPXyrp4EHZJ5j2VvBZHlQFqEoqsT72cAiI2d76wgofvvfUuDx8+ZbEqEUpxOp3wwdMTEKDFmpQb0T+P4HI+59npCTf2Dri4vCIosRkzK7q+O9nNw3YepBFs94a8/dZ3ODl6xt0Hr3Dj8CZZMWAwGkeC8HocUsRRkERZSLFm2m/FGDaGeXHiaZLm8cP7GEIGHPguypEC0T2Z9x6tFeV8znYvj7np5voFtE4JUuK7PM56i/KKhx+8z8nJM/TWfgdgKRZ1y73dQwZbY05+VNWqfCd0ORiOaPOCqsjx3rNarqirGueidyuXS95//ynWWn78p76Cr2uSJGNne5vvf/c9dm8U2MqRmIwEQ70qyRLN/Zfu0u+P+Pmf+xm2hjnPnh2hdawNddqesa7UzenK8oQgFGlSQQh899sf8eTJhPH2mGdPH/FLf+/vYY3h+NkJwrfkvZz5LLau5P0B0mSsVh2di/CCpmF8KSMUbk1oDmuYOXStGwGPQErNfDlD+qiVIbuRM0EIKg+j3gh/8QzrA3rN3iCG3lJJWg8nV1FGXApL6wOtXc8Ki9mVEGCMIkiBAsomsCifY4zuwILQMUtCV5CO872lcrTWEXyNkILy9JzL6ZLs7XfJEkO/yFAqlkKM1hEhlNfGKgIoITCpjiigjDIIysTevygBIQhCRkZOZ2TXjacyEoyVQCuNKBcbStvs8rxDENcMkggsWWt59uQx77z1PSprkaEkOIfQgdY59GibxqpPoqPz6Tay4D2Xlxecn53jvOK0OKfpWj2eH59S155v/cY77GwVOGtpqpqL01PGw4LtrZxer6Dd3WJ3fw8lNMfPL7k4P0d4S57nGDXg8aPnrMqW8/OLaFiyQAhFYuLoJiGisI1JYv1mON5mVdWcnJxzfHyBVoq/9Yu/yD//x36BP/bH/jl+9Td/g9l8xtZWztmzY46fTzo+XkvTlqi0QNJ1CauICgbi+CXRneZrT4GWG/3CKLwjwAXGuwrpauIe7YwQaL1H5n2CiHqKzgtUh+AFQneAiM3PvQ8kSpAkgqaxWLsW8xE0tdvkg0aJjjUR55sZo+gNBpSrRURZBRhtQGjKqkQpjzIJRZ6Q5RlSwGKx4OriHB9anG2jGGtXlvBd54AIASMV2siNzMJw2CcvCpqmpizLuDG61yurSKXyPuC8xWhJuapw3kfCdgdaWefi4RBCx6bpcukQSwqrqqaTPiHrTwmDGqEzrPWQjPCt+yc3Oun3emmj2d3ZIX2asgqCZ89O+PXf/DpHxydMJpcc3NwC4tjbEGA83mF35wZNXdG4mqqs+f633+Nrv/wt2qbm6GzGs4tLmtqhtUCbFKliPtHPM+7c2GPZBt748suM9+6SNU/QWlI1NUmaoLQmSRKKomA+P0LrCHo8eXbMO+8/Iu8l9IocgWC1rGhtDHukkrS1R8hAlvfI8gSEi6BLuJYGCEFQLWpWsxUyTzFZgkklKIGrAk3jUd5hUomgZRZiPidDNFTnBSLb2qCh3kVxm0DABqhti6ajbAEbOpQUNFpiI96CkiqWBZTsOpz1ppBrpEIrSdvEDR9CLJbHWdQGbXKyLEXIQFPXTJ0lSSTCO7JE0St6hOAoq9hm0jQtxiTUdbMBauQ6DBaK5WotqBM7w7WKEgl4gRJxbncgEJzHFGlUShaK3b1tFos5i0VFL88gCFarEm0Es9mKokhJkhRBoLEWhCQxCcaWSNvi8VHZXOfsjLe46GUffx9/Iiv4PV7OORaLBcOy5P/zjXf5O1PNdDqlqpcU/ZQ8z5jPS05Pzqkai20c3/n+M5bzBTcO9/m5n/tJLmYtXuUEk9HfTtkxCUoqkGzyAwhopRjvb1OdTNgq+mxv9xHzOAVGdf1dWiqaxnJ0fMnTpxd4H0hTgQ01337ru7RtQ1LE5z89Omc5W7JhIUCE12UgzzTGJPGUDY48zwhYpJBcSkE9rzrSsMDZKPOtlWR3p0c/SaD1FL4kEQGBikDE/5e7/462LLvv+8DP3vukm+/L71Wu6qrqro7oBtANNBJBZBIUJVEkJZESZXpJ1oylpbVsjyl7HOQZz1rWaDyylsaWREvkSDIlkgLFKAaQBAEQIGI3OlV35fxyuvmkHeaPfe6tgkRRZkMe9sIBalXXe/eFe+/57f3b3983VFxDEzWrIsPfKEyZDvdJxzMlgKzOMEoR1ULSrCAIPMk6ThJMpUmLw5goiKg1IlZXFim1YTTJGAzGbG/vo+KYMAhptZsUlW13OhnhjKE13yVQMBn2GA4mSNEA6YfccRTijAGriQKFsRYpBXmeAxIrDcrdd/VSSqEN6NKiRUEYhd4HUziKwiBGfjBtJRwcHuKsI4wCwDFOM4T0C7cTUGiNwaFL/woZbSlcSZKPcGWGxKIFWBXxn/61v8qP/cd/YADRH3i95Yvsha98lYtf+h1ujAXf2Bxhbcn73vc83YUOYT3k7q3bJLU2nfkacVyj3Wxy8cUXaHYaBGGI1pZzjz/M733+RbpzEY1a3U/1p9P+qgIiFdBqNSmNY6dfeCPONCQIIzpzcyBgPB4RNpoMRjmTrJzduI0oIY4iP8cRgfeEQJClPoLWWUCG91kVkU/bREAUVl8nJdJJ4tAHK3Tmmswttisrax8U0UgSirRk2BsTC+1zplWFHkyJtXXvqutbUa8McE7MmO7TkD3nvLmPBwt8EHqzkXgVc0XUnbZaWhvybMRgYOn3BohA4KXkkla3jQyUX0SkIy0mhEFMsxFRTDTpuA/OYMqSoiwwrkZUmflM0eJJXnrN3QN25D4NVKO1mDEyyrKcWRg4UxDHsZ/zVZFP1mrvO+IEeepnkKUpZ0irFILxaEIUxoDx+de6UmJY60eWZYbOJkhTYm3JSFu+8uIlRsPRm76P39JFZp3j0uWrnO4PSOpdlpc6fOyjH2Lt6BHeuHKZ3mDEkSNH+YFvfJXucAhAGAQMej1qG7eZ/9oLXNg7oPnyK7z3cFjJXbzZyq8++QT3VpYBH6QXSEUUJyQNw1LUQhcZ9STxBNU4Rhc5RV7QbjcpKvciH4HkA+ymQeNSqgolq0gNCFrNBmlhkCogiev+QK2qFBNrCCNPzvV152jWY9qthIW5FkpKtDEeZbPG288ZS2gLrAPl7quHnXDYsEGgApwwM9BDYGeWds7dF4wWZYlQiigMyfIcCoVSDkTJFGh4gEtLEHgAwBQWlCAMQ6wuSeKYsijIsgnSFmA92TdLx5QFJIkXjSb1WuV5AiXTHVbQH4z9LKviWWZZ4YvG+SGycj5sUUpJEIRI6cEacN7KWyjvo+Ikdjp7FKCt8YuQ8/6aUiiyIgfnrRlcZfkgpqMEpRCuROgMVxpEqMk1HDl1niAM3/R9/NYuMmPZ3t7krHQsLS3w/NGHsQ56/b4PS7AO4UoeFoLVxSXslUvIc+dx4zFifgk3HnP8yFHs9evIMw9hb95AnjqNPTzgy9awXx3KERBInzwilaIRNZAyID/IKltuhy41c905gkBRVucHoBJ2upm0YmYSU52xEMJHMVUIcLOZICLPTDDKzHz0rbUI44iTGJ1qjPFcBW1MtZOJ2fkOHIEtEdZinEM664nC1pGLCCdDnNXfJBuZYpVi+rpVbHjnhEcXZcAsQtZVYwTrZnoybyUXoIIQab07iS4dutDkIvXP1zp0XkASME7HniUTRlhDlR5qULJ63YqSUhVIGdBqtlBSUeQZw1Hqz3dCopQgkIG36q5e7aIoiBPvifkn1y9yrH8AogKOhKyYH/d366muUAjvw/Kba6d5dfkoVLnZVAh2fzACAUU+QVJSWAvWUOgCESUzBs2buf59BLMr4OvAunPuk0KI08BPAwvAC8Cfc84VQogY+CfA24F94Aedc7f+oO9dliVYb8P2wjde4fO/+xpWa46dPM6jT17A4MicgfOPwOU3CL7rk4h2B7fiLbW5dxd56jRibh53+xbBu57HTcbIpSWiEKIo9Ble1SxKhlWbhEJXblFFWZKmI3RTcu/eHYyA/mAA8ECh+VX1zGTIajFgPBkxzEcc2hxDSWPcxwqFCkOW8iEJikmaUhjHdRtwgxhbQfZSKooCxETTGxRIBFob4kgTKIdDUhqLyUbQLLEqwCGxQmCBXIYIEWAsXqfFNNwCWo0m7WYHqRRFWc7KbtpGTjetqQgThDfWCRRRGPm2UEicnfptGHb39iiyAl0WSKAoSiaTjEBUqmbpX6dQSsJQ0ek0/GJWGRlpoxkOBxRlPmPQ+3vf2z0YYSh8BVdFq6glNfJJxlnheMfZ035xPXESd3iI6HRxeYaYm8deu4o8dQp37y5idQ3Kkm8MB+wf1v0uL4QXfBpTWYUHpKMxsS5IjUY4wyQruHxvj9Fo8qZr5N/HTvbXgDeAdvXvvwn8befcTwsh/j7wHwJ/r/r70Dl3Vgjxp6vH/eAf9I2NMfQHA/rlGGOrHDsZ0BtMGI9HrG9ssxqH2LMnKNeOYD//OeT5h7F3biONBgTu4AB74zrq1GnMa6/g8hxRr/NoPWGlP/Kh5tpQ5DlxqAi39slNiHGGI3MxJ7aX6ff6NGshtTv3WLi7g9jaZJKWM/1SkA2JxgM+fLDF2TLFlSVeU2KgHoEoEWHod1hbw2UZBAEoyd/OI/77soNFEEYxBqjFgiAy9Ialp1wJH6UUR4IkCRFK+VkUprJYoCISS7QMCcKYYtLDWpDSztyhDntjer2CMApQwf2zzoMOVF7zVc2upCJQEGjHKDOVial3INZlpUK2VWhiRcE0RqPLABmAtQVFWQD+d5O5IM9TotiDSEpKoihgbq6Bko4gNNy5N8RJ0GWJNQJRZXVLIXDGU9rKsmBleZF4qQl3bxJ86COIlVXc3TsQxZiXv4E8dQrRbuM2N5Bvf+dMzWAu3fZoItBq1ClKjXEOZS0lPqtO6gxrDJEVjPOCZLH7RwfhCyGOAd8N/D+A/0T4/uQ7gT9bPeQfA38DX2TfW/03wKeA/48QQrg/QHaapimjqE5ZnW5wEAQxQRBg8fL/li75yuuX+JQLaC4fJxqX5O0Ojz1xjt2NLV65fA0hA9qb2+ylJUVukEVGMjG0hgWPnzuNVorDYkKnVueViYbGHEo6nj46DyeOcpc7LHYa3Oz1OX5slRfXh/RNdl/jFUYgI97xyGMENW9RJk6ewvzGr6E+9BHstWuoRx/DXHwV0emANojlZVyaknzxRdQ4xFmBtg5U4AefziOCnsBnK2coP6OKQokrMoTRiIj7NCwBTijCehM9OfCE3MrXPlABKvED4DD0chcR3BdzTiF6JT3woYKwoktRJc9UnhzWYmyJdRptcu/FYRW1JPLcSmvJsoy4VfP3iJyCLhZjBWlhSQvthZgCIh1SiwOOrMS850Or/Mt/YZhMSkQS4lxJp1tnY3PkuY14ZXWelURIwtiiT5/BfPUryJOncDs7uLKA0Qh5cIi9eRN5/Dj2yhUoC5yUPDVKcXnu+/le5el4X8aNcxDq32Oyd5eo3qARK466feI8e9N18q3uZP8T8J8DrerfC0DPOTdVuN0Djlb/fRS4C+Cc00KIfvX4vQe/oRDiLwF/CaBer/sDbihmKFm9USOJFHEYI5xFl46d8ZAb82vMtZoopZhkjqWFBTbHKVfCiEbSopEOORARo1pMKw45f+IY83MdRrWIm3c3qM11+ej3fTdzN+5y76Dk0vUbrDebDHJNL0wwnRb36hGNxXluxTUOco/OOecIwpishGGtjmgnuH4fJhMvObl7F3frJu74cUS7PQMS3N07OGtpCMFRadAWnFSgNDHQtI5E5wgVIK2lKSGREJeSoRBoPSaUptKcVa+d82euoNaqFMdTT0Mv3gyUZOoHo4IAXTllKeHt2XxSaFSpn5U3GbUW67x7srMlVht0keMrxwAO40wFJPjVvtR+DpbE4Uz3NV0Eqve4AnMEOvPzzHPn4e3vjPni5x03r+Wcf6TFhz52gouvpYx+p6SsvPuztMRYS37Y53ZxyN+qzZGFS7jNEVI2McIi2nOI2/tI1eX83DxUvFWDH8wPdgbcu7lVeZBUrBHpHYiFFMTtRfbnznCillFe/Cyf3tsnm8FIf/jrTReZEOKTwI5z7gUhxHe86d/gX7uccz8O/DjA0uKSmwVD+M8xGk1mHn+2KMl1gbGGoiwxxjJOM8JIMBlPGE9SwkCwADzabPPFdId6s01TCA7Wt7h9+TrHjqyxubfPwf4ONkv549/73bTENqfnTvPqzQ0GzlFvJDhrKkWxmLVYU86htWCykmhvl5/LW5TrtxF37nFhkvOFr7wEOOIvv0x+2GdhdZ6HlrsszPnghvc/+zgfSGoMxyNeuXiL9f0eSEgyQUhaadUckXSEgWQBw4up47fiNjVd4HXaXqAl8I6/YdKqAJWqwB6Yiwnrz1pTCH9GaVIhTgZYobDWE2gxBms0WmucLb1PSkUFU1Lirep8gLuuDFkBSm0pjUFpD9I4a7/JFEdUO8bUwlwKx+NPzhPHBR/9xAo/99PX+Qt/+STttuX1i/uMRrkvkCooXvsv5nA45kb7CGlNYSujWKjCGCtG/js/+DwmEORF4V8nKxlfvMW1ez1c9bv4ON6iyte2dOsNdpI2c8U6t61lXKtRDv9ouIvvAf6YEOK7gAR/Jvs7QFcIEVS72TFgvXr8OnAcuCeECIAOHgD5t17GGG8qKQQOSxAq2p0OzVaCdZYgkIjcesckqYiiiCzPadQS2s0G7YfPcfPWBnG7zmMf/yif/4l/isxyLm9sEgYx4zzn7tZuhZxJvvjCRS7fuMtqp8bq2grxyirjUtOsx1hX2Q9UAAMPrmzCK6Rvb+zxvxyro46cJiwLfmhrj//KJEgJi7LOYSx5cuEo3/H0BfLJmCxNWTtylAuPnCIUjq2HLvG//suvcJAWzDVCGp0uKooJpKMZOZJY8hg5a1duYaxD2qKiYIEQDiG8P6OqdZBiWkAV4mYdMpIYzGw+KIMQGQSAvC88tdbni1mL0yXGaq97s7raqbz1uJRhRcFyUNlrO+cQSmILn7dWGuNjc53/b2FtZUZataZKziiiK6sRw4HlwqN13vv8AjjNaGipN0IMBr+U+G1YSol0fhQzBXACFfhoXG28jYQQ6FLzCz/1G1inPTjlHLpwlEVlC17NDIUQREFInheESiFdSWxGpIcbKKFJwoqg+SavN11kzrn/Avgvql/yO4D/zDn3Q0KIfwH8KTzC+CPAL1Zf8kvVv79Uff4zf9B5DKhWJDubtRpj0IX2qSjGVXOnGJFXAW0CYMpPEwz6fZK4hsYxKjVpliJ0SRQENGoRc42QY90ajVrMfn/Efn+M1BptLVlRMl9XbK/3mO/EFGUVDFihcP5HudnP1LrEYTyDxFIFtPsbSRsL0qGUY2/vkF/45c9gipL5bpP2tbvsbm7y1KMPs9Rtc2K1Re/WHn4HMP75Vw5YXm7hOX1W50Qm90XG9FxWMdnjuof7q7OarGYKujTeFi2aWmErHMoTfqmyy6rickZX5y2DcJbp8BzrC9YYU82Y/LnL4Xz+mvBIparinUozJR9Pxwn+27RaMaUeY4wXfxa5ZWuj4NwjAReeWuCNV3o8/WyXMBLkeQEEMxTXOEdkDcYY4jBET1LvM1JJVh45d5bXr1zFOcewP/btbLV7U+UbWCuQgSMKArR1ZEWOwNFq1UBnNIoBZjz0dNYHWt03c/0fMSf7MeCnhRD/PfAN4B9VH/9HwD8VQlwDDoA//e/85Sp5gpiZaQqGownHTqz5dJRsjDZg8nLGyC8KTa2WEEVh5R8hiMOI/d19iqIkwlELQ5qJ5B3n1nj+4VOY0rCxe8goK3j62XdgwoAvvHQJJQT9wx7q+CKjwQgZhDjnJTKDYeojjvA3kDHeAHVqzhMnPkAvqscUgxyJZyIMx2NUnnOsG/PsqXnqcUxkc9777nfTXl3m1s6Ai7c+78mr2iANgMWFfpcC72fvB8XemWrWyLhqbidDmBUdMypVGIWoKPRCR1GdczG4aVaALhHOm4ta5/Va3lXYVRxHBcoPv6VS2MD6gPVAYo0ffisladRiEK5STjus/8vrSZ0v+ice7/ADP3yBojS8/MIhW5tjOvMhxvr5XX9QEEYwGZezqF4hfWKO0gZpfKt3ZG2Z1ZUVQhXQ7Xbo93s88cTjgOWNKzc8t1J4apatCMKl8YW6NNdBCsd+b4S1jiQOSSc5QVLg7A6BdCRhSFGm30qN/fspMufcZ4HPVv99A3j293lMBnz/H+b7JklEWTqvFaqk5UEUMr/QrZjXFicVuiiwpTe30doQVHZizhqCUFGLauwe7JMDiVI04oDFdo2TKwuEccjhYY96FPP440/xvo9/hN7wgE/9q8/SXp6j1JDn+YzBbawlib1Jy1QP9aBHBc4ymRQkFYew1qqTZgYlvU8HEhZCwUffcYH3P3acojQMRyX55i06x1f40Pue5Sd+8UuVvd1Uge21XTOfDKUojSWpeBOuilqa+oOUQR2mvBMhkCr02dahjztyMIvLddWZyegCq0tclezp28Kqnar6DWstQeipWIJK56YeUA07P7Tu98e02k2cnQ7t/Q42dSZGCC5e3CaJj7C2Jjh+fJE3XhuzvVXQ7xluXhsTJyHWSq5d9UPimWfJLAHGEUcRR5aX+OQnP05RlDhrGY3GhFHAQ6dPsbW9g1ASrb0PSFGUZIUPPgwkCGtoNeuEYY2NrW0kjjiuYVyBbS3g8nVCpVltNdgKv00t4QptkCJAVZKLOI5ptup0ui0ODg6qjKoIFShM7k1WjPZzD6kkRZGjqoHj/t4BUkraSYgrS1xpiJTkxr0tfuO3v8pCs86JYyt89uf/OctrCRdOrhLX6z4WN4mxwlbqXUejERMoeZ9D4RdfnHEEUqJzA8aRJAmdbofD3qgSOQqatYjjCwlHFtveyVcq8nLCztYGa5Mx8wtd2q0Gk2yCsPe9OoQIKoGjt0uTKiA02fSTQDVLcharYoSUhKEkUBEof/KxApzWXmZiK0MaqKhO9+NpXUV0tNbN6Fj3LbWm9t+Aq4bWxtz32JD+9UrzjCgI7hesrzLfejnBYGS5c3tCUotJao4zZxvculZw+bUxr7864PyjbS69nnP59QnNWgMZBBRFzmSSo6SiFkoW5ucZTTL+wY//BPv7PYIgIC9Kup06Tz7d5oknO9RqIfWmZHk5YWdb86u/epuD3ojlpXla7ZpXU6c5c502WhfEkaJWq9OTeEBNa4JIkSTfpix8b0kdUb2jpFlOURovjShyjHaUTvszQpqTZwW6LJkqaseTlNE4px1JJpMxx5s1fvQ9j3Pp3ga9SUFNKfIyJ5GWSFrm5xe5+MZr7G8dkk4UoV3xh+EgZDxJGY5LVqygloTUGzWG49xbq5Ul9SSCsQMkxjhqzTaN1n1tV1H6Q3+svGvUrd0+N9f3eeP2rp8Xfe0a9V/7KstrJxAmr/pQM0s9QfqYWuu8lkoIiUCDsAjhgQx/IlEoF+OiOmFYwefCF4At/TnLt4MeODC6xFpdFVZFz3J+93TWYYRPTsFpf4Zzfrbkmf6eyjVtSR1QlpYkCkhzb6fgUX0PXMlqYbLO4Yzg6pUhy8sRo1HG2fMN2guK3/r0DroU1OuG3/71uyRhiz/3wz/AcDjk1z79GebbHT784Q9w55WXOdHbYq7bptSaU6dOI4RjcXGBshzwnu/MuH5lnyeeWsIJx7VLPRYfOcXZUx/hb/6P/wtFqRlPCtKsJAwU7Xab8XiMDEKG44xS9sjLksIYAhdU58I3d721iwxvCCpzDxd7FrsgDpWf1VSZx/506uj3BlWInyeJTrKCwSDj5FJI1KixtNRFFilPnVhmZ+ylImePHcU9q3HGcbC9w8c+8m7ioMfX/umXsaMJo/HEW7LlpVfd4lNhggC29gcIFWKNpRtKglBx5OQR6qsrRJV7VhBE/sAuPKT+gUdP8MSxObb7Yy7eOeT6+hCl4LEzJzjoO3Qx8G67hanst/1u5/AaMay3w8YJgrJEuSlJ2TNDJAIlI1wcg53MuHzW+psc5zC6QFStYD0JGA8MKvDRvmmaMb/QIcs14+EYjCOsRSS1kEF/DFFQvTPMbCCsmQIKXuAaRxGBMyRJjB77sx08EAWML8qvfXWPj3/yCF/98g6vvNzj2IlVtnfXOTzU7O5q3vn2Z/hjn/wenPbD61OnTtGtPB0//tw7WPipn+DP/Jnv58qlS5w+c5pGo4kuS4xOKdRvs7ySEsUBhYZ+3/LYhRPMdy/Q7TS9uqKVMByNCUJ/1o7jhNF4TL3eZGAUhQgxZIgonHFT38z1li6yIFCcP3+GxvV9GA+J44QkTohCgdYZDocpffayDAOE9Degc44syzkcDJnkOWBZOrLC6oUzNEQKkwG6P2TjsE+r26I7P8+rl+9y+fNfpwhj3vfBd1LyAuM0x1hI4oBaFCLmPdskqUV0uw1q4T7N7jxhLWKlWad+5zKdhTlGeyPS4ci3TxUDXAh/+r+7N+AHn32I5x45QY0QOzasLDb4+HNPcH17n3PPPMmln96nnxdQ+dV79e99DmIQKEIkQpYoaQmVvB8iIQBCLNKHsWOrBFBHEkTkeYnSmjiKKLX21nfTAMSyRDmHKzWuKBDao3I6LdBC+Hzo0sxsuHEeUTTaAyTWVKx94UM1Vo6tUh8OKYsK/TWW/n6PqdXVresZn/7ldT70Xaf57U/v8w/+3iv0BgYhAvYOJ7SbLRbnukzSCVIpFpmjKEr2dnbQoWIJSEdjarUG/f6Aw/0ee9s7GGkp5SHtdo2vfWnCYJiysz3h+qXPsbyyzXA0IQwyTh9bot1qkpeG7e1d0ixDKkmSRMiyRqbqODPAqW9jt6qitJw5c5b2zmusyhpiPafeSFBhBUA06ujD/swbIokCaHsrgkF/yHCYoa0j7Q1ZXOnyvicfJd/b5LUv3aY3HJGVGhVts7G+j0Nxam2Rg8MBN7cOGaQlcxaCQJLEEaPRBBslBEpRT0KOrczT6i7xxa+8QVDk7I1HpKMJe1ueMNsgQ9mcds0z7T3xQXL7MGfT1mkp+K73nudtZ1cYDXMsKRcePsIjT5zE/myEdopxbrBBiQzUzNXJjy4UgVQIDAH++87mOM6hLZTWYStY2jkIQo/WZkYjBARK+bC/VBPGkUcSK2PCLM0IqsheT6lyFGlBFIYopSi1BiUJBATS+1w6o5HOLwiBVGijGU/GGGcpdAmlw5Vl9ftOydCOrY2UN1454PaNfSYj6xM6paBRb/LSy69Rj2PW1zfp9QakeU42yQiVIkhH/HBvg39ZRLz0ykW0Lvz8DGh3umxtbXPmzBpSBvQO+litaTcbbG+8TKsW8953P0erHnH77l3iMCBt1dndPySOYkKpkCpEt9fQhxtk2npj1Dd5vaWLLE01w3GG7fXZ2C1JJzHNVu7jabVBBAEa0NoShDG6LAmkB6ZHoxF5XvqbDgFao4Sm1ukQLSxz9/ptoiRhcGuLZhxz9uQiwtU5/9gFXr92Gx1I3/YEiklaEMU1THU282mWBhXXaMx3adTqxLrA7q9DNmItkYRGUw8Fi60InEc9EYIU+JnLQ/7UI22eWVU0myHDccr23j5n15bZ2B8wzstqBysruQYVUihnAIoKAiSgnPZnrsonA+ehaktAWepZi00gkZGgu9BEIhkPxthIEdV8WJ4rPORelhalImQUkfVGs7GAMBZpBHnupTey8h/JsiFSe/BjlrCpFNJ6gaRzDj0pKccZsnKSmroiP/vsPH/xr5zj4is9+gcpR1c7vP/938nTTz3N4uICURjMJENFUZJmKfkkRRcFk91tjv3yz/GR7/wAJ08epcgLtPHFXq83GQwGHDm6jDYePW3W64SBQhvHnXsb7Ny9x/W9fYIwxFhLKwx45NgahYWDgz6isYicO0N++zV6A9/RvNnrLV1kzab3qJ84h8WCMDTqXgBoqwFvGIfo1JKVBjnJqNcCms1G5bDrzwAHxYT24SG2yJBKcPbCWW7c2+Lr33iVJAxpNevE9YTJZMATcczVG/coc8s4zSvOnGN754Ajp46TZgVO+sFuv79Nb6/HOBmz2mkjpWRYGMZpynxYEihJq1lDSYW2HlLXxlI4WB9qnlqJcGLKVoB7G9vcvjPisJJVNOvejMbJwKeDSsBMLeMkpS69TN5an81lrfcMwSCc9EirFP7cFiqSWkKWTjg46DMaZYBAjSRxEvnvbR25s5STjOEkp5xlcnnLclXp0vzwGbJM+3irysMkDGU1M7PkWUkt8uETuckqvdfUFVmgJLz7/XNsbU740u+u8/1/9iQL80f40uczfuqf/Sz7e/sUeUqgBI1ag7nOHPVWwmKny8riImFZ+FyxNOXI0gpYT/WKlSC3JYNmg9FwxGG/z2Gvz/b2FnlecO3eNuOsIJABUaCoRRHC+agrjCauJfQmGS7ICBbnSI4/xfqrX+TfwZv4A6+3dJE5a4mjgImDQHo76FoSe7NTZ/15J4qwSpIWJZELK6ciDx37HcCrpd//7BM4M8a4kKQW8t4PvJvD8YRrl65yOMwYjHOOL81z9+5d/vQP/xD/09/9B9y7s1m5OhkK4x2mtLEkSQykZJOU/kHfn4OGY0xZsrs3Zn3ngIWwYF0PuVTfRwSSUAp0NWAebd7DLhxDGx9c2OwmJOmYpePH+Mc/9wUvGFSVK530QQhCaK8bs3bGHywnGWWWM5G6+pirYHJHGNUorUH5MThFadndOySbZDhtZqwVYwyTife/iMIQhE+nlGIaAexvLm+tIyoAxsP51jqUABUpRg5KrZFGEEQlWvuk0TAMCZOY8ThDOIGqomwTVbKymnC4n/Ln/9J5anXLL/zcZf7JP7+DNbCytMTa8jJznQZx5bOCNGRZzs7WLmbQo333Dr/6q79Of+ABqtBomlGAatS4fuMeFk961s6hTUFYS5hoi0aircVZ4cckOCZ5wShNCSc5YSAJdM4wzRndvM6oP6LGm9/K3tJFZq0lqlgWy8sLLNUCkloNrQ1JFOMIoChwQH84Iq6HhMojXAhP5zmxssif/PAHOLHUJZ3sY+2IKK7RaUU8/+53EKNZ6jZIwpB3ri7Rji2Pnj3G3/jv/kv++n/9f6fotgjjmELKKugv8/OwMMQhCCIvkNTaiwodJVY4RNwkTwcUhBWPEALhBYn37m7yxeyAD114P3EYoAvH8RNHOfPkI8S//GUC6c9xQahwzoDRWOn5kVnhODwYMWw2CANHoXMm2sDU/gCvwVJBiCkNQlnKaueIowBTBOSl3/W8WlkQSI+MBqHCFtXHuZ/BLBEYZ++rmt00TN63cnleoELlXX2F84JX69CVgWtZll5lgD87CwGNQNFqKY4eraNCS5opfvcL+1ipKLXl7uY+t+5tIaQjkD4rOo5D4jBgvtNlORKcLzVpnoOSNFp17GTCuCjBWoJaDYPwVugCDIpxnhOEviWw3mYLJ7wxqg84VYwyTZYX1OMhLthATka0OguY4fabvo/f0kXmvRd8sW1u7rI1Cjlx6gTpOK1QLL9XKamQQjIajUnilncpUpJ3Pf4Ezz/zdhIJ48kYZwX52EeTJkmd5YU2b3/7M7TMgCPzbdYOh9xZX2f9ta/z0Ps/wt/4Gz/G//z3f5J6s0vUaNLqdme5xmVRYoyrMrP8mShJEj+YBg52e7jAsbo6z9b2wYx8q63gIB3xwmjIL37hMt/77nNIZ5mLY8Y7e3z4+fdgvvQ1dg8OKNGYPMWJnDJzDI2jk444lWWMREo9cQR4vxFtjFcrSIG0EOPbSZz3H3SFYNgbk6Y5WO/SZIVHYhHG+3ZUpquuCpPICr96B1Jgqtd5CsVLKcFBHEdIIYgi6PeHCIT33XBe9yWV8pStasBdEVlYXIqpNxUqNDgn+fIX+9y+k963EBAOUwE6WjswhlGuURIGk4x9W/K+4YQ7d3cZ5AVpWRCHiiQJaYcNbBzQ6rTo9QYYbTmsHMCEgDTz4xgrLauLXWpxTJZbesNNxsMUgaDY36KR5shQYG1A9i34D7yli0xKRavZpHb6BNGxCCvmmFvsekekJEQEEQmWeBKysrxEEEls6ZkG73z8CVa7CxR5xmgwIJsMqCUSFSTs7+6ysLREIBOOnjjGibkGW5deY3djmxTBzp07LKzf4qm3PcaP/NCf5IVXX/WwdBwj4watRkSpLVpv46x/w0rjoe3xKCfPShrCp1WGyvMnjVXEgSIvcqx1HgD53OssKMnzS3NEQiMHIS3Z4Pu/97v5wpe/ztbeDpM0ZW8wrLzfLaUuMdZboEmXE5UlZSww+HmYtBXsL2toW5F6ETjjKPseiAgDhans6KYcRnBQ+VGqQFY6Mr9zldaf66Y7mC4NQej5kMZalAoqLZZ/TFlqBL44ozgizQuCoEQXfleRDo6fmOPFF4bUawE3b/b4uU/dpigVQtoZ2OEq0GbKJnHOZ9bkuqQVeGfhiTHk1lIaT8AutCWsxewdjujOd1lbXSQvLLv7Q0rjwR1jHXHobfbmOw1KaznY6VGUtkrYMYh0hBLeVE+p3z/l83/v9ZYuMgChQs6dO8dqfY75zglu3rmJmwhOts8S1WosDIcsX4cnHnmMMFTILOU7nnkH860WRZ4zGvTRZYkMI/YPDui0EuJ6mxvXrnPu/HmSKKF79AiZVbzwO1/l5PFVshwmozE6HfPsu57GWsOXXrpRIX4OGcQ0u/MkrUM68xOkUIx7Y6zNKK1FOO+vKESFVEnrmRBBhKCodgHHwEl++fOXOLk0x8MLS3Qeb3OQDQmE449/8qNsbG7THw74+suv8erlq7Pg8aLMyVWBcJbApJ6yVOnuKoIFNvIZylq4GWfQa64c0kqMrR7rBNb4ttBWFmzWlbhKu+W/tam0XF77NfXEAG8T4MWgnnlijN9ZozAkK0qy3T7jSUqpPWIqnaAEfvMz6/zGb92lFku08WiokLYCdvwcbZpu45+XH0UYYwmC0BuTSkmcJAxzb3EQxZUqo2K5RGEIOLI0Iy81hXZeZyocUQDLy12UhEmmGWeZN3XVBiGhFoWVhrCy3ePNF9mbV6L9/+FyznHY72MNWO2Z4llekGvtVcTSpzS66g1QTvCRd72D1W4Lq1OE8Fm/Ukp0WVJrdOj1xwRBTK1e4+rli+TjQ/bWb/LUs0/w7rc/yanFOZZWV2l057C6JHCOdz37dj7w7NsIpUNOpTdKETZaiDDAGE2Za7Q2VGzAynNdMZmkKBlRlJrD0RiD51VKJanVa5QLi1weZUzSCbEukFazs7OHwvHE449Qi0OefuwxOkkCpcblJWiLzUpMVlJkQ/9aVDzx6c3oggSnjUceqz+eOeXPWtb6G27KwHfWVURaM2sBrbMznqNxjqxyanZ4AKg0hkIbSl2hmkr5GZ2xlMZSlIa0KMh1Sa5L0jJnUhRMipxRphln/oyZa0FeVj6IxtscCCdIs5JJaciMIy8tpYFxZskLM5vZLS51adYTwkDRrNcw2nhvEKQXrlaIrmfc+PFHFAYkoWK+00IISbMWs9htEMUSFSpqSTJLnPFtPrPX981cb+0iw2GNriynDaXWaGcRzlOsyrzw73jF0O80Ela7dXCld9A1BVSK2TjyYRVB1GBnd5/O3AJSSLY37rK/v4eM65z/7o+TSMfh+i02rrzB4c4mRekLpluTNJOQRj2gXg9oNCOiUFGr1Rj1U2zhLafL0kf4BKFEqYDBYcbgMKMoCm++KTwrJarmMzd3D3gj00zKHG0MaV7QH/Z5/Y1LHPYOMcawv7PNww+dpkhziqzAGYc0BrSlzDJ8OEUlf6yoS05FfhewdrYrWF3tSM6hjS+gaRtkqzGAtVAW3mTUmfsFaayj0KZqDxWlteTakhYlvdGY0SSjNN4oNC9K+pOUw1FWtdW2Gl94bqMuQRvhJSfGzahZ2hiSWsjcfNvvxs5htCfpFsaHWVjh3X8Pe33SNMUaTaNWI0BQq9cAQVkaTGkw2npWkPWOxVJKokh6+4ooIFLhrHSMNkgBjXpUWfT55yyl9KLVb+F6S7eLU/8Frb1hjHOGWqNGpn2qSqlzL9kANI65Zt37XhQZOh2ji7KKZxXVucA/3VxGpJlmbmGF3v4mg16Pqy+/wNkLT9F6+mnu3r7HaDAk6Q9R4S5xLeEdb3+Ki1cusbm1WRFhDePxkDAIefzUMUYb25jRiBJHhKDbqKMHfQb9MUpIHj82z5HFLl+9tk6u/dmm3Wyzv3/ITV1yMCkYHQ7pZSXb2YTBeMyv/dbnGI7GnD97huWlRUx1w1phUYkibtYYhvNYnXtxpT+Y+WFvGIFQVIOzmdTEjyTsfVUw1U6v/TnKWktWSXum4JIUEiVgmsIilURYi7ZmBmyU2u9ASZJQ5Hl1RvIRmM56XqC12X0mvmBGng6FQDiHkpClOc749jBQyg/W3VSwWtkeOIfWFq01WZajAlWpIHysltElSnoCgLGWdJKSBIokFNSTmFApanFIoUt07tC6YKHV4uTJU7z6yhsUqS+qshoV+Wbx2/ZM5ioZhvYWzlmG1qUfslIgAoU2XrB5Y73Ho0dWkBQU5RirM6wuKfKCUgviqEZRlgQB1Bp1JqMeqyvzDAaH9EcDDna2eGU04MSpU5jNbZySgGI8Sbl97SrHzpzmO55/nlcuXvGMk9J48EFKovku2ca2h6brMY8tzVGPJGq87RNRnGM8mPC+5y/w8ece49Nfv8yXXr8JVTu2VZasj4Z0dve4O8lZH49ZXFqg1x9yd32XOI5YXV2ilkjMxCFCwdziPOXCOdLOKZx2nixtvejKCyR98ZS6rEIPHa5qlzAeslaB8kNoHHEQ+Lhc5yOFRFWQ3pjVo4oCZlYC3gKgYuILhzUeupsqpaWTMyNWKRXpJPWMkMro1fMs1eydFoKK7e9IlMIJ7YngRntamFKEShGHfuFthIpamSCAWuJ3P08e9zrCTrfJaDyuitbSiKNZwMXEZgz6mt5hDwfMd5o89x0f45X1HFNcqmwWhF+glEBX/i5v9npLF9k0A9gY7f84C1IyTlOCWBJJrxXLS4Mzjnrg00OUFN7pV1viMPGW2EIQhBGTdMxct8n2uEeWZ3S78xzsbFDqAj3SkEQkcy2CyszTWkG/1yfc2GRxTU5lh373LDXOORaPrPGhD3+Aoz/xDznZXOav/l//EzZu3ebMP/qH/OiTT3JkZZ6DvQOcg6fPHeMjH/sg/8//+Z/yq1+6iNWGUanZKiyNsuDi7Q1yJQiiiE67wdbWNmWRcrB/SLfdpl0ULC00kSffxVb7UUyQwJSYC0xlQUIoXwDTmZanzc+kJlEcoguDLu677QopCaIIZ7UnDGuLQaErW2sEWANx7OlOxlmEMVUsrEOGEpwhCsKKM+mRRuf8AD8IJIFk5lQFEEtJXIsJQkUSBYRBQLfVpCg0DiqriWruVhbkeYa1lpopZsyf0XBY/awSsFjrv3dRFGR5SV5qtHGY0hAEllNHGzQbNW7e6PHUo4/w0T/+ffyjL+1wa+NVpPOzPSdc5aHvZqGMb/Z6ixeZf0Om8G2pSwyOwhoUrmoVq7aiyJmMhzg3V2V6BRidVr4WMSdPHmX12HHuXL/J7s4GKytL7O5usra8zOGOot/rsbi0SJamdJeX0KX2+c3GkmUlurTs7R+QFyUiChFS8ciFhzlx5iHWjq7SjCQnTx2nnQc05pqcb5xHOk1Mwf7eNs9dOEMjiqnXGizOd/jv/tu/hvqv/ha//rXL7OWWTSPZORiy1R9Qb9bZ2NrmxInjNBsN6vUW2jhOnnmI5tJJdlPBreV3YpBQao+J2+ld4LFEIwJQMUL4GF3vf1jZCCjlz0CFw3qPmpk/on+MF2UKZX0o+lQ8Wr0eWhsvq1ECEIRhTJIolFRYoYhrdWxpmBSaxU4NpfwOo4xGCocSBmTgjVIrCUkUSVaX2zSbgjAqGQ4ydnbG9PqGPHcYQ6Wh81u1sf6+mA7MrbWUrvQLnx97ewsFa5FCs7qoOP/oEmtrbV59aZsrl3f5E5/8BO/6jo/z//q1N3htL6dty9m9Nn2tygoYUr/fDfq/83pLFxn4ldBar3hO8zEbG+tgBTv7OwyHIzLleJtO6c47kD6JERxBIKnVaiBy8ixl4+5NWrHi2eef4cUvGzY2NsjTCUpCu9vh8KDH8vIS6eCQ7tIi+5tbFaVJkiQJy0eO0Fk7xomXLpIRcWx5kTP4FuRwd5u00ugvry1zb32d2DiWi5ynLpwhUtCIFWWesXn3FvlkyNzaCv/Nf/1X+FNXb/KTn/o8ozeu8LV76xSlpjjoE9UStLG0GjFznRbtVovtpQvs7GYsb19EGweUzMz8BaCr7UyAkZIgUORU9nWuOq5JSZ4ZXzgIz+IQgjAKfMKogyCOqp0wIExqBGFCWKshghghI2StThw1kHFC1GiS1BoYETIoHEMtGDuBScdkhAwsWKdJhKEhHTWh6SpDM0ipmTHkPfJJnyIbce/2DhZLra5YOxryrveusbLWQOLY2Rly986Y9XsjeocGNdF4lyyDqxZibbQvCGWo1wyLyzGnH+ry0EMdjJN87Stb/KtfvEGnOceP/ad/lfmjD/P//pWLXOsZQiVRJscJz27xt5+dUc++bYtsagBjjUEbTV4UHGzvU5QFujQUacFhljIQOUNV487NBJ5+yGcY65IgcMRxxNLKUVZXV9HFiLvXroL29tLddhecZmFhjtu3boNzpOMByyeOg7PosqDdatPstGh3WnTqMR/+zvfzW1+7SJ6X3Lp2k15/AFJSU4rj129zYwD/2098ioZO+c+UZKvXZ66ZUCqIooB0MGR/d5uD/R2S61fZ3h8TRRGPfOLjrA4GfOOV17h58zbDccbB3i66qCQj1rC60OTmbgpGI1zlmuWMD4WoAA8vefF0qTiKKKIYKRRhGKOCEKtCjAwhSCD0+dgijJFhQpTUETJCBwkmiDEyRMuAQkhKpN858ZZ4gfDhgbVA0RKKzDh2igIDKGOIiMhEjFMWCJkgOKyMgAInqFtJKB1zc5JTpwLOtARLcYGa7LF17zbrd+9w6fUdhCiZ74acOFPj3Nk273nvCrWGxPULTv3MgB/6rjXGI4MpIUqgVlc0WgFxFDEc5Fy5fMiv/tJdtjczmo0O3/Pxj/PRj36Ya3uOv/nzF9nIfIFJC6LMfYGJ+7s+s4J78/fxW7rIcA7l8AYvumTS7zM+OGQ8ntDutGlGAau1DrXxIaExDPb2sa4yHXU+ZTI0Bp0PMLrN6rEjNFtt5laOeAhba3RR0u60WFiYI88z6vWaVxpLx6S3R1KLWF5eREpLno3Ihj0++wu/RKodF69t0R/lXmqjC04WPV6+leKkY7kVsReOuXpnk8JYnjx7jIV2ndRYpC19S4dic/+QF9+4Qmt9i7NnzvD0U0/w2GMXuHrtBmmaMR6NMEazfu8uS80jBEXlcWg193OlvTfH7LIOLSStxz8BTmFEgJUhRgZoJGXF8rBCUgrHx8+1Ob9YY6EZ066FXNwY8U+/scvQVGyQB9gOM+aD8sMRpS02lBjnKh4geG/KCo+b8qjwVCkQ1MOAo9064J2wdnLYyRyhiJirH+fUo4/w8Q8mdFXKYHuDO7dvsru1w+eu9cjLfWo1wXys+RMbOVdfHxHG3mtkNCro9TN6BznDgSVNLUlc58TxM3z8O5/hmWfeRqka/Isv3+a339jGOEkrCnwIYqAYmKyict2nRt/Pk/k2RRfn93f58z/3U9TyIUZI3l06xmmKNQ6RHeKcI5GSeRzvGvYJD2KK3T0mCy1kZXYplWA8GvDGa69w5+YNTj10jrOPPs47P/RhPvsLnyIbD4nDgDgKKLLMtw0SVCAoJgcc3MtZPnqCdHCICBNOn3uMo0uLvHbjrveIKH3WcqKNn09V1gdlGbE7nmBkxMm1Dr/ypdd58fItlPJ+9NpY6klIktS4vjkmCg+5fXuTs2dPs7OzQxhK1lZXqJ04RlmW9Ps9+uvXWVDHKstiU2lfpmwEN/sLB1YpDlSXEQHT4KSKYcXsFG8hCeBPvm2Fh5cq7Rbw1LEldkYlP3+ph6b6ntWXzEy5XDXHEtOcaYfCM/jd9Gz3r92aAnACJsawPhgjrGMhCWnVApQA4RT93PHy3RGv3htQDwQnltZ4/NnzfMdSg7osGBzssb25yXjzHsGdA/LJUQaHaUW9gmYSceLROdbW1jh67AiLy6vEQYM0c+wPS3770hZX9gRrSytEyhEKx2iSU+iSuDPHcPNWhS56NfusS/gWrrd0kUVBwMnTp7DXryIfe4KVmzeQx47jxiNEUvPpLafPYG9c59jDD2PW7/FKUXoJhlRYIT0yBoAgzTXjSc7h/h7zS8t84Lu/h8P9fe5eepkgiHEoaq02te4Sp962hFQBUajIBoeMd7cIw4jmco0//yPfz3/7f/vbFGbqqnsf3ZOy4seVBYVx/OSvfYU0L0lzb7XmnPErpVBIKYjUmMxYRm7MoRAcDsaEoaDMS8IwZmf7MiurS2hTctC7yumFAOG8GxbyAfnFrBAkQliMFIQqAOudd8V07jVFHysEbaThl1/c4KGPnCBUHloPhOUH37HCl2/12cj8PGvmGy+8vCWqgixC/BzsvqGOw0qYua5yv7PwWWFeuT3MLVI4WtYiLFUibyXVET63unSCmzsTbmyNUcqy3Aw5tdTgobXHOHfqYc5u3mPlL/9Hs/B5iw/tmOSaw1HBre0xL3zjkMWVkJOnlnjmbBPVafLQMOf2Xsnla9vkeUqnEXnDpoUl9p1HMhEGh6CsZEHfyvWWLjKiCJdOUB/9hOfKJQmud+i9L9bWkN0u7uYN1Nvfid1YR5w+UwXy+YAEFQZIrb0ttXU4W3D31g1GgwGnz51FAsfOP8LaQ+fBepPPpNn0Zpq6RMYJrsipNZtkoyHrV6/RHU84dWyZv/yjf5ZL/83fZmfkMEIQSwiN8sHlsafwuBJ6k5LSPvA2uel2IzAOcmsQSqHwSOp4NGK6K92+u0E2mdBqt7w7l3EU9QHSOU40/Izq9qDETXc0h0cap+2yqDR40rFckyw3fZLKzrhkZ2zIK+PIz90c8KH1Mc8cb/ifLQTHu5Lvf9syP/7VLQoPPwJ+llULBLEUBEogK/pSYafuwNMRgvTto3/Ssy1QCkFNSRIpCBU0IkEkKw9HWYVkSFGZcvvXSSjvkzLUgsvbBdd2DmgVI/7Y7T6/9ukblbGsRFtDrg2ZthSlZWF5mTMXnmIw6fG5n/tZfm39Fm/c3qRXaJrzCzz6zDO4uZNs9DJGGze58ZXfxRlXsUT8723Mt8b2gLd6kUlF2mojfvPX4NzDpDduYNMU0e4gghB39w7BiZOEX/2SN/vsHaAePw5LbYo8I3ARKghQxrvvltpQFCl7+zuMJxMfn9qoMbey7O21hUSaEif9Ku3KFKlCTGGZWzvOwdY2dy+/Ttxs89xzb+N7v/Od/OQvfBHrQNsIK2Nk5zi6MYfGYe9+CTedYVXQsKhUX4BHx7Q3zLEy9r0U003RsbfXBwdb2/ssLHSQQvC+R0+Sv3ydv/j8SZCC//HTN+hb5887QoDws0QlHS1heeZInT/25CIPrzWoh37uk2rHta2Mn39xi69vpmRa8tNf3eSRtbPUw6oYgI8/scBnrxzwWs87C3tWPAgnZ7C5EBLr/BlMVZ3VVN7p614QC0c9FAxzy9n5hJNzDeoKljs10qzg2Eqb7V7KpXt9irL0wkpjKI3ngk7ttcNAeRdi64jTMW/rp3z62h7KaNLSoIUgEBAKeMfDJ1k+foIbr13k/O7n2HvlFS5uD8lKjS4gG/T4/I2r1OpdylqbcrBHPux7WEeq2Yjg38f1li6yTEr+v488RlcfsG9T/tnY0esV2L0t3PV1EHBifZ8/06nxOydO8mPvfYrg4BB19jhCCvJ0QlyrEyYhLvOrkpSOIkuZjMZEUchrL77Io08/TRgExLU6iDqyLLAyREiwwjNKAOaPHqcLHo1y8KGPf4xPfWPCnpqDsI3d/ArZme9BixBpB7DxAqL0zAFnpzvYN18OgbAW6zKgcucVASBmMUf7B2PSTHP87AVS2aCu/Dkit9CNLP20miFJvwsoKZgPLT/69jU+cGGORJnqVGZwwhEqx9tPxjx29Ayf+touP/2NHV7YTPnMGz0++dS8P0sJQTdx/OA7Vrn+m7dJhXeFd/jdSFXtqXHWJ62Y+zEcVfeGdBBLy/c8vkw7Cbl074C3nV6kUwsJcGz1U7rdhG9c3+bCqQVkIFFC0ggsznlK1XR4bitDHwtoa5G5xDgYlwZlNHEUYrVXjS92W7Tabe5duYSc7HPl0hV2DvtMxpm3DLS+Yw+UpLe/iWDTU9DwA/nZ/K066E0paG/2eksXGTg++H3fwxubF+kPcg6ChF2ZYgwzEWBkHe21oyw/eoFxKAn7E7J0TJzUcNaRjSfE9RpK+fggqByfpM953js4YJLmKFEQBDGT4aTy4Rt7apISWOPIRwOUCmkvLHoxaampLZymPPkB0lyRGI3bfQUbt4nCmLW47m/JWW25b/rj60dWN7/1ZyavCgNbgoyJ4nh2oBdC8sTbn+PinSFPpppmLSAyjlFagA38LuN8+9aQhr/w3BE+8fg8Tme+cKshtXDCH+Zx1ALLn3nXEhrLT31jj597cYN3PdRmueW99IWA58+1eO5Sky/eGyOq3SqSEAlBoHzGdGH97jK9urFipZEQ1jsc74a0kojL60OePr1Ilhcc4khUwJXNIQ8f77AzKDmZafp5gTOiiknyhWVdZcfuX4SZQWwjDqgFiqVGDVd6b5F6BMNen+1ewMLGBiI/5KXf+ld0xzeZa7cRCOIwZDjKvdeIXzf8PVGpxQvtM8psRayevnPfyvWWLrIoClld6rC7OE/ZP6R78m1kzX3G4yHSOVyRk+gR+4Mxr97c4oPxGqejiDAMKPKcKEmwLiNLU6IoJoxCXFESTC3WrKUsvY9fu9OljXdYiqOYLJ9gtNcuYTJqjQZpOiZfT8mynIW1YwihsEEdp8E574KBNVhdspftYk05O8vMGPJ41O3fVCj5NnKGxznHkdV5Ljz5BNd7krMnjiJWHyHfeRUdJXzujV20s5gwoo2jsI7SOmJp+J5H5vnux7soLKWrfpZ3ygEE0gHCF3ggHT/4zlWubI54ZWvMr7y0w4++7zjSP4haAH/++aP0fvMOBT5iKgqCKg/al+7eRLM1qc6GzlGLFHOxQ0aCxWbE3f0Jh1lJIhWv7Y24tjXhO88vcn1ryGon5t7+kFP7Na5uDymdmMUwzVYYmGUOBMIHQ6yJwsubhMUFPvrJ5TkHvZRTx1qY8QFf+6WfoTi4g24EmCBH4BfM6ehAImeIam41MlDowi+MgVKUVSyT4N94s/5Q11u6yJQSLK4tYUtNP1glPv4YasEgCs+f03lJlB8yOnidveX387t3r3H2bFwRUgfkRUF3bg5nrc+EThIf5aoNUvodrSgKbt+6zZnzNXb2DgAYuok3JFWS3fXbtNtNoqRBY26RpF6nzHLGwxFKdNBaIrUGk1MJtNAyAKZ24RVjt0L2HiQ/8cC/QMxuUvBuwydPHONmMc/+2qMYGRNt5DxVq9Ns1ThIHWlhaMUBqbUE1c340FzED7xzGSV9wVf6R7Rz5Nq3lM3QVtQpP4dsxo4fec9R/t5v3uHKvRHrvYITSzGyCvl7dK3Ghx9Z4Hev930iJY5SG0bGkTvH7qQkN95XBWB3bClHGZk0tBPY2Bsw1I7t/pj1rRGnF+qk1jIovS34Ry8c5ct39iiMJBGGwjm0e+C+fqAbMEBpSoSe0JvkXNsegCn9+XAyodVe4OR8yNd++VOofI9ap0FRWnb6Gd1GDWF9rp3Rnn5ljfdm8XNnT9EzRlehFuB7pm+BuMhbvMicdQhnqTW7FCpEEzIqNKgI6yRaQuoCgrBGGTW5uxOC9ExrY0qUlAwHAxrNJsrh1bn4NqesVkatNQcH+3D9JlHsHYKbrTZxEhMIy+raCjcvX+PGlRtIJZlfWOT0+YdZPX6SW68fkut09mZUJxLAYft798Mapu/RbPuqYH/UA592hFisuM+smJtbJD75BM36cdqdGqcaiu8TNdwXt/itesImOUJrAqSPkNUaKR2bA8PGwZjd/RFb45JRaumlBZOsRCF4+GiTj71thcWW99MQSC4cbXJ2JeH2fslvvLzFj37nSY+QCoHE8v7H5vjq7UN2JoaxFgx0SV46Mm280HJK7RKCwliGuiQLIy5tD/n4E0e4uzfhhTuHvOuRVb50bZeNQUmznvDS3UOWGxE3ehnWCZ4+ucD17QFbaY6XXFoEFukcBsFSokhLw5FaSEMJytKC1VhtCFTEQ8e6XH/xd3GjTSaFRo/90B4hySZ9Oo2YWhTOIqC0NShX7WjO2//5e8/6eWT13nwr11u6yIq8YPP2Bo8//QTu+mX205JcO5LgPppmqbwmjG+XjDVkuW/5pFIIBOt37+GQrKwuAV69K6UijBROBpTGcXiw71n/gWKuu8D+7i66zLhw7gwnH36Y2zfucfPmdTY3dzjs9Xn0ybdxZ8fPffz1wK6UF+j9u5UHvP+4j3B9gJnxTcxuicOyIj3vr++8VKVWS1g6dQTUEkE5oV0PeGh+nuDMAh96+wW+cuOAq9c2sHlOrByxCrm9l/N/+dnXONJp0rA5IvLMCp8s6VuxzUuH3Nkb83/+2EN0a77lC5Tg2GKTr93Z5nNXC5441eX5My1cZVqw1BC842SHf/DFe0yE9IuBB/1w8kEHY/+Xb8fh5qDkH3/5Dpn2LliX9tardcaQJBGjiWV3VBJVQYcX7+6TFZp3rrXJtWWSlzy0VKfTqvPytR0uHOkgBCTDsV+EpcNpUFrTnV8kyg85uPUKE+39RFSgqMUBKvA+J+MiQyhBiD/fKSV9WqfzQEhR+FjbKtu2QoTtA+/zH/76lopMCNEF/iHwePXy/ihwGfgZ4BRwC/gB59yh8NqGvwN8FzAB/oJz7sV/xw8gqcUIk1Foy6HROKYeEGDwEvlRXtCb5NSkwZUFcVAjiEOM1oyHA4osx0nJ7s4ut29v0unOcfToKtoYCu1NWJzVSCHQZcEbGxe58NjjbG9u89VvvMadjS3OnH2Ix558iv3dfayMubY54Z/93h6Fq85RUyqEtTgUrD2B2/06pDywEPqd7vd7uySCrrQsIXnZKObnOnTaDfpXX2XH7RIoxSgQ/GS+xembt/jV8avIVpuHHz9Db2/E3dvrYDX1OCQ3jv20QAUWtMHhw/psZUMgjOGN9ZJXbuzzngsLiKotbNe8OWvp4Gd/7y6PH32EeuQFlUj40ONz/Mpru9wYeYW1ni4i0xvQ+a16agcBHrjISoikL/S6Eqy0Y4zVrMy12NofIa3lvacXOZwUtCPJzt6EGMcjay10rtnojam5gKdWm2wPfEDE2nwNpxRGgHSaWhJybLHOxsXPUOYjSmOIY8VCs4YUIdpaAqcxScjBeEKiAuqmipuq3ho3HRc4O2tPfS62+SOVuvwd4Nedc39KCBEBdeC/BH7bOfc/CCH+OvDX8embnwDOVX+eA/5e9fe/9VKBotFI0FKgtJfvS6EotUFYhasY6HlpyAy0qnTLWr3hJTIYxuMJIpAsLy8zGAzZ3tnB4D0No6RGWVr29/s0Ww3KsuDsufPEScLLL77IwtIS7c4ce3sHGFNw5NhRas0FXt9L+N9+8xYX9x1WqPsDVze1rHGYqIsI64gsnQ2H77eTD17TG1MSOsuxUHA9THj0kYeIlSIoByx1jlM4L2/e7BkWSoN2juHOPhtb+ywsznH+8bPcunKH8WhAM5CkuqCwBdo46mHAQkOyUI9YacWszcUsd2POrdT9olAVSZobJlojVcjrexlfvrLPhx7rYvACzblEcm6xzsWDQ/88pyyOaV/s3Oz54EABNQXvO9vl1FyDizf3ObfcpBErbu2OWWyHxGWEc5Zeb8KX1w/57oePcnSxzqcvb9NqRczFAW/sTHj/I3UW6i3e2J1wefOQ2+T8R3mOchBazcJ8lxoT1rduURhNHEuWuh1yOkyKgEarhpUF4WiLeRz7w4wwUN4KIo5w1vnw9qlx68yawT0AXr25600XmRCiA7wf+AsAzrkCKIQQ3wt8R/Wwf4xP4Pwx4HuBf1LlRH9ZCNEVQqw55zb/bT9DVtBzvZ5wZqVOfaOgsAHOFVgC/P9LQgyd0NGSEqlCn76Z5zhHtaLVqNVq7O0eEEcRK8tz3Lp1m+3dHq1WkziIaTYSWu0mnU7b67k2thiPBtQXF1laXqAWR6yPIj7z8oTfujFmbIIKA65axqlZuvMruyOofDYyxAPFV8EaVK/ZNz9hBwtK8PwT54i7dQyWYtJnPL5CP9XIqMZiMSY3XieVhAJXWg529+j3hpw5d4JO2mQ1KVhtS47M1WkmIc0aJGFAICoLb+GYJkpP0UyB4OrWmFx78nEuBNd3R3yI+epXExgn0E5UKTUSPZv/+aTR6WYuqJTXQnCsEeAyzfr+gOV2wK3ehLlayGvrfd6eLNKIFa9ujnj+4VWeAwJlwClsKYhRpBnkxiCNY39SYEzJO1a7tPSYKInoNGMSfOrm5GCDIutjnGOu1iRXqxzs7CEXjtPqHEXKEh3UiA+v04i9SU9Aycrp8/S3Noi0xdpipmH0a6Lyi4h981vZt7KTnQZ2gZ8UQjwFvAD8NWDlgcLZAlaq/z4K3H3g6+9VH/umIhNC/CXgLwEc67SwVmNMztvPrfGIDbk7tGSlpXSSHId03n8i1CXKas9AsN7fwROqHfVGC4Eizw1zcwvEUUSj3mBxwSOI890urXaDSVbwxusXCeOYTqdFHAe0mjFJHNHsLvArLyX89u2CzKmqlhwqCRCBRIym4fAOgQIxXQkl3s3Ycb+mHkQV70MfAVADVtoNes7L7EU+ZuOFL3D79gZCBogo4uTKMjsLj5EsrBEoSSR8quiNyzf57vec57tOKRKREgXKS5md5xQ6512lfEJm9XtUAFAvdVzbGTHfTDwnUcDR+QYgURVRdm9seHl94Ll9FW9SVi5QSvqWV0pvYUDpMGXJ6aU2J1oJX791QLcecnVnyNPHF+jnGdJ5julhavnNl9d51/EF0hJqkeA9J9vsjQtarRgXwEZmsJScO7PCtc1DFjs1kiRkrZVwOIEgDBneuIGzmloQ4GpL9OQK8tRxVK3GgYjoNOdozy2QjncJggm58Shm5EaMBiOMFTjnByxyOhaYwv1/ROamAfAM8Fedc18RQvwdfGs4u5xzTog/3InROffjwI8DnGu33Bc+/w2e6A956vxx/uLxeW4fFkwGJRe3Mz59ZZ9mLcKZvGKVhgh8NJA1BqcdSimazRZZVlCWBcurSxjjSOKYKI4QIuDqtVvs7/dIajU2t3cIo5BQBZw5dYz+wS6LCwucf/Qx/k9nzvDa33+RETE7g9SD9NLfWEEtmnUVjmqQWfEhp/okfz34Zt0HhwWOEIgdFGXpAQsBaZ4yP99iey/yOWD5gMm9A9b7P0P80DuZP/c2mq0mYaAodcmv/94VjtYv8M7l4P7koDIddZVhDQKPGlZRQ6WV/PzX7nFvqCmFwFjNsbriHQ91q10PEILffm2f7XFJLfIcTescVnhtWaAkgfQKBikgUAljEZMkAZ+9tMPYCU4s1ShySCLP7HChJIwC3nZqjmPdGi/c2MMpyTDTnOjWuXIwopFq2p0W98YlgRD0iwkuSLi+fUBvknN9a5eOjEgCx7C3DQLiMGQSLFJGdaQpCQSYyYSBKxnpknZ9DTPcQQhvNZD2BzOltxASWQXBe9Mh5wnMf0RzsnvAPefcV6p/fwpfZNvTNlAIsQbsVJ9fB44/8PXHqo/9W684Sai15nj5s7/OM584wXKjxaVbN5FWEAhNElieOrlA9/I6j9QPOddqYhnjrJ252MZJg6RWYzQeU5SGhVqNXr+HVIpmLcEgWFycp5ZELC4vEyY1Dnt9lHDMzS8wN9em3Wxy6tQZwtYcj6zV+dWr3j+RClXz/vM+pFyqyhQUi3Aa51VX3xQid7/c7u9kAksEhBhkWfhDuPU+Fc1GkyAMCZOEupbIcR/T3yK79Hl2ywnuwjtptudQARRlwS985TonP3aeo3JcQeAVwummUx/vyqutYJBZfvlr6/zzr9xjhAIhCIXgPQ8vsdr0lmlCwrAQvHCrz5G5GkkQ+Jax+qYCWXl2OF+U1nuMdOI6Dskff+8Z7u2Pub05ZGAs37g7oOcUlw8yAuVnVodbI2wSYZ2j0Qw4NJaV+YZvcV0VF+UcOINwmmZDMhfB+VaJMBlLbo/NrE8oIIhi0sEGUdNRo0RlbUxjDReFiCLH1VZgM6RiETAelcRx4HV4Bkrjs6+Lws/l4iTEjd9klfAtFJlzbksIcVcI8bBz7jLwIeD16s+PAP9D9fcvVl/yS8BfEUL8NB7w6P9B5zGA4WjEz/zLf8UHzIT5pS5tnbC8NM9gMEEoT+Tc2B3wtlqdsrdJbeEIU49BIUCqgHathjaWySQlqdV9yIDzUoqkVqc/GNFoNKjVIoIgYHXtCMiAfDxkZWWJdzz7rLcoWJhHBAHPnlvgX10dYhBQek3XNNYVKQhaEa4ENerhsiG4oBo3UNXTdFebTVgBh8IRCwglBNZRj2PKsmQ0GiGkD3PAOKwURLWExIbkkx7lza8xSUI6jz4LUQ0pLVv7PT7zxh7f/2STEIdxkJWGw7Fmt5+yeVhwb2/C1mDC7YOCzZGmFocsqIAokKw0Q7736VUC5QEZgJdu9ilKWIgDjKtmStXuOGWH+GiC6WzLM1tu74456OX0i5J7hykTJFcOU9q10N/MwiKlQkpHywX4U6Kp9GkliTTUA0dLlbQCQ0xBJAva5YgTzQnv6W5TmII03cEZzwKRQQPVWKGzcpTh1nWEzbBxiFCBn486jakADiUV9STyfiPaMU4zzwiqzr2BgDhUTL4FsvC3ii7+VeCnKmTxBvAf4NlgPyuE+A+B28APVI/9VTx8fw0P4f8H/65v7pxj2O9DzeC0RoUxV7dHbPQL7vVypIC7hylRvQN5RbWa0nCkIghD6vUGvX6PvCioN+qkaTqjS4VBiLFQbzap1xPCMGHt2DE67Q4XX3qJw50tkjiiMz+PVAqEolkPK7zDeYZDBVtPe3aLQ4SKYLIFOsc5CVZW56Kpp7rfuaZpXQBKOJpCESlJOhqDcJRFXkUGCZJQUjrvO2i1IUpCcII8HWBuvkS5sECwep44DsEZXrh6lw7L3NnpsdHP2Ojn7AwL0sJQWv/atpKQZhyy2m7QiBQykARC8p6zLdbmgmr3tYxKwWff2Pe2A7iKsyhmc0rhHEZ47MdU+dSqOqtZpxiUltKKGVAUSMdaO6YVOEI0kdA0pKYWFCRKk4iCRFkSZYikJZQgxDTg0OvywjLzujpb0koiNja28Z4cgtJCPH8Sl7Qo2yfRQeRjlJQiUGBSKpOhMWEYYI2hVosYTwo/IrKOvPBuyVEcUmr9LTHyv6Uic869BLzj9/nUh36fxzrgP/7DfP9aEvOnv++jnLn7Ki9+5RusfvIRAL5ydYc4jjjajplXMU53qS+deuC4I3x7FUbosqQsC6QKiOOYydjPWeq1GuBfxChOCIKI+YVFnnr6aVrtJpt3brO7dcDh7i7dxSW00YRhjVoUEjjIHTM/ePcAZUogfFDB/l2vlMZO6/C+wFOAc6KiB/szWwjUhVcWT3SOqBgqYRx5b8dQEglBqL25i7WWpOZngaPeNmvbF+ksrRDKORZX61zeHPHVO2N2DiZYJzgsBYdWzejxEsdCI6Idhyi8hV5mLI3Y8v4L8/fzyAS8fGfIxqCc4ZAPajKVwEtrHBTCMtGOcZbj8hSdSUxZEjiDMzlNV7CSFCzGhmPxHnMNSaI0obAo4T0eRUXNEv8a0DC9xwWSxfkF1o4mrFy9wvve+z4EgktXfw7wWXLSOdq1kLhdYzBpUDpLFApWWjUCSmQUc3cyJEDQadYp0pSiNBRFSaA8O847QXs34fEk/5astt/SjI/haMKXv/I13vn4EnNzR7mzvcujx9tsDldQKkQ5Q3to2burid7+CELfZBpk7tkAgjxNfXi7hSLz2q0oiYmTBJyj2WzgUDRaXaKkjrGORrNFq9PltZe+wc7GJmcef4IyzQiSJk+eW6URXmRsACkrRjt8E/XGaFxvA6obVQqmUrEZiN9o1MnTidct4YiBEEGmLbm1NJQiy3LA0Ww2CcKYMs8RWALlUy+ttSSNhNE45d6tm9SP3cWEdW5sO3IjSa2hHsdkuUbiybVCOhIcS/WQh5cSTi/UWOw2+OzlQw4mmvec6XJiIfa/rBDkGr54+QChJIG8L8J01QKRFYZxXjDJMpwtqSnDUqSZT8Z0kx6NUBNLQ6IcibIEClQQgDSz5E7w8pLpMPj+jn9/959+PJ1kfP5zv8JKlPC9t67y8z/3Sxw7doQkiqjiEUAUHN56ha49RxR1yA4OCcOQLB0T5DlNMQKtkcpV4h0otR/UqyBkko2xVhOquncqNpb4jwhd/D/8kkryxGNnSUf3eOX6S1xu9Xj8bU+z3TPcO5gQRTGt0qNkN+4NcUsaJx1FngP3PQKtcwRB4OONnCCKvE+8N9mUBEGIEAqt9YxMrLXhzuYBr118g4efeYaF1SPYsuD86QXOLybs3klxznobNWDGQLQOkQ4x457/N5YwkGjtMMYi8J7s73z7M6yv3+Ta9dsIB5GUGCHZN5axdTSBLMvI84Jmaw6XrCKjDDe8h8BD34XRSCkJAwUmp9jborVwHKEidAA616y0Qqg7Hlmps9TucmyuzpG5mMVOSKsekShJphUv3+njypDveHx55jIHcPHemHv7KUoIImsJhSOWBYksCFxBqYbUWpbGAjQjqFXCUF1kJHGEcdXcrMpJY+oGhUcmp3lksvqh04JyD7T+D15BEOCcoD8aMklT7m1sYMqC7f2BT/JEgB5DMM/uYEBzdZ6g20LYkrB0BNIw2rpJIC1J4jsBkFhr0NYwGmXkWellURLStCCMIkT2bVpkcRgQB1Q9MRzu7LMy1+D0iqGfZpTWE3zrsSQILIvNAJdanPNnLms8HUZIiQwlquLhlbqszlgSqzVFkZKXliRZQRvDYDjisNcnLw0vv36Fp954g6Xjp7Ba06hLnjjR5fduT8CJ+6AG0/UdVD5Elz4MQ1SeG0EYeMYKlk4tYLi7SaAq2wABey7kN11CHCoWwoRFY8mzglJrn4KZLLC9vQ5RB4qxtxYQfjdJIp9GGuSHXJjX7JaGTMVs9nI+/r6TXFgwhIH/PfzMx+/ySnpPqd99Y4+tgzFPnOhwZjmuQAuBtpbr197goahHJEoiqYmUjx1SEoR01RBa+GZZ+HbPBzR4qpnEzYLsg3zIM/NjRBzx9YM5rBMzl2M/8fCiSynVrPX24knfQlpr6ffHFEVJJ468fbiQnLtwgfDuOhu7+xROQFEQdBOG1OkoOHFikfWNbbqtGrK3ze316wQSkijyzsTae3sK54iCCK1K4ighz3wyp5Lym9DhP+z1li4yrQ1vXLvBowuKJ5+6wOL8SQ4211lodpFCUBhDLAV5rsFq4ljgRt4ybTqecxXSqCp2u7XGz0MquE8pRZZlSCfIJinpOGU4GJLnJbow7Owecv3KGzz13LtpdDpQpjx1dpngC+uU1SrtpSSe7xaFAWa856Uvlc98lhco6TixWOO7nzvHMw+dRC0e4b/4Wz9BNbQiFzGbtoEqCpZrdazz6Si2StBcfPht9JbPo++8gMwPvdxfCp/XFSnyrGQ8GbCz20d35tgbas/NzHMipQBdzXr8z/OhD4KNrV2uXbrI4234riePEjwwENrZ2cEMbrEcVQmeQszU1/fjbKvXeTYa8E7PWuvZzeucQzhIyhHNyV120iW0aeGE8kGF1XnVF6PF4HmkVO8d1UzPWQ3WEEaBz3UOJHEUcfnyVVxVrCoQFKWmMb5L3UX0X7+FOnaKZtygf+cGw/UrGF0QBZJ2I2E0TGfPV0pFlk0XR79oKKnQ1Tn4zV5v6SIrteZ3vvgKKwuCbX2R1Q9+hI9+4kMcjBVfuLSPNgVxIFls1zi/0iQSQ//CqAAhRJV9DNV2BniZfCBCKupD9T8LeIZIUku4+NrrIAICIblzZ4e93UN27t7h3PK7sLpgtRNRTwIGmZvNunzSZDUrGh8AmqmkxQfq5Xz8ubfx/R98mkDFPPXdf5zPf/1V/skv/A5GCM8SQeFEgApiSu0to3VpKIuSTgMis4g4fgEzvI6TAcLkVX6WJpASW2QYPSTNUyQxNeXI8xQnGigErtrJhPQcz83tTb781S8zJwvWllY4udKZiSOdE1y/eQMViNmOMaP//ltaOfBF56VGesYDnO50w2Ce35g0cPgzmXPm/uenZy/r3wuDwGjvID2e5IzTHGst7VbHv5+22vmmtgRVWH0Se1VFPhnR4jY26jK4fZ2BgFAPcfkuBkMc1Tj30Em+8Y3XmIpoS+0XYGkUWZETRRFae4b+9Oz2Zq63dJEJIZibm2d7/zat4ycIpaa3u8U4jwjQxGFAKCX9gz7Xb6xjn/ZvllRenGjLovJClwhhqvx6NcvCktKhS41SkjgKabdbJPU6w8HQQ+e1Otdu73L57g6bd25y5ul3oZSiVQ8qvq9n4GtjiaYtk3OY8cEDOIgHOpRwnDu2grCGPB+TD0Z85wfezU/98mfRBoQMIAxBOO/jbzSugq2zbMzcUow8KMh7A1zSRTiLKVLfsiIwziuya3qfJREj4whsSbsUCPswSFOhL4K8KLh56yZvXL6ENhoVKB45f54wCKF6HvsH++zu78GUpV45Fk+fkqsWFpga6vjdxud83+8i/GNc9XkFTlU/o6IEOh9Dlecl6SRlNM7o90f0BxMGw5T+aESalmS5RlvDXCvm/JkjuKBqMZ336TfGUBYFUbNGpARZqZGTHrIY0okSbGko85TSGpSApy6cZnVpHk97cyCrxcOC1o44CSvlvBf4qm9hL3tLF1lRan7nSy/x2NkOzz7zGF/vHbJ/sE/mapxeqjFcT8nKksEoJ0waOEZ+VRLTNtGnXVrrwHomBlL69Mm8RCmfzCKVN1ExuiRPU/K89G1RnFC4gGu3djjcPWDSP6C9vEan3cB/hZ/HWedmcL7E4CZ9+NdWPuEESoAptW9DL7/GQ6eOksQhxaQEEQAWm2U+VrasBKZhSJHnJKKAckJSDwnKOeRkTGkrwMCaymPD0Qgs3XCCkwVIx2RwwKUrl0mSCK0NO3u7bG3vsbC4xOOPP8Xiwjzd7hzLy8uk46Fny+iS169cQlufrOKcnZnLWOtnUdMdT06dvex9w5myyHxIvWAW4G6toSgL8rwkywpGk4x+b8JgMGY4nDAcp2TphMmk8CksVXrng3xPB7TrnvKktR9Yl6WuLAAtxjjyNCcIJKGVpFlOGIaYslKIW9+2LjabvPOZp7hx83a1MBqKQs80f85ZhIMsLwjDEKUkrnzz9/FbusgCJam3EsbjIZev3+LrOyM6Cwu8+73vZXVNcXvrDaywdLot4kgQCp/35ZNLApywBLjqDRH4Q5JvK0RVfMYKpAKtNft7u7xx8SJ5UaCCgCQKEQhubhywd9jjcPMe7dUT1OsxTSXoVR4USiq8ug0SV5DlD3Jw/GOiSCJ0SW+/7wP3Ll8mW1jB6RKmvoY6RQjDaDSi0Yx80EHgkx5lWaAqX8IoaSB1RW6tEl1sdY7QZYkSvu0tSk2pS776wtdAQBjHBIFia32He7fXufr6JWqVQuHhC+c4cfIYvd4BW7s77B3sVzsYM3gd519Bn1rpz6JiOnQWHsXNspLxaII2jsk4o9cbsn8wYP+gz2A0IZ3kFYHbUhb2m85tD5Klq4Fe9Qp6cEk45zmPzlXhgPcNdpwVjMc5eS6pN2IE3jy1LHOM8kwSiaCWJIyKgp/4qZ8nCfwCaZyP5tXa0/GkFBRFSVhlnRVGE367tosIwTsePcmTTctXvvEaX9zOuHJji2u3DvhzP/IDfOgda7z+mausdUvefcpzBX17IpBBAEUx/Uazl8jaKdrl39TpCBkEaZpz984dCu1X6Ga9Dkj2+zk3N/bYvneb448+Q7PeIFRV6mQY+HbC+sFspDN0kSJn3ooAljAIMNZxe3OXhbkutza2ObO0yoUzx/japU0QDqdznIDecMzSygJ57lfXNMtYECUYRzrJMDiMSCoFbxWHVLVqRZ5XwYi+DQ4CxdzCHFRopBCCMID+sE9RjNkpSrK04OBwl8I8zcb2JlIqrxKvejpnmdmeC9xMPpYXmtF4wmg44uBwxM7uIbt7PYbDCVmuKYoqUvaBM9y/UUtU4BGy+lj1zQU8SKAG3+2WZYlzlrIwVVtqMbpgfX2HLM8RuSTPPYNjShS3xnjychCQxJLzR46zsrDIq6+9QZGlVfStb/uV8pFLqop2ktKneFJ+m7aL4FMXW3XJaJIyykomGweMx0M6rYCjS02u2IxOzbEQ57jC05RUEFYtis8cttZDw646G4gKBLHWUZQapwJmQd5K4UpNEIW0Wo0qrVJw5dY2h3s75KND4rhLEgiCQBBKv1s2pCYUhtCMwOqKNuWfg0AwyQsub+zTUYK4Zbh6/Q6Howl/4oPP4tSrfP1q38+Pwpj+KPX52EZX+Wg5Jh/hDiboUR8TFxWQc5+WFQQS6wxOVKOJiv7knK7ceas5lLUkcY2N8bZvsay3FQ8jz1QJFAShRJemCrbwg9p8nDGepAwGYw77I/b2euzs9ej3R+RZgTX48L0Hdr1vvgQPCM78Xw8MeIWcfsmUdkZFPp7O0Pzj2+0m1hm0KX0Kq7U8cv4crVaLze1DAuF3wFotYnGuzdG1FY6urXDm9AmOH19jYa7D3FybS1fucPG1N/zZ3Xm79Vm7aB0oCMNwdqb8VuDFt3SR+ZR6Qa/X487mNrvDiMfPneZdzz3FnRub7B4OScKI/b19bt68xbNHFlFhBCKgLEu0LtGVCy3TM4RQCFXZLwu/SjsrqtmMD4AXFc2o3WoShhFpZrh+b5/+YMxwf4/W0TbPrQxZFgNiaagFjqbQHA17PCKuc0cXFWI5vRyFtvz0Z7/O2nybuSu3efzUUQZ5RjuAh46v8eLVAcb63TEtLHlezCBsaw3OjDlR75MQE9scMxzgrIfxBY5GLSaMA6SEOFDUwoAgVAjnjWaEVEzTSkajIUoFaKMBhwokC4urNJpd0jv3mByMOegNODjoc3DYZzTOmWQFWa5xVqC1IUv9oNwZM+M6T9XSQlRY3AM0t+mDZu5ds4G3L0ypxAyNnApAg8Ab3EzZJoFSNBsxcRgROT+YjuKQW7du8fBDpzn2fd/FsaOrrK4tsrK8RLOREIUhcRQhlSKKI4KKXnfjxl0C4Vv8IAgINJjCcxcRAUXhD2HWWuI4/PalVeEckzRlf9zjsYeOs9MXYEpef/0Nbt/ZYGHlGG976imGl97gPc8/i7x7B4d3ICrLHGvcjPDpQw+mZjZ4hEsIwtD7P0wDGVzVbgqJD8arfOZ3+obdUcHBQY/2muZss4/sbYFwhCogco5IlKhigHMaOSUszlysHINJxmCSI4CXbmzw3GMP8T0Pn+Pu/k0s051Wop1iMM5Q0t9ccRQyHqbU59cY9rbR6T7lcOx3IeuIQsVoMGYyFORZTqtZJ4xCag2vMnCY+zaqVWpmUEHzWngy7OXLV2m0u/zLX/oCozQnLwv/ehl/TtGFD9ijgszvb1TT3u/+LuUqfqYfNVU72HQjk/igdyVmu1QQKD+HY5oN7dvcMFSEQUCoAhpJQqvVIK6FFIWXyEzPizjHJz72QY6sLqFUiKyMbKfhh6LqUIIw9PSyMOB9738XfyP+z/n13/g8v/zrv+ubCKUw7r5Rq5SekB0G/uNv9npLF1mhLa/f3uGIGjMykmcuPIp1MBwOSJI2t2/e5rn3PM1enjMcTshLQ2g1uig8p1AKbGkq9rbzL75z3vZLVgUXKK/iNdPDAgjlJfZRFFXzK5iUkruHGZ/7vRf4+mtX2d7d97uIUBU1yKBMTjzYuM/Qn92KzoMDD+TJpaXh8y9f5c5ej71hDWf9XalU4HOqBxOWl1pko4z9/oS+rrGjBMdrbYrNbXTpFwLr8PFQ4xHWePg5KwrCOCLLcvrDCaNxyniScXDQZ2+vRyAdSVSdWZzBGEOe55Ras7OzT5qVOGP9uXVKzJ3yCaf1NH1qYvqMfLFJUdnIVYCI/+MD4FXgxZ3eJUoQqIAoDAgCrwCIAkUjSei2GqwuL3BkZZm1tWXWVpZYWpmn0+nwmc9+mU9/5rOAraB1UM5QTxIazaYftEuPygo828dvmNN0GY+CNuKY73z/u2l22vzyb/0emckxpW91pRAoqYhj6c990+f9Jq+3dJFpY3jp6h2eWBJ0jh7nRn/Czl6P6ze2efoJzQ/98PexkMTsJd6MRVtP5tw+GPL6tXusLM2xONcgCqKKHGtQgaHICpwBhPJnmSCYadCEUpRGcW9jn73DDG281Fc7xY2NXSaT8WxY61FFZurhSAmS0N0/LjFlRdyfHU0vUX38xr0DRHAEbOlFlaVGxA16wwIhJuzu9oi6y7zn+Y8xOhhQu/caphihS28C4x22SnRZAoJb69uoKGI4StndP2Rze5/RcEJeGKwRgOWpJ85QW/TmMcb4KCdd5gh8NKwp9QO/pb98+/fAjsT0nHd/mOwV0WqW0BKGklBVu1EQEASCJFQ0agmdVoOlhTmOHVtjrtsiigJWlhZYWVlmYWGBei1BKTkLtVBhSBAEfOwjH8AJwW/94q9gtKYRB3zi4x/m2IkT1BuNyl4bEPa+EzFeGOpHCoJABiD9PPDatZveZtwptHM+GreKgSryqaWEIPm2RRcdxHFErRZy8eZdrjcyzh5bpZmEPPfOCyg0v/u7r3K60aJebyCE11xdunXAP/n5FzFG06iFNOsx83NNVlfazLcb1OsRWVZ6NoOoBqlCUpSaje1DrlxfZ3t3gHER1tVASRZWj7B67AyMt3B4zqJfGAW+0XGkLiA69gjvf/8c23e3aO5uEecKnZf3GRLCY50eDvGp6EIorCsRhLjSIpqS1ASkvRKRdHj4qXczf/w8W9tfYHjlRco8h6DpWy8pyNNshqq+cvEar75+w3ssTgGEabEjvcOjtVjtvd+rAdJ9+2o5rSIx05eKKY1KVpYFlTFooO7vSKFUxIEkUt7PMgpCWo2EbrvB/FyXteUFVleXOHpkhbXVRTpzXWq1BkmtThTFPp/MukpzBnmWEcUxKlBIoZDKo7jd+UXe9mSf3/nFX0UKaDcSPvyhD1JvNADP9vGvrkSFsjoy+HNXkafoQjPJx76TQfCFz32VvLAPRCTJipJnsa4KqBfqvoziTVxv6SKTUvDw8RVqso+1cHdzl53dQz749kfZvHuX8+dOcuzYGvV761y5eos7L73EojNsLRxFNJqY4YTDQcHh0HB3Z8Irl9bxMkNDHEe84x1n2d7c5PrN2zgXACGOAOcMznkARQhFXKvzvu/6XpqtlPFk27+JVc0YYyhtiShLT9mKAh4+f5rHzp3moa+/yJFRkxt3d7C2ChI3BbgCv5U6P4QWDq+g9gXrjIU4RDhV/fwawlmOL7XZLgcI4VBCEEo/4B6kGbOZkqvmV7MC85c/IvoCck5grLm/u4oqftY6pKoAh2o3UlKgAlHFFkki5eNfZaBIQkkSBzRqNRY6HZYXuhw/usKx46usrSyzsNil1W7TaDRJkhgVeXWyFBLMfVsErUtwHhUWUlYMnMS7OwiJKTXOGLQVHBwecvnylRntbK7bJlSAKTzaaDzh1wFWe+6mwKHLEoetzmreinv3cMjFSzfItfWeMJYq+MPvfkJYnPQ+JJF989Hsb+kis9Zxb3sfsyRYW2jzxEqLRlJjoV0nLzXb27s8NrfMzv4B3/fnfpjO4xfYef0SW41FPvf1dTKRg4qooC9cNYj2iJpCUHJ0dZHhcMjmziG4wKNw1XBYOAVKsnj0FHOrp4nENmPhdy1R+adHlUo4NCFRGNBsNEBK5DRKKKojavM4Y5FK4owGUyJMNc+TvrBEGeCsAhUSBCEGiYoCbCUglDi6qyfZER799K1pgC5yyll7N73uz/6+CeETAodiMsmwndrM7BQArdm8d48oDAlaIWHgd6MwVCRxQC0JaTfrLHW7LC8vsLqywJGVRVZWFunOdeh0u3Q6HZIkJogTv09rjTNVKCMOV3o5fxApTx3T/v1QofSpNFLOlAVCKU8swA+ji7xgPEn5u3/3H/LVr79EvUIXT504zWc/81kuPHKOVttrA8EShjFZnnsVdRB60kCgqqbPoELFZz7ze+wMUv+e3De/ry4HUqGCgCNLbQI996bv47d0kYFfpSU1Hj42z619w92tXay1jAvL7Tt7bKys8Im1BcpGjd/8td/m8Pp1fjvuIpQAoSqmQBUTXrVBOIUVir29Q5a6LR46eZL+0DKZFH4NC0KM9rqnucUFTj/2GFk6IV6Zn5mg2qq9mOVBVodj67xmbYq/GQeqHvteX0gkIbaQUJhK9BnSrJW4JGE8VhCFJLWYNNNIqTDOMBoc4ExJFNUJggBRCsIgAGcZDIcz5sX0csIPd0VliOpZ7P4mVkoSx1HFgvHcPGMsD50+xnPvfJpLl24gpWRxvsnxIyscO3qEY2urrK4sMLfQpdXuECYRAkGZ5URhgApDn37qHLY0GOuR0ek5NwoqvmI1DHPGzsYqVDxIJf1IIS01aVrQG4zZ2Nrhzp1NNje32N3Z4/Bgn739PRqNOvOhoGOHXMnG5FmODEOfCOMEspp7yqiO1RYNGGeg9PeCkIIsz/n1z3yJcaGpeHhUtBmmC5IMQhr1iL/4I9/Hz/7z//VN38Nv6SKTUmKtYDBOWVSSs8fmWN8f8bU3bnN+Ynj+mUeJ6jHNAG729jFCcu3GTV4VNYpoDREFFZoEGI0vMAFSUpqSS1c34cxx5lodmo02k3TkGRSEXgbvBE5nDDfXMecfZ5LC7v4hgfr/cfen0bZl6Vkm9sy5+rX705/b3xt9RmSfqZRSSjUpk8iSaARI9IgCSpQxzeAHo8rNAIaLYWODXS4PbFHYIAQukFQqIRpJqNSlJCSlspGyjcxob3/v6Xe/2tn4x7fOiTRVlqhIU8TIHT/ixInT7b3XXPOb3/e+zxuAl7KkNYamtYTWsF5XPHh0TGMsvjG852zOKtyAUML9tFJS++sQwq6JEAQEfkmSKWyUUzvBzMW9FNMljhw/usfxozsEtqJta8JAfFar9YrWODkzKC4uZFE3yG4Rde3sQAs1OQw0eR4TRQFKhUSRSKEePTrENBX/+V/9i4xHQ0bjEWEYyAwpDHFGhtPnLJMgCOknWTcOiOgmJF1TSHx6IaJ7NEZ2WuuMJKZ4jw5D6tZRlC0nZwvu3nvEg4eHHByeMlvMKaoKYy1REDAe9bhyeZd3vuMZxuMJg0HOZqTZ/uF/yrXv/m5QWmKPnFQ/2kNrGqRSCERwAG/oLrXix//Vz/OFVx5RtqJ5PO9WXdywFIRRyM3r23zbR76RH/4nX6WLTIEIaG3F6WJFuznkA++4xU/98he59/iE+z/xUaL3Pcd79nf4iX/9Szy9tY1xFheAiiICpXGNkbulkh0M5EW0ZoHCkiUpXnnyPERpcd2qbqZiKoe1otaoTMtsofhvfvRnqKoF8IY6wHlPiuLtgeWfv3wPC4QePhQpVtv76DiXM1zXOLDd7kIQEKcheZSyXHmasiEa9uT/dcHq1ljquuTx7c+wePAltr04rIumpG0sOohQoSYKI+IwJI1DkjAkjjRpGjPMUzbGQ3a3t7i0v8XW9iYvv3qbL73yOta6Tjcog3ljWp5+6gnaqqZtGmzjMDpg0B+SxilBGF6Uyecmy/P0S6zDXij1leC/Lzp98rxFdSMBD/PpjF//xOe4e/8RdetYrQviOCZNUp596gZb25tsbo7ZmAxJ4hhwEuSoApyzBMUK7z2z+ZzBcIR2wYWIuW3lZhBFEXUl6pgg0ASRdJGXq4Kf/OlfwhBibCUD9S+z24jZVBEHnj/+B343cRK9UVa/icdbepF5BXmagap5+cGUV48cYaDYmeQ8efMavV6Ptljza5/4HL9+3OLbNZEOSMMAk6YURXMhllPdQlM6RMeh0K90SJJmxFoxGaY8jgrqRmONJ4il+7Scryhe/DTz5Zzv+O4/yvbVW7z24qcuDsjn+ciiKOqU493gVylNlEQQxeINQ9T6uhOmqiAgSkKWJVRhH6sKklAz2RixWNfi8A40aRQzvf8FDu+9zMgZjG1ROmM0GNHPIob9lMmwz9bmkL3dCfu7O1y6tMvuzjaTyYheLyeOAjmbaEWxLnjptTvQqeo9DmsNTdPS1K1oHlWIMS3WO07OTvAewiBEK0WSJkShNIWCc/yC6m5kaOFidOr4tm2pm4bVSobnQRhhu/Sd5194kre/42nCKKYsGiFtIezGQIdY64njWEyxzmBaccMDJF0yT97rA+KQt9YRx3E3a3Q0XQNE4GKepqrw3vHil17i0dEpugu5tqbrsnZiFN/lY3/Ne57im7/pa+TpfVk5/j/28ZZeZPgOtx2J7aBQhqe2NnnX257gYFbzic++wmhZ8MIgJ9aKdz/7BPXdV7maD3nYwKqTxvjzHkB3V03TiHbliLTc+ZumIQwUo1HG4XGFdxrtPEEgiLfrTz7FO7/+wzQeJrvXUS9+6t/q6Opu0Z2f+d6Y14Zh+Ea4eKfoOBclJ1lOFEW0bGLrlqifEicx66KS3C3vGPcztgYpj176Apu9hCtxxhPZBn/ghbdz9dION65fZmd7k42NMUkSkaRpV9J5VNcJDMJAiMlRQhCFbG1OCHG0XTkahcLJ99ZQrNcEYYC17YVNResuXshZwjBiuViAl0AQ0xryvEfTtlSNlM6uk1ZZ7wmjENOajsQoQuMsj1B4wijmnFcy6vVYFQWtaWlbQ+ut2HxaEXmL4iwiTAKMNbRFi+0YLeIlawnDkLrju3h/znPUGGewjSVLEpRSvPTF16gai/VKdrHzPO/zcYXWXNob8xf+7PeS5ylt2/xbvdr/cY+39CJz3qHCc/mM4vHJnGev77KqKu49nnLvYME1ZZm6Grs65dbu07w6GpEGOWZt8OcceN3FG1kg0CSxRoUBYWfft90AdjSIOZ1ZjJFznNYK2zTc+dJL6GzE13zrtzPevHSxgi6CI768qXKuclOy2AKUnBmCoBuOAijCKKbf7xNoTdnIGx2EER7FalmAhr3xgG989xPs9y17H/nDbG6MuLRYcPn+A77u93yHRMqGknIThqLJi+POxxXoDrIjQ1yZeynatuXD3/pN5L2c7/+7/xDTqTSeffZpPvzNHyKMApqmFSOkMZ1fLJJy0DlM0+KtRSuNazUaRblcYJ3FtFbmU1pL3nY3oE7zDGMcjZGfW1U1dVVf+PDE6Ok6BFtAoCOappFZXBiKt8xZ4iimqiuapibuEAfluhCTrlI4Zy92HN1hEjphlwy2vSeKRF1StZYvi4+jwz6DgjjWfNfv/jDPPnML7yxJknxF1/FbepHFYcjO1gb9qibH4ZYNn3/tMZ/8wj2MDfHaEWpL4h1Bu2K5WPDO557lh185oqzrN+QwSu5OvrMyaFcReU+kAwIlu5AXkxOBBqM8ziviNMEaCU64+/nPcPPpdxJmI0ItxFkAlGjxIhSBbknCDK8UidKEviLE4QJpBZtGhMNeKbIsw3tPWZaEWuE0JHFEua5RGi6Pc/7c93yI97/zCZEjaY2zlvTuPcLHj4ijiCgSn1QQyIwpiqJOUOzQ6jwMQqRBzlmCIKCualrTsn9pDx2GmKYl1IqdrU2SOMJZQ9CVvnVdoVGYqsYZUYYEYUAYhd1T153hFTSyoK2x4KyAR4OAti0kON446fR5SYepG0OSyqJq27YTdIu6Q4bBnjSLJae7brtu5blB04tUq7tpBD4Uc6a1WGuIohjVnTGtdRc6xta3GN/y6uu3ZVZoTfc83gir11rzjmdv8ie+57u6sUtwoWx5s4+39CLzHjYGA1J/SuI80HI0XZJlOVeuXSJLE57Vlv7925wcHvMLH/0lfufXfZCiPsRbwbWd19daR3gvFpY4soSDAUGgqLs5k2lasA2Dfko78xI2oDwqUqhW05Qln//YL/KeD76TjWFfWr6dPSYINHkY0i8XXJtsE4QReRCwMT1iZ5hz0CiCVqKE7Dm9Kow6Fbwi1gFBEgsWOtZs91L+5Hd+LV///qdpm4aqaEX90N2N6WZ9KClHgyAgjhPSrId1whSxxtA2NXW9EPewt8RRgveWddFw5/ZjiT7yln5vwPXrl9BKHOMauUAxRlwN3aK1zsncrmtoBEGAMa3MFi8WtzR05E80RIFIucJYE2tH2zZYrwgjhzU1pmMuvmE0kwYK3tE0csMJo6ALs+gaH94ROIjjWDqgUYRSsutFseza8neBDgVF19QNKoAkTKjKltZIl9Mae6FxVt4Thprv/PZvZTDIsM5iGyOVwFerdlFpRZJIuEEeQ57GjDe22RxvCkexqWl8QxwGaK14+PiYXpxw8/IlXj++I0ZD08hZLNQEOgZnSMOYMFLo8/ifc6mQUgwST9VPWS1rmnVNGMibrtCUyznl8WOuXdojDAMCHaJ1Z4zMMy4/vsfv+qZvYndvl8s7W7z3p3+a6pu+nb/1j3+WqpMmaYBAY72RVCPjCKOAJAppW8MgCvm93/xO3vf2Gyyms05664giRRSl9HspSZIQjMdEUYQOQ5mnWdsxJs+9c/J7FJ6QAO0067XM1H7u53+Vn/3orxElKUmS8b73vJvWBXzhS3fZGudMBvmFzMgY0xXAnTG0o0LJeUjRNE0XU9UpRJQmDEJR2YeBtPe7wbTyYs3RKOIolmgipTFG8Ak6DHHOU9YNKki6DqZUGU5JEKGzhiiJcXUpkBscQSDd2ta0RJHg8Yy1JFEs80QUCZLaUlYti3WFM9KJ9E5kbp1+jCt72/yOb/l6nDn3mOlOD/lVql1UCpZFwXxd4pMc7xx5luOslWA4YJxFbLk+79zfYevRAVES80e+8xv5xd94BYN6Y3bjJM8kiEI2t8akIURRgKnWeFfLvCdQGNPQTxNWC3lRwzTD0eBaSxxBnqY8+973sjkZsLW9wfb2Fvu7m2wNemz9wA/yzv/kT+K0ol4tyH7pF7i6P6GXxKyMoW4MCrHGKCUmySgWB65pPYMk4vd94wt824eeIY1Der2MNE1QShMniZRvShNGES6IcF6Jq9e2MgRXIkQ+D7FrWwks1zpgtV5z8PiYl156nd/4zc9SlGvA0ktTbr/yRXJdM4yeIN3OO/ahLBbbqS6sfQOlFwQBgRLKSZwkxKloIpWWcrRuGuI4QnvA2g5bl+ADmd2FUSRlLHR/s+n0iRpnHb0u3cV5j7EBrXWgNUVZdbAeTRDIDS7NUjl7aUUWpRcwnNFggA4DUZ10DgyP51c/9klefOkeoC6YH+dVgQ5DPvC+d5GlEY1pCLrrIdCye77Zx1t6kTnneXw64x1RxON1IXKlzrznRGXIZj/BzRsm/YjdyYAHd27zrm/9CM9cGfPpV486f+AbQQ+jjQmX93elm4bl8NGym694jD2f+Ft0EuNbg2ktYRzTeIOKMp5929v5U9/xHJ3IgaZpcCioSpq25fjkWHYX60itI4lCsjSAZfekzo+J1pFnGbY7B/Zize//8Hv4T/74t5FlclbRYdjNsgDvyfseTuagFKui7JBoooEMorhrUgRY6zqUwgG379zn/v1HnE3nVKVgvne3xnzgPW/jiRuX2b+0R56lJEksiv6ufFqvV2RZStMYWRhBSByLtlB1rJFzYpXzyKA6CMj7uVB5lYwynLUEUYQ9H0p31pE4jromRfBG+etlQQchKOPQeNIkAB1iHaRhKiLnMGB5srpAC9R1RRBE9PIejRH6r4cLEnQYhGLpcfDRX/w4ZeM6hqMkyCglUU9RqPngB96JbWuCMKAupdMZBkGXKPrmHm/pRYb3xFHEulizM9khXq/o9/pEOiYINUkUEAQldd1w9/4jrvYH3HnlJdzd1/nwB9/NZ17+CVQnitVxxNZoyAfe/TTb20PiNKUtV5wuZ6i8h2os7XyBXRfsbm1x1s5pZw5nPFo7NrZ3uPrsC9S9bY5nCyJVkyQJrREOh1kuSI2Ri96qLhgvYDjI6aUJWomaxHQ4aOdDBmEPZ6AXx/yub3yBP/9nfj/9PMF0F48xDmsEGKPVG3feUIf0+8OL9vVyueTRo1OOT2acnsx4/PiI6XRGWZYoHONBxrueu8XVy5fY3p4wGQ9I05QgAO8ERorXeEQIrLQiSVOUUuQ9daEptE5KUmvai+aCQtrgvjtP6TCW2WGgOxt/1PFHwIT2AtZqu9aecW3nBKiJ80xKau+hO2sGgZamlHVkOhaOPp5sY0gvS2jSiLpVEITS7u92RmHYG6IwRocKbzQnJyd89vOvgjpPPf0yz59WXNrd4smbl6hWBWVdC8a9E0k7+1W6k0VhyCANGekeUS9jmDv2dvfpDwbSUfPA6X0cnpPpkjvTJU8M+9x57UWef+IZkkRRtQCOzUHC9373t/D8UzeYbA3Z29vh5OSU/8Pf/BRVY8T/pKHFYaoVwyzhbCmlRhRHzKZTXvn8Z3jy6Wdp3D5JKPaYNE4uOnhpkrKxuY0OQyIUcZqysbPFxrCH8seEgb4YpiqtScKQYeb4xvc9x5/6w78TZwyLeX0RluHVuRVf47ycj1zV4JZrXvvSbQ4OT7lz5xGnJ6csFgu0gl6esbU54mve84yYHTeHDPIeQRDQOiNIhsayXqxp2oZePyfPM4JA3ACBSN8vhsuuU7s7dBdHpXGua1ZoLc810NRNIxQupS90i3JTUHgt80FnjFBJwlAMs1255x20dSVzUWcxdS0Y8bzfqUt8hzjXwpt0TrASVuw6XsmZz6HRKiBJEwHfxlJ2etsS6IBf+bVPsqha+Zyz+ItFpggC+JYPvZsskfgnHQTkScJytYDwq7hc9N6xtzUmP5lBLM2NOI5J45hAS7KJSno0jSXeu0LsHd4WvP6ll9jUY5xTeK3RoeIb3v8C//H3/l5sU2G7POmTukR7OmxcjWkqUI4k8gx6KdOoEihma/C2xZiaVVFhwwH9QYrSgShGkoTQepK8RzDexHowRYVHEwQxaR5L1JBSJFF00RXcHWf8me/6Tt7+/FUCDWVVSMmHQns5B53fQe/cvs+vf/JzZPfus3PnNf7NUUk/y9gY97l5ZYMrl19gf2+TYT8jSWJpW2s5N5naSNiG1sRxSpZJxplDPFjG2G6BqDd0e0EgYRqd1Mh1tN8wCAnCUMydSuz5wtf1OHNO9LXoboFa71EGoo6zcd5kCCNxZpu2vZhlKqWJogCcw7QtVVF0czJDaVoUIVGcUFQlYdEQG8uiqAiTGFBdU0TTNgV1VcnIQWtaY6kaw89/9NeoW9sZXF1nUpBKZ3PU4zt/x4dIYvEaKq1YF2uiMLrIYnizj69okSml/jLwZ5DbweeQYL994IeATSSs/Y977xulVAL8I+C9wCnwB733d36bn09/tMHxK59he7KJAq7t77A5mbCzu8nmxpi9cs1rn/0Y73zvu/m6XsaDj/6sdLK0wnUXTBRF3Ly2S12ssNaQpjngqMo1zrRdsJy7CI+IQs3+5oR7D+ec927DJEZpzfHJCa3us7GTdpnCriNKFaimpViVgonrLOxZmjHMe4RRAFUtAtXunziK2d7okwYa19S01mKrWmz13TD5fJx6ZX+LjW/9WpK7++Rf6PHCd3w7WdqVVwEXd3U5VxmclY5dksREadQp8kFHIc4YglS6bt77C0eB7vIvdLcTOWPl7wiCztvpLnDkSkmZ2HRdRq3EqSciW5kzRnGMchJAYVt5T7zzBFFwIUhT3ffoMLgoBaM4pWwsoY5o0BilMF74matmTd3U9K0j0PIcgiCibRu8F0ezd55enoNSrNcljTG88voDbj84En2ldedd++55a77mPc+yvzckGw6p2zPmJ1OyXk7Wy7GLxX+YYHal1GXgLwJv896XSqkfAf4Qkqb5X3jvf0gp9XeBPw18f/fvqff+SaXUHwL+j8Af/K1+h0fx0p2HvDvQ3H10xO7uVb7vz/xBelkKOOqmJnj4mAdxRFOumTc1i1VBnGSsig4wGmjySPP+dz9PksQ4F4oiIQwErhJ0XS4URhkIPArHpc0BQSS1vPeeKNIUsynL6RmPTwqKPQmxUF0LX6UJYRhK+YPC2RrnLHVZcvXSRDKWtUA5JLRBM9m+ws7+VaKwpqkKQqVJshhrHaab0YDv5jQyzogjUaoMBjmgJMSdiChOCRHjZRh0YlwnLe+madBedxKjbimYNxJZQHVINmEqai2dPusFvKo7RYa3lrptCCIJDmzqmiCKcNYRhwFRFEremrNdIyZAOYfWlqi7abTW0jQNgW8JIsESGOM4OZ11ek9FfzDERylV10G23VB41OtRNy2j0YhgteoWK7RNQ7FeE8cJddNKHlxHII7ihCjN+NjHP01xzi5xXy71UPQSzR//I7+PvJfx4M4dvLzseOOoS3Fo/4fULoZAppRqgRx4DHwY+CPd//9B4K8ji+z3dB+DhLj/HaWU8r9NTuhyvcZajwlga2uD6ekJi0CBd4RRRFqWoODy9eusXvwSe3s7fGmx5uSlezgvtKKrOxu8421PEceJHHqlEc3NJ57maz/wAT72iU+yLksxXwYhvSDk3c88wS9/8jWO6hUOj+6gl4vpESeziiTdRXlJn7TnahEri8o4C3VD0LY0RcFT1/dJtFyotpMnDYdjdq7e4uB0yuTGhCBYc+H9cp44jHBakNGmbXgDAmNFAGvpvGGJtLo7O0nYQVSdcwRakaQx1jhMa6mqCqeQEYLutIRaEycROggucNfGyBlG6+5nWVGLuFY4/b4bE0Rh5xZWQpkMgi5r2TpU4MHbi93FWEcchAShpi0LvIqZzldY61mvKzyeJM1o6hrHGuOkKxjFMcbKWc+cG2ERyKmzjvFkJGV4J8kytpXxiBaRcBRHFFXFZ77wCsZJSXvRzUShQ80zT11le9zDGM+Va9epG8N6uRRZWaBFWMybrxffNE7Oe/8Q+NvAPWRxzZHycOa9P7fqPgAudx9fBu5332u6r9/8rX6H7QL59rdG2Lbl2rUr5P0evf6QwWjSpbfAU8+/na/58DdRr+YsViVfvH/Gz/zaS6Km94pnnrxBkiU4pUEnoGNUGLOxs8tf+Et/kSdu3qAo1hjToDVcvrTNd3z7t3BjbwsdKYkxWpfQtlTFguPpguWqpFqvKZcL6vWKarHANg3NaoUtK3zbSOyrtzxxeZf98eCiIRDogNFkh+Vszqd//VN4ItAhpqkwTSPtcWdlkKw0eZp2bfYErbRIo5qGsiwpy6pDkwN4oU51F2DT1BSlgFJ1GDAYDhkMh6JxjGLC7nxYFQVlsca2Dd56CU9s2o7b3+BaIymfWnfRvxFhHBOnafezAryzOGu+LI1TEjV1ENIYS9EY5kXNo+MzGiJmRUNjPVGUdAs9udAwWmcJtKKqSxEDty1BKL4wGbobzjvq3oriP4wCyRILAgl5RIBG1jl+5ud+mbv3jzCtvSiNz8+eaRTwB//A72ZrawvvNU1d4UxDr9djsjFhOBow3hh1Z88393jT36mUmiC7003gEtADvu1N/yVv/NzvU0p9Uin1ydI5enFMP8tQeJ66eYMkCMA6AqUZDofkWcaVJ5+kl2aouuL2wymvnlmKRg7ggfK8/bknSdKMMJRQde09rm2wTYWxTTeIlMFXEGje8a53sbk55vmnbhB2c6EwDglDTbFY8OjhYxazknI5p1rMMMUa7WXQrJWTC627CJRW9POYa5cmJJEmjUJ6vRFt3bKen/LFF1/kl37+F1hUDUmaSrgehjjUHcNDSYfOSecvjmOyPOvkVML4qKuS9XrFcjHH2haUqD6UUqRRTByFHdq6lllUIGJaKYNDIUN12khrWryRhBhraqJQkSTdTKvbTcPzgI7zc2AoRCpnLUqHOBVStpbpsmRe1qwbg9UBLtCyOCPBG+RxQhLHRLEMp5M4ot/vEycpKE2SpB33MBQClneUZQGdpt97Kf28c4SBGHTxTiRlppUdr2n5iX/9UcquKaO786NSnjAKeOc7nuRbv/mDxGlGfzhkON5gNB7T6/dlQTcGfe4wf7PX9JtNdVdKfTfwbd77P939958Avg74bmDPe2+UUl8H/HXv/e9USv109/GvKaVC4ADY/q3KxUtR6PWgz596ZpsXZvd494c+xP7+3kVAQRiFBEWBf+02pztbPPrlTzKd1nwxDjnHtERxyLd88D1cv7bHBZPdS/lXVRXr1ZrPf+GLnE5neO/Y2pjw3LNPs7W1yauvPeRXfvNLGNNJd7zHE3Dp6g2+4d1PkEeGoijo5T1c2xB97vM073yHnLk8hJ/+NO5978M6x6c++zKvPDzFAkkyYP/KDWiW+NUBW5Mx73jvuygXU5wRi0nTNKSJKBiMkZDwqq7Iqxp1dMR8f59eL2e5Ei1nFIlyXbqBXaKNEimUdSKSBToBbUTbyEwvyzKqqiRNU6kcmoYszXDWUlallKPWorRYZpqmIUlSAJqmvhAI2y6OylpHmmcXXrI872GdlKpJLLO1uq7Fl5akXWCgvbDEgHSQneukWkgjR2aSbafWj/BVQ/j5z7F+/gV6vZymaSnKkuFwCN5TlhVJGjObr/i5j36MsrHdPK+zASGayA+89x08+/RNyqIgzTK01lRVBdBhzkWo8JF/8FP8xtH8Ta20r+RMdg/4WqVUDpTAtwKfBH4B+ANIh/F7gX/eff2/6P7717r///O/3XnMWMuol/Fv2oT/9+OE7//Q76f/rnd1Z1ZNFMaER0fwr3+S02/6Fn7us3+Ll26/zo9Z0b2hFLu7I973+/8c1ZNXRQtnRKntnOWf/dg/56c+9nPUpkfrcp68eZ2PfOs388powOsBFNcN3/+pf8LD4wUaiAJFawNubd7ir7zzj/KeKwrvLFNjiZ2lf/8fsvjInwCtCb2nd7dm/a1/GJ2k/LP7P8JPvPwStXEk+Qbv3/4IH3znLSbLz3FnPuN2vscHPvxuquUpeGjapmuqhJyHkmutqR88Iv7cZ5l/5HeyDjRt21DFiTDdrXT+LkLOnZP5Vye4ld1Ps14XRJEMe9dIdrYQe2PatmXpHWkcY51j1YqJU6ForaNuWsq6oTUWhxblvxYehmkbrLOkaUaSJKxXK8JIPF7WiChaaWRheskWwznWxZokji+sLlEcdxkGcv4EsQHVdUUvF9mXWiwZHv1Tjr/9e4njpLtp1pwYQ9jZo9ABf+8HfogfP9qkLBuJo1IC7Qm05ubVHT74p/43HA6yTrli0SoQR3Vdy+9y3TzuR379TS+UN73IvPe/rpT6UeA3kFjJ3wT+HvATwA8ppf5G97m/333L3wf+sVLqVeAM6UT+1r8DsE7zxduPqVpHoyMKJ93AJI5QUYwOI7SOuPy2d5Du7FO52yyNEKW01gw2xmxeucK8shweHvLg3kMODh7z8PFjPv7x3+Dx4Vk3twn55Oe/xL0Hj7l5/RLf/OFvYG9/n+u3rvLK4ZcATy9KiMcj6ijnztTw1I4myxN0EmDqGqdCfJKjg1Ayo3VErQK0Unzgg+/lFz//Ou28gHbF7M6nefI7381+9g6ODh6yWqyZr5Zk+QDvPWm3qM7b8aKn8BRGSph4OACvSJy4DFQQYpsGawxxmnQ7Q0sYRKigCx43DT4IiMdxh0PQMr+yhrZpCOIEFUY0ZQUqgDDE6ohVXeNVyGK9Js9zWhUTRgLjUWHYRQ01pKN+d0MAG4TongatyXsjmfl1Fci5r857iMKAUIfoKELHMUVREhMQpjFR+EbgQxCLoNhECXVdE0cpPohwSY6PE4g0ed4XzaMTd/XBwQm/8KkvMbeK2iickxmF0opJL+NPfd8fY/PyHk3TEgaBiKGVovWOMI6xQUjd1CRZjzc/iv4Ku4ve+78G/LV/69OvA1/zP/C1FVJK/js/FIrlek2/N6RtWnQYMdnclG5a21JXFdViQVysqJcz1usGkbbLBulRrAvD3/2v/hGz6SlnZ1OaqsGZFjCMhwOeeeoJnrx5navXr5DlKZNRnyxJ0FpR1Q0feMcT/MpvvEZR1tgug3k02WS6dhBkElIQRKjuIrJWEj29aWlNKzMcY3n+qSs8fSnnQdjwxK1bPPfkE+ymjizrcePGNbnovRz8wzC4KLfCUELonDV4FEkSEycxx6uV+MQ6/1SaipgYEpyV5kqUBN3ZyxLHEbYj8oadBahtWmkahCEmgLI2xElMMhhxejYliuJul0ukxNaCJG9bgwqCi05kmmbCoO8U+pI5JqbKKIrO33/JHTDmwsC6LtbI+FTmfEVR4ByEWrNcLBgOh5LR1gmVxTMmFK/WWA5PzvjcqxI2MY5krOOsZGW3zvPa7fscHM8xzkv3wSpQniyJ+KPf8x185//8m/HOEicpbVNfNKZAoXVI2wpNeL1acp4q+mYeb2nFRxgE3Lx+haPTOTvbmzz33NsuFNwmjET1HUbMZms+8cuf4u7DEzmgOxDfvHTObr/2OmkU8sytG2xMBty8dY293R0GowFxEKA6rnpZVqRJSmsMYRCSZ/C173qBzfEvU9ctTVXjipbf/NVfYXt7j/V730+WcBFocM67sNZdMECqusQHASoMOD055fRkyu7WJg/uehrzfuImkE5kICZVOo+TVkowAE4ilAItHEJjWrQx9Hs9nHc0jXQSi2JFVcrOlKUZrj2nQinpGjp3Ea7RdtYU4xSrdQVKZFFKB7TLtSyaJCMINFGcCDTUO+IoQikJQ5dBd9otYncRDHiOxLbGUFU1VV2TJskF9QolHEljDI2xRAkkSYIkZZ5j6lqZ8XWRV2E3YG9bEf/WTc3D2/doXnmdf/Wzv8hPf/RXuXV5l/e+8wUiDdvbm/zgP/5RPvabL7Na1x1d4A3EQJ5H/O7v/DB1WVGsV6ggIstSkjjFubZzbQiwKNCCB9RfQXfxLb3I+v0eddUwGfQYbGyyWBXce/gZHj064sHDAw4Pj4kODnjmpRf5F4cVer4kQvxBvvv3e56/yV/5i38SrRxxJIFucRqjgxC87hZYKLX4IMVbS9QNUtum5cqlbW5d2uJkWlCXlt2dDRaVRUUR9+eajR0RAzdNg+k8XV4rmrImawz3Hxzy6PiI+/cPeOnV+7ggwDrLcDjhk7/+m3z4d3w9y8UJzsvcKQhDsiy9KF2sdcSJXOg6kDNaHCeUxhDFMWnWlVQdT9A7Seo8D1pIsoQwySiqhsI0BEHIuqrwiKv4PFXFaUFsn+sEvXN4KwzDcxVImqV4DzpIAPF/KR91MB5FXVdEoTRgojgmSVOqusJYR12vsVYaIGEYi1LfK5yV8jIIQ+koxiKvEjNpznQ2o7Gl4PG6i7+tGvp5wpGxfOGLr6CVYrFa8/HPfpHDx4fYpkWhee3OKdbKUP6ceyKiTEeepyRpIjrHTsbVGrlhnQ/Vh8M+TdMSRPqrF6QThiHOK7I04fa9R/xf/28/gO2ItHJn1dwMQy7tbfPudz7Lb37qs3J+kbwcdOB55uYe2+M+67IgTzOsaZhPZyRJRpKkJGnc9ZpEBmVcp7BALqwsC3jhmat8/OX7RCrhnV/7AVA5SZTyaOpIlg85PT5kdnTEe16+zX/9//iHGOsx6yXf89oX+fHCsbW/Q2ssddPSesNyueLWE7e4ur8nZ5ZBD2zLuqgItGMxX3aG1YS4MyHqLtziXGcYRpGEywdivW/bVnaXtEdtZfdcLpcExmNcKaNU1XXuvHQZg64sVVqhnajw4yQWhkaS0LYtUSDlXxanGN9irKeuatIsJQ4j4ihhuVxIeWssQRCRZsJjlDhYQ5ZlFIWMILI0I8t7FMUa3SqyLMHYluViQZqmeCWZY3XtKdZrmrqh1+/TmhK0xKOHYcDO9hZVPwdjaKzltdduUxtJ9myaBm8scQKtUYKn8+cGXcULb3uS8Xgow+ooxntDkvcoi0Lwc0qG+G1rOD094/D4lNVq/T94jf47Xcdf2TL49/tw1rK/f4mTs1NOp2cMhzlP3LjO9sYIjeOpp26xVRfkv/ALqPe/g1//O/8voq4T51H04pBv+eZvEJt6x9zTOmFne1ekS6btENKiI3ROGBaoc3+UzEfe88Iz/MC//CUq0/BvPvpz9PMRO5efZKMf8Usf/3GsrVFNzeWTM84mc7Y3R3zgG97HM2HD933fn2RZ1Xz607/JB973HJ/6zCucnZ5xdHbIuJ9xdtZjf3+bZn7EYNC/OCNYayiLAhNFJKmoOpIoIerSTSpA6UDOip30arUuCUqB30QadBSBCtBeXkuthWuRxDG2kzlprS86gnVdE+rwwhTZWsd6XZIkMa01eAu2dSRpLqi36Zw0SWRX6mJvTVfqxmmMUp40FURAHCdEsZy/WtMSxxFZmoJWWGMZj0es15Wc5ZzDirtFdjgvrutev09d1sRpiraWW9cu8fWDLX7l0y/K/EyF7G1vYdqak7MFg16CqWdU5hxt4OnnEf/RH/8DXce1i+3tmJAoRdHUPHj4mAcPH/HSy69z+85DDo+Pmc0Xb/o6fksvsiiKeOfb38FnvvB5sofHxKHj9/+eb8Fbz2q5FAZ8F+QQhTGbGyPC2ZShilDeszdJ+NKLL/G2J6/IrtgaQVh3gXNplGG9CHybupK3wbvuzmkkGEF5nn3iGjvDnDtlg21r0qDm6Wt93v3kiNkXehydFgStTOb2d3Z523O3aMo1t+/c5/v/1t/hZLqgaSomG0P+8Hd9m+wqdYUKRU0ynGxSBZZ6ucJ2jRs54wVIN0yDcyyLAlOWlAvJZ7be473Io+Kum5hmAW3T0DYi0JVdLxYFSaeoaNsWOqZ/GIYdgKZF4wgUZFnOar0mCCNJnFSKummJgpAoCqjbRr6v42QI6TcijlPRJ7YN9WJFVTdMp3O0gs2tLaJIuodt29I2NVmek+UZxloCApkD5jlhGBFHnYnUhoRRSNsGYijVijRNcVXFcDDgd33km7hx7RI/+ysf5+7jU9qqYjjI8K7l7GzK5kbKo0dngLg4drYmPP/Mk+A8jTGUTUVVtTx8eMDtOw+4e/8hDx4ecHR8QlkWtFVD3s87jMGbe7ylF1lrDFd3tvi23/F9/OiP/SS3nwUQegAASrFJREFUX73Lr/7yx/m6D76f0WQkCObXliigWM6YDDIyv+Sbn9yjn8UkoebJG3t4j9j0z4eyXafoXJMXhPqiM+aco60baTEHCm88Oxsjvu3r382//LmPsb014aknn+Xp61t84EbNpd/3QX7ov/05Dh+f4IEXX3qNL77yMrYq+HOzBdHWJS5f3idPYh4fPubua6/y7LNP423Jq1/8DC999td58VMTfue3fSuB9iL9QULC66YmSXucns6kA2YMtI5MabwOsW3dqSVClIc4ibGmEVRBVYsK3mvatu3ITtJAkeGvnFHOZV5lVaOBuilwQF039HtyeVRVzbqomC1WzGdLvLVsbo7I0oS9vR2ZZzmhTs3nS1CK0+mUg8MjnHXs72wBmsVszsOHh9x/8Igkjbh6dZ/Ll/Y5x2Ob1hAOI0GCBxq07IDee+JElCKFMdR1TR5HRGnKM88/T6+fc21vmy/dvsenv/Qq81VBqGDYzynXFVeujLFO6GPjSY/Zcsnxac3R0SkPHx/x8qt3OT2bMp+tBayqYDIY8twTt3j2mZu87YWn+Ut//i++6ev4Lb3IkiTmGz/8AZI45K/+1b/M3/7b/0/+/j/4JwQq4Os/9H5pX4fSbXzttRfRvuWp69vMNxOMszRNS1lLEwAQC5ESj1nQOZetsRfxp6YThGqlcF7KGrQHpfhdH/kQr9x7QF2VPLzzBczpF9jXt9ncHPK7PvwcP/vznyM4gHc9/xTvff/bGaYR7/ipn+A9f/kv4bro1dOzGQ/uP+Tll17h+OiA4WRCrODGjRvMZ2uhV4Uh0+mcKE7wzlG5RpJeopDWGlE8hMIISdOMsqzI84SmbWTAHImoN0pTmraVHQHpnB6dHNM0MoZIk4g0zViu1swXS5q6IU0zwkhjLdRNg1ZyHlutKw6Ozlis1+ztbjMe9Oj3+8znJxhju8G2zKCWyzVN23J0fMpqvWJ3d4eNjQlZltO2DZtbky4y1tK2lrKsiQLY3d+lWK27HdmQZTnGSjVhupB6aoc1DVGWyywPUFHCjWfexvaVq+xcusTzTz/BvYeP+NTnXuTR0RmB1hgLp7M55WrFztYWv/BLH+P0dM7J6ZTVukCrgCwJedszN3nqievsbm8xGvUZ9DPiKGC8sXExingzj7f0Ilst1/z4j/93xElMmiVYBxubmyRZCkoU31Egb0JbzHn61iZJVZIkEfPjguPTGf/6p36Gfj7kmWefIVBBx/twXR0usJZAS1yPcYY4jnAeyqpmejbj4OiET3/mCxwfn2LLJaopeerSgBtXNiiakqe2b7F/9RJve+4W/F+mvOvP/iHiJEFbQ/qLP886kqEuwJUru+zubfGe972LsmqwVvR3i8WClZHgg9A50rxPFMeUxZqiWJNlGfP5gqBTNVhnKco1WZIRhFFng0k550s6Z8Qz5QGtqKuG9bqkbTyL+ZLxZIhWmqpuWS7XDPoDgrEMhaumZrFc0MsSJpMRg+GI1WrF296mOZnOCAJNuVwznc1I05j5YsHly5cYj4YMhn3KsiCKEo6Ojlkt18wXCx4/PiBOEtI0YTAY8OzWhDxPWS0WJHFCVbd4J1pFHQRgxNriTCsmTuWlAulkcVEU4rXGti3VekXezwl1yM2nn+bS9es8+baCJ25cZb5Y8NJrd/nk51/GNA3rouHq3j5f+uJtjPf0s4Rn3vV2xoOMG9cuk8QR+/s72NaQ93PWxZogCLhz7x7r9Vdr48N7Hj98zM2bN3CN4wPvewff83s/wtVrVzBtQxyGBEpTlELcDX0jYtIgIEkjNre2sCg++anfIO/l3Lx1k7atEcptRNvpBL1zFGVFUda8evsun//8l7j/4ICz6Yy6Fa7gzjhiFFRsXR2xv9tHK0NbWx49fEyaZ+xNNtjcHlG6Fa6U7lZoHWq5xqIoqwodxhcpMEEgZydjDIMso10siIOQNI5QgaJazpnogMYDRUloWpSzpN5TrEu0CimblrKoqaoTCb1IU/I8x1kRyRpjCCJNr9cTt7b3LIsSqxR5npEmKaPJhMVqTTFfYLs5lCAXFNPFmsW6Yr1aUVUNbduytTlhe2dTnNbeEwWKzY2R0KMAjcO5lqZpyPsZUSac+8PHR5TlIXt7u/QHPZwdECcpWZJIaJ+TYX4Sp+LaNpZiVdBELVEckqcpzhjq1lNWLUEtiO6mbYlaS1GvUbViejZjta64f7Tg7PiA55+6xde86+0cnc149PiQD33jt3A6XYmp1xnqqmRjY8z2zgZ1UTGdzjg5OqKoWl565Ta3b9/n9GTGclm86ev4TQuE/6d4PN/L/Y99+BsZT8ZYa+n1cmHteQl2q6oStS6wr7zEa0lA2DS8+NR17l/aozaW2axksVjTz1P+6B/7Y+zs7NMasZLMZgtOT2c8eHjI63fucXY6ZbWuKJuOvY6/EBQr4PJ2wuWdVHKuvCeJQ5qqot8bEMURTz5xja956QF6WdC0lqYoGb30Kvb9X8Oy6i74ICAMA6pSmixZlgIimA06JbvpFr5SWuRDcSRIwE5pUa7XfG73Mq9s7FwQoKIgJM9zgkAEvDqIJJ63qfDO0R/0BRngHOtCIDHrdYlHUHlxHDEZDRiPx2gtu/iqqMRj1tQSBdsaer3eBSLt9HSK1ordrQ12tjcYDHskUSQ0rJ0drIOqrplOpzx8eEBZNURxyGQ8Iu1ias9OZ8xmc9JewrXLl0miGAeslkuOT6fcu/+QXp6ztTXh0qVLHB0e8uDRAUdHU9R6xX/82udRt25RNzVVVQp/0jkOD484nc7Be8aDnO3NMVopqqpiNNlgvLFFURQEYcjR4XEnJpAyta4q1us1zkHTCJLhytVL/KVPfYrPnk3f1LDsLb3InnvqSf9//xt/g+nZlNYIlWmyMSZQiqquePqZJ3H37jP7ge/nFy6PGO7vY7WoGpq2pa4tq6IizzIGgxH3Hsw5nUq+clWJuVKHoi2MgvOgOi5a265DlAFc2cm4vBOzXKw7TqAijEL6gwHOefIsp9frczY9w7eGQRTxXXceEvyv/wp35p44yVguFlKOOk8YRlRVQ5ImaCW7trAfRUIlWDNF2xjqtmWxXFOWNYvlivl8QdsYenlKr9/DOEfdNExnS1ZroXc1Vc3GZMTu7lan0HBESUwYhswWC1arQlJRkphhv0cSxQxGA4r1ElAslisGoxHFcoUKAubzOWmWUVUVcRIzX8jZKwoDxsM+xhg2J2MmkxFx3CEOtDAWnTPd7EzJjbJuqFspVYuiYTY7Y7Va453j6rWrnYIk4PjkjHv3H3N0dMxyucIArTXioXOekW0IOnXOeffUdsGDVVV2si+LqUuUt+zs7fN1X/e1hFHAKy+/znS+pG4Nymu2tzZ527PPcPnKHhuTEXEUM9kYU9cleM/v+V/8L/ncy6/+T67C//f+ODw+5b/6R/9tN6d36K4UNK0hANTP/gob6wVfdzZj3oN5VTHY3sHrCKVC4VvEEcZ45os5l6+MMark9fsP8Z0NXxtLFAU4pQi7YadGEWkps3Z3Nnn2uafZ38r5zCc/Sp7FlIVBdHKWqihw3jM9mxJphVmv8E3DwsPxaYG5/5h48ypYmUU504L31HVDGGp0xyCp6wrvIeiif8qyAhRV07JYLqmrhsVizXKxRON5+ukbDIcDwjBgPl9IRK2xBArSrQmX9ncZDQdEaUyxKjk+nWKs3KkVggyfzabMFZTlgDyVnDbnxFbTy1OyJCQJh8RxzMZogFeKxXpNU7dcuTzA4VktVxyfnnF6OufewyO8s+xsjdmeTNjanLC1vUm/1wfo+PSG1gr8tJenXdSR5/qNq0SdKXS5WglGIYwYb4zp9eUmuSoL5oslr756h3VRsAoDSMTn7qxEJwmZ2eKCWFgpqeaJd76LNI5ZrQt+8jc+Ty/L2Lu8y/bVq+RpinWa8WTA9rUrjHa3iROJhvK9DEwfrQL8V2sLv9PzCHciEOOhdS0q8LStpakr0mrNonY8mMOkrWnrRySDHnGeQyy8jbpqqeuW0Fa8550v8NKrdwgDTRgoAhWQBqIauXJljxvXrtDr9RhvjLh0aV8AOFpx8PrLHL3+OqZtaNpW3shO9BrFMWGHTMv7PTav3OLqE88w/sWPcdLfoSpL2tbQGwwJdCIgmL54l1Q3Vzo/C3nvKauSNEmx1jJK+mKtVwHL1Up0fd6RZlkH8DSMRkO2t7e4efM6YSCu5bZp0FHI0dEJ8+WaxWpFGsdsjEdsbGxwejbl0v4up7MZzomfyzsnyIZu8BsEmrjrqmVpwrJYo3BEocK0LWVZce/BQ6qmJetlDPt9kjiSwIkg6GJ1xZ1sWgd0Dm6vMa2naVqyXLxw48kI27RYJ2DWV1+7y+dffInecCjd49sPmc4F0Cq7coTqUA7WGryTKFvvIOiEzApBIty7/1DGAFHMC88/y/bmBju72xyfHgsnf12jjOFXf/XjlHXFqN/nbc89zd7+Hts7W4Qd/u/NPt7ii0y6b0or2qYUu10nj2nbhratRbenI9pwwEnd0nOOqJwR6jNUoNBKMM+mNTz13E2e3R/xP/uG93DriZsMBgP29neJk5BelktaiVbiOXOGII7RocZYw3y+oC4qQZUZyb5Ks4yd3V1GownjzQ12r1xn/8YTxIMhkY+JPvZpsl6OL2vy3pDlYsbp8RHD8Sa9fp80SaiqkiiORRjcGkzbMuj3aTrLPYiGs6lr+v2c1bpgNl9y++5DykrOS3mW4nAEYYSzMBwOiEPBf1e1NFe2NsZsbUxI0pikC5FojSUKOuRaVVI2LaGCOOzR6/fpTF9Sfpqa09MzUIIqF3Y/DIc9stZd+L6Ugrq1vHr7Pv7VOwRK0ctT9vZ22NwYMRwNqeuGXr/HYNhHByHz2YLDo2OWqxWz2ZKj41MePj6mRXN8NhM9YRwxGo+5dWPEdL7g6PgUEBG5RaNCoVQFaUZdFuIoBxyOPO9x89Z1kkgCKT77hRcJXgwYDge87W3PsL9zia2tDfYv73NwfMKoP2Dv8mXW64LV7Xso5Wna9k1fxW/pRSaePotSgYSoKFlgTVV3gtoI1dQyRA403ifMjQWnCHxLpD1VXbMuK3q9AX50na2rT/In3/0hyqpGKZntNI2YDbXX+NazWpcURcn9+y8yW8w5PDjkSy++yNGjkrppWJcN48mYb3rHe7n21JNcv3aNfp6R9/voSLSEvjUd+MYzGA4pypLReEKWZZTrNfdv3ybNcja2NkiiiKIzUioCeb62krSSIMQ4w7ooWK5KptMZRSGH/CRNRApU1x0KYE0UxazWa5IsI4kjwi5Mo5cl1EWBs4ZZM5PzS+sYDHpY07K1syGvCZJqeTY9oyxKvJdSLAwDsiwlCKNOciWogcFgyHK5pm7kHFh2i7aX5/R6GdubEsm0tbVJUSxZruQsh9YslyuWq4KzsxnHJ1NO5lPKosJZjw4iwjACNAZBh1dVzePDYxbL9f8XB1FMquLl9daR5SlbWxtMuh1+f2+HMI44PT3jC1/4Eh5Nmqc899yTPPHENfqDAXFnKXr8+JBPfPw3+ZVf+QS9Xs721gahVizmy//e9fnv+nhLLzKgO8wa4VZ03D+68HHrxcwYBCFxmAo6LFRd91EzHE3YiGKW6xWBguHONeaFoTmdsZwvCZOYVVFwdHjKo0ePSOKERw8e8ejomOWiEIu+8mgvZsN4sAdtQzQwKA+//Cuf4Jd/6deJkxAdRDz//LN813d9J5evXKYx7YUXYFWI/i9LUvIsYTAcsr2/R9u0TI+PebhcsL2zQxJHpFlCWZREcUKWZ4Di5PSMpu3YiCqg18sYDPrEcYJ1jvlyxbooWBcVVdMyn61wzIiiQErEyYg0iYnCLtIIRZQk4CuyLKFYe2xrZTQQaALlCYMY01ryfo+DgwWtcaI5TBKapuX09IzVct0JdgX34L3nxvV9rl7Z4/L+Jdq6IElSlsslzraiolea5WrBa7fvs1isKMpG8HXe0x/0uX71Kr1ehnNQVg1HJ6ccPD6mLKsLh7hSGqXl4zDsssmcI40Dnrh5jTgM6Pczrl29QpYmNE3Nq6+9hg4j3vu+dzEZj+hlGePRiLJYs5wtut0y5tq1q+zu7rEuCjSa0ahPv5/TH/Tf9DX8ll9kkriIhPO14mDNBzmR1ly+doUnw4DLn/gYH/7aD9C0hjiJuHzlEoNBznK54Pbt+9y7+wgUfOJXPsXP/3e/hMPT1hJ/WnVUKd8FIQhzUJE6R4i/OGNIA8XgYk1V14SB5BuLXcMSAA++9Cpf+PVPsTcakQKhEra8jkVJj9KURdHpEoWGu723x9beLnXTcnh0RJ7nRHGKbQ12uaJYlaT9Pt7JuSjQsLm13XmyRJ8XBRFRGBOHhuV8SRJFjMcD0iQmCIRj6JwVum8XdNFaSxjGlFUjRKmmuniezlgshsVyzdHJGbfvPmS1LknTiKtXr1DXNePRkMlwyGg8YndvG4fj4OCYs5MzXvzCq9y/f8DGZES/n7NcypxtNpuzWhc0rUGhRSqVnoueA3r9Hq01nJxOCcKQqmqo6oo4iYhiUeRY4zvTp/BEzrEM436PF952i2eevsWtmzcJolAqiPWK5WJJEIacnJxQrpfMveP1l1/vwgE9e7s77OzuMBqNSZKUw8MjHj14KM7z+Brb2xvE8Vep4sM7Rx7HbGyMGI/6vP2dz7O9u02SZWRJyp27dzl57Tb1yQkv/PxPX4hfq7pi2WHNLnvPZYSwNBgO2N3bk1DxPOlYD30UdMkg4tc6TzfxcIEXkzw0odTWtWDMJADcd+p+sWAM7t0m+IEfkLPCzg7q3IrStJRVhVei7K+qijRNCLrFW1QVUZrz6OAEDfT6PcJQMx5PxH3tHd5ZLu3vCyC0rrDWMZlMcM6TFQluMuT6tasM+jnGWoaDIU3b4Jzl5PiULEvRSs5qWRox2dggQJj3p0fHXLq0z9nZlPl8wenZjKoxTGeiPt/b3eLatUtsDIcsF3Oi7sYRpyl3bt/FOs9iteLkdEagNWeLJfceHmA7VY4OQrI0YTQcsLkxIc8y8l5O3hfe/WKxZFUUHJ+eUdYttqgxrcE7Os6JGENbbdCBwhiN1gat4F3veBvf/A1fx9b2BO8dw9FQDJ5aobWiP+gzHPRZrlbUdYv3kA/7pFnM5njClSuXmGxOsI0F5dnZ2eHqlSsEgSbNkotd880+3tKLbHtni9/9e78NrRWvvn6HX/21T9A0luPTqSRkGoN14CdXUXSxrlpf3J0u7e8TxwHPP/cM25sb9AY5Fk82GqNDcegGKiCOI3LvMR0NKUki6VR5odNGcSTQl0ZgLP00w7RtR2eS32tMi9UKH0bg5ILwQJTmrFZLsdeXxYUWz3vN8emMqqk5PZsznc4IlGL/0i7j4ZBLly8RhTJcPT4+En6Fkzt3sVoxmYw6ErGiLCquXrlE0xkPTSN4OWGuNixXa8nn8h7jjARBKMVquaCuKvJeD4/g1c6OT0jynCxJSLYSelHIN37o68FblALXNFy6tI01nsPDY1onmszlYkXThfkZZUjTmCDQjEdj9rY3GW+MGQ/67O1sMxj0iNMUnOPo6IiHh0ccnRxLIqgVLalCYZpWzkrd/Ktu6o4CLTfgUCtuXb/MEzeuMpueAi1JKvzGqhIe5cHhAcfHZ7KbNi11WfL663fxCCcme/45Dh8fUCxXtE1Lb9iTII48Js0E7nMeDv9mH2/pRXZ8cso/+MEfxnoFWoi5EkqnCLUIZIfDAVeuXGI86nHr1jVu3bqFRpP1uuifPMVaifqROFbPdDoHbxj0BzgnqgnTitM4DIXe1DSGNEskm6ypcd6RJplQdb3DOE+cpJiyZLVas1iuWS6X1HVD3dREYUieZxcw0TCMMAZOT0VU65WmaWrKdUlZ19LGHg0IwxClA2aLOc4a4jghTjLqtmG9WqFQxFlGax3eWAIdUtWGF7/0Cov5XKhTsQROTM+mnY9O3FTT+RxnxfNVloUMaPd32N7YojWWBwfHvH7nAev1mqZpGfQH1HXNg8MTkjhme3cbbw1n0xlHx2ecTudYJxSsOJJGzM7WBoPBQDp5Uch4NGQ86DGejNE4lssFDw8OOJsuKKuKphG5VtMYHJ4oTtDK4wMvWALnqDuGZNjrSQfWNDzxxHWeuHmDNImpStGZnpycsLW9iTW+M7E6VkXFpz7zee4/OMAay5NP3iDJe9RNQ5rnDLpd7+RsShgo0iynl+R46ygvRi/5V3Qdv6UXmbUO4yHPE6IwYjIec2lni929LZ57/hm2tzeJ4oAwUCRJhkLC6+IkEegLsF6tpa1cW4IwpCzLLs0+lgRHHJqAOE2p6xrvDc4pjHWcnZ1grSNJUgaDvnAwlEIHEXk/ZTabSWJI3VK3hrI2nJ7NKMqSLEsZtAZPQJrJHbqq666lbnHekkYJe3vbZHlO09b0+32cdd2bPqNpWqqypm4bTCMD8DDQWO+wrWNVlqKqbxqapqVpRfOX5xlKC6u+bhrCIJCzT5wQBCGHBwcATCYjitrym597kcVizcn0DEVA20qw4P2HR938ThoTvPxa57FTXeh7QBJFjEZ9elkGeDYnQ5IOmLM1mbC7v01TlQxHA+jwDBbFcdfMsZ0fTmnJXTu3GwVBgIpBq4gojrrzstyYXnj+3bznXc/jrKEsKvqjgdhgypKmMZTrUgIaTcNsvpBg9yCkbR2f+ezLpFnCoN/j6PCER4+OJT0mCnni1g1uXDdcurRL7GNUoLnzpde4f/8Bh4fHb/o6fkvLqq5evuz/5l/96zz93FP08pR+v4dXAhr11nW5xWIjMcawWC6wTrjrpm1YrQuiKJLUFuNEEuUB5SXSRymaqhWbe9PiOohLaxzT+RKtAyaTEeNhThJHoiTPEpyTO6VpTTc0NjSNZTpfMFssuuAE+flxEItq3DsB1bSGOA7p91KyTHLBrHGs1ktBk4XiTD4P0VNdoqW1BmMsTdtQ1S1N1XRjDUXT1F2pKM2NpmkwrfDjrTGEWpOmKVGWUndnwbxbFGVVc3JySlE2Fw7mpmnEMd2Vbs4Kw0NrwQXoQBOHIdeuXGJzPODy5T2GwxFploG35Ll042ZnZ2S9lNl0weHJKQ/uP8C0jrKqaZqW0bgP6DdypwOhUHnvBKrqBDturWC7A60Z9fu8813Pk0QBD+8/xFsIk5BHjw+5c/cBTWvYnGzw5FM3aE3Lw4ePWa5LWuN4+OiAxWJJGGqSKCIKJNQ9SRJ5rp2sLdABs9kM1zFFojDiJ3/mX3E2Pfvq0y6+44W3+Z/6Zz/CermQlPtQfFXOIwiyQFNVJbOzqagwnKcoC0n2CGTqv14VNG3LclGAEqR0UTRM54tOjtSwXhci4A0DsjSVlrQTNYdS4HEkcUKWpaLcbmoUIcNB1uUKG9rWyXmky9tKswwdhCgEVCrE4wi866RUQQf5fCPlxTrJefZe4DlagbNyUYJQzXSg0Tqgrluxr7SieDfOfhkDv0VpjTUSJphE8UXIXxgodna2L+hQ09mMprXUddPRojzWdDtJl/oSBJD3UvJcoKXDwYDNyYgnb1wFBTt7e5TrglVR4Kzj9ut3qTvEgHWe5WJF2VGDm9aQxCFZJqOA9aoEZB7qgTCO8XjCIOyAOkLIbk2Ld5YQ6Pf7pGnCfDZntSrw3tG0HWMkjFivluDPF7ORfLFuwXgkZD0INKPhgDCM6OU5VVkRaMViuSRJBWSkgoD1egUofuEXf4bZ7M0JhN/S5aL3nnK9ZjSZ4KxhvV4TZylZHHN4cCjotiiGIMAby9HRIXVjumSTklVRslyXzGYLzk6nFGXNbL6SVJfuMO0c5HnKxtZmd+EJ6HQ8GWCdxdk3BMOz2Rzd2URaYzidLtCBJs8zvNIEoWJjc4LyjnVRsi5KrHPEHR1XzIeWPMsYjwfkaQyIUXu5WlPUFdaeC14dddtQ1Yaykk5bEATkWQq+uSixkiyhrGvaRuJv6W5A4DEY4lAU7+ezpTAKWa7W0un0HtMx4s8d4VoHjIZ90VUCl/f3mIz7JHlKFMeMxmOpElYrgiTCNIbDoxOOj0+YLeacns7wXtAGTbdw+71ckmC8k7xv5zk9maMC6dAGWlE3ouapyoowkAG05KDJ64+CMIgwdc3J2ZzGNF1QoutuMo6m7rAIzpImGct1SRCF6DCSc7WGOIxpWnkfUDJor8uCpqoZDIekSdLJ2yANI/Zu3CQIFB//xJs/l72lF5nWAUmWMV8sCIOI3mBAW9csFgviLMUt19x7/TbWK3qDIWmeU7dzjk7OuHvvIWezOfNVwWJR4J1Y2JMkxltHGAX0s57EKXWdtX6/h/eOum45OZnR62UkaULbGJqmpNfrXVhP4iSVpEhryLIUBaRJKlDUsqCuGwaD3oVFJdAQZakwD5Xi5GxOliY422Jay7qqO2oWFKZ5I661g5xqrRgO++RpQpZlxEnMdLbgbDoHJDzBA2EQY62VRFJrCXWADgOatqGua9arovt5Hf7NG9H46YAb16/Qy1Nu3LzKjauXBAOXxtRlzd7lS5ydnrJel8yqitl0yenp50hSGVobZ2iNWHJM2+KcsCG9l9eorivhQTpLVdUkaUqaZ7RGAD8qFJ5JXVdoD0EU0LaNdBnbRnSiOqCqG6xzgmJw7kI2poOA1pg3IKQdLQukixxFEVXdUNUNzooJtKlqrl3Z59q1y/QHfc5OZxwdnXQlfUKgFcPxkLPTM5q6ftPX8Vt6kZ2XQhrFfHrGdOoIo4Q4CqmKkjhNeeLZZ5ieTTk9neKVMNmVBh1qwjih15NYnkDLm3geXhDHEruktCIKE0AYGGGoGY9GNG1L07QsFyt6/YxhOrgQ8EpZpWhbGYiaVpTrZVkThpJC0uv1kXgKSHJx/JZliW0NhAHGGeZHa8GrpXFXKlqMaTuUmhLmX+AZ9vvkeUq/l9HLM/Dw2uu3OTtbYJ0Xu4uxZGlCHCcX0bTGWNZViUZRrGUg67rnEAQBbVtLBlgYEQWK0bDHeGOINZbHDw+Is5jJZMx6uWaxXlOVJToI0WHA9s42y9WC6WxJVVb0+j2sbeWs2lF/29bQtA1xEjMaDSUzWgek4xEoCQjEWayh89BJdprAhCCJEkxrsW1DmiSUZcViuex0klCWJb7bwbI8k3BHLY0ZYwyBFuGAaaVxo7zkkLe0qC6VJo5D4kiTZwn5lT3Z6ecrgjBgvSy4f+8hZV1/Rakub+lFVhQlP/VTP4/zlv6gx8Z4RJ7lJHnMeDSmbeRNzfKc/STl/oP7HB8fMZ2taWrDxmQIIB3AqgG6BEoPy+VaIJZWYhWTJKHXy9E6YL6YEccxeS8VJQgiSNaBJk1SirLEWmldp6m+aIScH9aXy4K6rsmylOGgBziMlcN8mmRM53PuP3wkgeStlcRQHRAGnT0nColjzeaG6O+Ut2xubaKUZrlccnx6CipgPJlQ1w3OQ101aO1ZrYpOfmZFvaIVxhmyXu+N0IrurpwkMVkuDZgkji4QcE1tWIcB8ztzLu3tsLG5QT9J6Pdk5z88PmJ6dsZytaZuHGmSUKzWNMbS72VYY4mUzPAmk7GInbUmjCOMkdeiKGXxJ0mIV77LZfboMMAhwRa22/VBsVwJHDXs4KmmFYpW3IUgKi3lbRCGhGGAt2JGraqaQCtMK+Ll0XhAECjKdUVZFMwXKw6Pzigqw2q9YrUsWC3WwuRP44tmkrVfpZjuxXLFxz75OeloKYF9DgYZg2GPLE0YDkfEsagpirKmrBuKUqhLg85ICIosTYlC0bedlxeDfk5ZNTS1JU4EP11VNVkSs72xgQ4C6rYljCNQcr4oipL1qiSMInp5RtO01LV4o8Iw7FrpYlsZjPrEYUSWJYxHQ7z1PHr0mDBOLjDUp6dz4kjyxtIkRuHJ+xmXLu0y6KVEkQxWF/MFr712j5PTU7QOyfs9dnZ2qetSzlJG6LqrVXlBpvJeKFTOWQb9AVGkWSwXtK1F65BBP6M3yC4SUKIwotfLybNYzqCh5kr/Cv1+wmDYZ3NjwuNHh8xmc159/Q6rVU1/0ENb8cGFUUCaZ6CUqCQkVoembnFehsemw+8FgSYKpKnR1Ia6bdBaUi9NK/G7rgulqKu6m6VJReO6pArpGr8RK6UDLYJqupjhSEsoRmsuBMR10zCfzQBpHukoZrZYo1TI3fuPhFBmLYPBAOMdtqopy0qeZ/BVanXJ85xrN6/hvOLsbMrZcs3pckVweCqawG5WA3SxO5L40tYS95NlwvQrigqUot/vddIre/Fmp1mCd7Lr9fJMqEjOU6wLgjCg7azt3nOxU+GFoqsD1cmsWqrOdqKUplitCbWmlyQs5wtcB/zM8pyz6YLjYxlI7+1ts7u7I/aWwYCmranrmuOjEx4bA0p1u7XBOToYqORnHR4eipbPShaXqCVkR46TkDBUPHnzOlsbE+I4plivCeOIO/ce8drrD9ChWD3SOCSOO4qyd1RlI1RlpalsRdKGnJ7OePjwMffuPcTj2dgYE4USpFEWNVkuxlPnPN6ZbkcRPWLbyPM4j4N1zosELRA0XdWRj1GKYiWu9aZpukRNYZUoLaXaBRLCOSkFq0rCMowR/IISIYHWEs2kgCSOOzxBJfxJxQWrPwiCjtlfkkQJ4/GYw4MDcR84S384JIwiZmczYT6+ycdbepE57yjrmsViddFit0ZeRI/GqwDrPWEou04QiMogTmMCHVJUFXEcM5oMMa1hvV4RBiFZnnXteUUcRYyGfYJAsVwuJOwuCtga5J2oV2FMRlO3lLXBKysNjkCzXpcURSWBClqwAZcu7cubbQxNK82M0+mcsihxKFbLNRAQaOnoHR8fs729Rd1UwrtIU8JQWu5VXYPSjMYjtNasFivmi0WXQ+a7oXvIYDjCe8tiscIaS9NU7O1eYm93i14cs7O/i1Ka6WzG48OTLtfZiuo/iTsngyIOU3ynsl8uVxJT60WiFGjIej3STIjARbkiCiNGwx4ORVU1WOvoZZkMhpuKsq4JtRbLitLUdYVDoTlfSBIo0ZqWsjQyq3T+y+KixE8Y6rBT+kjHuW1E1H2uwPfeS+xuh5CIopBIKwbDHpvXL3M2m3J0fMZ0vgDn0CjiTrxgTSvlpW8JtHjxUAodBcxnc+gyAL6SONu39CKrq4bHj466NEfPeeqh89J2P4fqVFVFknQ7WRdtqrQgoquqxnRo6u3tTaqqRmuB14RhIEr26YwwDEiSqAufc2RZShxFGGtYLFY4LyqHspQSIksTsjwlDDTDYZ8syxn0JbCiqVusguVizWK5ZLZYYq2UcE3TCOcvS8l7IgJerwpaI6XnclmwXK3ACUMkzxMWiyVtbTHWEUUB/X4PHaiOdxjivaXu2v9RFNGa5uKm0s93qKuKk9Mp8+WS5WLBxsaQ0WQoO6MXqRndDKuqKrGKZAlxmlDXFW3TEvUy8licx7PpWmjLrQzIFYosSfAdILYoCrxWxEmKxst7YBxJloBzFEUlmO8uGfR8cVnrsG170a0MI9mRrJVGiNYBKhCbk9Ya29G+ojDEK8XGeMRkMmJnd4vxoM/Wzibj8ZjjkxMePT7g3p0HLOZL7j98jFKOKIzYHAx517ueJ8sSyrLk8PiE2XTFYDAEpHHylXQW4S2+yIJQ2szrDvqitMxSoijqjIuqayYkKC2w0jAMOrOhADSjKO4GxhaQu5tCsV6Lbg5gMOh1bzTEccK6KDk5XWAamad47ynLUsqhrjtX1w3roiQINKens+5O60jCkK2tTaJYmhg76Taj8YTFcsnxyRTrNK0zeF+yWK5ASQ5bEIYdiNN2Nwlw1lMWghtQQSBZ1EpRFmWHNOvu9GEog/QslRAFItrWcHY6oyobrDXdIjaEQcCtm1fxSpQhcRSig3NBrRWIp9aUZU1dt8RpzGDYwzSG6Xwt4eeRzPxWRYmzjn4vQym5wXnliRMpm8umcxMrjdKwWq27C1aKx9aIJMzU5mIoH8YhYRyCj3F4rPFdBWMko05rAq3oDfr06UJB0pTRcMDezhbrcs1yucA1FZev7JIkIrtrypLj42PG4zFf8/73kCYxWRJz+fI+cRKxLkqatmU4HDOdriXHrns9zpNL3+zjt11kSql/AHwncOS9f6H73Abww8AN4A7wPd77qZJW0H8JfDtQAH/Se/8b3fd8L/C/7X7s3/De/+C/yx/ousOubY3oFLtkx6CDmoInjiPWqxVtoFCVJst6GNOCouvWRQyHYrozxlJWBdbIIXy1WrNelcJkN0YISmUlRY2znUSrJYojoiikqaTFboxDKRnkhkHAzs4W21sTRoMek/GQ4XiE846Dx0ccn8wwbUuepWilJeGl21GTJJVzIEKsCoMArbxQl1AkUUKSxsIkiSN0IOAc01qMqQX3FgSYjpmYpSnD4YAg0CxWK+pGAijiOGY86RMnMbP5XMrqSLSCTdsQBbJ4mralWBcyT/SOsqixxuARH1ecxFIaGstoKHPAphF8QRRHKBXg8GB9J8eSVntd1xfnpLYV1YwTXRgS+qlonUFZSOJE7EetQWt14YKO4kj8fUAcRTS1ZFZnWcZyteTg6IDVYsV4PObm9Su8/PKrbG1v0TaiNhkNR8xnS1bLkjxPuHx5n3VV8YWXXuH0dMbR0bGIsauaXi+Xm816LeMF3nwL/7eVVSmlvhFYAf/oyxbZ/wk4897/TaXUfwZMvPf/qVLq24G/gCyyDwD/pff+A92i/CTwPkQQ/ingvd776W/1u0ejif+GD35Y2ul5TpLKRbGzs4PWiqIoCLshpu7yuFojiY1RFJKmGUDn9wq6N7eVxkNrwUlXzHlHnuWYDismWWOGOEo6hFsXStE23Y6qUd0BvNfL2doaszHps7u9zcZkQhQFNE3LyemUg8NjZstVhzGTxkLT2osdUty+mjSLibRi2O8xHg/Y29tlYzwiDjSjyQbWGeazOcvlmtYYxpsTDg5PePjogNlsTtu29PJcWuVRQBBERKEmSyOSOKVtWpzypN2McL5Y46wslCRJBKuwLsnSBNUhy6uqwhgrdpMwoDGWIFDd8F0JcqBpusy0iKaR0tpaSxJJY6RupaQzXYBHU0u57JXsDgqNsW1nUdK0TQNesgDSLMU6j3cysE7iWNIwjQgAxpOxZFwvV6xWazF+5hlFVcjroAMmkwmL5QrvDUHYBV40hryXMhwNaNuWu/ceolVAv9cjimLqqhbKdCi6TVD82L/8UU5PT/79yKq897+klLrxb3369wDf3H38g8BHgf+0+/w/6gLXP6aUGiul9ruv/Rnv/RmAUupngG8D/ulv+ceFIXkvRyhHAmtLEmE1lGXZpZT4Tg4l1CalQmTobEXk6TxV1WCMo9/v0eulPP/ck/ILlMiNjBGa1eODQ4Ig6CKGQoyRs47WmjTJpOvWnR0Uqgu1q7l//4D5bMDZ2VI6V92idF52p9V6LWcXFRCFEWGoLgamzjqSLGRjNODpJ29w9fIeaRKyv79Hv9cnCgOqpmJ+NuWJm1cxxnD//iMeHxyxnM/R5zFHrfwtgVJkxIRKMR5tEEcxxrT0Rz2sdSwWS4q1J817xFGINQ2nZzP6wx79fkZrPfPpDKWkM5dmEerLnnMUx1Rd9y8KQ7Iso64qzk6WNEbmWIGSG+CqqC48b03Tdug2B4G6SNCJ4xCPLGpvDUproiRGWynvQy30rPMI3kF/wGQyZjDoURQlBweLC4HBaDTotKbC+qiqioODI+KuudNaGcMoNNPpgsWqkIAMIA6hKis2Nja4dGmP1WpFVVWUZSUhFP8BGh+73vvH3ccHwG738WXg/pd93YPuc/+/Pv/feyilvg/4PoA8y6mriqJsCHTRDd1lDhN2dXocxwRRQBqlrIsSvLog9YqyI+L69SuMhgOyTLKPBZwjB/KzsynL5brrjhnaRhTiSRLT72f0exmBUmxsTNjc2sJ5cfE+ePCIuhIAT1GUHJ+c8fjg8MI0KB27QBiOHVbNed8d9umigTQulFGCdYbFfMp6nLG/9wSTyZhyXaB1LAbK3R2m0xm/+ZkvcPfBIcfHpxgrwtk0Tdnb2SZNYwbDnCgKKFalNBsSGdCaRnaV/qBPoKFYl7RNgwoUcZaAh9WqBA3DYb+zuCAA1jAkjWKqusIaQ1kKyKepW8GxGYt3oLynrkqWy/IiksnUYoRVGtFk4rHegRPdofdtB0yS91ZrxWjQI0ljdnZ3aeqGO6/fvtBaLtclVdVwdCSyKa3FzV5VFcvlsnNRe0lTDQPSJMUBD+8/kKaQtRgr0bhJG9HPexdRtZcu71OWBWVV4J1jMtmQ1J+uyfRmH19x48N775VS/3+T8nvv/x7w90DKRWMcvTRme3uD4WAAyLxq99IuSmvOTs84PTtjuVoxyPvdXESytEbjAXkvI0tjbNuybCUsvKprlovVBbxFKU2gC8bDAYPRkH6es1wsCMKA69eusLExkoN8WeKVIgg1s+mc5WIthCetsN4QRGF3vtCdLtG9MVztpEweIVitiwLnbCf4lcikfn/AZDhGo1ivVgzHQ4JAc3Z0QtMYzmZzGbAaMTl6pdjcmLAxGZDnKc5KkyiOE/qDHmGgmU2XGOvp91OyKMU7JxnISpP1M9HzVQ0qgcEgp+52xHN1S7+fo5SnXJc0dUuaZ2RJxqpYi6TMWnCeqqxk5ICmrhtU24BHunRKsVotRVtoHVkuwmilHVmWdENn0TtaI0LwJI44PjzsOoueJI5x3tM0NTpO2NraZm9vm6qquXvvAauVVAt5HlAWBVXdksYJhSlI0oQoSfB4hgNR3ud51mkshTKc5T3iJMY5SxKnrNcF9+4+wHtPr5//B1F8HCql9r33j7ty8Kj7/EPg6pd93ZXucw95o7w8//xHf7tfkqUx3/SNH2DU79HvJYwnE6xzHBwccHx0wumZoKOfuHmVJIoZj0fCNHfyRq1Xa+rG8PDhIVVZUZQlVdkShRGDPGU8HhCHiueefo6rV69QlivapsEZT5rnNE3DarWgLFa0xrFYrDg9nfH48RGrUnxOYSDItSTKSZKcpjXIjdETBqC1JEU2bUPTOabTVDKXL0pdIzFCjx4dMJ9O2d3d4YkbVyiWS1QoODyPMOIXqyXWe4bjIZtbI/IsoViWLOZF197PMM4xm63QSl7DLNA0RkYLaSysDecci+UKpQLyTJQf4t3yZEmM0gFNK/TiLEs654JmdiaO7TCO0GFIURSCObBWSr0o4urVS9x64iYKuHP7Lg8fiUk0jiM523a7UpTEnYBaXhulpVE1HAxIE0nhbKpKbDeuc4KrAIVmMV+QZTFKwZO3rtF/19t5/bU73Lt3n8YYhnmOjgLK9UpUO97T7/cEo+fltVyvC6wVUOxyucJ76OU5njl53iNNM9IsYTqdYVrzJpfKm19k/wL4XuBvdv/+51/2+T+vlPohpPEx7xbiTwP/e6XUpPu6jwD/q9/ul/SyjA+85x0CMq0rHj94eMEU3ByP2drYIE5DJpsbZElGVZWsi4JiXdMYR1k3PHj4mNPTOcbIjCnLUkKt2N7Z4OmnbjEZDQi1ZzU7JR8IjmC1POPRwwdYr6hbw+HRMSdHJ9SNoXWCIRgNe3jn2N7e6sStXiwUrSPr9VitV8xmM1bLFWEQsLu7Ido8pQmjsON8QFM3rIuKsqi5VxzRy5Jul1xw9fpVxsPeRbv89TuPaa1hd2eTpm4oK8Pxek6vn8hiW5UYY0h7CaNxznpdcTaVAXeepUS9DOc8s+UarWE46FPXLW1TYVotYuk4oqlb2qbAaSmTyqqiWJXdEDwAZ1lOS5rW4LztAhTpKgKJNDp4+IiyKDk5m3YyKUtVyZA5SROSKKU1LfP5HGssSSoOhrKqePD4kUTiJjHKS+azlHRKYnWVZnG0pChLrl291L1nC8qyYDAcykC7bSWSWAf0BgOa1nB8ckLSmWyDzr9XVC3zZUUvS4mTkKIqmYwFGTGbTimrtVhu/n2eyZRS/xTZhbaUUg+Av4Ysrh9RSv1p4C7wPd2X/yTSWXwVaeH/RwDe+zOl1H8OfKL7uv/deRPkt3pEccTBw3v0+n0GoxHPvfAczhrm8znbu7uUdcPx0SGP7j9kPl+xrpsOHmMp65rlckVRVERxjLNtpxZoCdOUxXLNr37sE4RBSBgo8jynNYbFfN2dqaS8q2sRBm9u79A2DYN+j1s3rzOeDEX5sC5YzJb0Bz2qWvxqjw+PODw4wlpLngtrJInECIoWQXGxKinKqmPiizl0c2vM1SuX2NoYkWWimTw+nbNaLYniiP4gpSgbZrMVSikGg4ymCTCtY7pe0MtSsizGO8ditkQHAeONITjFaiXfk+apcE9ay3pVEGhFv9ejrmvq1jKfzdFaWuTrLhhdqy6F1HvqpqRt7BtSJyflr7Eyw2rqmtWqRKkj6ZxqBb5DFcSh3DCriqYUjWeap5BIgIh38juSJEUhBlaNwltL1MnmBPstQSFl3fL67XvEcYTWAUUh5ts0l87jZGODxWzO9PSUpm7IUxmhOK8pq1aOFWHIaJSilWK5KkjTmLPpjF4tZ+c0STHtl1lo3sTjLe2MfvaZp/2P/OMfwNQ1h8cn7O7ti+Lae05PT7j38DFFWXF0dMx8OqduLV4FrMvyjRfFS8s9CMWfVBZytoiSWFDOSuG8wVuZhORZQhhotrY2uXbtMmmWkCYRw17O1vY2/Z4E3U3PzhgOBxfRr9PTU3mTspzjk1OOT6cs12vKUmhReZowmQzpD/rShGgaiqpitZbcqziOuLS/zfWr+xei4ywT/xVOUVQ1p9NpF9Ag6PD1ukJ56PfSjssBprXESUwvzzpFvoTmBaG09ufzdTcyiMR/hma1XneJnSllx8momhYPpIm0vcuyujinhYHAerz3HS4AylIucPl9mjhOscYhXrdQFCndmSwMA4nnbc4d4wFxkuC9ZrVawTl5LJCZaFPVb3QmO8G3tS0EGuXpynD5e7I8Jc1SFsslWkvneGOywcbGhFdefZW6ammtFYVQIKMW2zpM04gjIReEgvwOy7yz1vyrn/xxTs/eXAv/Lb3InnnqSf/9/8X/GR2GJGnM8cERcZJeqL2X8yWvvfoaea8HgWSOnZyeUTUC5pzPl91QWZ5j3A2UUYJ8bhpLHAfkWUIShVy+vM/Vq5cY9HO8szRVQ3/QY39/l36WXNB3wzgmjhPKVcFyuSDv9S5Qb2dnMl4IE2FbvPr6HQ4OTzibzVFK08tTBp1Q2XpPWVdsToZUZS1niaFY+/u9nlz4vVxQ5YFmvVozPVtgrGMwyEkSUWesliXeOjY3hlgnAQ9ae/p5nziNWS1XVFUt873OzLhelsyXK/pZShwHrNbVhYrfOqFplVXV4e2izuEdsrO9dSEEWCyWzBdLkjRjMBBPmyw0g4xbYuIo6RQyFVEUX5xNk1T0gOPRiLIsOT05ZT5b4ODCm+a9wjnRVQ56wmds6nM1vqhLoiCmbVpGoyFN27IuzsP6HDu7O4RhKMp7Bet1QRjFlGWDx2FbMXsmsUT+1k2DcxJJJddLLKHzccyP/Og/5eT0+KtvkT395BP+r/1nf4UoSsn6PYbDAUkY8PjRY3YvXboIh1jMFzx6/BiH4v6DA067VEWPvrjDKaWwpiXNEjY2JqRxTFOXWO8JdEASBxLosFrJnTaIuHr1Mnu7WwRaszGZMBgNL0CccRSR5X0CpVivFzjnRbUdBFRVyWKxYDZbYjw8fnQgoXlhyGA8YjQY0FQlDx8f8fjwGO8d/SxjPOmjtGYyHjMeSmSRClXHIikpi1JAN21DWVZUdctw2KeXZ7SNY7mcMxwOGA365P2MLM2kTQ40TcViIX4p7x15nrJcFRwcnLJer6VsjiO5UNelqNmVEvaGbCBCN44iimJ9YZAMu/jfuq5lQWgt/BIdoPD08pwkiQnCkLpumc7nQgCLo45z6Wk6JJ4k3Bic97RNLe7uJJMd0LkOlCqWmta2mMZ04mFDEGqx1CgkS7tY0zQ1oEjjhCvXrjBbzDk5PsUYf6GLVEoRaJnHRXGMt544lvlsawzT2QzbtvzET/+rr86d7Mqly/5P/OE/wmQ8IIwiWWRpKshk51guV9StYTZbsliuOTw8YTpf0RhDECh6WYZWmkG/x+7uNhuTIWkUMBoNSLKcOEkoioLXXnuNfn8ocyDrKKuCtq7p9frEUcT+ziYb4xE6CNjd36ff77NaLGga0RX2eiLYtdZ0YwGJ+1kvVxweH1M3hrJuZOjdGIqi4PDgVLKdOwGtd5CmEUmWsDEei9I8jIiSCGckA+DkdEZZNaRJxKX9TUCLM7lpSaJQ2ByB7pgd0OuLrnO9Egdx1dbdWauQ2WBZE6dpR1Cu8QipqW0bqkbGHbt7O4wGA+aLOYdH8lzCKGIyHuGsQeuAuMv+qht5jt5Jl9AaQ113KAUnyS++a/d777pzZh+saEPpBslRFFJXFdbJDRAvLJMwCMRZ3jnabWsYjQaiztBinVmtVujORzccDMjynOnZlNW6BCXSMIe8Pm1jUArSRJgo3jqUdzJQjyLyfk5TNygF/82P/TDHJ0dffSCdpm2pGsPpTAbRd+89oihrGiOqbNMaZnNRGjjnGA56pHFEL0u4fu0Ku7tbTIZDUey3LZvbG4zGI5IgYL5Y0B/2CMMdbl6/TLkqeXx0SJr3mM1mFOuVCIg97Oxu0+tl4Dzr5ZK7d+91ZwUZYCdpSpbnpElMmkasV2ssEg4RRQmrdcnR4RH3Hx4QRTGTyZDRZIC1jtWywJiSIJDzUnEw5VFywrDfo9fLBTm+quSmMZC5n7WW124/RivY3hUxsrOew8NTQYUPezhnObp9IoTgLCdJJHQ9DEM2NzbZ2d6hLkvossiWyzVHRyc0VcXG7jZPPvUkw0Gf+WKKMRZ9qEnzjHu377MqS2yvx3DQIwxD1mV5YT9JU5nFVVXdKW0kHxugLApQ0BtIeV136HLlJF0mSVPqDnbqIynhdBTSGHHAW2MIlZwLnfeEWjqiSZyxWq7xTnKzz32D89m8I2hZRpMhWivms+UFLi+Oxe5UN3JW9N4SBwFKezQKaz1Ky9ztK3m8pReZtZbNrTHrQliDy6JlNl0ynS8xnX5t0M+ZJBFRpLh16zrbm5tMRkPSJKauay5fvSLKACWu16apIYwZDgcYU/P46BBrkVZ22+AWktF1+coVwLGYLfj8577I/YePWS4WJKGmqBu2d/ew1pH3cm5c2SdZLuj1+vSHQ9I4pjYNJydnLFaF2Gc8XL925QLfXRQVYaCIIkkmOZsuQXnyXkrSmSjXRUXezwg7j1dTy+4yHg8YTwa0jeHkeAbes7U1Zjjq0TSGg4NjIWSFAZWHxXLJOOhz49pVuQn1cvb393HGcvvefe7eu8/tOw/Y2trkmWeeIElDwsDx8NEDoijm7HTKweMjZvMFy/maKE04Pj7h+OiQXt5jsjFhZ2uLMAo4PZuxqiQ3u+2ozHQ7jTDqTUcKs6RpJCAj5yUYo5cRRQHGJdjGslosybKc2BrajrPR1jVNK4Jf56EsBQ4URzFJGkus73SGdVbK/PG4axKtO9G14BJQirppu/fDEYUBqKATNWvQirIoyPO8y0N484+39CJrWsPB0ZTpmXihrBFH7eW9bUzTsLMz4crly2xtT9icDGjqmiTN2NnbJdCepqopVgvu3nuAd57hsNfNQDJKL/rBprYcHBxyNpsxGgzZ3t1GK83Z8RGv333A3buPmc+W5P0creBsWWKdpXFHjEZ9BsM+tTH4QBPZltPTY5pa1BhFUYuWzkpDIOpKOmNanDUcHJ5QlDXeW3q58CTyLGUyHjCeDKmrluFoTNZLMdYxn8+ZTRc8fHhIqBWj0YjReIA3ltOTGUEYkKYxo+GAxXLNYrkiDgLG4z5NU1OVBVE44PDohC9+8RUOHh8RpilpmrG5sUEWJ7z88mtSSkeaxXzFbLGkWK278jomzRKWqzVpErG1vc1g2KepG87OTsTev1yj9XlqTdhpDiWMQ3B1AUEgyDjTtjS1QeEZDQZUVc26KCmrWmwtSvgk5gJ+qlGBOOLp2F6BFhXNfDEXkbGW7LHN4QbLxYrjk2MUiuFwwGA45MHjQzGLGtsZdzUxunNLi0A8DAMx4SowphVx9VdrC38wGPmv/7pvYmMyZjIZk2Uxu9ubTMYjhoM+40GP05MTrt28waCfCxjHeB7cu8d8tebg4JijoyPCICBJYq5ev8Z4MmI07GObFq0Eee0Q9XfT1MzmS+q6Fct6XTGdrZjO1rSmpZfGxB1tdrUuiYKAjc2hnD8uGP1ShhhnaBvH5mRMnMYoLd2605MpZVmxs7MpFv5Gws1Ho6EQsKxhc2uDjY2RcEPqhuV8gUFmXc5IV3K9XFG3VlADgaauapartfDpjWUyHhJocX4P+jlxmnB6dMqjwyPyXo/d7a2ujR1yfHIimLtOCwh0HcBMXOAK6sYwGo8EnTcYcnJ8zHQ6ZTyZ4JWX0my5IlAhqgtCVApmU2nanANcRVYGrjUkcUSUSrZzW4vRtLGOJEsJuqwAay3Ke3TXmGiNWHqSJMa2RlDf1hBG4UWpao3Mv5TWbIwnGCvzP+MsRVljnQBcrfMyc1NKaMKh7kYyLc5ZellCnKSEUcgP/+h/zclX45ksyxJu3rjCeNgHBVubE2mvekOSxKDh+o0r0NY8uHvKcGND5C8qYDFfcHB4JDq+fo/d7U28sxwfHDE9OaXf77ExGVNXFQ7NfLbg5PQUYy2z2YLjszlBGLAxGXLt8qYQe4sK61p6mZy/WmNZLEvUqmTYS0WjV8sZpN9LSdKE6WLO7N6iaxBIJtnm1gZ5lrGzu83GZISzliwTgfPR0THFaolpKpH1pAmTzYkMdZ0o0/NBj0Ap8VAdHnFwcEbTNGRpwubmGNtadrY32dvbRStYzucMxyM2JxOu3LiGs4bp2Rmz+RxrJNZWBwHDromwv7/P5mTMYr5ga2uTpqrpDUZYb5mennE6m3aCWUmgNNYKXcqLL0w7WC9WrIryQigdKEmo8SjSNCUZhDhrIVDEccTG5gaB1kxnU9ZrSTRVKPIsQ3lPUZfCrETy4Lx1sgMO5VxYrgtwlkGec/nyrU51s+b46ITVao1zjo3JhDQtmc4XeBSmbkhTgf5YaygrS9O05HnMYDAkTiO0UrQO+ApMm2/pnezypUv+z33fn2V6Nrug8Aq+LWB+NuXK5Utsb2+zsTFi0Oszn88JuvOMaQ1VXVB2VnelJTghjkKSOKbXk1zmsiipmoqDg2NOz5YordBarO6LdUFVVIRByGiUo7SmqhvqoiVNI/J+JkJdlByYlSju10XBYrkiDEOGg4EAQJsWrxS9PGNza0wvy/4/7Z1PjBtXHcc/P9szHnvsXf/ZP3UIajYorZQTjXpIBAfEH4Fy4NRDERLiwAUu/DmgRpw4ghACJERBIBShAoVSAYqEKig9B6ggoTTZJukGlmw2sb22d9f22uOZx+E9p6uglm3XYw/R+0jWvnkzWn/3Z/923rx58/2xUK0wCgOG/T2CUcit23f0ekzXYemhJZaXF+nt7OBmdaG8IBgxCiOaW03a7R1cL8cwGBCNIv28lZOhXCnj57Ls7OxSqVZxHYcwjGhvb3Nz7Z9EkeAaG+pmo8lgoG8HRKSYm/N510OLVMolur0eQRjqyRxHn4kLcwXa7W1ub9bZarVJiXD06BF8P49Iit1uj7t3m6D0sF4RgTGuGQXGm0VMYUdJ4WbHZ5+QXr+Pl8uZVTYDUEKn3db39sB44eu6BpLScU6JHjIWigWyjsPS4iLVxSqtraZeZ9rY0p+N6xmT2CH9/kDfCysUTAGLoXahFuh0ttnbC/CcDJ7vMT83h4pGdHt9zj9z/sGcXfQ8j5Xjx6gdGbL2+k3W1ze0z7zncOI9K8yX5sg4aXJ+ESfnspRbYhgMqd9tmlXVRcqlEoNBn/5eQN4vGAfgiNbWFp3OLs1mi+EooFD0ObFQpdFs0e/36Zsazumiz3Znl816m6zrUK2W8FyXlPFJrCxXcJwMjUaTdNrB9z0qCyWyjq5YmS8U8Yt5BP1c2ebtOpcuXWUYDFmqliiVfLq7fbbaegp8eXmRfN4jnc7Q6w/IOFmiULF64zUGQUQq7eB5LrmcflJgFEbUjtTMEHhA3qy+V5Km2dqm3WzT2+uT83MMAu0utVdvMhiOGAQBYRjiedpyr7PbhbspNjabBMMhQRgSBgGFQh7H0SskQrP2cOXYw9RqCxQLPuEoIpfP42Qy9Po99swQN18oks3qcrKv31ij3mgRhRF+XtskDIZDet2udvLK5QgGQ1q7uxBFKKVtxR0nQ2Qe+nTzHkTKFGnUT3ynBMJIrwIa9HZpbA65tbGBP1/CK/is31wnk05TrpR55NFHmJ8v4XkOBT9HPp9HRSPu3KlzZfUaGxubtDp98jmXk7UlHCdNtzcw7xPjk9GzRER2gNVZ6/gfLACNWYt4C6y+wzHW97BSavGd/IJEn8mAVaXU47MW8VaIyF+SrNHqOxyT0PfO1+9bLJYDYZPMYomZpCfZD2Yt4AAkXaPVdzgOrS/REx8Wy4NA0s9kFsv/PTbJLJaYSWySicjHRGRVRK4bl+JZaHi3iLwkIq+KyD9E5POmvyIivxeRa+Zn2fSLiHzHaL4sIqempDMtIn8VkQtme0VELhodz4qIa/qzZvu62X9sSvpKIvKciFwVkSsiciZJMRSRL5rP9xUR+ZmIeBON4dgqOkkvIA3cAI4DLnAJODkDHTXglGkXgdeAk8DXgadM/1PA10z7LPA7tOX7aeDilHR+CfgpcMFs/wJ40rSfBj5r2p8DnjbtJ4Fnp6TvPPAZ03aBUlJiiDbZXQNy+2L36UnGcKpf2rfxh58BXti3fQ44lwBdvwE+gl6FUjN9NfRNc4DvA5/Yd/y942LUdBR4EfggcMF8ORtA5v5YAi8AZ0w7Y46TmPXNmy+x3NefiBjyhrt1xcTkAvDRScYwqcPFA9t6TwszLHgMuMjbtymPk28BXwbGDzxVgbZSauzGuV/DPX1mf8ccHycrQB34sRnS/lBEfBISQ6XULeAbwL+A2+iYvMwEY5jUJEsUIlIAfgV8QSm1vX+f0v/SZnIfRETGJa1ensX7H5AMcAr4nlLqMaCLHh7eY8YxLKMLpawARwAfXQxlYiQ1yd7M7nvqiIiDTrBnlFLPm+47xp6cA9qUx8X7gI+LyE3g5+gh47eBkujyNvdruKfP7J8HmjHqA30W+LdS6qLZfg6ddEmJ4YeBNaVUXSkVAM+j4zqxGCY1yf4MnDAzPC76AvO30xYhul7Tj4ArSqlv7ts1timH/7Yp/5SZITuNsSmPS59S6pxS6qhS6hg6Rn9USn0SeAl44k30jXU/YY6P9QyilNoE1kXkUdP1IeBVEhJD9DDxtIjkzec91je5GMZ50XvIC9Kz6Nm8G8BXZqTh/ehhzGXgb+Z1Fj0GfxG4BvwBqJjjBfiu0fx34PEpav0Ab8wuHgf+hLZL/yWQNf2e2b5u9h+fkrb3ootAXgZ+DZSTFEPgq8BV4BXgJ0B2kjG0y6oslphJ6nDRYnlgsElmscSMTTKLJWZsklksMWOTzGKJGZtkFkvM2CSzWGLmP94s1oPr/nCLAAAAAElFTkSuQmCC\n" + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "im2 = cv2.imread('bus.jpg')\n", + "results = learner.infer(im2)\n", + "im2_dets = draw_bounding_boxes(im2, results, learner.classes, show=False, line_thickness=3)\n", + "plt.imshow(cv2.cvtColor(im2_dets, cv2.COLOR_BGR2RGB))\n" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "markdown", + "source": [ + "## Loading a custom model\n", + "\n", + "To load a custom model, the `path` parameter of the learner must be set to point to the weights file of your model:" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + }, + { + "cell_type": "code", + "execution_count": null, + "outputs": [], + "source": [ + "learner = YOLOv5DetectorLearner(model_name='yolov5s', path='yolov5s.pt')" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "markdown", + "source": [ + "Then the detector can be used in the same manner as the pretrained ones." + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/projects/python/perception/object_detection_2d/yolov5/webcam_demo.py b/projects/python/perception/object_detection_2d/yolov5/webcam_demo.py new file mode 100644 index 0000000000..f9a8759d91 --- /dev/null +++ b/projects/python/perception/object_detection_2d/yolov5/webcam_demo.py @@ -0,0 +1,77 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +import cv2 +import time + +from opendr.engine.data import Image +from opendr.perception.object_detection_2d import YOLOv5DetectorLearner +from opendr.perception.object_detection_2d import draw_bounding_boxes + + +class VideoReader(object): + def __init__(self, file_name): + self.file_name = file_name + try: # OpenCV needs int to read from webcam + self.file_name = int(file_name) + except ValueError: + pass + + def __iter__(self): + self.cap = cv2.VideoCapture(self.file_name) + if not self.cap.isOpened(): + raise IOError('Video {} cannot be opened'.format(self.file_name)) + return self + + def __next__(self): + was_read, img = self.cap.read() + if not was_read: + raise StopIteration + return img + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda") + parser.add_argument("--model", help="Model to use", type=str, default="yolov5s", + choices=['yolov5s', 'yolov5n', 'yolov5m', 'yolov5l', 'yolov5x', + 'yolov5n6', 'yolov5s6', 'yolov5m6', 'yolov5l6', 'custom']) + args = parser.parse_args() + + yolo_detector = YOLOv5DetectorLearner(model_name=args.model, device=args.device) + + # Use the first camera available on the system + image_provider = VideoReader(0) + fps = -1.0 + try: + for img in image_provider: + + img = Image(img) + start_time = time.perf_counter() + detections = yolo_detector.infer(img) + end_time = time.perf_counter() + fps = 1.0 / (end_time - start_time) + img = img.opencv() + if detections: + draw_bounding_boxes(img, detections, yolo_detector.classes, line_thickness=3) + + img = cv2.putText(img, "FPS: %.2f" % (fps,), (50, 50), cv2.FONT_HERSHEY_SIMPLEX, + 1, (255, 0, 0), 2, cv2.LINE_AA) + + cv2.imshow('Result', img) + cv2.waitKey(1) + except: + print("Inference fps: ", round(fps)) diff --git a/projects/perception/object_detection_3d/benchmark/.gitignore b/projects/python/perception/object_detection_3d/benchmark/.gitignore similarity index 100% rename from projects/perception/object_detection_3d/benchmark/.gitignore rename to projects/python/perception/object_detection_3d/benchmark/.gitignore diff --git a/projects/perception/object_detection_3d/benchmark/benchmark_voxel.py b/projects/python/perception/object_detection_3d/benchmark/benchmark_voxel.py similarity index 97% rename from projects/perception/object_detection_3d/benchmark/benchmark_voxel.py rename to projects/python/perception/object_detection_3d/benchmark/benchmark_voxel.py index 05690550c4..eae1a6d6e3 100644 --- a/projects/perception/object_detection_3d/benchmark/benchmark_voxel.py +++ b/projects/python/perception/object_detection_3d/benchmark/benchmark_voxel.py @@ -26,7 +26,7 @@ def benchmark_voxel(): - root_dir = "./projects/perception/object_detection_3d/benchmark" + root_dir = "./projects/python/perception/object_detection_3d/benchmark" temp_dir = root_dir + "/tmp" configs_dir = root_dir + "/configs" models_dir = root_dir + "/models" diff --git a/projects/perception/object_detection_3d/benchmark/configs/pointpillars_car_xyres_16.proto b/projects/python/perception/object_detection_3d/benchmark/configs/pointpillars_car_xyres_16.proto similarity index 100% rename from projects/perception/object_detection_3d/benchmark/configs/pointpillars_car_xyres_16.proto rename to projects/python/perception/object_detection_3d/benchmark/configs/pointpillars_car_xyres_16.proto diff --git a/projects/perception/object_detection_3d/benchmark/configs/pointpillars_ped_cycle_xyres_16.proto b/projects/python/perception/object_detection_3d/benchmark/configs/pointpillars_ped_cycle_xyres_16.proto similarity index 100% rename from projects/perception/object_detection_3d/benchmark/configs/pointpillars_ped_cycle_xyres_16.proto rename to projects/python/perception/object_detection_3d/benchmark/configs/pointpillars_ped_cycle_xyres_16.proto diff --git a/projects/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16.proto b/projects/python/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16.proto similarity index 100% rename from projects/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16.proto rename to projects/python/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16.proto diff --git a/projects/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.24.proto b/projects/python/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.24.proto similarity index 100% rename from projects/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.24.proto rename to projects/python/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.24.proto diff --git a/projects/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.24_2.proto b/projects/python/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.24_2.proto similarity index 100% rename from projects/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.24_2.proto rename to projects/python/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.24_2.proto diff --git a/projects/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.33.proto b/projects/python/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.33.proto similarity index 100% rename from projects/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.33.proto rename to projects/python/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.33.proto diff --git a/projects/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.5.proto b/projects/python/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.5.proto similarity index 100% rename from projects/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.5.proto rename to projects/python/perception/object_detection_3d/benchmark/configs/tanet_car_xyres_16_near_0.5.proto diff --git a/projects/perception/object_detection_3d/benchmark/configs/tanet_ped_cycle_xyres_16.proto b/projects/python/perception/object_detection_3d/benchmark/configs/tanet_ped_cycle_xyres_16.proto similarity index 100% rename from projects/perception/object_detection_3d/benchmark/configs/tanet_ped_cycle_xyres_16.proto rename to projects/python/perception/object_detection_3d/benchmark/configs/tanet_ped_cycle_xyres_16.proto diff --git a/projects/perception/object_detection_3d/benchmark/media/000000.bin b/projects/python/perception/object_detection_3d/benchmark/media/000000.bin similarity index 100% rename from projects/perception/object_detection_3d/benchmark/media/000000.bin rename to projects/python/perception/object_detection_3d/benchmark/media/000000.bin diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/README.md b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/README.md similarity index 93% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/README.md rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/README.md index 0a8303a1be..dd2f04280c 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/README.md +++ b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/README.md @@ -19,12 +19,12 @@ pip install -e . ## Running the example Car 3D Object Detection using [TANet](https://arxiv.org/abs/1912.05163) from [KITTI](http://www.cvlibs.net/datasets/kitti/eval_object.php?obj_benchmark=3d)-like dataset ```bash -python3 demo.py --ip=0.0.0.0 --port=2605 --algorithm=voxel --model_name=tanet_car_xyres_16 --source=disk --data_path=/data/sets/kitti_second/training/velodyne --model_config=configs/tanet_car_xyres_16.proto +python3 demo.py --ip=0.0.0.0 --port=2605 --algorithm=voxel --model_name=tanet_car_xyres_16 --source=disk --data_path=/data/sets/kitti_tracking/training/velodyne/0000 --model_config=configs/tanet_car_xyres_16.proto ``` Car 3D Object Detection using [PointPillars](https://arxiv.org/abs/1812.05784) from [KITTI](http://www.cvlibs.net/datasets/kitti/eval_object.php?obj_benchmark=3d)-like dataset ```bash -python3 demo.py --ip=0.0.0.0 --port=2605 --algorithm=voxel --model_name=pointpillars_car_xyres_16 --source=disk --data_path=/data/sets/kitti_second/training/velodyne --model_config=configs/tanet_car_xyres_16.proto +python3 demo.py --ip=0.0.0.0 --port=2605 --algorithm=voxel --model_name=pointpillars_car_xyres_16 --source=disk --data_path=/data/sets/kitti_tracking/training/velodyne/0000 --model_config=configs/pointpillars_car_xyres_16.proto ``` 3D Object Detection using a specially trained model X for O3M Lidar diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/__init__.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/__init__.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/__init__.py diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/pointpillars_car_xyres_16.proto b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/pointpillars_car_xyres_16.proto similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/pointpillars_car_xyres_16.proto rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/pointpillars_car_xyres_16.proto diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/pointpillars_ped_cycle_xyres_16.proto b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/pointpillars_ped_cycle_xyres_16.proto similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/pointpillars_ped_cycle_xyres_16.proto rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/pointpillars_ped_cycle_xyres_16.proto diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16.proto b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16.proto similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16.proto rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16.proto diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.24.proto b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.24.proto similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.24.proto rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.24.proto diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.24_2.proto b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.24_2.proto similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.24_2.proto rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.24_2.proto diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.33.proto b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.33.proto similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.33.proto rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.33.proto diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.5.proto b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.5.proto similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.5.proto rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_car_xyres_16_near_0.5.proto diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_ped_cycle_xyres_16.proto b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_ped_cycle_xyres_16.proto similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_ped_cycle_xyres_16.proto rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/configs/tanet_ped_cycle_xyres_16.proto diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/data_generators.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/data_generators.py similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/data_generators.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/data_generators.py diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/demo.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/demo.py similarity index 97% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/demo.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/demo.py index d113b26a05..2ce25d7733 100644 --- a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/demo.py +++ b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/demo.py @@ -23,6 +23,7 @@ # OpenDR imports from opendr.perception.object_detection_3d import VoxelObjectDetection3DLearner +from opendr.perception.object_tracking_3d import ObjectTracking3DAb3dmotLearner from data_generators import ( lidar_point_cloud_generator, disk_point_cloud_generator, @@ -162,6 +163,7 @@ def voxel_object_detection_3d(config_path, model_name=None): # Init model detection_learner = VoxelObjectDetection3DLearner(config_path) + tracking_learner = ObjectTracking3DAb3dmotLearner() if model_name is not None and not os.path.exists( "./models/" + model_name @@ -172,6 +174,7 @@ def voxel_object_detection_3d(config_path, model_name=None): else: detection_learner = None + tracking_learner = None def process_key(key): @@ -284,8 +287,10 @@ def process_key(key): if predict: predictions = detection_learner.infer(point_cloud) + tracking_predictions = tracking_learner.infer(predictions) else: predictions = [] + tracking_predictions = [] if len(predictions) > 0: print( @@ -296,7 +301,7 @@ def process_key(key): t = time.time() frame_bev_2 = draw_point_cloud_bev( - point_cloud.data, predictions, scale, xs, ys + point_cloud.data, tracking_predictions, scale, xs, ys ) frame_proj_2 = draw_point_cloud_projected_numpy( point_cloud.data, diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/draw_point_clouds.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/draw_point_clouds.py similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/draw_point_clouds.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/draw_point_clouds.py diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/fonts/.gitignore b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/fonts/.gitignore similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/fonts/.gitignore rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/fonts/.gitignore diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/media/demo.png b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/media/demo.png similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/media/demo.png rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/media/demo.png diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/metrics.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/metrics.py similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/metrics.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/metrics.py diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/models/.gitignore b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/models/.gitignore similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/models/.gitignore rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/models/.gitignore diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/channel.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/channel.py similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/channel.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/channel.py diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/main.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/main.py similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/main.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/main.py diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/o3m_lidar.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/o3m_lidar.py similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/o3m_lidar.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/o3m_lidar.py diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/structures.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/structures.py similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/structures.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/o3m_lidar/structures.py diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/requirements.txt b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/requirements.txt similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/requirements.txt rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/requirements.txt diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/rplidar_processor.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/rplidar_processor.py similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/rplidar_processor.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/rplidar_processor.py diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/setup.py b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/setup.py similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/setup.py rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/setup.py diff --git a/projects/perception/object_detection_3d/demos/voxel_object_detection_3d/templates/index.html b/projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/templates/index.html similarity index 100% rename from projects/perception/object_detection_3d/demos/voxel_object_detection_3d/templates/index.html rename to projects/python/perception/object_detection_3d/demos/voxel_object_detection_3d/templates/index.html diff --git a/projects/perception/object_tracking_2d/benchmark/.gitignore b/projects/python/perception/object_tracking_2d/benchmark/.gitignore similarity index 100% rename from projects/perception/object_tracking_2d/benchmark/.gitignore rename to projects/python/perception/object_tracking_2d/benchmark/.gitignore diff --git a/projects/perception/object_tracking_2d/benchmark/benchmark_deep_sort.py b/projects/python/perception/object_tracking_2d/benchmark/benchmark_deep_sort.py similarity index 98% rename from projects/perception/object_tracking_2d/benchmark/benchmark_deep_sort.py rename to projects/python/perception/object_tracking_2d/benchmark/benchmark_deep_sort.py index 5cd6ce8e83..01e0643f3f 100644 --- a/projects/perception/object_tracking_2d/benchmark/benchmark_deep_sort.py +++ b/projects/python/perception/object_tracking_2d/benchmark/benchmark_deep_sort.py @@ -29,7 +29,7 @@ def benchmark_fair_mot(): - root_dir = "./projects/perception/object_tracking_2d/benchmark" + root_dir = "./projects/python/perception/object_tracking_2d/benchmark" temp_dir = root_dir + "/tmp" models_dir = root_dir + "/models" num_runs = 100 diff --git a/projects/perception/object_tracking_2d/benchmark/benchmark_fair_mot.py b/projects/python/perception/object_tracking_2d/benchmark/benchmark_fair_mot.py similarity index 97% rename from projects/perception/object_tracking_2d/benchmark/benchmark_fair_mot.py rename to projects/python/perception/object_tracking_2d/benchmark/benchmark_fair_mot.py index 23f205fe79..c94bd23fec 100644 --- a/projects/perception/object_tracking_2d/benchmark/benchmark_fair_mot.py +++ b/projects/python/perception/object_tracking_2d/benchmark/benchmark_fair_mot.py @@ -26,7 +26,7 @@ def benchmark_fair_mot(): - root_dir = "./projects/perception/object_tracking_2d/benchmark" + root_dir = "./projects/python/perception/object_tracking_2d/benchmark" temp_dir = root_dir + "/tmp" models_dir = root_dir + "/models" media_dir = root_dir + "/media" diff --git a/projects/perception/object_tracking_2d/benchmark/media/000001.jpg b/projects/python/perception/object_tracking_2d/benchmark/media/000001.jpg similarity index 100% rename from projects/perception/object_tracking_2d/benchmark/media/000001.jpg rename to projects/python/perception/object_tracking_2d/benchmark/media/000001.jpg diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/.gitignore b/projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/.gitignore similarity index 100% rename from projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/.gitignore rename to projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/.gitignore diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/README.md b/projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/README.md similarity index 100% rename from projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/README.md rename to projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/README.md diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/data_generators.py b/projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/data_generators.py similarity index 100% rename from projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/data_generators.py rename to projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/data_generators.py diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/demo.py b/projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/demo.py similarity index 99% rename from projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/demo.py rename to projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/demo.py index c6a0819a69..dd23d97019 100644 --- a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/demo.py +++ b/projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/demo.py @@ -199,7 +199,7 @@ def fair_mot_tracking(model_name, device): if predict: # Init model - learner = ObjectTracking2DFairMotLearner(device=device) + learner = ObjectTracking2DFairMotLearner(device=device, use_pretrained_backbone=False) if not os.path.exists("./models/" + model_name): learner.download(model_name, "./models") learner.load("./models/" + model_name, verbose=True) diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/media/video.gif b/projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/media/video.gif similarity index 100% rename from projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/media/video.gif rename to projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/media/video.gif diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/requirements.txt b/projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/requirements.txt similarity index 100% rename from projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/requirements.txt rename to projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/requirements.txt diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/setup.py b/projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/setup.py similarity index 100% rename from projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/setup.py rename to projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/setup.py diff --git a/projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/templates/index.html b/projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/templates/index.html similarity index 100% rename from projects/perception/object_tracking_2d/demos/fair_mot_deep_sort/templates/index.html rename to projects/python/perception/object_tracking_2d/demos/fair_mot_deep_sort/templates/index.html diff --git a/projects/python/perception/object_tracking_2d/demos/siamrpn/README.md b/projects/python/perception/object_tracking_2d/demos/siamrpn/README.md new file mode 100644 index 0000000000..23c95a1b3c --- /dev/null +++ b/projects/python/perception/object_tracking_2d/demos/siamrpn/README.md @@ -0,0 +1,22 @@ +# SiamRPNLearner Demos + +This folder contains minimal code usage examples that showcase the basic functionality of the SiamRPNLearner +provided by OpenDR. Specifically the following examples are provided: +1. inference_demo.py: Perform inference on a video. Setting `--device cpu` performs inference on CPU. + +2. eval_demo.py: Perform evaluation on the OTB dataset, supported as `ExternalDataset` type. The toolkit + provides the option to download the dataset at the location set by `--data-root /path/to/otb`. + Setting `--device cpu` performs evaluation on CPU. + +3. train_demo.py: Fit learner to dataset. COCO, ILSVRC-VID, ILSVRC-DET and Youtube-BB datasets are + supported via `ExternalDataset` class. + The user must set the dataset type using the `--datasets` argument and provide the datasets + root path with the `--data-root` argument. + See [here](/src/opendr/perception/object_tracking_2d/siamrpn/README.md) for the appropriate + data folder structure. + Setting `--device cpu` performs training on CPU. Additional command line arguments can be set + to change various training hyperparameters, and running `python3 train_demo.py -h` prints + information about them on stdout. + + Example usage: + `python3 train_demo.py --dataset coco --data-root /path/to/coco2017` \ No newline at end of file diff --git a/projects/python/perception/object_tracking_2d/demos/siamrpn/eval_demo.py b/projects/python/perception/object_tracking_2d/demos/siamrpn/eval_demo.py new file mode 100644 index 0000000000..dee2014eff --- /dev/null +++ b/projects/python/perception/object_tracking_2d/demos/siamrpn/eval_demo.py @@ -0,0 +1,36 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +from opendr.engine.datasets import ExternalDataset +from opendr.perception.object_tracking_2d import SiamRPNLearner + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--data-root", help="Dataset root folder", type=str, default=".") + + args = parser.parse_args() + + learner = SiamRPNLearner(device=args.device) + learner.download(".", mode="pretrained") + learner.load("siamrpn_opendr") + + # download otb2015 dataset and run + # alternatively you can download the "test_data" and use "OTBtest" dataset_type to only run on one small video + learner.download(args.data_root, "otb2015", verbose=True, overwrite=False) + dataset = ExternalDataset(args.data_root, dataset_type="OTB2015") + learner.eval(dataset) diff --git a/projects/python/perception/object_tracking_2d/demos/siamrpn/inference_demo.py b/projects/python/perception/object_tracking_2d/demos/siamrpn/inference_demo.py new file mode 100644 index 0000000000..da29c08949 --- /dev/null +++ b/projects/python/perception/object_tracking_2d/demos/siamrpn/inference_demo.py @@ -0,0 +1,58 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +import cv2 +from opendr.engine.target import TrackingAnnotation +from opendr.perception.object_tracking_2d import SiamRPNLearner + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + + args = parser.parse_args() + + learner = SiamRPNLearner(device=args.device) + learner.download(".", mode="pretrained") + learner.load("siamrpn_opendr") + + learner.download(".", mode="video") + cap = cv2.VideoCapture("tc_Skiing_ce.mp4") + + init_bbox = TrackingAnnotation(left=598, top=312, width=75, height=200, name=0, id=0) + + frame_no = 0 + while cap.isOpened(): + ok, frame = cap.read() + if not ok: + break + + if frame_no == 0: + # first frame, pass init_bbox to infer function to initialize the tracker + pred_bbox = learner.infer(frame, init_bbox) + else: + # after the first frame only pass the image to infer + pred_bbox = learner.infer(frame) + + frame_no += 1 + + cv2.rectangle(frame, (pred_bbox.left, pred_bbox.top), + (pred_bbox.left + pred_bbox.width, pred_bbox.top + pred_bbox.height), + (0, 255, 255), 3) + cv2.imshow('Tracking Result', frame) + cv2.waitKey(1) + + cv2.destroyAllWindows() diff --git a/projects/python/perception/object_tracking_2d/demos/siamrpn/train_demo.py b/projects/python/perception/object_tracking_2d/demos/siamrpn/train_demo.py new file mode 100644 index 0000000000..95aa92c14f --- /dev/null +++ b/projects/python/perception/object_tracking_2d/demos/siamrpn/train_demo.py @@ -0,0 +1,42 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +from opendr.engine.datasets import ExternalDataset +from opendr.perception.object_tracking_2d import SiamRPNLearner + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--datasets", help="Dataset to train on. To train with multiple, separate with ','", + type=str, default="coco") + parser.add_argument("--data-root", help="Dataset root folder", type=str) + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda", choices=["cuda", "cpu"]) + parser.add_argument("--batch-size", help="Batch size to use for training", type=int, default=6) + parser.add_argument("--lr", help="Learning rate to use for training", type=float, default=5e-4) + parser.add_argument("--n-epochs", help="Number of total epochs", type=int, default=25) + + args = parser.parse_args() + + if ',' in args.datasets: + dataset = [ExternalDataset(args.data_root, dataset_type) for dataset_type in args.datasets.split(',')] + else: + dataset = ExternalDataset(args.data_root, args.datasets) + + learner = SiamRPNLearner(device=args.device, n_epochs=args.n_epochs, batch_size=args.batch_size, + lr=args.lr) + + learner.fit(dataset) + learner.save("siamrpn_custom") diff --git a/projects/perception/object_tracking_3d/benchmark/.gitignore b/projects/python/perception/object_tracking_3d/benchmark/.gitignore similarity index 100% rename from projects/perception/object_tracking_3d/benchmark/.gitignore rename to projects/python/perception/object_tracking_3d/benchmark/.gitignore diff --git a/projects/perception/object_tracking_3d/benchmark/benchmark_ab3dmot.py b/projects/python/perception/object_tracking_3d/benchmark/benchmark_ab3dmot.py similarity index 97% rename from projects/perception/object_tracking_3d/benchmark/benchmark_ab3dmot.py rename to projects/python/perception/object_tracking_3d/benchmark/benchmark_ab3dmot.py index f86caa334c..ec4d6e12c8 100644 --- a/projects/perception/object_tracking_3d/benchmark/benchmark_ab3dmot.py +++ b/projects/python/perception/object_tracking_3d/benchmark/benchmark_ab3dmot.py @@ -25,7 +25,7 @@ def benchmark_ab3dmot(): - root_dir = "./projects/perception/object_tracking_3d/benchmark" + root_dir = "./projects/python/perception/object_tracking_3d/benchmark" media_dir = root_dir + "/media" num_runs = 100 diff --git a/projects/perception/object_tracking_3d/benchmark/media/0000.txt b/projects/python/perception/object_tracking_3d/benchmark/media/0000.txt similarity index 100% rename from projects/perception/object_tracking_3d/benchmark/media/0000.txt rename to projects/python/perception/object_tracking_3d/benchmark/media/0000.txt diff --git a/projects/perception/panoptic_segmentation/efficient_ps/README.md b/projects/python/perception/panoptic_segmentation/efficient_ps/README.md similarity index 100% rename from projects/perception/panoptic_segmentation/efficient_ps/README.md rename to projects/python/perception/panoptic_segmentation/efficient_ps/README.md diff --git a/projects/perception/panoptic_segmentation/efficient_ps/example_usage.py b/projects/python/perception/panoptic_segmentation/efficient_ps/example_usage.py similarity index 100% rename from projects/perception/panoptic_segmentation/efficient_ps/example_usage.py rename to projects/python/perception/panoptic_segmentation/efficient_ps/example_usage.py diff --git a/projects/python/perception/pose_estimation/high_resolution_pose_estimation/README.md b/projects/python/perception/pose_estimation/high_resolution_pose_estimation/README.md new file mode 100644 index 0000000000..da48ec4088 --- /dev/null +++ b/projects/python/perception/pose_estimation/high_resolution_pose_estimation/README.md @@ -0,0 +1,11 @@ +# High Resolution Pose Estimation + +This folder contains sample applications that demonstrate various parts of the functionality provided by the High Resolution Pose Estimation algorithm provided by OpenDR. + +More specifically, the applications provided are: + +1. demos/inference_demo.py: A tool that demonstrates how to perform inference on a single high resolution image and then draw the detected poses. +2. demos/eval_demo.py: A tool that demonstrates how to perform evaluation using the High Resolution Pose Estimation algorithm on 720p, 1080p and 1440p datasets. +3. demos/benchmarking_demo.py: A simple benchmarking tool for measuring the performance of High Resolution Pose Estimation in various platforms. + + diff --git a/projects/python/perception/pose_estimation/high_resolution_pose_estimation/demos/benchmarking_demo.py b/projects/python/perception/pose_estimation/high_resolution_pose_estimation/demos/benchmarking_demo.py new file mode 100644 index 0000000000..33142e7c99 --- /dev/null +++ b/projects/python/perception/pose_estimation/high_resolution_pose_estimation/demos/benchmarking_demo.py @@ -0,0 +1,83 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import cv2 +import time +from opendr.perception.pose_estimation import HighResolutionPoseEstimationLearner +import argparse +from os.path import join +from tqdm import tqdm +import numpy as np + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda") + parser.add_argument("--accelerate", help="Enables acceleration flags (e.g., stride)", default=False, + action="store_true") + parser.add_argument("--height1", help="Base height of resizing in heatmap generation", default=360) + parser.add_argument("--height2", help="Base height of resizing in second inference", default=540) + + args = parser.parse_args() + + device, accelerate, base_height1, base_height2 = args.device, args.accelerate,\ + args.height1, args.height2 + + if device == 'cpu': + import torch + torch.set_flush_denormal(True) + torch.set_num_threads(8) + + if accelerate: + stride = True + stages = 0 + half_precision = True + else: + stride = False + stages = 2 + half_precision = False + + pose_estimator = HighResolutionPoseEstimationLearner(device=device, num_refinement_stages=stages, + mobilenet_use_stride=stride, half_precision=half_precision, + first_pass_height=int(base_height1), + second_pass_height=int(base_height2)) + pose_estimator.download(path=".", verbose=True) + pose_estimator.load("openpose_default") + + # Download one sample image + pose_estimator.download(path=".", mode="test_data") + image_path = join("temp", "dataset", "image", "000000000785_1080.jpg") + img = cv2.imread(image_path) + + fps_list = [] + print("Benchmarking...") + for i in tqdm(range(50)): + start_time = time.perf_counter() + # Perform inference + poses = pose_estimator.infer(img) + + end_time = time.perf_counter() + fps_list.append(1.0 / (end_time - start_time)) + print("Average FPS: %.2f" % (np.mean(fps_list))) + + # If pynvml is available, try to get memory stats for cuda + try: + if 'cuda' in device: + from pynvml import nvmlInit, nvmlDeviceGetMemoryInfo, nvmlDeviceGetHandleByIndex + + nvmlInit() + info = nvmlDeviceGetMemoryInfo(nvmlDeviceGetHandleByIndex(0)) + print("Memory allocated: %.2f MB " % (info.used / 1024 ** 2)) + except ImportError: + pass diff --git a/projects/python/perception/pose_estimation/high_resolution_pose_estimation/demos/eval_demo.py b/projects/python/perception/pose_estimation/high_resolution_pose_estimation/demos/eval_demo.py new file mode 100644 index 0000000000..9f71e0bd5c --- /dev/null +++ b/projects/python/perception/pose_estimation/high_resolution_pose_estimation/demos/eval_demo.py @@ -0,0 +1,62 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from opendr.perception.pose_estimation import HighResolutionPoseEstimationLearner +import argparse +from os.path import join +from opendr.engine.datasets import ExternalDataset +import time + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda") + parser.add_argument("--accelerate", help="Enables acceleration flags (e.g., stride)", default=False, + action="store_true") + parser.add_argument("--height1", help="Base height of resizing in first inference", default=360) + parser.add_argument("--height2", help="Base height of resizing in second inference", default=540) + + args = parser.parse_args() + + device, accelerate, base_height1, base_height2 = args.device, args.accelerate,\ + args.height1, args.height2 + + if accelerate: + stride = True + stages = 0 + half_precision = True + else: + stride = True + stages = 2 + half_precision = True + + pose_estimator = HighResolutionPoseEstimationLearner(device=device, num_refinement_stages=stages, + mobilenet_use_stride=stride, + half_precision=half_precision, + first_pass_height=int(base_height1), + second_pass_height=int(base_height2)) + pose_estimator.download(path=".", verbose=True) + pose_estimator.load("openpose_default") + + # Download a sample dataset + pose_estimator.download(path=".", mode="test_data") + + eval_dataset = ExternalDataset(path=join("temp", "dataset"), dataset_type="COCO") + + t0 = time.time() + results_dict = pose_estimator.eval(eval_dataset, use_subset=False, verbose=True, silent=True, + images_folder_name="image", annotations_filename="annotation.json") + t1 = time.time() + print("\n Evaluation time: ", t1 - t0, "seconds") + print("Evaluation results = ", results_dict) diff --git a/projects/python/perception/pose_estimation/high_resolution_pose_estimation/demos/inference_demo.py b/projects/python/perception/pose_estimation/high_resolution_pose_estimation/demos/inference_demo.py new file mode 100644 index 0000000000..f1b588d943 --- /dev/null +++ b/projects/python/perception/pose_estimation/high_resolution_pose_estimation/demos/inference_demo.py @@ -0,0 +1,65 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import cv2 +from opendr.perception.pose_estimation import HighResolutionPoseEstimationLearner +from opendr.perception.pose_estimation import draw +from opendr.engine.data import Image +import argparse +from os.path import join + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cuda") + parser.add_argument("--accelerate", help="Enables acceleration flags (e.g., stride)", default=False, + action="store_true") + parser.add_argument("--height1", help="Base height of resizing in first inference", default=360) + parser.add_argument("--height2", help="Base height of resizing in second inference", default=540) + + args = parser.parse_args() + + device, accelerate, base_height1, base_height2 = args.device, args.accelerate,\ + args.height1, args.height2 + + if accelerate: + stride = True + stages = 0 + half_precision = True + else: + stride = False + stages = 2 + half_precision = False + + pose_estimator = HighResolutionPoseEstimationLearner(device=device, num_refinement_stages=stages, + mobilenet_use_stride=stride, half_precision=half_precision, + first_pass_height=int(base_height1), + second_pass_height=int(base_height2)) + pose_estimator.download(path=".", verbose=True) + pose_estimator.load("openpose_default") + + # Download one sample image + pose_estimator.download(path=".", mode="test_data") + + image_path = join("temp", "dataset", "image", "000000000785_1080.jpg") + + img = Image.open(image_path) + + poses = pose_estimator.infer(img) + + img_cv = img.opencv() + for pose in poses: + draw(img_cv, pose) + cv2.imshow('Results', img_cv) + cv2.waitKey(0) diff --git a/projects/perception/lightweight_open_pose/README.md b/projects/python/perception/pose_estimation/lightweight_open_pose/README.md similarity index 100% rename from projects/perception/lightweight_open_pose/README.md rename to projects/python/perception/pose_estimation/lightweight_open_pose/README.md diff --git a/projects/perception/lightweight_open_pose/demos/benchmarking_demo.py b/projects/python/perception/pose_estimation/lightweight_open_pose/demos/benchmarking_demo.py similarity index 100% rename from projects/perception/lightweight_open_pose/demos/benchmarking_demo.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/demos/benchmarking_demo.py diff --git a/projects/perception/lightweight_open_pose/demos/eval_demo.py b/projects/python/perception/pose_estimation/lightweight_open_pose/demos/eval_demo.py similarity index 100% rename from projects/perception/lightweight_open_pose/demos/eval_demo.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/demos/eval_demo.py diff --git a/projects/perception/lightweight_open_pose/demos/inference_demo.py b/projects/python/perception/pose_estimation/lightweight_open_pose/demos/inference_demo.py similarity index 100% rename from projects/perception/lightweight_open_pose/demos/inference_demo.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/demos/inference_demo.py diff --git a/projects/perception/lightweight_open_pose/demos/inference_tutorial.ipynb b/projects/python/perception/pose_estimation/lightweight_open_pose/demos/inference_tutorial.ipynb similarity index 100% rename from projects/perception/lightweight_open_pose/demos/inference_tutorial.ipynb rename to projects/python/perception/pose_estimation/lightweight_open_pose/demos/inference_tutorial.ipynb diff --git a/projects/perception/lightweight_open_pose/demos/webcam_demo.py b/projects/python/perception/pose_estimation/lightweight_open_pose/demos/webcam_demo.py similarity index 100% rename from projects/perception/lightweight_open_pose/demos/webcam_demo.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/demos/webcam_demo.py diff --git a/projects/perception/lightweight_open_pose/jetbot/README.md b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/README.md similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/README.md rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/README.md diff --git a/projects/perception/lightweight_open_pose/jetbot/evaluate.sh b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/evaluate.sh similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/evaluate.sh rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/evaluate.sh diff --git a/projects/perception/lightweight_open_pose/jetbot/fall_controller.py b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/fall_controller.py similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/fall_controller.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/fall_controller.py diff --git a/projects/perception/lightweight_open_pose/jetbot/flask.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/flask.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/flask.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/flask.png diff --git a/projects/perception/lightweight_open_pose/jetbot/jetbot.sh b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/jetbot.sh similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/jetbot.sh rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/jetbot.sh diff --git a/projects/perception/lightweight_open_pose/jetbot/jetbot_kill.sh b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/jetbot_kill.sh similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/jetbot_kill.sh rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/jetbot_kill.sh diff --git a/projects/perception/lightweight_open_pose/jetbot/requirements.txt b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/requirements.txt similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/requirements.txt rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/requirements.txt diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/__init__.py b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/results/.keep similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/__init__.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/results/.keep diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/protos/human_010_sit.wbo b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/protos/human_010_sit.wbo similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/protos/human_010_sit.wbo rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/protos/human_010_sit.wbo diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/protos/human_010_standing.wbo b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/protos/human_010_standing.wbo similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/protos/human_010_standing.wbo rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/protos/human_010_standing.wbo diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/pose_demo.wbt b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/pose_demo.wbt similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/pose_demo.wbt rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/pose_demo.wbt diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/brown_eye.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/brown_eye.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/brown_eye.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/brown_eye.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyebrow005.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyebrow005.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyebrow005.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyebrow005.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyebrow009.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyebrow009.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyebrow009.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyebrow009.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyelashes01.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyelashes01.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyelashes01.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyelashes01.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyelashes04.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyelashes04.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyelashes04.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/eyelashes04.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/female_elegantsuit01_diffuse.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/female_elegantsuit01_diffuse.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/female_elegantsuit01_diffuse.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/female_elegantsuit01_diffuse.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/female_elegantsuit01_normal.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/female_elegantsuit01_normal.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/female_elegantsuit01_normal.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/female_elegantsuit01_normal.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/keylthnormals.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/keylthnormals.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/keylthnormals.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/keylthnormals.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/keylthtex1.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/keylthtex1.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/keylthtex1.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/keylthtex1.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/male_casualsuit02_diffuse.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/male_casualsuit02_diffuse.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/male_casualsuit02_diffuse.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/male_casualsuit02_diffuse.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/male_casualsuit02_normal.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/male_casualsuit02_normal.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/male_casualsuit02_normal.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/male_casualsuit02_normal.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/middleage_lightskinned_male_diffuse2.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/middleage_lightskinned_male_diffuse2.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/middleage_lightskinned_male_diffuse2.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/middleage_lightskinned_male_diffuse2.png diff --git a/projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/short01_diffuse.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/short01_diffuse.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/short01_diffuse.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/simulation_pose/worlds/textures/short01_diffuse.png diff --git a/projects/perception/lightweight_open_pose/jetbot/static/eu.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/static/eu.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/static/eu.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/static/eu.png diff --git a/projects/perception/lightweight_open_pose/jetbot/static/opendr.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/static/opendr.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/static/opendr.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/static/opendr.png diff --git a/projects/perception/lightweight_open_pose/jetbot/static/opendr_logo.png b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/static/opendr_logo.png similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/static/opendr_logo.png rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/static/opendr_logo.png diff --git a/projects/perception/lightweight_open_pose/jetbot/templates/index.html b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/templates/index.html similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/templates/index.html rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/templates/index.html diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/__init__.py b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/__init__.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/__init__.py diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/active.py b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/active.py similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/utils/active.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/active.py diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/pid.py b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/pid.py similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/utils/pid.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/pid.py diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/pose_controller.py b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/pose_controller.py similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/utils/pose_controller.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/pose_controller.py diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/pose_utils.py b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/pose_utils.py similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/utils/pose_utils.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/pose_utils.py diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/robot_interface.py b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/robot_interface.py similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/utils/robot_interface.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/robot_interface.py diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/visualization.py b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/visualization.py similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/utils/visualization.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/visualization.py diff --git a/projects/perception/lightweight_open_pose/jetbot/utils/webots.py b/projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/webots.py similarity index 100% rename from projects/perception/lightweight_open_pose/jetbot/utils/webots.py rename to projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/webots.py diff --git a/projects/perception/semantic_segmentation/bisenet/README.md b/projects/python/perception/semantic_segmentation/bisenet/README.md similarity index 100% rename from projects/perception/semantic_segmentation/bisenet/README.md rename to projects/python/perception/semantic_segmentation/bisenet/README.md diff --git a/projects/perception/semantic_segmentation/bisenet/eval_demo.py b/projects/python/perception/semantic_segmentation/bisenet/eval_demo.py similarity index 100% rename from projects/perception/semantic_segmentation/bisenet/eval_demo.py rename to projects/python/perception/semantic_segmentation/bisenet/eval_demo.py diff --git a/projects/perception/semantic_segmentation/bisenet/inference_demo.py b/projects/python/perception/semantic_segmentation/bisenet/inference_demo.py similarity index 100% rename from projects/perception/semantic_segmentation/bisenet/inference_demo.py rename to projects/python/perception/semantic_segmentation/bisenet/inference_demo.py diff --git a/projects/perception/semantic_segmentation/bisenet/train_demo.py b/projects/python/perception/semantic_segmentation/bisenet/train_demo.py similarity index 100% rename from projects/perception/semantic_segmentation/bisenet/train_demo.py rename to projects/python/perception/semantic_segmentation/bisenet/train_demo.py diff --git a/projects/perception/skeleton_based_action_recognition/REAMDE.md b/projects/python/perception/skeleton_based_action_recognition/README.md similarity index 89% rename from projects/perception/skeleton_based_action_recognition/REAMDE.md rename to projects/python/perception/skeleton_based_action_recognition/README.md index b4f0f5a10c..3afb81cfc8 100644 --- a/projects/perception/skeleton_based_action_recognition/REAMDE.md +++ b/projects/python/perception/skeleton_based_action_recognition/README.md @@ -26,6 +26,10 @@ Please use the `--device cpu` flag if you are running them on a machine without python3 demo.py --device cpu --video 0 --method pstgcn --action_checkpoint_name pstgcn_ntu_cv_lw_openpose ``` +A demo script for the Continual ST-GCN can be found in [costgcn_usage_demo.py](./demos/costgcn_usage_demo.py). To fit, evaluate, and run inference on the model, you may use the following command: +```bash +python3 costgcn_usage_demo.py --fit --eval --infer +``` ## Acknowledgement This work has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement No 871449 (OpenDR). This publication reflects the authors’ views only. The European Commission is not responsible for any use that may be made of the information it contains. diff --git a/projects/python/perception/skeleton_based_action_recognition/benchmark/README.md b/projects/python/perception/skeleton_based_action_recognition/benchmark/README.md new file mode 100644 index 0000000000..e279f2605b --- /dev/null +++ b/projects/python/perception/skeleton_based_action_recognition/benchmark/README.md @@ -0,0 +1,24 @@ +# Skeleton-based Human Activity Recognition Benchmark +This folder contains a script for benchmarking the inference of Skeleton-based Human Activity Recognition learners. + +The script include logging of FLOPS and params for `learner.model`, inference timing, and energy-consumption (NVIDIA Jetson only). + +The benchmarking runs twice; Once using `learner.infer` and once using `learner.model.forward`. The results of each are printed accordingly. + + +### Setup +Please install [`pytorch-benchmark`](https://github.com/LukasHedegaard/pytorch-benchmark): +```bash +pip install pytorch-benchmark +``` + +### Running the benchmark +Benchmark the SpatioTemporalGCNLearner and ProgressiveSpatioTemporalGCNLearner +```bash +./benchmark_costgcn.py +``` + +Benchmarkk the CoSTGCNLearner with backbones: "costgcn", "coagcn", and "costr" +```bash +./benchmark_costgcn.py +``` \ No newline at end of file diff --git a/projects/python/perception/skeleton_based_action_recognition/benchmark/benchmark_costgcn.py b/projects/python/perception/skeleton_based_action_recognition/benchmark/benchmark_costgcn.py new file mode 100644 index 0000000000..86750ff98e --- /dev/null +++ b/projects/python/perception/skeleton_based_action_recognition/benchmark/benchmark_costgcn.py @@ -0,0 +1,91 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import torch +import yaml +from opendr.perception.skeleton_based_action_recognition.continual_stgcn_learner import ( + CoSTGCNLearner, + _MODEL_NAMES, +) + +from pytorch_benchmark import benchmark +import logging +from typing import List, Union +from opendr.engine.target import Category +from opendr.engine.data import Image + +logger = logging.getLogger("benchmark") +logging.basicConfig() +logger.setLevel("DEBUG") + + +def benchmark_costgcn(): + temp_dir = "./projects/python/perception/skeleton_based_action_recognition/tmp" + + num_runs = 100 + + input_shape = (3, 25, 2) # single skeleton in NTU-RGBD format + + batch_size = 1 + + for backbone in _MODEL_NAMES: + print(f"==== Benchmarking CoSTGCNLearner ({backbone}) ====") + + learner = CoSTGCNLearner( + device="cuda" if torch.cuda.is_available() else "cpu", + temp_path=temp_dir, + backbone=backbone, + batch_size=batch_size, + ) + sample = torch.randn(batch_size, *input_shape) # (B, C, H, W) + + learner.model.eval() + learner.optimize() + + def get_device_fn(*args): + nonlocal learner + return next(learner.model.parameters()).device + + def transfer_to_device_fn( + sample: Union[torch.Tensor, List[Category], List[Image]], + device: torch.device, + ): + if isinstance(sample, torch.Tensor): + return sample.to(device=device) + + assert isinstance(sample, list) + assert isinstance(sample[0], Category) + return [ + Category( + prediction=s.data, + confidence=s.confidence.to(device=device), + ) + for s in sample + ] + + print("== Benchmarking learner.infer ==") + results1 = benchmark( + model=learner.infer, + sample=sample, + num_runs=num_runs, + get_device_fn=get_device_fn, + transfer_to_device_fn=transfer_to_device_fn, + print_fn=print, + ) + print(yaml.dump({"learner.infer": results1})) + + +if __name__ == "__main__": + benchmark_costgcn() diff --git a/projects/perception/skeleton_based_action_recognition/benchmark/benchmark_stgcn.py b/projects/python/perception/skeleton_based_action_recognition/benchmark/benchmark_stgcn.py similarity index 100% rename from projects/perception/skeleton_based_action_recognition/benchmark/benchmark_stgcn.py rename to projects/python/perception/skeleton_based_action_recognition/benchmark/benchmark_stgcn.py diff --git a/projects/python/perception/skeleton_based_action_recognition/benchmark/install_on_server.sh b/projects/python/perception/skeleton_based_action_recognition/benchmark/install_on_server.sh new file mode 100755 index 0000000000..21b21ccdd2 --- /dev/null +++ b/projects/python/perception/skeleton_based_action_recognition/benchmark/install_on_server.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +conda create --name opendr python=3.8 -y +conda activate opendr +conda env config vars set OPENDR_HOME=$PWD +conda env config vars set PYTHONPATH=$PWD/src:$PYTHONPATH + +# Reactivate env to let env vars take effect +conda deactivate +conda activate opendr + +pip install torch==1.7.1+cu110 torchvision==0.8.2+cu110 -f https://download.pytorch.org/whl/torch_stable.html +pip install pytorch_lightning==1.2.3 onnxruntime==1.3.0 joblib>=1.0.1 pytorch_benchmark diff --git a/projects/python/perception/skeleton_based_action_recognition/benchmark/requirements.txt b/projects/python/perception/skeleton_based_action_recognition/benchmark/requirements.txt new file mode 100644 index 0000000000..0e0189f337 --- /dev/null +++ b/projects/python/perception/skeleton_based_action_recognition/benchmark/requirements.txt @@ -0,0 +1 @@ +pytorch-benchmark >= 0.2 diff --git a/projects/python/perception/skeleton_based_action_recognition/demos/costgcn_usage_demo.py b/projects/python/perception/skeleton_based_action_recognition/demos/costgcn_usage_demo.py new file mode 100644 index 0000000000..6ce6fe6535 --- /dev/null +++ b/projects/python/perception/skeleton_based_action_recognition/demos/costgcn_usage_demo.py @@ -0,0 +1,106 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import torch + +from opendr.perception.skeleton_based_action_recognition.continual_stgcn_learner import ( + CoSTGCNLearner, _MODEL_NAMES +) +from opendr.engine.datasets import ExternalDataset +from pathlib import Path + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--fit", help="Fit the model", default=False, action="store_true") + parser.add_argument("--num_fit_steps", help="Numer of steps to fit the model", type=int, default=10) + parser.add_argument("--eval", help="Evaluate the model", default=False, action="store_true") + parser.add_argument("--optimize", help="Perform inference using the model", default=False, action="store_true") + parser.add_argument("--infer", help="Perform inference using the model", default=False, action="store_true") + parser.add_argument("--device", help="Device to use (cpu, cuda)", type=str, default="cpu") + parser.add_argument("--backbone", help="The model type to use", type=str, default="costgcn", choices=_MODEL_NAMES) + parser.add_argument("--batch_size", help="The batch size of the model", type=int, default=2) + return parser.parse_args() + + +def main(args): + tmp_path = Path(__file__).parent / "tmp" + + # Define learner + learner = CoSTGCNLearner( + device=args.device, + temp_path=str(tmp_path), + batch_size=args.batch_size, + backbone=args.backbone, + num_workers=0, + ) + + pretrained_weights_path = learner.download( + path=str(tmp_path / "pretrained_models"), + method_name=args.backbone, + mode="pretrained", + file_name=f"{args.backbone}_ntu60_xview_joint.ckpt", + ) + + learner.load(pretrained_weights_path) + + # Define datasets + data_path = tmp_path / "data" + + train_ds_path = learner.download(mode="train_data", path=str(data_path)) + val_ds_path = learner.download(mode="val_data", path=str(data_path)) + + train_ds = learner._prepare_dataset( + ExternalDataset(path=train_ds_path, dataset_type="NTURGBD"), + data_filename="train_joints.npy", + labels_filename="train_labels.pkl", + skeleton_data_type="joint", + phase="train", + verbose=False, + ) + + val_ds = learner._prepare_dataset( + ExternalDataset(path=val_ds_path, dataset_type="NTURGBD"), + data_filename="val_joints.npy", + labels_filename="val_labels.pkl", + skeleton_data_type="joint", + phase="val", + verbose=False, + ) + + # Invoke operations + if args.fit: + learner.fit(dataset=train_ds, val_dataset=val_ds, steps=args.num_fit_steps) + + if args.eval: + results = learner.eval(val_ds) + print("Evaluation results: ", results) + + if args.optimize: + learner.optimize() + + if args.infer: + dl = torch.utils.data.DataLoader(val_ds, batch_size=args.batch_size, num_workers=0) + batch = next(iter(dl))[0] + frame = batch[:, :, -1] # Select a single frame + + categories = learner.infer(frame) + print("Inferred :") + for i, c in enumerate(categories): + print(f"[{i}] category.data = {c.data}, category.confidence = {c.confidence.detach().numpy()}") + + +if __name__ == "__main__": + main(parse_args()) diff --git a/projects/perception/skeleton_based_action_recognition/demos/demo.py b/projects/python/perception/skeleton_based_action_recognition/demos/demo.py similarity index 99% rename from projects/perception/skeleton_based_action_recognition/demos/demo.py rename to projects/python/perception/skeleton_based_action_recognition/demos/demo.py index 75ef36bc0d..4c98fe4def 100644 --- a/projects/perception/skeleton_based_action_recognition/demos/demo.py +++ b/projects/python/perception/skeleton_based_action_recognition/demos/demo.py @@ -100,7 +100,8 @@ def select_2_poses(poses): energy.append(s) energy = np.array(energy) index = energy.argsort()[::-1][0:2] - selected_poses.append(poses[index]) + for i in range(len(index)): + selected_poses.append(poses[index[i]]) return selected_poses diff --git a/projects/perception/skeleton_based_action_recognition/demos/ntu60_labels.csv b/projects/python/perception/skeleton_based_action_recognition/demos/ntu60_labels.csv similarity index 100% rename from projects/perception/skeleton_based_action_recognition/demos/ntu60_labels.csv rename to projects/python/perception/skeleton_based_action_recognition/demos/ntu60_labels.csv diff --git a/projects/perception/skeleton_based_action_recognition/demos/samples_with_missing_skeletons.txt b/projects/python/perception/skeleton_based_action_recognition/demos/samples_with_missing_skeletons.txt similarity index 100% rename from projects/perception/skeleton_based_action_recognition/demos/samples_with_missing_skeletons.txt rename to projects/python/perception/skeleton_based_action_recognition/demos/samples_with_missing_skeletons.txt diff --git a/projects/perception/skeleton_based_action_recognition/demos/skeleton_extraction.py b/projects/python/perception/skeleton_based_action_recognition/demos/skeleton_extraction.py similarity index 99% rename from projects/perception/skeleton_based_action_recognition/demos/skeleton_extraction.py rename to projects/python/perception/skeleton_based_action_recognition/demos/skeleton_extraction.py index cf7bb9148d..6584649e93 100644 --- a/projects/perception/skeleton_based_action_recognition/demos/skeleton_extraction.py +++ b/projects/python/perception/skeleton_based_action_recognition/demos/skeleton_extraction.py @@ -194,7 +194,8 @@ def select_2_poses(poses): energy.append(s) energy = np.array(energy) index = energy.argsort()[::-1][0:2] - selected_poses.append(poses[index]) + for i in range(len(index)): + selected_poses.append(poses[index[i]]) return selected_poses diff --git a/projects/perception/slam/full_map_posterior_gmapping/README.md b/projects/python/perception/slam/full_map_posterior_gmapping/README.md similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/README.md rename to projects/python/perception/slam/full_map_posterior_gmapping/README.md diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/CMakeLists.txt b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/CMakeLists.txt similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/CMakeLists.txt rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/CMakeLists.txt diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/README.md b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/README.md similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/README.md rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/README.md diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/cfg/rviz/default.rviz b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/cfg/rviz/default.rviz similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/cfg/rviz/default.rviz rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/cfg/rviz/default.rviz diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/cfg/rviz/gt_map.rviz b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/cfg/rviz/gt_map.rviz similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/cfg/rviz/gt_map.rviz rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/cfg/rviz/gt_map.rviz diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/experiment_real_data.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/gt_map.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/gt_map.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/gt_map.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/gt_map.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/test_computeR.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/test_computeR.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/test_computeR.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/launch/test_computeR.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/err_collector b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/err_collector similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/err_collector rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/err_collector diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/fmp_plot b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/fmp_plot similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/fmp_plot rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/fmp_plot diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/gt_mapping b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/gt_mapping similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/gt_mapping rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/gt_mapping diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/occ_map_saver b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/occ_map_saver similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/occ_map_saver rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/occ_map_saver diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/odom_pose b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/odom_pose similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/odom_pose rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/odom_pose diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/pose_error_calc b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/pose_error_calc similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/pose_error_calc rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/nodes/pose_error_calc diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/package.xml b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/package.xml similarity index 97% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/package.xml rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/package.xml index 62d9d562bc..a33d39f8bb 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/package.xml +++ b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/package.xml @@ -1,7 +1,7 @@ fmp_slam_eval - 1.1.1 + 2.0.0 FMP SLAM Evaluation Package Jose Arce diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_curves.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_curves.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_curves.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_curves.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_histograms.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_histograms.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_histograms.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/err_histograms.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/method_comparison.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/method_comparison.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/method_comparison.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/scripts/method_comparison.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/setup.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/setup.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/setup.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/setup.py diff --git a/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/__init__.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/__init__.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/disc_states.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/disc_states.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/disc_states.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/enums/disc_states.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/error_data_collector.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/error_data_collector.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/error_data_collector.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/error_data_collector.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/fmp_plotter.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/fmp_plotter.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/fmp_plotter.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/fmp_plotter.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ground_truth_mapping.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ground_truth_mapping.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ground_truth_mapping.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ground_truth_mapping.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/map_colorizer.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/map_colorizer.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/map_colorizer.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/map_colorizer.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/net_utils.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/net_utils.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/net_utils.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/net_utils.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/occ_map_saver.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/occ_map_saver.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/occ_map_saver.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/occ_map_saver.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/odom_pose_publisher.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/odom_pose_publisher.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/odom_pose_publisher.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/odom_pose_publisher.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/pose_error_calculator.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/pose_error_calculator.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/pose_error_calculator.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/pose_error_calculator.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ros_launcher.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ros_launcher.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ros_launcher.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/ros_launcher.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/roscore.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/roscore.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/roscore.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/src/fmp_slam_eval/roscore.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_blue.urdf b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_blue.urdf similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_blue.urdf rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_blue.urdf diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_green.urdf b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_green.urdf similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_green.urdf rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_green.urdf diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_red.urdf b/projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_red.urdf similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_red.urdf rename to projects/python/perception/slam/full_map_posterior_gmapping/src/fmp_slam_eval/urdf/simple_robot_red.urdf diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/CMakeLists.txt b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/CMakeLists.txt similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/CMakeLists.txt rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/CMakeLists.txt diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/README.md b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/README.md similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/README.md rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/README.md diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/launch/mapsim2d.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/launch/mapsim2d.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/launch/mapsim2d.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/launch/mapsim2d.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/package.xml b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/package.xml similarity index 97% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/package.xml rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/package.xml index 5a2c183107..b484c4bdb1 100644 --- a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/package.xml +++ b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/package.xml @@ -1,7 +1,7 @@ map_simulator - 1.1.1 + 2.0.0 The 2D Map Simulator package for generating datasets for testing and evaluating SLAM algorithms Jose Arce diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/rosbags/Robot_10Loop_noisy_3_2pi_180rays.bag b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/rosbags/Robot_10Loop_noisy_3_2pi_180rays.bag similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/rosbags/Robot_10Loop_noisy_3_2pi_180rays.bag rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/rosbags/Robot_10Loop_noisy_3_2pi_180rays.bag diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/Common.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/Common.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/Common.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/Common.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Cell.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Cell.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Cell.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Cell.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l020m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l020m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l020m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l020m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l030m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l030m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l030m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l030m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l040m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l040m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l040m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l040m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l050m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l050m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l050m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l050m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l060m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l060m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l060m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l060m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l070m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l070m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l070m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l070m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l080m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l080m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l080m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l080m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l090m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l090m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l090m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l090m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l100m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l100m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l100m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l100m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l100m_LocOnly.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l100m_LocOnly.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l100m_LocOnly.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_1l100m_LocOnly.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l120m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l120m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l120m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l120m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l140m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l140m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l140m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l140m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l160m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l160m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l160m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l160m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l180m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l180m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l180m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l180m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l200m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l200m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l200m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_2l200m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l240m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l240m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l240m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l240m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l270m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l270m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l270m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l270m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l300m.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l300m.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l300m.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_10Loop_3l300m.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_45deg.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_45deg.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_45deg.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_45deg.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_CellTest.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_CellTest.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_CellTest.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_CellTest.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_EOS.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_EOS.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_EOS.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/commands/CMD_EOS.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Cell.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Cell.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Cell.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Cell.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Loop.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Loop.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Loop.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Loop.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Loop_window.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Loop_window.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Loop_window.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_10Loop_window.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_45deg.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_45deg.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_45deg.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_45deg.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_CellTest.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_CellTest.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_CellTest.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/maps/Map_CellTest.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_1ray.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_1ray.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_1ray.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_1ray.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_2pi_180rays.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_2pi_180rays.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_2pi_180rays.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_2pi_180rays.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_3_2pi_180rays.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_3_2pi_180rays.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_3_2pi_180rays.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_det_3_2pi_180rays.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_1ray.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_1ray.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_1ray.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_1ray.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_2pi_180rays.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_2pi_180rays.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_2pi_180rays.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_2pi_180rays.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_3_2pi_180rays.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_3_2pi_180rays.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_3_2pi_180rays.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Cell_noisy_3_2pi_180rays.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Loop_det_3_2pi_180rays.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Loop_det_3_2pi_180rays.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Loop_det_3_2pi_180rays.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Loop_det_3_2pi_180rays.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Loop_noisy_3_2pi_180rays.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Loop_noisy_3_2pi_180rays.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Loop_noisy_3_2pi_180rays.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_10Loop_noisy_3_2pi_180rays.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_45deg_det_1ray.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_45deg_det_1ray.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_45deg_det_1ray.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_45deg_det_1ray.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_CellTest_det_8ray.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_CellTest_det_8ray.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_CellTest_det_8ray.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_CellTest_det_8ray.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_020m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_020m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_020m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_020m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_030m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_030m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_030m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_030m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_040m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_040m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_040m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_040m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_050m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_050m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_050m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_050m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_060m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_060m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_060m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_060m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_070m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_070m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_070m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_070m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_080m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_080m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_080m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_080m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_090m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_090m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_090m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_090m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_100m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_100m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_100m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_100m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_120m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_120m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_120m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_120m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_140m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_140m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_140m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_140m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_160m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_160m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_160m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_160m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_180m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_180m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_180m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_180m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_200m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_200m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_200m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_200m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_240m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_240m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_240m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_240m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_270m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_270m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_270m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_270m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_300m1loc.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_300m1loc.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_300m1loc.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/robots/Robot_Exp_10Loop_300m1loc.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_180Ray_2pi.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_180Ray_2pi.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_180Ray_2pi.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_180Ray_2pi.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_180Ray_3_2pi.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_180Ray_3_2pi.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_180Ray_3_2pi.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_180Ray_3_2pi.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_1Ray.json b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_1Ray.json similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_1Ray.json rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scenarios/sensors/Sensor_1Ray.json diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scripts/mapsim2d.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scripts/mapsim2d.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/scripts/mapsim2d.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/scripts/mapsim2d.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/setup.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/setup.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/setup.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/setup.py diff --git a/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/__init__.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/__init__.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/closed_shape_2D.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/closed_shape_2D.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/closed_shape_2D.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/closed_shape_2D.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/line.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/line.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/line.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/line.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/polygon.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/polygon.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/polygon.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/polygon.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/pose.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/pose.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/pose.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/primitives/pose.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/transform.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/transform.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/transform.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/geometry/transform.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/__init__.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/__init__.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/obstacle.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/obstacle.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/obstacle.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/obstacle.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/polygonal_obstacle.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/polygonal_obstacle.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/polygonal_obstacle.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/polygonal_obstacle.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_simulator_2d.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_simulator_2d.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_simulator_2d.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_simulator_2d.py diff --git a/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/command.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/command.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/command.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/command.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/__init__.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/__init__.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/bool_msg_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/bool_msg_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/bool_msg_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/bool_msg_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/message_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/message_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/message_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/message/message_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/__init__.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/__init__.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/comment_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/comment_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/comment_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/comment_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/misc_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/misc_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/misc_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/misc_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/scan_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/scan_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/scan_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/scan_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/sleep_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/sleep_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/sleep_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/misc/sleep_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/__init__.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/__init__.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/__init__.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/__init__.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_circular_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_circular_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_circular_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_circular_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_interpol_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_interpol_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_interpol_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_interpol_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_linear_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_linear_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_linear_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_linear_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_pose_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_pose_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_pose_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_pose_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_rotation_cmd.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_rotation_cmd.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_rotation_cmd.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/robot_commands/move/move_rotation_cmd.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/utils.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/utils.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/utils.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/utils.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/CHANGELOG.rst b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/CHANGELOG.rst similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/CHANGELOG.rst rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/CHANGELOG.rst diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/CMakeLists.txt b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/CMakeLists.txt similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/CMakeLists.txt rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/CMakeLists.txt diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/TODO.txt b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/TODO.txt similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/TODO.txt rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/TODO.txt diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.app b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.app similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.app rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.app diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.generic-shared-object b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.generic-shared-object similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.generic-shared-object rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.generic-shared-object diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.subdirs b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.subdirs similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.subdirs rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/Makefile.subdirs diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/generate_shared_object b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/generate_shared_object similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/generate_shared_object rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/generate_shared_object diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/message b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/message similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/message rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/message diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/pretty_compiler b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/pretty_compiler similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/pretty_compiler rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/pretty_compiler diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/testlib b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/testlib similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/testlib rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/build_tools/testlib diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile_test.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile_test.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile_test.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/configfile_test.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/test.ini b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/test.ini similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/test.ini rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configfile/test.ini diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configure b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configure similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configure rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/configure diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/Instructions.txt b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/Instructions.txt similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/Instructions.txt rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/Instructions.txt diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/scanmatcher.tex b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/scanmatcher.tex similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/scanmatcher.tex rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/scanmatcher.tex diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/userver.txt b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/userver.txt similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/userver.txt rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/docs/userver.txt diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/graphmap.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/graphmap.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/graphmap.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/graphmap.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/map_test.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/map_test.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/map_test.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/grid/map_test.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2log.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2log.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2log.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2log.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2neff.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2neff.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2neff.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2neff.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2rec.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2rec.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2rec.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfs2rec.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfsreader.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfsreader.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfsreader.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gfsreader.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor_tree.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor_tree.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor_tree.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/gridslamprocessor_tree.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/motionmodel.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/motionmodel.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/motionmodel.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/gridfastslam/motionmodel.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/configfile/configfile.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/configfile/configfile.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/configfile/configfile.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/configfile/configfile.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/accessstate.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/accessstate.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/accessstate.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/accessstate.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/array2d.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/array2d.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/array2d.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/array2d.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/harray2d.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/harray2d.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/harray2d.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/harray2d.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/map.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/map.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/map.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/grid/map.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gfsreader.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gfsreader.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gfsreader.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gfsreader.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.hxx b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.hxx similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.hxx rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/gridslamprocessor.hxx diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/motionmodel.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/motionmodel.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/motionmodel.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/gridfastslam/motionmodel.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/carmenconfiguration.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/carmenconfiguration.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/carmenconfiguration.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/carmenconfiguration.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/configuration.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/configuration.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/configuration.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/configuration.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorlog.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorlog.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorlog.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorlog.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorstream.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorstream.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorstream.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/log/sensorstream.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/particlefilter.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/particlefilter.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/particlefilter.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/particlefilter.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/pf.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/pf.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/pf.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/particlefilter/pf.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/eig3.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/eig3.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/eig3.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/eig3.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/gridlinetraversal.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/gridlinetraversal.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/gridlinetraversal.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/gridlinetraversal.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/icp.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/icp.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/icp.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/icp.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcher.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcher.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcher.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcher.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcherprocessor.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcherprocessor.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcherprocessor.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/scanmatcherprocessor.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/smmap.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/smmap.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/smmap.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/scanmatcher/smmap.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensor.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensor.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensor.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensor.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensoreading.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensoreading.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensoreading.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensoreading.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensorreading.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensorreading.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensorreading.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_base/sensorreading.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometryreading.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometryreading.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometryreading.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometryreading.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometrysensor.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometrysensor.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometrysensor.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_odometry/odometrysensor.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangereading.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangereading.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangereading.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangereading.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangesensor.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangesensor.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangesensor.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/sensor/sensor_range/rangesensor.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/autoptr.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/autoptr.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/autoptr.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/autoptr.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/commandline.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/commandline.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/commandline.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/commandline.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/gvalues.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/gvalues.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/gvalues.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/gvalues.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/macro_params.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/macro_params.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/macro_params.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/macro_params.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/movement.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/movement.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/movement.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/movement.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/point.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/point.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/point.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/point.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/stat.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/stat.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/stat.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include/gmapping/utils/stat.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/carmenconfiguration.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/carmenconfiguration.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/carmenconfiguration.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/carmenconfiguration.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/configuration.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/configuration.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/configuration.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/configuration.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_plot.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_plot.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_plot.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_plot.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_test.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_test.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_test.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/log_test.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/rdk2carmen.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/rdk2carmen.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/rdk2carmen.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/rdk2carmen.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/scanstudio2carmen.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/scanstudio2carmen.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/scanstudio2carmen.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/scanstudio2carmen.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorlog.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorlog.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorlog.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorlog.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorstream.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorstream.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorstream.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/log/sensorstream.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/manual.mk b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/manual.mk similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/manual.mk rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/manual.mk diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/manual.mk-template b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/manual.mk-template similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/manual.mk-template rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/manual.mk-template diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/package.xml b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/package.xml similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/package.xml rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/package.xml diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/eig3.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/eig3.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/eig3.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/eig3.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/icptest.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/icptest.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/icptest.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/icptest.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/line_test.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/line_test.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/line_test.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/line_test.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatch_test.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatch_test.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatch_test.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatch_test.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.new.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.new.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.new.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcher.new.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcherprocessor.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcherprocessor.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcherprocessor.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/scanmatcherprocessor.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/smmap.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/smmap.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/smmap.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/scanmatcher/smmap.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensor.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensor.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensor.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensor.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensorreading.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensorreading.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensorreading.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_base/sensorreading.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometryreading.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometryreading.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometryreading.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometryreading.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometrysensor.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometrysensor.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometrysensor.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_odometry/odometrysensor.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangereading.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangereading.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangereading.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangereading.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangesensor.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangesensor.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangesensor.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/sensor/sensor_range/rangesensor.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/setlibpath b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/setlibpath similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/setlibpath rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/setlibpath diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/Makefile b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/Makefile similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/Makefile rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/Makefile diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/autoptr_test.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/autoptr_test.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/autoptr_test.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/autoptr_test.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/movement.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/movement.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/movement.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/movement.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat_test.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat_test.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat_test.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/utils/stat_test.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/.gitignore b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/.gitignore similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/.gitignore rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/.gitignore diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/README.md b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/README.md similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/README.md rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/README.md diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/CHANGELOG.rst b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/CHANGELOG.rst similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/CHANGELOG.rst rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/CHANGELOG.rst diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/CMakeLists.txt b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/CMakeLists.txt similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/CMakeLists.txt rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/CMakeLists.txt diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/launch/slam_gmapping_pr2.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/launch/slam_gmapping_pr2.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/launch/slam_gmapping_pr2.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/launch/slam_gmapping_pr2.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/msg/doubleMap.msg b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/msg/doubleMap.msg similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/msg/doubleMap.msg rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/msg/doubleMap.msg diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/msg/mapModel.msg b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/msg/mapModel.msg similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/msg/mapModel.msg rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/msg/mapModel.msg diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/nodelet_plugins.xml b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/nodelet_plugins.xml similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/nodelet_plugins.xml rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/nodelet_plugins.xml diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/package.xml b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/package.xml similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/package.xml rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/package.xml diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/main.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/main.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/main.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/main.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/nodelet.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/nodelet.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/nodelet.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/nodelet.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/replay.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/replay.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/replay.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/replay.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.h b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.h similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.h rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/src/slam_gmapping.h diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_laser_different_beamcount.test b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_laser_different_beamcount.test similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_laser_different_beamcount.test rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_laser_different_beamcount.test diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage_replay.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage_replay.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage_replay.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage_replay.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage_replay2.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage_replay2.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage_replay2.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_stage_replay2.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_symmetry.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_symmetry.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_symmetry.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_symmetry.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_upside_down.launch b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_upside_down.launch similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_upside_down.launch rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/basic_localization_upside_down.launch diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/rtest.cpp b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/rtest.cpp similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/rtest.cpp rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/rtest.cpp diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/test_map.py b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/test_map.py similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/test_map.py rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/gmapping/test/test_map.py diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/CHANGELOG.rst b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/CHANGELOG.rst similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/CHANGELOG.rst rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/CHANGELOG.rst diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/CMakeLists.txt b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/CMakeLists.txt similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/CMakeLists.txt rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/CMakeLists.txt diff --git a/projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/package.xml b/projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/package.xml similarity index 100% rename from projects/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/package.xml rename to projects/python/perception/slam/full_map_posterior_gmapping/src/slam_gmapping/slam_gmapping/package.xml diff --git a/projects/perception/speech_command_recognition/README.MD b/projects/python/perception/speech_command_recognition/README.MD similarity index 100% rename from projects/perception/speech_command_recognition/README.MD rename to projects/python/perception/speech_command_recognition/README.MD diff --git a/projects/perception/speech_command_recognition/demo.py b/projects/python/perception/speech_command_recognition/demo.py similarity index 100% rename from projects/perception/speech_command_recognition/demo.py rename to projects/python/perception/speech_command_recognition/demo.py diff --git a/projects/perception/speech_command_recognition/example1.wav b/projects/python/perception/speech_command_recognition/example1.wav similarity index 100% rename from projects/perception/speech_command_recognition/example1.wav rename to projects/python/perception/speech_command_recognition/example1.wav diff --git a/projects/perception/speech_command_recognition/example2.wav b/projects/python/perception/speech_command_recognition/example2.wav similarity index 100% rename from projects/perception/speech_command_recognition/example2.wav rename to projects/python/perception/speech_command_recognition/example2.wav diff --git a/projects/simulation/SMPL+D_human_models/README.md b/projects/python/simulation/SMPL+D_human_models/README.md similarity index 100% rename from projects/simulation/SMPL+D_human_models/README.md rename to projects/python/simulation/SMPL+D_human_models/README.md diff --git a/projects/simulation/SMPL+D_human_models/examples/model_1.png b/projects/python/simulation/SMPL+D_human_models/examples/model_1.png similarity index 100% rename from projects/simulation/SMPL+D_human_models/examples/model_1.png rename to projects/python/simulation/SMPL+D_human_models/examples/model_1.png diff --git a/projects/simulation/SMPL+D_human_models/examples/model_2.png b/projects/python/simulation/SMPL+D_human_models/examples/model_2.png similarity index 100% rename from projects/simulation/SMPL+D_human_models/examples/model_2.png rename to projects/python/simulation/SMPL+D_human_models/examples/model_2.png diff --git a/projects/simulation/SMPL+D_human_models/examples/model_3.png b/projects/python/simulation/SMPL+D_human_models/examples/model_3.png similarity index 100% rename from projects/simulation/SMPL+D_human_models/examples/model_3.png rename to projects/python/simulation/SMPL+D_human_models/examples/model_3.png diff --git a/projects/simulation/SMPL+D_human_models/examples/model_4.png b/projects/python/simulation/SMPL+D_human_models/examples/model_4.png similarity index 100% rename from projects/simulation/SMPL+D_human_models/examples/model_4.png rename to projects/python/simulation/SMPL+D_human_models/examples/model_4.png diff --git a/projects/simulation/SMPL+D_human_models/src/download_data.py b/projects/python/simulation/SMPL+D_human_models/src/download_data.py similarity index 94% rename from projects/simulation/SMPL+D_human_models/src/download_data.py rename to projects/python/simulation/SMPL+D_human_models/src/download_data.py index 1244e05b6a..84515ccd9b 100644 --- a/projects/simulation/SMPL+D_human_models/src/download_data.py +++ b/projects/python/simulation/SMPL+D_human_models/src/download_data.py @@ -44,7 +44,7 @@ def reporthook(count, block_size, total_size): ) human_data_url = OPENDR_SERVER_URL + "simulation/SMPLD_body_models/human_models.tar.gz" - downloaded_human_data_path = os.path.join(OPENDR_HOME, 'projects/simulation/SMPL+D_human_models/human_models.tar.gz') + downloaded_human_data_path = os.path.join(OPENDR_HOME, 'projects/python/simulation/SMPL+D_human_models/human_models.tar.gz') print("Downloading data from", human_data_url, "to", downloaded_human_data_path) start_time = 0 last_print = 0 @@ -66,7 +66,7 @@ def safe_extract(tar, path=".", members=None, *, numeric_owner=False): tar.extractall(path, members, numeric_owner=numeric_owner) with tarfile.open(downloaded_human_data_path) as tar: - safe_extract(tar, path=os.path.join(OPENDR_HOME, 'projects/simulation/SMPL+D_human_models')) + safe_extract(tar, path=os.path.join(OPENDR_HOME, 'projects/python/simulation/SMPL+D_human_models')) tar.close() os.remove(downloaded_human_data_path) @@ -74,16 +74,17 @@ def safe_extract(tar, path=".", members=None, *, numeric_owner=False): return model_url = OPENDR_SERVER_URL + "simulation/SMPLD_body_models/model.tar.gz" - downloaded_model_path = os.path.join(OPENDR_HOME, 'projects/simulation/SMPL+D_human_models/model.tar.gz') + downloaded_model_path = os.path.join(OPENDR_HOME, 'projects/python/simulation/SMPL+D_human_models/model.tar.gz') print("Downloading data from", model_url, "to", downloaded_model_path) start_time = 0 last_print = 0 urlretrieve(model_url, downloaded_model_path, reporthook=reporthook) with tarfile.open(downloaded_model_path) as tar: - safe_extract(tar, path=os.path.join(OPENDR_HOME, 'projects/simulation/SMPL+D_human_models')) + safe_extract(tar, path=os.path.join(OPENDR_HOME, 'projects/python/simulation/SMPL+D_human_models')) tar.close() os.remove(downloaded_model_path) + if __name__ == "__main__": raw_data = False if len(sys.argv) > 1 and sys.argv[1] == 'raw': diff --git a/projects/simulation/SMPL+D_human_models/src/generate_models.py b/projects/python/simulation/SMPL+D_human_models/src/generate_models.py similarity index 100% rename from projects/simulation/SMPL+D_human_models/src/generate_models.py rename to projects/python/simulation/SMPL+D_human_models/src/generate_models.py diff --git a/projects/simulation/SMPL+D_human_models/webots/extract_anims.py b/projects/python/simulation/SMPL+D_human_models/webots/extract_anims.py similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/extract_anims.py rename to projects/python/simulation/SMPL+D_human_models/webots/extract_anims.py diff --git a/projects/simulation/SMPL+D_human_models/webots/install_project.sh b/projects/python/simulation/SMPL+D_human_models/webots/install_project.sh similarity index 55% rename from projects/simulation/SMPL+D_human_models/webots/install_project.sh rename to projects/python/simulation/SMPL+D_human_models/webots/install_project.sh index ee1cf93925..6ce95c86fa 100644 --- a/projects/simulation/SMPL+D_human_models/webots/install_project.sh +++ b/projects/python/simulation/SMPL+D_human_models/webots/install_project.sh @@ -7,7 +7,7 @@ cd $WEBOTS_HOME/projects/smpl_webots/controllers/smpl_animation make mkdir $WEBOTS_HOME/projects/smpl_webots/skins mkdir $WEBOTS_HOME/projects/smpl_webots/skins/model-204 -cp $OPENDR_HOME/projects/simulation/SMPL+D_human_models/fbx_models/female/204_0/204_0.fbx $WEBOTS_HOME/projects/smpl_webots/skins/model-204/model-204.fbx +cp $OPENDR_HOME/projects/python/simulation/SMPL+D_human_models/fbx_models/female/204_0/204_0.fbx $WEBOTS_HOME/projects/smpl_webots/skins/model-204/model-204.fbx mkdir $WEBOTS_HOME/projects/smpl_webots/protos/textures mkdir $WEBOTS_HOME/projects/smpl_webots/protos/textures/model-204 -cp $OPENDR_HOME/projects/simulation/SMPL+D_human_models/fbx_models/female/204_0/texture.jpg $WEBOTS_HOME/projects/smpl_webots/protos/textures/model-204/texture.jpg +cp $OPENDR_HOME/projects/python/simulation/SMPL+D_human_models/fbx_models/female/204_0/texture.jpg $WEBOTS_HOME/projects/smpl_webots/protos/textures/model-204/texture.jpg diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/Makefile b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/Makefile similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/Makefile rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/Makefile diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/smpl_animation.c b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/smpl_animation.c similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/smpl_animation.c rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/controllers/smpl_animation/smpl_animation.c diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/Makefile b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/Makefile similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/Makefile rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/Makefile diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/quaternion_private.h b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/quaternion_private.h similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/quaternion_private.h rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/quaternion_private.h diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/smpl_util.h b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/smpl_util.h similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/smpl_util.h rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/smpl_util.h diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/vector3_private.h b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/vector3_private.h similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/vector3_private.h rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/include/vector3_private.h diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/quaternion.c b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/quaternion.c similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/quaternion.c rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/quaternion.c diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/smpl_util.c b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/smpl_util.c similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/smpl_util.c rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/smpl_util.c diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/vector3.c b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/vector3.c similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/vector3.c rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/libraries/smpl_util/src/vector3.c diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/protos/smpl_model_1.proto b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/protos/smpl_model_1.proto similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/protos/smpl_model_1.proto rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/protos/smpl_model_1.proto diff --git a/projects/simulation/SMPL+D_human_models/webots/smpl_webots/worlds/demo_world.wbt b/projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/worlds/demo_world.wbt similarity index 100% rename from projects/simulation/SMPL+D_human_models/webots/smpl_webots/worlds/demo_world.wbt rename to projects/python/simulation/SMPL+D_human_models/webots/smpl_webots/worlds/demo_world.wbt diff --git a/projects/simulation/human_dataset_generation/README.md b/projects/python/simulation/human_dataset_generation/README.md similarity index 100% rename from projects/simulation/human_dataset_generation/README.md rename to projects/python/simulation/human_dataset_generation/README.md diff --git a/projects/simulation/human_dataset_generation/background.py b/projects/python/simulation/human_dataset_generation/background.py similarity index 100% rename from projects/simulation/human_dataset_generation/background.py rename to projects/python/simulation/human_dataset_generation/background.py diff --git a/projects/simulation/human_dataset_generation/create_background_images.py b/projects/python/simulation/human_dataset_generation/create_background_images.py similarity index 100% rename from projects/simulation/human_dataset_generation/create_background_images.py rename to projects/python/simulation/human_dataset_generation/create_background_images.py diff --git a/projects/simulation/human_dataset_generation/create_dataset.py b/projects/python/simulation/human_dataset_generation/create_dataset.py similarity index 100% rename from projects/simulation/human_dataset_generation/create_dataset.py rename to projects/python/simulation/human_dataset_generation/create_dataset.py diff --git a/projects/simulation/human_dataset_generation/data_generator.py b/projects/python/simulation/human_dataset_generation/data_generator.py similarity index 100% rename from projects/simulation/human_dataset_generation/data_generator.py rename to projects/python/simulation/human_dataset_generation/data_generator.py diff --git a/projects/simulation/human_dataset_generation/dependencies.ini b/projects/python/simulation/human_dataset_generation/dependencies.ini similarity index 84% rename from projects/simulation/human_dataset_generation/dependencies.ini rename to projects/python/simulation/human_dataset_generation/dependencies.ini index 9e71a90634..de06dc7b2e 100644 --- a/projects/simulation/human_dataset_generation/dependencies.ini +++ b/projects/python/simulation/human_dataset_generation/dependencies.ini @@ -1,8 +1,8 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=PyWavefront==1.3.2 pyglet>=1.5.16 pickle5>=0.0.11 opencv-contrib-python - numpy>=1.19 + numpy>=1.19,<=1.23.5 diff --git a/projects/simulation/human_dataset_generation/download_models.sh b/projects/python/simulation/human_dataset_generation/download_models.sh similarity index 100% rename from projects/simulation/human_dataset_generation/download_models.sh rename to projects/python/simulation/human_dataset_generation/download_models.sh diff --git a/projects/simulation/human_dataset_generation/reformat_cityscapes.py b/projects/python/simulation/human_dataset_generation/reformat_cityscapes.py similarity index 100% rename from projects/simulation/human_dataset_generation/reformat_cityscapes.py rename to projects/python/simulation/human_dataset_generation/reformat_cityscapes.py diff --git a/projects/simulation/human_model_generation/README.md b/projects/python/simulation/human_model_generation/README.md similarity index 100% rename from projects/simulation/human_model_generation/README.md rename to projects/python/simulation/human_model_generation/README.md diff --git a/projects/simulation/human_model_generation/demos/imgs_input/msk/result_0004.jpg b/projects/python/simulation/human_model_generation/demos/imgs_input/msk/result_0004.jpg similarity index 100% rename from projects/simulation/human_model_generation/demos/imgs_input/msk/result_0004.jpg rename to projects/python/simulation/human_model_generation/demos/imgs_input/msk/result_0004.jpg diff --git a/projects/simulation/human_model_generation/demos/imgs_input/rgb/result_0004.jpg b/projects/python/simulation/human_model_generation/demos/imgs_input/rgb/result_0004.jpg similarity index 100% rename from projects/simulation/human_model_generation/demos/imgs_input/rgb/result_0004.jpg rename to projects/python/simulation/human_model_generation/demos/imgs_input/rgb/result_0004.jpg diff --git a/projects/simulation/human_model_generation/demos/model_generation.ipynb b/projects/python/simulation/human_model_generation/demos/model_generation.ipynb similarity index 100% rename from projects/simulation/human_model_generation/demos/model_generation.ipynb rename to projects/python/simulation/human_model_generation/demos/model_generation.ipynb diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/README.md b/projects/python/simulation/synthetic_multi_view_facial_image_generation/README.md similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/README.md rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/README.md diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py similarity index 97% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py index 4df8e55fff..59e114a64c 100644 --- a/projects/data_generation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py +++ b/projects/python/simulation/synthetic_multi_view_facial_image_generation/SyntheticDataGeneration.py @@ -40,9 +40,9 @@ from shutil import copyfile import cv2 import os -from algorithm.DDFA import preprocessing_1 -from algorithm.DDFA import preprocessing_2 -from algorithm.Rotate_and_Render import test_multipose +from .algorithm.DDFA import preprocessing_1 +from .algorithm.DDFA import preprocessing_2 +from .algorithm.Rotate_and_Render import test_multipose class MultiviewDataGeneration(): diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/bfm_show.m b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/bfm_show.m similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/bfm_show.m rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/bfm_show.m diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_noneck.jpg b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_noneck.jpg similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_noneck.jpg rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_noneck.jpg diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_refine.jpg b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_refine.jpg similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_refine.jpg rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/imgs/bfm_refine.jpg diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/readme.md b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/readme.md similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/readme.md rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/readme.md diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/render_face_mesh.m b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/render_face_mesh.m similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/render_face_mesh.m rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/BFM_Remove_Neck/render_face_mesh.m diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/LICENSE b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/LICENSE similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/LICENSE rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/LICENSE diff --git a/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/example/Images/.keep b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/example/Images/.keep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/mobilenet_v1.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/mobilenet_v1.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/mobilenet_v1.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/mobilenet_v1.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_1.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_1.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_1.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_1.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_2.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_2.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_2.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/preprocessing_2.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/simple_dataset.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/simple_dataset.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/simple_dataset.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/simple_dataset.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/test.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/test.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/test.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/test.py diff --git a/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cv_plot.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cv_plot.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cv_plot.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cv_plot.py diff --git a/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.cpp b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.cpp similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.cpp rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.cpp diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.h b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.h similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.h rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core.h diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cp37-win_amd64.pyd b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cp37-win_amd64.pyd similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cp37-win_amd64.pyd rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cp37-win_amd64.pyd diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cpp b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cpp similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cpp rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.cpp diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.pyx b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.pyx similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.pyx rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/mesh_core_cython.pyx diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/readme.md b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/readme.md similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/readme.md rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/readme.md diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/setup.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/setup.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/setup.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/cython/setup.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/ddfa.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/ddfa.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/ddfa.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/ddfa.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/estimate_pose.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/estimate_pose.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/estimate_pose.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/estimate_pose.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/inference.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/inference.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/inference.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/inference.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/io.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/io.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/io.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/io.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/lighting.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/lighting.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/lighting.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/lighting.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/paf.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/paf.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/paf.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/paf.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/params.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/params.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/params.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/params.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/path_helper.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/path_helper.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/path_helper.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/path_helper.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/render.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/render.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/render.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/utils/render.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/readme.md b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/readme.md similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/readme.md rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/readme.md diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_demo.m b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_demo.m similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_demo.m rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_demo.m diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_face_mesh.m b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_face_mesh.m similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_face_mesh.m rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/render_face_mesh.m diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/tri.mat b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/tri.mat similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/tri.mat rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/DDFA/visualize/tri.mat diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/LICENSE b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/LICENSE similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/LICENSE rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/LICENSE diff --git a/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/__init__.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/__init__.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/__init__.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/allface_dataset.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/allface_dataset.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/allface_dataset.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/allface_dataset.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/base_dataset.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/base_dataset.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/base_dataset.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/base_dataset.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/curve.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/curve.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/curve.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/curve.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/data_utils.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/data_utils.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/data_utils.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/data/data_utils.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/test.sh b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/test.sh similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/test.sh rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/test.sh diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/train.sh b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/train.sh similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/train.sh rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/train.sh diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/v100_test.sh b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/v100_test.sh similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/v100_test.sh rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/experiments/v100_test.sh diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/__init__.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/__init__.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/__init__.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/__init__.py similarity index 74% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/__init__.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/__init__.py index 91e0febc81..36314edf6e 100644 --- a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/__init__.py +++ b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/__init__.py @@ -1,18 +1,18 @@ import torch -from algorithm.Rotate_and_Render.models.networks.base_network import BaseNetwork -from algorithm.Rotate_and_Render.models.networks import loss -from algorithm.Rotate_and_Render.models.networks import discriminator -from algorithm.Rotate_and_Render.models.networks import generator -from algorithm.Rotate_and_Render.models.networks import encoder -from algorithm.Rotate_and_Render.models.networks.render import Render -import algorithm.Rotate_and_Render.util.util as util +from .base_network import BaseNetwork +from . import loss +from . import discriminator +from . import generator +from . import encoder +from .render import Render +from ...util.util import find_class_in_module __all__ = ['loss', 'discriminator', 'generator', 'encoder', 'Render'] def find_network_using_name(target_network_name, filename): target_class_name = target_network_name + filename module_name = 'algorithm.Rotate_and_Render.models.networks.' + filename - network = util.find_class_in_module(target_class_name, module_name) + network = find_class_in_module(target_class_name, module_name) assert issubclass(network, BaseNetwork), \ "Class %s should be a subclass of BaseNetwork" % network diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/architecture.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/architecture.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/architecture.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/architecture.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/base_network.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/base_network.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/base_network.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/base_network.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/discriminator.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/discriminator.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/discriminator.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/discriminator.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/encoder.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/encoder.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/encoder.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/encoder.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/generator.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/generator.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/generator.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/generator.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/loss.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/loss.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/loss.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/loss.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/normalization.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/normalization.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/normalization.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/normalization.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/render.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/render.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/render.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/render.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/rotate_render.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/rotate_render.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/rotate_render.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/rotate_render.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/__init__.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/__init__.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/__init__.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm_reimpl.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm_reimpl.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm_reimpl.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/batchnorm_reimpl.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/comm.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/comm.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/comm.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/comm.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/replicate.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/replicate.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/replicate.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/replicate.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/scatter_gather.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/scatter_gather.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/scatter_gather.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/scatter_gather.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/unittest.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/unittest.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/unittest.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/sync_batchnorm/unittest.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/test_render.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/test_render.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/test_render.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/test_render.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/util.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/util.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/util.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/networks/util.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotate_model.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotate_model.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotate_model.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotate_model.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotatespade_model.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotatespade_model.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotatespade_model.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/rotatespade_model.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/test_model.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/test_model.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/test_model.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/models/test_model.py diff --git a/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/base_options.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/base_options.py similarity index 99% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/base_options.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/base_options.py index 8528a58620..8bd14494e7 100644 --- a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/base_options.py +++ b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/base_options.py @@ -4,8 +4,8 @@ import os from ..util import util import torch -from algorithm.Rotate_and_Render import models -from algorithm.Rotate_and_Render import data +from .. import models +from .. import data import pickle __all__ = ['math'] diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/test_options.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/test_options.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/test_options.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/test_options.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/train_options.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/train_options.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/train_options.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/options/train_options.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_frontal.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_frontal.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_frontal.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_frontal.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_multipose.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_multipose.py similarity index 99% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_multipose.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_multipose.py index 8e8bf09b58..d18a1c48ba 100644 --- a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_multipose.py +++ b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/test_multipose.py @@ -14,7 +14,7 @@ import torch import math from .models.networks.rotate_render import TestRender -from algorithm.Rotate_and_Render.data import dataset_info +from .data import dataset_info multiprocessing.set_start_method('spawn', force=True) __all__ = ['dataset_info'] diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/train.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/train.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/train.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/train.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/__init__.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/__init__.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/__init__.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotate_trainer.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotate_trainer.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotate_trainer.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotate_trainer.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotatespade_trainer.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotatespade_trainer.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotatespade_trainer.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/trainers/rotatespade_trainer.py diff --git a/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/__init__.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/html.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/html.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/html.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/html.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/iter_counter.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/iter_counter.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/iter_counter.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/iter_counter.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/util.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/util.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/util.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/util.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/visualizer.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/visualizer.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/visualizer.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm/Rotate_and_Render/util/visualizer.py diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/demos/imgs_input/person01145+0-15.jpg b/projects/python/simulation/synthetic_multi_view_facial_image_generation/demos/imgs_input/person01145+0-15.jpg similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/demos/imgs_input/person01145+0-15.jpg rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/demos/imgs_input/person01145+0-15.jpg diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/requirements.txt b/projects/python/simulation/synthetic_multi_view_facial_image_generation/requirements.txt similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/requirements.txt rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/requirements.txt diff --git a/projects/data_generation/synthetic_multi_view_facial_image_generation/tool_synthetic_facial_generation.py b/projects/python/simulation/synthetic_multi_view_facial_image_generation/tool_synthetic_facial_generation.py similarity index 100% rename from projects/data_generation/synthetic_multi_view_facial_image_generation/tool_synthetic_facial_generation.py rename to projects/python/simulation/synthetic_multi_view_facial_image_generation/tool_synthetic_facial_generation.py diff --git a/projects/python/utils/ambiguity_measure/ambiguity_measure_tutorial.ipynb b/projects/python/utils/ambiguity_measure/ambiguity_measure_tutorial.ipynb new file mode 100644 index 0000000000..de215206aa --- /dev/null +++ b/projects/python/utils/ambiguity_measure/ambiguity_measure_tutorial.ipynb @@ -0,0 +1,1796 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "JFy2xBVv_X9v", + "metadata": { + "id": "JFy2xBVv_X9v" + }, + "source": [ + "# Ambiguity Measure Tutorial\n", + "\n", + "This tutorial shows how use the ambiguity measure module from the OpenDR toolkit.\n", + "The ambiguity measure tool can be used for interactive trainig of models such as Transporter Nets or CLIPort.\n", + "In this colab we will show how to obtain this measure in a pick and place task.\n", + "This colab is adapted from the [Socratic Models: Robot Pick & Place colab](https://colab.research.google.com/drive/1jAyhumd7DTxJB2oZufob9crVxETAEKbV?usp=sharing).\n", + "\n", + "## Setup\n", + "\n", + "First make sure that hardware acceleration is enabled (Edit -> Notebook settings -> Hardware accelerator -> GPU).\n", + "\n", + "Next, we need to install a number of dependencies and download some assets.\n", + "Finally, we print some info on the GPU in order to ensure that it is available:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "79cfbbad-733b-43cb-8a96-2334857ba507", + "metadata": { + "cellView": "form", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "79cfbbad-733b-43cb-8a96-2334857ba507", + "outputId": "6372c643-7471-4d13-c5c1-6d34c162b788" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", + "Collecting ftfy\n", + " Downloading ftfy-6.1.1-py3-none-any.whl (53 kB)\n", + "\u001b[K |████████████████████████████████| 53 kB 101 kB/s \n", + "\u001b[?25hRequirement already satisfied: regex in /usr/local/lib/python3.7/dist-packages (2022.6.2)\n", + "Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (4.64.1)\n", + "Collecting fvcore\n", + " Downloading fvcore-0.1.5.post20221122.tar.gz (50 kB)\n", + "\u001b[K |████████████████████████████████| 50 kB 3.1 MB/s \n", + "\u001b[?25hCollecting imageio==2.4.1\n", + " Downloading imageio-2.4.1.tar.gz (3.3 MB)\n", + "\u001b[K |████████████████████████████████| 3.3 MB 43.5 MB/s \n", + "\u001b[?25hCollecting imageio-ffmpeg==0.4.5\n", + " Downloading imageio_ffmpeg-0.4.5-py3-none-manylinux2010_x86_64.whl (26.9 MB)\n", + "\u001b[K |████████████████████████████████| 26.9 MB 16.6 MB/s \n", + "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from imageio==2.4.1) (1.21.6)\n", + "Requirement already satisfied: pillow in /usr/local/lib/python3.7/dist-packages (from imageio==2.4.1) (7.1.2)\n", + "Requirement already satisfied: wcwidth>=0.2.5 in /usr/local/lib/python3.7/dist-packages (from ftfy) (0.2.5)\n", + "Collecting yacs>=0.1.6\n", + " Downloading yacs-0.1.8-py3-none-any.whl (14 kB)\n", + "Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.7/dist-packages (from fvcore) (6.0)\n", + "Requirement already satisfied: termcolor>=1.1 in /usr/local/lib/python3.7/dist-packages (from fvcore) (2.1.0)\n", + "Requirement already satisfied: tabulate in /usr/local/lib/python3.7/dist-packages (from fvcore) (0.8.10)\n", + "Collecting iopath>=0.1.7\n", + " Downloading iopath-0.1.10.tar.gz (42 kB)\n", + "\u001b[K |████████████████████████████████| 42 kB 348 kB/s \n", + "\u001b[?25hRequirement already satisfied: typing_extensions in /usr/local/lib/python3.7/dist-packages (from iopath>=0.1.7->fvcore) (4.1.1)\n", + "Collecting portalocker\n", + " Downloading portalocker-2.6.0-py2.py3-none-any.whl (15 kB)\n", + "Building wheels for collected packages: imageio, fvcore, iopath\n", + " Building wheel for imageio (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for imageio: filename=imageio-2.4.1-py3-none-any.whl size=3303886 sha256=10b3e947e79811166892fa4fbaa5ff9083e7ebc73e812a1dd0408e224607382e\n", + " Stored in directory: /root/.cache/pip/wheels/46/20/07/7bb9c8c44e6ec2efa60fd0e6280094f53f65f41767ef69a5ee\n", + " Building wheel for fvcore (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for fvcore: filename=fvcore-0.1.5.post20221122-py3-none-any.whl size=61484 sha256=49948bdcd3eff6484bd7383c436da55b11f00e93ade2a1041e7b42ca026db304\n", + " Stored in directory: /root/.cache/pip/wheels/2d/e4/d7/be0b4010933f5fffea6385e9b319eac9d6e56c82ee4a0164e5\n", + " Building wheel for iopath (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for iopath: filename=iopath-0.1.10-py3-none-any.whl size=31547 sha256=88a077808466797e4f277bdf94f4d898f33288a116f9a83bf06a32665076997a\n", + " Stored in directory: /root/.cache/pip/wheels/aa/cc/ed/ca4e88beef656b01c84b9185196513ef2faf74a5a379b043a7\n", + "Successfully built imageio fvcore iopath\n", + "Installing collected packages: portalocker, yacs, iopath, imageio-ffmpeg, imageio, fvcore, ftfy\n", + " Attempting uninstall: imageio\n", + " Found existing installation: imageio 2.9.0\n", + " Uninstalling imageio-2.9.0:\n", + " Successfully uninstalled imageio-2.9.0\n", + "Successfully installed ftfy-6.1.1 fvcore-0.1.5.post20221122 imageio-2.4.1 imageio-ffmpeg-0.4.5 iopath-0.1.10 portalocker-2.6.0 yacs-0.1.8\n", + "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", + "Collecting git+https://github.com/openai/CLIP.git\n", + " Cloning https://github.com/openai/CLIP.git to /tmp/pip-req-build-1rut9z61\n", + " Running command git clone -q https://github.com/openai/CLIP.git /tmp/pip-req-build-1rut9z61\n", + "Requirement already satisfied: ftfy in /usr/local/lib/python3.7/dist-packages (from clip==1.0) (6.1.1)\n", + "Requirement already satisfied: regex in /usr/local/lib/python3.7/dist-packages (from clip==1.0) (2022.6.2)\n", + "Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from clip==1.0) (4.64.1)\n", + "Requirement already satisfied: torch in /usr/local/lib/python3.7/dist-packages (from clip==1.0) (1.12.1+cu113)\n", + "Requirement already satisfied: torchvision in /usr/local/lib/python3.7/dist-packages (from clip==1.0) (0.13.1+cu113)\n", + "Requirement already satisfied: wcwidth>=0.2.5 in /usr/local/lib/python3.7/dist-packages (from ftfy->clip==1.0) (0.2.5)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch->clip==1.0) (4.1.1)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from torchvision->clip==1.0) (1.21.6)\n", + "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from torchvision->clip==1.0) (2.23.0)\n", + "Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /usr/local/lib/python3.7/dist-packages (from torchvision->clip==1.0) (7.1.2)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->torchvision->clip==1.0) (1.24.3)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->torchvision->clip==1.0) (2022.9.24)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->torchvision->clip==1.0) (2.10)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->torchvision->clip==1.0) (3.0.4)\n", + "Building wheels for collected packages: clip\n", + " Building wheel for clip (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for clip: filename=clip-1.0-py3-none-any.whl size=1369408 sha256=2bd94e56438901c74e82d4ed6f7340968d125ed622a99b2b75c8f39db2b38a4a\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-c2fogydq/wheels/fd/b9/c3/5b4470e35ed76e174bff77c92f91da82098d5e35fd5bc8cdac\n", + "Successfully built clip\n", + "Installing collected packages: clip\n", + "Successfully installed clip-1.0\n", + "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", + "Requirement already satisfied: gdown in /usr/local/lib/python3.7/dist-packages (4.4.0)\n", + "Collecting gdown\n", + " Downloading gdown-4.5.4-py3-none-any.whl (14 kB)\n", + "Requirement already satisfied: requests[socks] in /usr/local/lib/python3.7/dist-packages (from gdown) (2.23.0)\n", + "Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from gdown) (4.64.1)\n", + "Requirement already satisfied: filelock in /usr/local/lib/python3.7/dist-packages (from gdown) (3.8.0)\n", + "Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.7/dist-packages (from gdown) (4.6.3)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from gdown) (1.15.0)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests[socks]->gdown) (2022.9.24)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests[socks]->gdown) (2.10)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests[socks]->gdown) (3.0.4)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests[socks]->gdown) (1.24.3)\n", + "Requirement already satisfied: PySocks!=1.5.7,>=1.5.6 in /usr/local/lib/python3.7/dist-packages (from requests[socks]->gdown) (1.7.1)\n", + "Installing collected packages: gdown\n", + " Attempting uninstall: gdown\n", + " Found existing installation: gdown 4.4.0\n", + " Uninstalling gdown-4.4.0:\n", + " Successfully uninstalled gdown-4.4.0\n", + "Successfully installed gdown-4.5.4\n", + "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", + "Collecting pybullet\n", + " Downloading pybullet-3.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl (91.7 MB)\n", + "\u001b[K |████████████████████████████████| 91.7 MB 41 kB/s \n", + "\u001b[?25hRequirement already satisfied: moviepy in /usr/local/lib/python3.7/dist-packages (0.2.3.5)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from moviepy) (1.21.6)\n", + "Requirement already satisfied: tqdm<5.0,>=4.11.2 in /usr/local/lib/python3.7/dist-packages (from moviepy) (4.64.1)\n", + "Requirement already satisfied: imageio<3.0,>=2.1.2 in /usr/local/lib/python3.7/dist-packages (from moviepy) (2.4.1)\n", + "Requirement already satisfied: decorator<5.0,>=4.0.2 in /usr/local/lib/python3.7/dist-packages (from moviepy) (4.4.2)\n", + "Requirement already satisfied: pillow in /usr/local/lib/python3.7/dist-packages (from imageio<3.0,>=2.1.2->moviepy) (7.1.2)\n", + "Installing collected packages: pybullet\n", + "Successfully installed pybullet-3.2.5\n", + "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", + "Collecting flax\n", + " Downloading flax-0.6.2-py3-none-any.whl (189 kB)\n", + "\u001b[K |████████████████████████████████| 189 kB 15.3 MB/s \n", + "\u001b[?25hRequirement already satisfied: jax>=0.3.16 in /usr/local/lib/python3.7/dist-packages (from flax) (0.3.25)\n", + "Collecting optax\n", + " Downloading optax-0.1.4-py3-none-any.whl (154 kB)\n", + "\u001b[K |████████████████████████████████| 154 kB 61.4 MB/s \n", + "\u001b[?25hRequirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from flax) (3.2.2)\n", + "Requirement already satisfied: numpy>=1.12 in /usr/local/lib/python3.7/dist-packages (from flax) (1.21.6)\n", + "Requirement already satisfied: msgpack in /usr/local/lib/python3.7/dist-packages (from flax) (1.0.4)\n", + "Collecting rich>=11.1\n", + " Downloading rich-12.6.0-py3-none-any.whl (237 kB)\n", + "\u001b[K |████████████████████████████████| 237 kB 54.8 MB/s \n", + "\u001b[?25hRequirement already satisfied: typing-extensions>=4.1.1 in /usr/local/lib/python3.7/dist-packages (from flax) (4.1.1)\n", + "Collecting tensorstore\n", + " Downloading tensorstore-0.1.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (8.3 MB)\n", + "\u001b[K |████████████████████████████████| 8.3 MB 23.6 MB/s \n", + "\u001b[?25hRequirement already satisfied: PyYAML>=5.4.1 in /usr/local/lib/python3.7/dist-packages (from flax) (6.0)\n", + "Requirement already satisfied: opt-einsum in /usr/local/lib/python3.7/dist-packages (from jax>=0.3.16->flax) (3.3.0)\n", + "Requirement already satisfied: scipy>=1.5 in /usr/local/lib/python3.7/dist-packages (from jax>=0.3.16->flax) (1.7.3)\n", + "Requirement already satisfied: pygments<3.0.0,>=2.6.0 in /usr/local/lib/python3.7/dist-packages (from rich>=11.1->flax) (2.6.1)\n", + "Collecting commonmark<0.10.0,>=0.9.0\n", + " Downloading commonmark-0.9.1-py2.py3-none-any.whl (51 kB)\n", + "\u001b[K |████████████████████████████████| 51 kB 8.4 MB/s \n", + "\u001b[?25hRequirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->flax) (0.11.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->flax) (1.4.4)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->flax) (3.0.9)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->flax) (2.8.2)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.1->matplotlib->flax) (1.15.0)\n", + "Requirement already satisfied: jaxlib>=0.1.37 in /usr/local/lib/python3.7/dist-packages (from optax->flax) (0.3.25+cuda11.cudnn805)\n", + "Requirement already satisfied: absl-py>=0.7.1 in /usr/local/lib/python3.7/dist-packages (from optax->flax) (1.3.0)\n", + "Collecting chex>=0.1.5\n", + " Downloading chex-0.1.5-py3-none-any.whl (85 kB)\n", + "\u001b[K |████████████████████████████████| 85 kB 180 kB/s \n", + "\u001b[?25hRequirement already satisfied: toolz>=0.9.0 in /usr/local/lib/python3.7/dist-packages (from chex>=0.1.5->optax->flax) (0.12.0)\n", + "Requirement already satisfied: dm-tree>=0.1.5 in /usr/local/lib/python3.7/dist-packages (from chex>=0.1.5->optax->flax) (0.1.7)\n", + "Installing collected packages: commonmark, chex, tensorstore, rich, optax, flax\n", + "Successfully installed chex-0.1.5 commonmark-0.9.1 flax-0.6.2 optax-0.1.4 rich-12.6.0 tensorstore-0.1.28\n", + "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", + "Collecting openai\n", + " Downloading openai-0.25.0.tar.gz (44 kB)\n", + "\u001b[K |████████████████████████████████| 44 kB 2.8 MB/s \n", + "\u001b[?25h Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n", + " Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n", + " Preparing wheel metadata ... \u001b[?25l\u001b[?25hdone\n", + "Collecting pandas-stubs>=1.1.0.11\n", + " Downloading pandas_stubs-1.2.0.62-py3-none-any.whl (163 kB)\n", + "\u001b[K |████████████████████████████████| 163 kB 27.8 MB/s \n", + "\u001b[?25hRequirement already satisfied: openpyxl>=3.0.7 in /usr/local/lib/python3.7/dist-packages (from openai) (3.0.10)\n", + "Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from openai) (4.64.1)\n", + "Requirement already satisfied: requests>=2.20 in /usr/local/lib/python3.7/dist-packages (from openai) (2.23.0)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from openai) (4.1.1)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from openai) (1.21.6)\n", + "Requirement already satisfied: pandas>=1.2.3 in /usr/local/lib/python3.7/dist-packages (from openai) (1.3.5)\n", + "Requirement already satisfied: et-xmlfile in /usr/local/lib/python3.7/dist-packages (from openpyxl>=3.0.7->openai) (1.1.0)\n", + "Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.7/dist-packages (from pandas>=1.2.3->openai) (2.8.2)\n", + "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.7/dist-packages (from pandas>=1.2.3->openai) (2022.6)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.7.3->pandas>=1.2.3->openai) (1.15.0)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests>=2.20->openai) (3.0.4)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests>=2.20->openai) (2022.9.24)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests>=2.20->openai) (2.10)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests>=2.20->openai) (1.24.3)\n", + "Building wheels for collected packages: openai\n", + " Building wheel for openai (PEP 517) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for openai: filename=openai-0.25.0-py3-none-any.whl size=55880 sha256=9ac239031c8a211e1b34625fcbffba42de4ebe3fb367a7c9db851095c79b361a\n", + " Stored in directory: /root/.cache/pip/wheels/19/de/db/e82770b480ec30fd4a6d67108744b9c52be167c04fcf4af7b5\n", + "Successfully built openai\n", + "Installing collected packages: pandas-stubs, openai\n", + "Successfully installed openai-0.25.0 pandas-stubs-1.2.0.62\n", + "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", + "Requirement already satisfied: easydict in /usr/local/lib/python3.7/dist-packages (1.10)\n", + "Imageio: 'ffmpeg-linux64-v3.3.1' was not found on your computer; downloading it now.\n", + "Try 1. Download from https://github.com/imageio/imageio-binaries/raw/master/ffmpeg/ffmpeg-linux64-v3.3.1 (43.8 MB)\n", + "Downloading: 8192/45929032 bytes (0.0%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b3956736/45929032 bytes (8.6%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b8183808/45929032 bytes (17.8%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b12337152/45929032 bytes (26.9%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b16596992/45929032 bytes (36.1%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b20422656/45929032 bytes (44.5%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b24756224/45929032 bytes (53.9%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b29089792/45929032 bytes (63.3%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b33275904/45929032 bytes (72.5%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b37584896/45929032 bytes (81.8%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b41861120/45929032 bytes (91.1%)\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b45929032/45929032 bytes (100.0%)\n", + " Done\n", + "File saved as /root/.imageio/ffmpeg/ffmpeg-linux64-v3.3.1.\n", + "/usr/local/lib/python3.7/dist-packages/gdown/cli.py:125: FutureWarning: Option `--id` was deprecated in version 4.3.1 and will be removed in 5.0. You don't need to pass it anymore to use a file ID.\n", + " category=FutureWarning,\n", + "Downloading...\n", + "From: https://drive.google.com/uc?id=1Cc_fDSBL6QiDvNT4dpfAEbhbALSVoWcc\n", + "To: /content/ur5e.zip\n", + "100% 2.94M/2.94M [00:00<00:00, 137MB/s]\n", + "/usr/local/lib/python3.7/dist-packages/gdown/cli.py:125: FutureWarning: Option `--id` was deprecated in version 4.3.1 and will be removed in 5.0. You don't need to pass it anymore to use a file ID.\n", + " category=FutureWarning,\n", + "Downloading...\n", + "From: https://drive.google.com/uc?id=1yOMEm-Zp_DL3nItG9RozPeJAmeOldekX\n", + "To: /content/robotiq_2f_85.zip\n", + "100% 2.33M/2.33M [00:00<00:00, 59.3MB/s]\n", + "/usr/local/lib/python3.7/dist-packages/gdown/cli.py:125: FutureWarning: Option `--id` was deprecated in version 4.3.1 and will be removed in 5.0. You don't need to pass it anymore to use a file ID.\n", + " category=FutureWarning,\n", + "Downloading...\n", + "From: https://drive.google.com/uc?id=1GsqNLhEl9dd4Mc3BM0dX3MibOI1FVWNM\n", + "To: /content/bowl.zip\n", + "100% 181k/181k [00:00<00:00, 85.0MB/s]\n", + "Archive: ur5e.zip\n", + " creating: ur5e/\n", + " creating: ur5e/collision/\n", + " inflating: ur5e/collision/base.stl \n", + " inflating: ur5e/collision/forearm.stl \n", + " inflating: ur5e/collision/shoulder.stl \n", + " inflating: ur5e/collision/upperarm.stl \n", + " inflating: ur5e/collision/wrist1.stl \n", + " inflating: ur5e/collision/wrist2.stl \n", + " inflating: ur5e/collision/wrist3.stl \n", + " inflating: ur5e/ur5e.urdf \n", + " creating: ur5e/visual/\n", + " inflating: ur5e/visual/base.dae \n", + " inflating: ur5e/visual/forearm.dae \n", + " inflating: ur5e/visual/shoulder.dae \n", + " inflating: ur5e/visual/upperarm.dae \n", + " inflating: ur5e/visual/wrist1.dae \n", + " inflating: ur5e/visual/wrist2.dae \n", + " inflating: ur5e/visual/wrist3.dae \n", + "Archive: robotiq_2f_85.zip\n", + " creating: robotiq_2f_85/\n", + " inflating: robotiq_2f_85/README.md \n", + " inflating: robotiq_2f_85/robotiq-2f-base.mtl \n", + " inflating: robotiq_2f_85/robotiq-2f-base.obj \n", + " inflating: robotiq_2f_85/robotiq-2f-base.stl \n", + " inflating: robotiq_2f_85/robotiq-2f-coupler.mtl \n", + " inflating: robotiq_2f_85/robotiq-2f-coupler.obj \n", + " inflating: robotiq_2f_85/robotiq-2f-coupler.stl \n", + " inflating: robotiq_2f_85/robotiq-2f-driver.mtl \n", + " inflating: robotiq_2f_85/robotiq-2f-driver.obj \n", + " inflating: robotiq_2f_85/robotiq-2f-driver.stl \n", + " inflating: robotiq_2f_85/robotiq-2f-follower.mtl \n", + " inflating: robotiq_2f_85/robotiq-2f-follower.obj \n", + " inflating: robotiq_2f_85/robotiq-2f-follower.stl \n", + " inflating: robotiq_2f_85/robotiq-2f-pad.stl \n", + " inflating: robotiq_2f_85/robotiq-2f-spring_link.mtl \n", + " inflating: robotiq_2f_85/robotiq-2f-spring_link.obj \n", + " inflating: robotiq_2f_85/robotiq-2f-spring_link.stl \n", + " inflating: robotiq_2f_85/robotiq_2f_85.urdf \n", + " creating: robotiq_2f_85/textures/\n", + " inflating: robotiq_2f_85/textures/gripper-2f_BaseColor.jpg \n", + " inflating: robotiq_2f_85/textures/gripper-2f_Metallic.jpg \n", + " inflating: robotiq_2f_85/textures/gripper-2f_Normal.jpg \n", + " inflating: robotiq_2f_85/textures/gripper-2f_Roughness.jpg \n", + "Archive: bowl.zip\n", + " creating: bowl/\n", + " inflating: bowl/bowl.urdf \n", + " inflating: bowl/cup.obj \n", + " inflating: bowl/textured-0008192.obj \n", + "Tue Nov 29 10:44:55 2022 \n", + "+-----------------------------------------------------------------------------+\n", + "| NVIDIA-SMI 460.32.03 Driver Version: 460.32.03 CUDA Version: 11.2 |\n", + "|-------------------------------+----------------------+----------------------+\n", + "| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n", + "| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n", + "| | | MIG M. |\n", + "|===============================+======================+======================|\n", + "| 0 Tesla T4 Off | 00000000:00:04.0 Off | 0 |\n", + "| N/A 60C P8 10W / 70W | 3MiB / 15109MiB | 0% Default |\n", + "| | | N/A |\n", + "+-------------------------------+----------------------+----------------------+\n", + " \n", + "+-----------------------------------------------------------------------------+\n", + "| Processes: |\n", + "| GPU GI CI PID Type Process name GPU Memory |\n", + "| ID ID Usage |\n", + "|=============================================================================|\n", + "| No running processes found |\n", + "+-----------------------------------------------------------------------------+\n", + "gpu\n" + ] + } + ], + "source": [ + "#@markdown\n", + "\n", + "!pip install ftfy regex tqdm fvcore imageio==2.4.1 imageio-ffmpeg==0.4.5\n", + "!pip install git+https://github.com/openai/CLIP.git\n", + "!pip install -U --no-cache-dir gdown --pre\n", + "!pip install pybullet moviepy\n", + "!pip install flax\n", + "!pip install openai\n", + "!pip install easydict\n", + "# !pip install tensorflow==2.7.0 # If error: UNIMPLEMENTED: DNN library is not found.\n", + "\n", + "import collections\n", + "import datetime\n", + "import os\n", + "import random\n", + "import threading\n", + "import time\n", + "\n", + "import cv2 # Used by ViLD.\n", + "import clip\n", + "from easydict import EasyDict\n", + "import flax\n", + "from flax import linen as nn\n", + "from flax.training import train_state, checkpoints\n", + "import imageio\n", + "import IPython\n", + "import jax\n", + "import jax.numpy as jnp\n", + "from matplotlib import pyplot as plt, transforms, cm\n", + "from moviepy.editor import ImageSequenceClip\n", + "import numpy as np\n", + "import openai\n", + "import optax\n", + "import pickle\n", + "from PIL import Image\n", + "import pybullet\n", + "import pybullet_data\n", + "import tensorflow.compat.v1 as tf\n", + "import torch\n", + "from tqdm import tqdm\n", + "\n", + "os.environ[\"XLA_PYTHON_CLIENT_MEM_FRACTION\"] = \"0.5\"\n", + "\n", + "\n", + "#Download PyBullet assets.\n", + "if not os.path.exists('ur5e/ur5e.urdf'):\n", + " !gdown --id 1Cc_fDSBL6QiDvNT4dpfAEbhbALSVoWcc\n", + " !gdown --id 1yOMEm-Zp_DL3nItG9RozPeJAmeOldekX\n", + " !gdown --id 1GsqNLhEl9dd4Mc3BM0dX3MibOI1FVWNM\n", + " !unzip ur5e.zip\n", + " !unzip robotiq_2f_85.zip\n", + " !unzip bowl.zip\n", + "\n", + "%load_ext tensorboard\n", + "\n", + "# Show useful GPU info.\n", + "!nvidia-smi\n", + "\n", + "# Show if JAX is using GPU.\n", + "from jax.lib import xla_bridge\n", + "print(xla_bridge.get_backend().platform)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "b33f0439-5569-4e45-982c-4cd3afb52de6", + "metadata": { + "cellView": "form", + "id": "b33f0439-5569-4e45-982c-4cd3afb52de6" + }, + "outputs": [], + "source": [ + "#@markdown Then we set some global constants, such as pick and place objects, colors, workspace bounds.\n", + "\n", + "ALL_COLORS = {\"red\", \"blue\", \"green\"}\n", + "SEEN_COLORS = set.union({\"yellow\", \"brown\", \"gray\", \"cyan\"}, ALL_COLORS)\n", + "UNSEEN_COLORS = set.union({\"orange\", \"purple\", \"pink\", \"white\"}, ALL_COLORS)\n", + "\n", + "COLORS = {\n", + " 'blue': (78/255, 121/255, 167/255, 255/255),\n", + " 'red': (255/255, 87/255, 89/255, 255/255),\n", + " 'green': (89/255, 169/255, 79/255, 255/255),\n", + " 'orange': (242/255, 142/255, 43/255, 255/255),\n", + " 'yellow': (237/255, 201/255, 72/255, 255/255),\n", + " 'purple': (176/255, 122/255, 161/255, 255/255),\n", + " 'pink': (255/255, 157/255, 167/255, 255/255),\n", + " 'cyan': (118/255, 183/255, 178/255, 255/255),\n", + " 'brown': (156/255, 117/255, 95/255, 255/255),\n", + " 'gray': (186/255, 176/255, 172/255, 255/255),\n", + " 'white': (255/255, 255/255, 255/255, 255/255),\n", + "}\n", + "\n", + "PIXEL_SIZE = 0.00267857\n", + "BOUNDS = np.float32([[-0.3, 0.3], [-0.8, -0.2], [0, 0.15]]) # (X, Y, Z)" + ] + }, + { + "cell_type": "markdown", + "id": "iXF8HJVsCaWT", + "metadata": { + "id": "iXF8HJVsCaWT" + }, + "source": [ + "Next, we will define the gripper and environment classes." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "17cbffac-ae3f-4426-8f73-a45300e61c26", + "metadata": { + "cellView": "form", + "id": "17cbffac-ae3f-4426-8f73-a45300e61c26" + }, + "outputs": [], + "source": [ + "#@markdown **Gripper class:** adds a gripper to the robot and runs a parallel thread to simulate single-actuator behavior.\n", + "\n", + "class Robotiq2F85:\n", + " \"\"\"Gripper handling for Robotiq 2F85.\"\"\"\n", + "\n", + " def __init__(self, robot, tool):\n", + " self.robot = robot\n", + " self.tool = tool\n", + " pos = [0.1339999999999999, -0.49199999999872496, 0.5]\n", + " rot = pybullet.getQuaternionFromEuler([np.pi, 0, np.pi])\n", + " urdf = 'robotiq_2f_85/robotiq_2f_85.urdf'\n", + " self.body = pybullet.loadURDF(urdf, pos, rot)\n", + " self.n_joints = pybullet.getNumJoints(self.body)\n", + " self.activated = False\n", + "\n", + " # Connect gripper base to robot tool.\n", + " pybullet.createConstraint(self.robot, tool, self.body, 0, jointType=pybullet.JOINT_FIXED, jointAxis=[0, 0, 0], parentFramePosition=[0, 0, 0], childFramePosition=[0, 0, -0.07], childFrameOrientation=pybullet.getQuaternionFromEuler([0, 0, np.pi / 2]))\n", + "\n", + " # Set friction coefficients for gripper fingers.\n", + " for i in range(pybullet.getNumJoints(self.body)):\n", + " pybullet.changeDynamics(self.body, i, lateralFriction=10.0, spinningFriction=1.0, rollingFriction=1.0, frictionAnchor=True)\n", + "\n", + " # Start thread to handle additional gripper constraints.\n", + " self.motor_joint = 1\n", + " self.running = True\n", + " self.constraints_thread = threading.Thread(target=self.step)\n", + " self.constraints_thread.daemon = True\n", + " self.constraints_thread.start()\n", + "\n", + " # Control joint positions by enforcing hard contraints on gripper behavior.\n", + " # Set one joint as the open/close motor joint (other joints should mimic).\n", + " def step(self):\n", + " while self.running:\n", + " try:\n", + " currj = [pybullet.getJointState(self.body, i)[0] for i in range(self.n_joints)]\n", + " indj = [6, 3, 8, 5, 10]\n", + " targj = [currj[1], -currj[1], -currj[1], currj[1], currj[1]]\n", + " pybullet.setJointMotorControlArray(self.body, indj, pybullet.POSITION_CONTROL, targj, positionGains=np.ones(5))\n", + " except:\n", + " return\n", + " time.sleep(0.001)\n", + "\n", + " # Close gripper fingers.\n", + " def activate(self):\n", + " pybullet.setJointMotorControl2(self.body, self.motor_joint, pybullet.VELOCITY_CONTROL, targetVelocity=1, force=10)\n", + " self.activated = True\n", + "\n", + " # Open gripper fingers.\n", + " def release(self):\n", + " pybullet.setJointMotorControl2(self.body, self.motor_joint, pybullet.VELOCITY_CONTROL, targetVelocity=-1, force=10)\n", + " self.activated = False\n", + "\n", + " # If activated and object in gripper: check object contact.\n", + " # If activated and nothing in gripper: check gripper contact.\n", + " # If released: check proximity to surface (disabled).\n", + " def detect_contact(self):\n", + " obj, _, ray_frac = self.check_proximity()\n", + " if self.activated:\n", + " empty = self.grasp_width() < 0.01\n", + " cbody = self.body if empty else obj\n", + " if obj == self.body or obj == 0:\n", + " return False\n", + " return self.external_contact(cbody)\n", + " # else:\n", + " # return ray_frac < 0.14 or self.external_contact()\n", + "\n", + " # Return if body is in contact with something other than gripper\n", + " def external_contact(self, body=None):\n", + " if body is None:\n", + " body = self.body\n", + " pts = pybullet.getContactPoints(bodyA=body)\n", + " pts = [pt for pt in pts if pt[2] != self.body]\n", + " return len(pts) > 0 # pylint: disable=g-explicit-length-test\n", + "\n", + " def check_grasp(self):\n", + " while self.moving():\n", + " time.sleep(0.001)\n", + " success = self.grasp_width() > 0.01\n", + " return success\n", + "\n", + " def grasp_width(self):\n", + " lpad = np.array(pybullet.getLinkState(self.body, 4)[0])\n", + " rpad = np.array(pybullet.getLinkState(self.body, 9)[0])\n", + " dist = np.linalg.norm(lpad - rpad) - 0.047813\n", + " return dist\n", + "\n", + " def check_proximity(self):\n", + " ee_pos = np.array(pybullet.getLinkState(self.robot, self.tool)[0])\n", + " tool_pos = np.array(pybullet.getLinkState(self.body, 0)[0])\n", + " vec = (tool_pos - ee_pos) / np.linalg.norm((tool_pos - ee_pos))\n", + " ee_targ = ee_pos + vec\n", + " ray_data = pybullet.rayTest(ee_pos, ee_targ)[0]\n", + " obj, link, ray_frac = ray_data[0], ray_data[1], ray_data[2]\n", + " return obj, link, ray_frac" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "6add977c-813a-4557-8f22-ce501e91fa34", + "metadata": { + "cellView": "form", + "id": "6add977c-813a-4557-8f22-ce501e91fa34" + }, + "outputs": [], + "source": [ + "#@markdown **Gym-style environment class:** this initializes a robot overlooking a workspace with objects.\n", + "\n", + "class PickPlaceEnv():\n", + "\n", + " def __init__(self):\n", + " self.dt = 1/480\n", + " self.sim_step = 0\n", + "\n", + " # Configure and start PyBullet.\n", + " # python3 -m pybullet_utils.runServer\n", + " # pybullet.connect(pybullet.SHARED_MEMORY) # pybullet.GUI for local GUI.\n", + " pybullet.connect(pybullet.DIRECT) # pybullet.GUI for local GUI.\n", + " pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_GUI, 0)\n", + " pybullet.setPhysicsEngineParameter(enableFileCaching=0)\n", + " assets_path = os.path.dirname(os.path.abspath(\"\"))\n", + " pybullet.setAdditionalSearchPath(assets_path)\n", + " pybullet.setAdditionalSearchPath(pybullet_data.getDataPath())\n", + " pybullet.setTimeStep(self.dt)\n", + "\n", + " self.home_joints = (np.pi / 2, -np.pi / 2, np.pi / 2, -np.pi / 2, 3 * np.pi / 2, 0) # Joint angles: (J0, J1, J2, J3, J4, J5).\n", + " self.home_ee_euler = (np.pi, 0, np.pi) # (RX, RY, RZ) rotation in Euler angles.\n", + " self.ee_link_id = 9 # Link ID of UR5 end effector.\n", + " self.tip_link_id = 10 # Link ID of gripper finger tips.\n", + " self.gripper = None\n", + "\n", + " def reset(self, config):\n", + " pybullet.resetSimulation(pybullet.RESET_USE_DEFORMABLE_WORLD)\n", + " pybullet.setGravity(0, 0, -9.8)\n", + " self.cache_video = []\n", + "\n", + " # Temporarily disable rendering to load URDFs faster.\n", + " pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_RENDERING, 0)\n", + "\n", + " # Add robot.\n", + " pybullet.loadURDF(\"plane.urdf\", [0, 0, -0.001])\n", + " self.robot_id = pybullet.loadURDF(\"ur5e/ur5e.urdf\", [0, 0, 0], flags=pybullet.URDF_USE_MATERIAL_COLORS_FROM_MTL)\n", + " self.ghost_id = pybullet.loadURDF(\"ur5e/ur5e.urdf\", [0, 0, -10]) # For forward kinematics.\n", + " self.joint_ids = [pybullet.getJointInfo(self.robot_id, i) for i in range(pybullet.getNumJoints(self.robot_id))]\n", + " self.joint_ids = [j[0] for j in self.joint_ids if j[2] == pybullet.JOINT_REVOLUTE]\n", + "\n", + " # Move robot to home configuration.\n", + " for i in range(len(self.joint_ids)):\n", + " pybullet.resetJointState(self.robot_id, self.joint_ids[i], self.home_joints[i])\n", + "\n", + " # Add gripper.\n", + " if self.gripper is not None:\n", + " while self.gripper.constraints_thread.is_alive():\n", + " self.constraints_thread_active = False\n", + " self.gripper = Robotiq2F85(self.robot_id, self.ee_link_id)\n", + " self.gripper.release()\n", + "\n", + " # Add workspace.\n", + " plane_shape = pybullet.createCollisionShape(pybullet.GEOM_BOX, halfExtents=[0.3, 0.3, 0.001])\n", + " plane_visual = pybullet.createVisualShape(pybullet.GEOM_BOX, halfExtents=[0.3, 0.3, 0.001])\n", + " plane_id = pybullet.createMultiBody(0, plane_shape, plane_visual, basePosition=[0, -0.5, 0])\n", + " pybullet.changeVisualShape(plane_id, -1, rgbaColor=[0.2, 0.2, 0.2, 1.0])\n", + "\n", + " # Load objects according to config.\n", + " self.config = config\n", + " self.obj_name_to_id = {}\n", + " obj_names = list(self.config['pick']) + list(self.config['place'])\n", + " obj_xyz = np.zeros((0, 3))\n", + " for obj_name in obj_names:\n", + " if ('block' in obj_name) or ('bowl' in obj_name):\n", + "\n", + " # Get random position 15cm+ from other objects.\n", + " while True:\n", + " rand_x = np.random.uniform(BOUNDS[0, 0] + 0.1, BOUNDS[0, 1] - 0.1)\n", + " rand_y = np.random.uniform(BOUNDS[1, 0] + 0.1, BOUNDS[1, 1] - 0.1)\n", + " rand_xyz = np.float32([rand_x, rand_y, 0.03]).reshape(1, 3)\n", + " if len(obj_xyz) == 0:\n", + " obj_xyz = np.concatenate((obj_xyz, rand_xyz), axis=0)\n", + " break\n", + " else:\n", + " nn_dist = np.min(np.linalg.norm(obj_xyz - rand_xyz, axis=1)).squeeze()\n", + " if nn_dist > 0.15:\n", + " obj_xyz = np.concatenate((obj_xyz, rand_xyz), axis=0)\n", + " break\n", + " \n", + " object_color = COLORS[obj_name.split(' ')[0]]\n", + " object_type = obj_name.split(' ')[1]\n", + " object_position = rand_xyz.squeeze()\n", + " if object_type == 'block':\n", + " object_shape = pybullet.createCollisionShape(pybullet.GEOM_BOX, halfExtents=[0.02, 0.02, 0.02])\n", + " object_visual = pybullet.createVisualShape(pybullet.GEOM_BOX, halfExtents=[0.02, 0.02, 0.02])\n", + " object_id = pybullet.createMultiBody(0.01, object_shape, object_visual, basePosition=object_position)\n", + " elif object_type == 'bowl':\n", + " object_position[2] = 0\n", + " object_id = pybullet.loadURDF(\"bowl/bowl.urdf\", object_position, useFixedBase=1)\n", + " pybullet.changeVisualShape(object_id, -1, rgbaColor=object_color)\n", + " self.obj_name_to_id[obj_name] = object_id\n", + "\n", + " # Re-enable rendering.\n", + " pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_RENDERING, 1)\n", + "\n", + " for _ in range(200):\n", + " pybullet.stepSimulation()\n", + " print('Environment reset: done.')\n", + " return self.get_observation()\n", + "\n", + " def servoj(self, joints):\n", + " \"\"\"Move to target joint positions with position control.\"\"\"\n", + " pybullet.setJointMotorControlArray(\n", + " bodyIndex=self.robot_id,\n", + " jointIndices=self.joint_ids,\n", + " controlMode=pybullet.POSITION_CONTROL,\n", + " targetPositions=joints,\n", + " positionGains=[0.01]*6)\n", + " \n", + " def movep(self, position):\n", + " \"\"\"Move to target end effector position.\"\"\"\n", + " joints = pybullet.calculateInverseKinematics(\n", + " bodyUniqueId=self.robot_id,\n", + " endEffectorLinkIndex=self.tip_link_id,\n", + " targetPosition=position,\n", + " targetOrientation=pybullet.getQuaternionFromEuler(self.home_ee_euler),\n", + " maxNumIterations=100)\n", + " self.servoj(joints)\n", + "\n", + " def step(self, action=None):\n", + " \"\"\"Do pick and place motion primitive.\"\"\"\n", + " pick_xyz, place_xyz = action['pick'].copy(), action['place'].copy()\n", + "\n", + " # Set fixed primitive z-heights.\n", + " hover_xyz = pick_xyz.copy() + np.float32([0, 0, 0.2])\n", + " pick_xyz[2] -= 0.02\n", + " pick_xyz[2] = max(pick_xyz[2], 0.03)\n", + " place_xyz[2] = 0.15\n", + "\n", + " # Move to object.\n", + " ee_xyz = np.float32(pybullet.getLinkState(self.robot_id, self.tip_link_id)[0])\n", + " while np.linalg.norm(hover_xyz - ee_xyz) > 0.01:\n", + " self.movep(hover_xyz)\n", + " self.step_sim_and_render()\n", + " ee_xyz = np.float32(pybullet.getLinkState(self.robot_id, self.tip_link_id)[0])\n", + " while np.linalg.norm(pick_xyz - ee_xyz) > 0.01:\n", + " self.movep(pick_xyz)\n", + " self.step_sim_and_render()\n", + " ee_xyz = np.float32(pybullet.getLinkState(self.robot_id, self.tip_link_id)[0])\n", + "\n", + " # Pick up object.\n", + " self.gripper.activate()\n", + " for _ in range(240):\n", + " self.step_sim_and_render()\n", + " while np.linalg.norm(hover_xyz - ee_xyz) > 0.01:\n", + " self.movep(hover_xyz)\n", + " self.step_sim_and_render()\n", + " ee_xyz = np.float32(pybullet.getLinkState(self.robot_id, self.tip_link_id)[0])\n", + " \n", + " # Move to place location.\n", + " while np.linalg.norm(place_xyz - ee_xyz) > 0.01:\n", + " self.movep(place_xyz)\n", + " self.step_sim_and_render()\n", + " ee_xyz = np.float32(pybullet.getLinkState(self.robot_id, self.tip_link_id)[0])\n", + "\n", + " # Place down object.\n", + " while (not self.gripper.detect_contact()) and (place_xyz[2] > 0.03):\n", + " place_xyz[2] -= 0.001\n", + " self.movep(place_xyz)\n", + " for _ in range(3):\n", + " self.step_sim_and_render()\n", + " self.gripper.release()\n", + " for _ in range(240):\n", + " self.step_sim_and_render()\n", + " place_xyz[2] = 0.2\n", + " ee_xyz = np.float32(pybullet.getLinkState(self.robot_id, self.tip_link_id)[0])\n", + " while np.linalg.norm(place_xyz - ee_xyz) > 0.01:\n", + " self.movep(place_xyz)\n", + " self.step_sim_and_render()\n", + " ee_xyz = np.float32(pybullet.getLinkState(self.robot_id, self.tip_link_id)[0])\n", + " place_xyz = np.float32([0, -0.5, 0.2])\n", + " while np.linalg.norm(place_xyz - ee_xyz) > 0.01:\n", + " self.movep(place_xyz)\n", + " self.step_sim_and_render()\n", + " ee_xyz = np.float32(pybullet.getLinkState(self.robot_id, self.tip_link_id)[0])\n", + "\n", + " observation = self.get_observation()\n", + " reward = self.get_reward()\n", + " done = False\n", + " info = {}\n", + " return observation, reward, done, info\n", + "\n", + " def step_sim_and_render(self):\n", + " pybullet.stepSimulation()\n", + " self.sim_step += 1\n", + "\n", + " # Render current image at 8 FPS.\n", + " if self.sim_step % (1 / (8 * self.dt)) == 0:\n", + " self.cache_video.append(self.get_camera_image())\n", + "\n", + " def get_camera_image(self):\n", + " image_size = (240, 240)\n", + " intrinsics = (120., 0, 120., 0, 120., 120., 0, 0, 1)\n", + " color, _, _, _, _ = env.render_image(image_size, intrinsics)\n", + " return color\n", + "\n", + " def set_alpha_transparency(self, alpha: float) -> None:\n", + " for id in range(20):\n", + " visual_shape_data = pybullet.getVisualShapeData(id)\n", + " for i in range(len(visual_shape_data)):\n", + " object_id, link_index, _, _, _, _, _, rgba_color = visual_shape_data[i]\n", + " rgba_color = list(rgba_color[0:3]) + [alpha]\n", + " pybullet.changeVisualShape(self.robot_id, linkIndex=i, rgbaColor=rgba_color) \n", + " pybullet.changeVisualShape(self.gripper.body, linkIndex=i, rgbaColor=rgba_color)\n", + "\n", + " def get_camera_image_top(self, \n", + " image_size=(240, 240), \n", + " intrinsics=(2000., 0, 2000., 0, 2000., 2000., 0, 0, 1),\n", + " position=(0, -0.5, 5),\n", + " orientation=(0, np.pi, -np.pi / 2),\n", + " zrange=(0.01, 1.),\n", + " set_alpha=True):\n", + " set_alpha and self.set_alpha_transparency(0)\n", + " color, _, _, _, _ = env.render_image_top(image_size, \n", + " intrinsics,\n", + " position,\n", + " orientation,\n", + " zrange)\n", + " set_alpha and self.set_alpha_transparency(1)\n", + " return color\n", + "\n", + " def render_image_top(self, \n", + " image_size=(240, 240), \n", + " intrinsics=(2000., 0, 2000., 0, 2000., 2000., 0, 0, 1),\n", + " position=(0, -0.5, 5),\n", + " orientation=(0, np.pi, -np.pi / 2),\n", + " zrange=(0.01, 1.)):\n", + "\n", + " # Camera parameters.\n", + " orientation = pybullet.getQuaternionFromEuler(orientation)\n", + " noise=True\n", + "\n", + " # OpenGL camera settings.\n", + " lookdir = np.float32([0, 0, 1]).reshape(3, 1)\n", + " updir = np.float32([0, -1, 0]).reshape(3, 1)\n", + " rotation = pybullet.getMatrixFromQuaternion(orientation)\n", + " rotm = np.float32(rotation).reshape(3, 3)\n", + " lookdir = (rotm @ lookdir).reshape(-1)\n", + " updir = (rotm @ updir).reshape(-1)\n", + " lookat = position + lookdir\n", + " focal_len = intrinsics[0]\n", + " znear, zfar = (0.01, 10.)\n", + " viewm = pybullet.computeViewMatrix(position, lookat, updir)\n", + " fovh = (image_size[0] / 2) / focal_len\n", + " fovh = 180 * np.arctan(fovh) * 2 / np.pi\n", + "\n", + " # Notes: 1) FOV is vertical FOV 2) aspect must be float\n", + " aspect_ratio = image_size[1] / image_size[0]\n", + " projm = pybullet.computeProjectionMatrixFOV(fovh, aspect_ratio, znear, zfar)\n", + "\n", + " # Render with OpenGL camera settings.\n", + " _, _, color, depth, segm = pybullet.getCameraImage(\n", + " width=image_size[1],\n", + " height=image_size[0],\n", + " viewMatrix=viewm,\n", + " projectionMatrix=projm,\n", + " shadow=1,\n", + " flags=pybullet.ER_SEGMENTATION_MASK_OBJECT_AND_LINKINDEX,\n", + " renderer=pybullet.ER_BULLET_HARDWARE_OPENGL)\n", + "\n", + " # Get color image.\n", + " color_image_size = (image_size[0], image_size[1], 4)\n", + " color = np.array(color, dtype=np.uint8).reshape(color_image_size)\n", + " color = color[:, :, :3] # remove alpha channel\n", + " if noise:\n", + " color = np.int32(color)\n", + " color += np.int32(np.random.normal(0, 3, color.shape))\n", + " color = np.uint8(np.clip(color, 0, 255))\n", + "\n", + " # Get depth image.\n", + " depth_image_size = (image_size[0], image_size[1])\n", + " zbuffer = np.float32(depth).reshape(depth_image_size)\n", + " depth = (zfar + znear - (2 * zbuffer - 1) * (zfar - znear))\n", + " depth = (2 * znear * zfar) / depth\n", + " if noise:\n", + " depth += np.random.normal(0, 0.003, depth.shape)\n", + "\n", + " intrinsics = np.float32(intrinsics).reshape(3, 3)\n", + " return color, depth, position, orientation, intrinsics\n", + "\n", + " def get_reward(self):\n", + " return 0 # TODO: check did the robot follow text instructions?\n", + "\n", + " def get_observation(self):\n", + " observation = {}\n", + "\n", + " # Render current image.\n", + " color, depth, position, orientation, intrinsics = self.render_image()\n", + "\n", + " # Get heightmaps and colormaps.\n", + " points = self.get_pointcloud(depth, intrinsics)\n", + " position = np.float32(position).reshape(3, 1)\n", + " rotation = pybullet.getMatrixFromQuaternion(orientation)\n", + " rotation = np.float32(rotation).reshape(3, 3)\n", + " transform = np.eye(4)\n", + " transform[:3, :] = np.hstack((rotation, position))\n", + " points = self.transform_pointcloud(points, transform)\n", + " heightmap, colormap, xyzmap = self.get_heightmap(points, color, BOUNDS, PIXEL_SIZE)\n", + "\n", + " observation[\"image\"] = colormap\n", + " observation[\"xyzmap\"] = xyzmap\n", + " return observation\n", + "\n", + " def render_image(self, image_size=(720, 720), intrinsics=(360., 0, 360., 0, 360., 360., 0, 0, 1)):\n", + "\n", + " # Camera parameters.\n", + " position = (0, -0.85, 0.4)\n", + " orientation = (np.pi / 4 + np.pi / 48, np.pi, np.pi)\n", + " orientation = pybullet.getQuaternionFromEuler(orientation)\n", + " zrange = (0.01, 10.)\n", + " noise=True\n", + "\n", + " # OpenGL camera settings.\n", + " lookdir = np.float32([0, 0, 1]).reshape(3, 1)\n", + " updir = np.float32([0, -1, 0]).reshape(3, 1)\n", + " rotation = pybullet.getMatrixFromQuaternion(orientation)\n", + " rotm = np.float32(rotation).reshape(3, 3)\n", + " lookdir = (rotm @ lookdir).reshape(-1)\n", + " updir = (rotm @ updir).reshape(-1)\n", + " lookat = position + lookdir\n", + " focal_len = intrinsics[0]\n", + " znear, zfar = (0.01, 10.)\n", + " viewm = pybullet.computeViewMatrix(position, lookat, updir)\n", + " fovh = (image_size[0] / 2) / focal_len\n", + " fovh = 180 * np.arctan(fovh) * 2 / np.pi\n", + "\n", + " # Notes: 1) FOV is vertical FOV 2) aspect must be float\n", + " aspect_ratio = image_size[1] / image_size[0]\n", + " projm = pybullet.computeProjectionMatrixFOV(fovh, aspect_ratio, znear, zfar)\n", + "\n", + " # Render with OpenGL camera settings.\n", + " _, _, color, depth, segm = pybullet.getCameraImage(\n", + " width=image_size[1],\n", + " height=image_size[0],\n", + " viewMatrix=viewm,\n", + " projectionMatrix=projm,\n", + " shadow=1,\n", + " flags=pybullet.ER_SEGMENTATION_MASK_OBJECT_AND_LINKINDEX,\n", + " renderer=pybullet.ER_BULLET_HARDWARE_OPENGL)\n", + "\n", + " # Get color image.\n", + " color_image_size = (image_size[0], image_size[1], 4)\n", + " color = np.array(color, dtype=np.uint8).reshape(color_image_size)\n", + " color = color[:, :, :3] # remove alpha channel\n", + " if noise:\n", + " color = np.int32(color)\n", + " color += np.int32(np.random.normal(0, 3, color.shape))\n", + " color = np.uint8(np.clip(color, 0, 255))\n", + "\n", + " # Get depth image.\n", + " depth_image_size = (image_size[0], image_size[1])\n", + " zbuffer = np.float32(depth).reshape(depth_image_size)\n", + " depth = (zfar + znear - (2 * zbuffer - 1) * (zfar - znear))\n", + " depth = (2 * znear * zfar) / depth\n", + " if noise:\n", + " depth += np.random.normal(0, 0.003, depth.shape)\n", + "\n", + " intrinsics = np.float32(intrinsics).reshape(3, 3)\n", + " return color, depth, position, orientation, intrinsics\n", + "\n", + " def get_pointcloud(self, depth, intrinsics):\n", + " \"\"\"Get 3D pointcloud from perspective depth image.\n", + " Args:\n", + " depth: HxW float array of perspective depth in meters.\n", + " intrinsics: 3x3 float array of camera intrinsics matrix.\n", + " Returns:\n", + " points: HxWx3 float array of 3D points in camera coordinates.\n", + " \"\"\"\n", + " height, width = depth.shape\n", + " xlin = np.linspace(0, width - 1, width)\n", + " ylin = np.linspace(0, height - 1, height)\n", + " px, py = np.meshgrid(xlin, ylin)\n", + " px = (px - intrinsics[0, 2]) * (depth / intrinsics[0, 0])\n", + " py = (py - intrinsics[1, 2]) * (depth / intrinsics[1, 1])\n", + " points = np.float32([px, py, depth]).transpose(1, 2, 0)\n", + " return points\n", + "\n", + " def transform_pointcloud(self, points, transform):\n", + " \"\"\"Apply rigid transformation to 3D pointcloud.\n", + " Args:\n", + " points: HxWx3 float array of 3D points in camera coordinates.\n", + " transform: 4x4 float array representing a rigid transformation matrix.\n", + " Returns:\n", + " points: HxWx3 float array of transformed 3D points.\n", + " \"\"\"\n", + " padding = ((0, 0), (0, 0), (0, 1))\n", + " homogen_points = np.pad(points.copy(), padding,\n", + " 'constant', constant_values=1)\n", + " for i in range(3):\n", + " points[Ellipsis, i] = np.sum(transform[i, :] * homogen_points, axis=-1)\n", + " return points\n", + "\n", + " def get_heightmap(self, points, colors, bounds, pixel_size):\n", + " \"\"\"Get top-down (z-axis) orthographic heightmap image from 3D pointcloud.\n", + " Args:\n", + " points: HxWx3 float array of 3D points in world coordinates.\n", + " colors: HxWx3 uint8 array of values in range 0-255 aligned with points.\n", + " bounds: 3x2 float array of values (rows: X,Y,Z; columns: min,max) defining\n", + " region in 3D space to generate heightmap in world coordinates.\n", + " pixel_size: float defining size of each pixel in meters.\n", + " Returns:\n", + " heightmap: HxW float array of height (from lower z-bound) in meters.\n", + " colormap: HxWx3 uint8 array of backprojected color aligned with heightmap.\n", + " xyzmap: HxWx3 float array of XYZ points in world coordinates.\n", + " \"\"\"\n", + " width = int(np.round((bounds[0, 1] - bounds[0, 0]) / pixel_size))\n", + " height = int(np.round((bounds[1, 1] - bounds[1, 0]) / pixel_size))\n", + " heightmap = np.zeros((height, width), dtype=np.float32)\n", + " colormap = np.zeros((height, width, colors.shape[-1]), dtype=np.uint8)\n", + " xyzmap = np.zeros((height, width, 3), dtype=np.float32)\n", + "\n", + " # Filter out 3D points that are outside of the predefined bounds.\n", + " ix = (points[Ellipsis, 0] >= bounds[0, 0]) & (points[Ellipsis, 0] < bounds[0, 1])\n", + " iy = (points[Ellipsis, 1] >= bounds[1, 0]) & (points[Ellipsis, 1] < bounds[1, 1])\n", + " iz = (points[Ellipsis, 2] >= bounds[2, 0]) & (points[Ellipsis, 2] < bounds[2, 1])\n", + " valid = ix & iy & iz\n", + " points = points[valid]\n", + " colors = colors[valid]\n", + "\n", + " # Sort 3D points by z-value, which works with array assignment to simulate\n", + " # z-buffering for rendering the heightmap image.\n", + " iz = np.argsort(points[:, -1])\n", + " points, colors = points[iz], colors[iz]\n", + " px = np.int32(np.floor((points[:, 0] - bounds[0, 0]) / pixel_size))\n", + " py = np.int32(np.floor((points[:, 1] - bounds[1, 0]) / pixel_size))\n", + " px = np.clip(px, 0, width - 1)\n", + " py = np.clip(py, 0, height - 1)\n", + " heightmap[py, px] = points[:, 2] - bounds[2, 0]\n", + " for c in range(colors.shape[-1]):\n", + " colormap[py, px, c] = colors[:, c]\n", + " xyzmap[py, px, c] = points[:, c]\n", + " colormap = colormap[::-1, :, :] # Flip up-down.\n", + " xv, yv = np.meshgrid(np.linspace(BOUNDS[0, 0], BOUNDS[0, 1], height),\n", + " np.linspace(BOUNDS[1, 0], BOUNDS[1, 1], width))\n", + " xyzmap[:, :, 0] = xv\n", + " xyzmap[:, :, 1] = yv\n", + " xyzmap = xyzmap[::-1, :, :] # Flip up-down.\n", + " heightmap = heightmap[::-1, :] # Flip up-down.\n", + " return heightmap, colormap, xyzmap\n", + "\n", + "def xyz_to_pix(position):\n", + " \"\"\"Convert from 3D position to pixel location on heightmap.\"\"\"\n", + " u = int(np.round((BOUNDS[1, 1] - position[1]) / PIXEL_SIZE))\n", + " v = int(np.round((position[0] - BOUNDS[0, 0]) / PIXEL_SIZE))\n", + " return (u, v)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "1206d4cf-9401-4436-b44a-c34e0f8d2569", + "metadata": { + "cellView": "form", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 497 + }, + "id": "1206d4cf-9401-4436-b44a-c34e0f8d2569", + "outputId": "8de8e552-1a9d-4eaf-db6a-db5b91ba26ab" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Environment reset: done.\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "
" + ], + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADHCAYAAADifRM/AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9ebhlZ1Xn/1nr3efcW7fGVFXmeQQSZZBBbBVw6lYR9RccQKEFW0UcQAW0pW0HFFtb/fVP7aeV1nYCtRvUFhvHaJsIiIAiKBAwJCSpqlQlVal5uPec/a71+2Otd99Tl7pVQAhVyXPf56nknnP2Ofvd717vGr7ru9YWd2dtrI21sTbWxqNr6NmewNpYG2tjbayNT/1YU+5rY22sjbXxKBxryn1trI21sTYehWNNua+NtbE21sajcKwp97WxNtbG2ngUjjXlvjbWxtpYG4/Csabcz/IQkStE5KiIlE/jOY+KyDWrfPYiEXnbw3DOV4vIr36qf3dtfOwQkVtF5FvO9jzONETkKhFxEelW+fwRJzOn21uf7vGIVO4icreInMiFvF9EfkNENpzteX08I+f+xe21u9/r7hvcvX665pDnu+vTdb4850+6+zmvcM7mSMP6zyJyXET2iMgviciWM3znR0XkDZ+uOX46xycrM2fTuJ2NvbXaeEQq9xzPcfcNwGcBTwF+6BP5ssR4JF//2ngUDRF5BfDTwKuAzcDTgSuBW0RkvMp3TunxfrrG2T7/2jjDcPdH3D/gbuCLZ17/DPCW/PvpwN8CB4H3Ac+aOe5W4LXA24ETwHXAi4C7gCPAR4FvzGNflMf9V+AQ8CHgi2Z+azPwP4DdwC7gJ4Ay8/m3Arfn736QMEKvByzPfRT4fuAqwIEO+Hrg71dc6/cCf5R/zwE/C9wL3A/8MrBulTW6Drgt574P+F8znzlwXf69Dfgj4DDwLuDHgbfNHPtY4BZgP/Bh4OtWOd9nA3tWrMH/A/xT/v2jwBtmPjvlfQK+APjnmeNuAd498/qtwFefbRn8FMvzppSHr1vx/gZgL/DNM2v4e8Ab8n59FzABpvn9983I+Y+n/B4B/gLYPvO7Xwl8INf+VuBxM599FvCP+b03Af8L+In87FnATuAH8l6/HjgPeEvO80D+fdmKPfefUrYOA28GtuZnTfa/KWV6H/AfZr67UmY+b0ZmdgAvOsVavhaowGKuyX/N9/8V8G5iP7wb+FcfzxxP8ft/CnzXivfeB9x8ir216n4l9uZz8+/Pze89O19/EfDehyxXZ1uwP8nNcDep3IHLU1B/HLgUeBD4ciIq+ZJ8ff7MTbwXuIlQppvzZj4mP78YuCn/fhHQE8p1RCjeQzOC+b+B1wHrgQtSMF6Sn30tofCfCgihaK9cOfcVAt4BC8Smun7m83cDz8u//wuhiLcCG4H/A/ynVdbod4H/kOswD3zezGezAvg/gTfmdXxGzvtt+dl6YhO9OOf3JGID3rjKOe8EvmTm9ZuAf79yo57uPgHriI25Pdf9/pzTxvzsBLDtbMvgp1ievzRlrTvFZ78J/O7MGk6Br851W8cKBTgj53cCN+QxtwI/lZ/dABzLNR8RDsZHgHH+uwd4eX52M2E8ZpV7T0QYc/nb24DnpuxuzHv+hyvmsitlaz3w+zNycFXK4q/kbz0BWCKNzQqZuZLYG8/PuW0DnrjKet4KfMvM662E4XlhyvHz8/W2M83xFL/9b4G3z7y+kTA2c6fYW6vuV+A1wC/m36/O+/XTM5/9/EOWq7Mt2J/kZribsMoHUxj/WwrHDwCvX3HsnwPfNHMTXzPz2fr8jeeywgMmlPt9gMy8964UkAtTCNfNfPZ84K9nzvny08z9lMo9X78B+OH8+/oU6AXCSBwDrp357ucAH13lPL8F/HdmvKiZz5wwOIVQFo+d+ewnWVbuXw+8dcV3Xwf8yCrn/Ang1/LvjTnfK/P1j7K8Uc90n95KKJanE17nGwkF+AVkJPBo+ge8ANizymc/Bdwys4Z/s+LzYV1n3rsV+KGZ198B/Fn+/R+BN858poRiexbwjPx7VubfxsnKfQLMn+ZanggcWDGXn5p5fWP+RpmR/VlP/10sOzOzMvODwP/+ONfzVk5W7i8E3rXimHeQnv/p5niK314p169tMu8n763T7lfCO29R7Z8B3wL8Xb6+jYwEHsq/RzLm/NXuvsXdr3T373D3E4R1/1oROdj+EaHcxTPf29H+cPdjhAL7dmC3iPyxiDx25thdnqud4x7gkjzPKL/TzvM6woOHiCbu/CSv63cIQwHwDYQXdJzwaheAf5g555/l+6ca308I2LtE5AMi8s2nOOZ8wpPZMfPePTN/Xwl89or1/EbgohmWz1EROToz95tFZI5Qzu9x99nfm/3d092n21hWNrcRm++Z+e+2Va73kTz2AdtXwbAvzs/b2HGKY0419sz8fZyAeCDkd7gn7m75m5fmZytlfuX59rr7YnshIgsi8joRuUdEDgN/A2xZwf5aKV8jIjI701xnx0PZUydd88w8Lj3THEXkl2fk/NXufgT4Y+B5eezzgd8+xTnPtF/fAdwgIhcSBvG3gMtFZDvwNGIdH9J4JCv3U40dhEe4Zebfenf/qZljZgUXd/9zd/8SYhN9iAgR27hURGTm9RWEN7+D8Ny3z5xnk7vfNDOPa1eZo6/yfhu3AOeLyBMJwfmdfH8fAUncNHPOzR5J5Y89ifsed/9Wd78EeAnw30TkuhWH7SXC7MtXXGMbO4DbVqznBnd/qS+zfDa0Obj7B4mN8WWEYfodTj3OdJ9WKvfbeHQr93cQ8nTz7JvJAPsy4K9m3l4pP2eSp5XjPsK4tnMIcf93EfmjlTJ/+clf/5jzvQJ4DPDZ7r6JuGcQjsWpfuMKIlqcNVgfzzjdnlo5Vs7xpGuemceumdennKO7f/uMnP9kfv67wPNF5HMIyPOvTzGH0+7XdNj+gYDA3u/uEyKf8H3Ane7+ia7Px4xHm3J/A/AcEfk3IlJEZF5EniUil53qYBG5UES+SkTWE5vrKJHwbOMC4GUiMhKRrwUeB/yJu+8m4IKfE5FNIqIicq2IPDO/96vAK0XkycnKuU5EmnDdD6zKg3X3KYFb/gyB1d2S7xtheP6LiFyQ879URP7NKtf2tTPXfYAQ+Nlrw4N++QfAj6YHdiOR3GrjLYR38cJcg5GIPFVEHrfa/AmF/nJik79plWPOdJ/+llAYTyPC6Q+QUQSfAo/mXBvufgj4MeAXReRLc52vIuConUTicrVxP3DVJ8D8eiPwbBH5IhEZEcp5iVjzdxDJyO8SkU5Evoq4B6cbGwkldlBEtgI/copjXiAiN4rIAoEn/55/4tTf3wa+WES+Lue2LR2gU42Ve+xPCDn+hvzu1xPQy1s+yTn+CSGPryGICrbygI9zv95GJMWbw3LritcPaTyqlLu77wC+ikhQ7CWs/atY/TqVsJT3EWyQZwIvnfn8nQTuvY/A1r7G3R/Mz/4tkYD6IKE8f4+EFdz9TXn87xCY+R8SihoiK/9DGaq9cpV5/Q7wxcCb3L2fef8HiOTX32UI/JeEEjzVeCrwzoRM/ojIAZyKf/tdRBi8B/gN4NfbBxmC/msiBL0vj2nJtNXG7xLr+H9X8z7OdJ8SLnsP8IH0aCAUzz3u/sBpzv2IHe7+n4n1+Fkiyf9OYl2+yN2XTvPVZkAfFJH3fBzn+TCB8f8iIdfPIWjFk1zrm4F/R+SiXkAowNOd//8j8l37gL8joIeV4/WEbO0hPN2XnWmep5j3vUQC/hXEXn0vkYA91fh54GtE5ICI/ELu2a/I7z5IQJZfsUI+P+455v34A2KPrhadwpn3622EcfybVV4/pCEnw2trow0ReRGRlPm8sz2XtbE2ztYQkXcCv+zuv37Gg0/9/VuJpOg5W2n6SJjjJzMeVZ772lgba+OhDRF5pohclPDFNwGP59Te+No4x8dahdnaWBtrY3Y8huW6h7sIKHL32Z3S2vhkxsMGy4jIlxLYVwF+dQVjZW2sjUfkWJPrtfFIGQ+Lck+O678QVXA7iSrL5ydVbm2sjUfkWJPrtfFIGg8X5v404CPufldm4P8nwY5YG2vjkTzW5HptPGLGw4W5X8rJFV87CY7yMETk24BvA1i/fv2TH/vY2cLQh2E4J5dVnPqtR+A481UcP36cgwcOnFTZISK4Ow8++CBmRiucFhVuuvFGRqPRSb9hZtx7zz0cPHholdM57gKy2mxm5+n5SnDgxsc9jrnxXH7sPHD/Xu7bvRvEl7+a31AVrr32etavXzjtNc+Ou+++m3379n0qbvUZ5RpOlm0RefL8/DyOI5+UtDn4qou68sg4/yk/Of0PrHbEJ7xHVv1CfrD6JB/CWPmjJ0/ilFM644UtH+A44sQdFE9xDOltd9VPmkd+1WePjz2Hx3fascNfcvLXl99oO0Vy15w8ptMpfd+f8krOWkLV3f870fuEpzzlKf7ud7/rZCEeZMFzYQAEz+sVIa8/F1gALH5Dif8jGK3Hgg/LmToEk3aEIJanVsmjYlN5u8d5rKFxo4c5kUpIMAfNv9t/HWkfD5eFgFdHNG+Zg7hDq0NpX+iB4phITmF2g4QylXZ8Wx8Dkby4LAD/h3/8R978+79Pb4YiuAilK1SHX/v1/8GJoyewYhRXnvmMz+f1v/mbbNy0Ma/VEYF9D+zjxd/8Yt7+1rdRRmN6emTqVKto6ajqqEtcvxe8GIO0tv9ZrF+HUN0oIkDhD9/8Zq656moQZ//+Azz+iU9k83mbkBorbr3hqhR1nv+Cb+Tnf/4XKLmg4o7Lyr0qy6/deerTzlSH86kds7K9sLDgN9xwffT7QOgwzBUTR8SAgpjg4ggFxBF3DFCPqjOvjo5AXEKszeKCVamat98MVNMeCvHNgouDVRDHRcGdYhrCK4ITht07oThUE9QdV0ctKpqUdjoB70EUM1Ax3AuujtQm2w7imBaKC7iBSYijhpOAddBVfGqU0n5dMBfUDdMCHUivGBWpBip0xam94sXjd5BQmFJRE+hKrJFXxEIGe4eCoAa9hvNSKvQYIqAiufcEE4cqYE4nlV5iB/cOao6mcjYKpqBWaTvTzCld6IUqoJbGwBS0hl5xR12xYkheBw7UVPgjR6wDauxjPFSCCSpQ1bAKFKFUwYpz1x2rd2R4uGCZXZxcznsZJ5f6fsxw4iYNxirHsvOWfwigjrvhOC4CYsu1l9J+LX5IU3x91kynslYcQVEEVHCV4fSeG8k1zl0Hb5PYMG1izTLjqINbeMQCUJvynXE+AbEwRpIzFSyEi7wOz+stPjNPz7k5JuAiqLZrSQ9jxj54u7MGWAhvQUALTkFd+Id3vovp4hRXo1SHWvmSZ30JGzdtypWJ84Dw8ld8H29929uhCGZL4BUKdF0XVyGAGtUdtCLVsanjFcxCYYkLXXpApYTxFO35qZ947XB1htPXmobeEYm1VBzRwmWXXhoGqt0nkVzDmfs7Wy8oH+MSPZTxCcs1AJ1QsLjTolgXMqcUBEeLoxrHhEyDe6Wm8igdqMd3ZDD0gqhTLIyAkgpFoVfHxXH6QbYUUBcknZ/4vIaCA6Sv0WzKDJeKWpN2DXn3UKhWC14VB6wqvVS8uUhqaPM0jRBIgJIebG8IYQyKgWrBRKlusffc00BVhIqIIQpdSYeqlvxcURcoQqdOJx104YhgYWjMw8EpDuKGi1MEihuiRhmMYO679M41JbFHEYv91jm4KvHzYVTUHYi9RFGKFzz3rlQNI0w4WjU0NIhQcYrlovd5XhWKpM7RkAE0ZN9cmUr4anj6nvn/ZUN+6vFwKfd3A9eLyNUSDxp4HlElufpoLvIwV4+LbMJsilsoCzcBVQyouWkgnARHYpF8+aK16WJvHrmF95NKAkjBy9vteW5s0J0FBo3pHpvMpYVMDHMXzS+4Dy7PYK+8XafTIIn4rtJCr5DG/LtZhrDhmC9HGC168DZXT48dZjz5Gjsex6SmQXFKqVA6Dh89jDON9RGnKyBdi3Mk1wve+ta38u6//bsQQMLjQwsqRvVc4FQ8nUT05BbGR0RQL7gpSMHQjKaadAo7d++K9UT54zf/n/DUQvfH/e4cw9i0cRPf87KXL6/IjFzXvC/eJm0e7/ryKn8Kxicu14TnHRGcYEQ06CW98FKoFvevb44uoF2BsWIa976nUglXWqTgKqG0UkZRwySUoBqkBg/jJ4JLh4tgGt577IMSjkdqeHGQErIeRxjF+5ArdaBDunCEioczVlzCuIjm9WlcgIQBEZXwsgWsgHsfzqoJnt9R7QbHxCWVWw9uFfpwrETB1MD78LARutzGVkOhYgquuJY43iWNYDpoGl51bzG3ToxiETF5hn8uBSmOa/pENZ3E5uyVEgbTI6JWiQnEvNN4SjNAhksa2IyoIxqLPeuaZloBTYPRR3RdCsgotEQnoRM6E1SUIQw6w5M5HxblniXz30W0cb2daDH6gTN/cWY6QoZc7UUIiiqohhJTJH0f0nWZ8dLEZn9oCBfbRndtKjfaR2gnqc5TuYoMP0VCN8PUmlUe3ps5b24cXz4oTtvibFJp4UhrSZHHivhJRskRWneLIQLQ9rshUIggVXJ+y/FK6PvCYDRcsaoR8VjNtqBGIT0+F254zGfwgm96YZjLPO/b/vptfOs3fyv7D+1HtMPcqFbxiVFdsBIemKLghpuG8Ss6GLmINMIUl+KIxnu997g5Bw8dZtfO+xCHW2/9K04sHmfSL1GnfXgoJowo3HDD9Xkej0092DJPA5X3RCKMNwonWYCHOD5ZuZYU0Aj7HbGKmiFVkEk4J57ud1dCDsQikDUso7fBjw6/wcNzDgsKyAi0BwwpATW07RARQ8iSmlOLZGTUh/F0R02o5gGzeElxTm/FCmoKtRLtVqaYgqCoKK4SASIl7pcknNdbwBkYYqEgQSjSg4JpKLnWFsdF00ALpoJJiXmnEnRrkYvjJa9JS0SwEPG1BuwF4WmHWyBIJ4wqSM1F8XCWEMnotlJdwqCYpGZ3KKP4PQtvP/2YmCs26AsVoWRMKdS4T6G1IwoSzfmHLimDnvJ0y+OeFXEwxXqlr+kdiuNi9BKGUoFqPctY8qnHw1ah6u5/4u43uPu17v7aMx7fJpkurhPr63YS3jLz9/L70sIhAt8OPC4CrGXkd1n5xZElFPFgUJr/3ZKLDR+Jf2KxqdxsUHxtnt4uoE2rrbm0Y2wwSOD50ylYOaNw6Btm2o6NcLaFt9I89MEzy59IPWrerrIpuuaBxW9o6TGtuAkH9z3A7t0PsFTDcxOgmyts3LAhvl/gne9+Ny97xcs5cvwQgtNbjRWSEoa2QpdGAg98UVOkqlp656l8cwNYbiy1gCVA+dAHb+eWv/gzal+Z9IYZFFNUS3htHl7Xt7/0pYzn5+JcQ1gaFy3DOgrVExx4GGi+n6hcQyjBKuF9qoKiuCtSGqZaQ8nWCNtDhGoiU4alVnGpuIbM1cjwYa4ZtYRhreGyhlxqybxM4Psiqfy9ebEdqiFjkQOI9bRIHmEClcglGaHnRUKRi2f05WEwRMNACem8CJgK0RhJ8824V56gVOdO8SnV+9jN5oRd0YSYJH5XIreAWjo/YTxMwkGymETuoYx+1VA10IJ5wQyqW0YdpPHP9c4bE/smsHdr+TbzIYXnA+YOOBQjiQlg4kzd6WmGOFS4pF6IqDu8dTUhcqBOSQPkTjojjmhFsHD+POSnVAnjKJ4oRMlQZ3UZP7faD5hhHl6vkRchmpYSlt1gMNNleCJDz4ZziJRm8MKDgEH5h9Vo3q8OHnYo6UCaWzI1tQZYRxVJ/FdnVi1998xvBQwnsREHtS9hQHzZ81ANBe1psd0tlHie0jN0F2MZXGveqQWeaClYJLYZ8hrXYqRtwrFauW/nLrBKLwWsQ1TZf/gghw4doCNDYRVuuP76Ftew1E948x/+Ibvv28OJE4ss9uGNFIQuXSXvMrKoPfQTzAIvd3pkYtS+UvuYb3FBVBnriOKCmYSHmXCMAX/5f/+Sv/rzW9DAHiKBN9zbJtSR7GsBGUgEXzIsd5oMi431Mf36Pv3DJbBzsZx0CZjMXCP6kA6tAXW4wahCkYIyCvmfgvkI9y7gjLwo11ByauFBo55wSXiZbp6RrUGfEWkJGeoADaAHRAP/z3by0kHzYEwcpQa8Qo0lrl16zs0zNjTls6K4RZJYEgIKBWrQaaCOmka+SuyXiF0oGbGoJORhBtWgN0YtgZrGSc0pVTPad6wm/CdGoc4o2HDMtAXJeIRHwyaudG6xr91QcQqJd6vi6pgKIoqX5klFMhmB6iF/okansQerw0TTQKsNeSNPQ9lrhRLyWSHyfWJh9OlCJjSuVkVwU3rIXF6L4Ryrp3dezpn2AzLEnnETS254ybBKmXVNDdXMrtO+o4O73F4OypnA5qQQQuslFyppTC000mXvouoy5OPanOymuJc1ifsM6yV2SoZjeUT7T+KhkTBzpAyudSj/lD5pyRY8/59ZspynpAANccBgg9qsIonsmW2dTCe875/fi6ii4kgVtMRnRTrMJ3H9qnzhs74wF0t43S/9Er/yK6+L5GY1RkURjNF4ge/+npdRvPAbv/VrFC+Z4BZe85of40/+9E/5u3f+7WDtHOXZz/5y3vLHb2H/gw8OkUZIuSxDS0A/6Vm0CW6KjiRZMsEwePJTP4vPefrTUxZanqHh1s1zn0lguw7Ry9kfXXqYEQ1qhSJKTY+4iIdFkjp4ieaGeE+p4QGX3rCuoj5iKjXut9akA8jgsIajoFSzkHf1iAjN0SLU2jzelHdrAPkAYKToFYrXGeE3ihQ84Yplh9FTSVky2DQ8ZhfMK8XDgeokIZ8W3UqwPSJaFYpZOKMGdBHZ4IZ0krm0SAhbc9BGLUKJ/aJiCVMJ5unRN6cg9WFnhepGNaEzo0pET0NQIZpJY0e0RQdCUQsUoW+Rl+Q6WOxdAe/zetTQGobcHDrCORHpE8qJ71s6eW414SEySk9UoVruy4CKSib+3HRIgusZPJdzRrmH9tUM1cnMtcUiqjKLOTQmi2TybpYpYYAmE6NBArSFxBEvg1WE+NmgfoWhaLTDkni4DLic0egp4oHCa8IqLWsd+OHyJUnwEpff0JkoIhW7Q3Jhwyp7Mhny0uI9aZeeIZ5VVHOdZiKPmTNTMXSAeeLblr85R2XnvfcyqZMIDOjQvrB9+8VgcO+99/CmN/0BkzoNAyeJ4euIV7/61Xznd38ngvOKV708Qui8X6UbcfPNN2Ne0+AkA8KFW269hekDfShxhyIWuDyxuaZWue1vbgMXxutG4IVJnWAmdAoXbL+QCy7cntqwIhRqLmauRCrPFl77cE/O+rBgAdUiMdc+GRfFyIz0IAPu4S2S+KyNkpooJeXDM+fe7kvANUwzaSk6eJWxJiENwdQALSkvtUWvDr3hnQZsZjawNErSDK0rSJ85Gg0lJpnYrV5QrTThEgvjEv6JJjZvVCMcmHSgiji1fUegL1AslLdarFmwXEKRVVdEbTBkeFxbcaNOCoxqoBS1AgXN5KWLJrzkVK+Ia1Ay1VF6TISKoNKHQ5l6xgimjpgP6bu4Zk/PWpadOWWgDIuFo2cCnRMwmYQe8YSLwxg1jrymwyP4VCnFqM0pNXDtaRAypkCgG5Lst9OllM4dWGbwdmPxPMVStKQTvpzkFA9PXqoue3DpFWtm4wfnull6IReoYd6WUIYtJz9pKiKTIdKiAV/+wURbhrymhP304QbmeWiKXWYnEximyeD5SPKNMwZJRd7ey+GNt5Me00yWfOAp57pZvlMIzrlniIvkb6vQm3DXXXfTFWXUjdOTnzJdOs4Hb/8gL/53L+b22z+YSjM8t/HcOl79g6/mpd/+7cENFmXUzTHuRpSuC3aHgBaldCO0xPtdN6KMOl7zw69h08bNwQeuiTsn3qmm/ML/+/O84bd/k/n5dRRRUhuwfjxP6QqveNX3xf0oAB24JBQgQ3JVZtcMZu7B2R0mEUl1BmIlFFMhYAkJ7HSKY7nRxUpgxapQ04sVz3X39NUlZDj0NkLBS3zfMzoTS3kXwUtLAKZy74wqFghlaZh45ElUJbn2kRcp1oLPDJOG+orYB2rBElHRuBxvdPGEWbDw3HEGpprbchJc0kRL0DvFnIIFjt4UaPEkFwRe72Z4DdkvY0l6ZoSF4kalMCEZbdVQg06DQukZ3zZmTyQ3CxWo6uQtwhBMgysfOSulCkPiV0QHhpp7ieg/Ycams9Q9vHhbXrBwegS8BirlQEZoLoqUjNZF8akmvJysp1w0bylZX12+zxnlLpLwCO3/yzF1Q69nqLeApd7RZUWJEIGQDMUE3hRbfjXjmUwi5hdnks5CKJ8EUHDLELYp6eH8OkxMmvIuIMm7FG/CBjOnQWj4el5rS+CedLFZ4JIGq2GHNEvdKI9NKgj8NMRV0YbxpwIIxy0Z/V6RztEuNuONj38imzZtwgRe8MIX8szPfwbv/cf3Qm6QkHvhla96Ba985fcyGhVa4nq4dwk9eWL/jTk30DRxnvOcr+Tm5z6XagSpLzJuiEX4uXPXbqYTYzpZpO8rVWHrpi088WlPZuvW87jssitOXiMy6dV48NYkICZmTlLjziR5n4ahSpe3XNxCSVvQBiUT9moBFVZxXCvWWBsacEVb8GqGuQy8Z0rmWHBKIOdIeuxVm9F3+mrLTBOP8L6rYbiNhMdSzsU0YTyLJRTPSCEomZ7H9VJp7gRdSwSXmC+NzhD5sSppbADpUmqLoEXyMlrhlkQtBYprF3MWD9Wrqaw96iCUgGatWhjLdN4s96BmxB3FV+lzlTAssekry56aRxTlgQyIJ6zhHRSnWIfXPvH+gKCqGbl6oLkWg/C3nERCbKqoQPGKerCH8BIMGGlTqnhvWA9ODbKCABZJcyuWiXEjYLS2v1YRu0+R+D70kUUuYpEVrs01SGsv4qs4YZ6Wf/kiA9LLytQBS7TBTZC8gaHKNT0SS7J0Ceua3og07z1ZSQCItZTWcMa0x8kkyCKGVM4NQ4+cqaaKksHbl/yNgGgYjFFQJiWYFvgAcehUlk/tJUP7bjn6kVmHNbBytwgxFcWmTp2ErzgSp/Z9GKiiTPoQfu8rKsbc/Jgf++Ef4TXx7ckAACAASURBVOXf8d1pRFu8mKfP/dHCS2nwmbcQlSH0/v5XvZJtmzdn+C7t7uEK2jm9Ge5KRRDrmVuY4+iBI7z0JS9l29aty+udXpVmblvUIkEVIU6uezME54B2d0OsZkWizzgIfcb6iohRxHLjazJqPDx7I2s3EsPVMOhBWYzCoYAkbPlyxcMDtIp6Cc/ZhFo1T5+RcR+hPh7wRBiWmGcvGe7VNKKVNEx1eB03vSBWgo2ThHWvwXZCw8ZaVskKUC0ikQgsmmynI5ZFSLhR+mThmAScSsKq0kXEIDmfYfeHd00tQRc1S+iqC5gLwTxpAZJGDMn8j1O8IgajGvFN7456DbNZKp2GAfVkrETqqsScLeefjBZLMkAvIZukEYhEdObdtDlusQdKMoLEnM4F83DWBoqvE7Cda6IYQXVebZwTmPvRxZ53fOhAhHNeUZUMe4wtC3NctX2MSQITHtVchiBeoQh7Dxn37Z8McIuhaFL1qjpzY+eGy+bDWJfwFkXDhZe07MdPGB+5f2lZTDJZU90pWTn3mMvmGJWgVCmaeHV4GnhQ+G7ftRh4poZldu+hA+ud6oVrzh+xaV1GG5KRhlkKhDFS5SN7TrC0FNibkJhrYEhgcP7mjm2bumRCCaI9bl0YDzEqyp79E/YfqVhdjKrRzD1UIpn7rC/8Qv7pn97LP//je+mtxySYDN1Y2LBwHhTj6MFDbNt+AU/+wufxnjuPhgCmkgjo0NmyueOSLaOEUsMLmk5Jhe4sTY07di9h6vSLhozHkboolrivsGFhPfPzY/bvPUAkqQrj8TyHDh5g08ZNfN4znsV7PnKA6TSuQWYSymaOKnQqPO7KuZnq32A99WpJjT17o21eJypRqys01osLUejS0QoiDAsHM4tXsIAKpAY8IckQUQlOd+1toCnihIIgKIoBL7ZIMvyAyPeEwR+cEAdNOmGmWJLTHdRZKct5cPeARLtimNRQ7hU8mRzVPWieEgaqI+TOk+FVSlAdUaGqIdXDmfOS2HtEwK24X3HwuJ9Weui7QCaTsy6erRPIqEgFrOBMwaJozpPaKZZKtenh9EQiiV1wr0w1WhaIeWSpMSaiyygtgmrsZ6QHGeFNP5GOo0ReqCRsUiSYNV4VsTrkhCIEC+MefQvKcj5PGs3bUdUkbxQg6g3ORBg4J5T70sS4895QHl4LZWQkgYot6yd4vw4XoyvOBeeNKBLZaiGKLzaOhSu2we27lpBaMO9ZrJVDRxtDQblj5zFAuHzbiC0btfmzIMrGOWXjgnL1+SkEOIeOOLv3LTEVpzNFOvjARxcxVx44uBSbpMJ1l61nfi657CjrimDSg1e68YgLNofrsmP/lH1HJ9yxc2ngvLcNB8KJyZSjE+cxF63D3RgpjDW5seRx0iOd8sChKTv2RiTiraJF4veyYQ69OxvmlUu3NAglFKlKhyo89jE38sTHP55de/fwxt99I0ePHKOa0wGf+aQncee/fIgtl23mZT/0cywtGROfcPHWjm2bC46wtFQ5cLSyeNy4e9ITZYLhZd2+Y5F+oK45U1PO3xIJuhe85BX84mtfzUgLU3p8ajzhpsezY9cODsh+nvCkp6Lj83jJS76NK665mqWpcOdhod9/NIylJByXntPF581BdSbFmZxQNPt7aOKaThjqsznKtDLpQE0bLIvWHpERRuCuZn3QXHtHRk4t2Raj9riXYM24YLWLhCkV9S56lTRuLTWw2ar4KDBz60ok9gtor6GsTKjeRSl+lxFUTTZKKhG1SDo2PNOz2NdKtEOgj1wCJsG/F4mirGQ/hQ6yqPTsM9nY2gi4Uy3htAo1iQrqkTxx9aDKjhsCk1gqEVV02uN9h5SArySZaCXoJIHReRfrrY54n4ncMBpeBe8sK0yDLjyEm0UG2mQjYARk1JL1vgyl2hShhEOVGHr0lirRhkQrKiWoywhFgzUsohSBPplHyhTzVhrsQ1EW7lgH0of8KhbrZi16kNNB7ueGcl8/rzztseuoCHfsmrJUDWpgx4snet5/79H0GJzzNo0SQzMwpSvKjZePKSrcdPk6WiOppUXnyGKlGztaJbFFR6zwgXuPUtWptaDmrF8QNiyU2Fzq2LRn6iVhFOfaywuj0SiExWDb+hJCkxBEbyBSGJdCVyp33lc5vNjTFWPfwRGu0X9jLAWVPgy2C6qVxSXl0u3C+rl1HJkY3kcCQC14z7EfM4PugaZedF7hvn3CtLchTwEM0JYnXHFssXLHoaWMFINX6x10ZUSnwvx4jptuuJYXvuAb+PXf+C2OL56gKtz+/vdRRvO86Ht+hquuvYnRKLyuBw5P2HMoKhiX+gmHjgX7RbVw0eYRF54Xwvb4q+eZVsd6RaTiKmzf0FFEuP/y85lbN4f0fXhinXP7h9+Plg28+Ht/lqc87al0c1sxhHsPemCmxdm+MOKai+bxRrmr0BVhy+aOpUkoimmN8F9UkFrYs3+JPUcmHFs6u5577UrQHbOJlkg0nwplDJhnEzXQccBe3gqJVNDqA+zVCaCxtgEfAjMsoaClArUb4EzPGgor2TiuxTVKKCGxSGBKJDUt+dTijnmXnOxQ5gWP3mHVkRLUwdS7YQQkoQzvhsgjcoOZgwrMNKmGkU8oZCGcBu9fyWrWGo3JJDFykQaleihwidYFIk7nzrQqNZlGRafRSqNKJKQbbVCTTJuwYSJEWImIVKwDseCnTzNhnWxQtygUi/5KhkhJWiNEcjRyExUbPG4vZPWvkbR/hr5tyX5qvym90aszpgML4+MQtM/egs2TeDuS0PVpYMdzQrmbGcdOVCjKtRcWLto6R7GCd+GUVjUOHXZ2758ms8USewuv9aN7T+AIh48aDx71zKY5Xadcd+mYYiEYJiB1wlUXzqU30I5VLjt/xLgEEnnf/hMcPCaUWrHi7HjAYDQJL3rqFI0+N7UIk37K7n0TFtYXrt4+hzhcsG3E+bVLzzrpY9nZzbKfhxRh28I89x+ecuCYcehQj3cRXpt1aGMdZ9Ve4KIR6i2scxbmPfsOhSctGoanOPTaR/jpcOTwHsiGTdMuaINFhdF4nL874jNveiLPefY+3vSmPwB1Lr38cr7uhd/Jk572eMpIue7COao5O/cGphr5jxHbN5fsdQGqwoEliyhHjXsemHJiMY7ftGnE0RNBj9x2zWfxtc9/Iffvuos6rXzkrrvYcffdvPR7X87znveVXLltnKyBMEY79k2Z1ihcObY4jQIZDcaFjODw3sqHd0yZZhMmJDbvuINLto/YMK/LntBZGo5l9LSc4JVMRBrB4mhFaaEPNRP5BB6fzBExxWsYf9cOsywqEg2WmEaYbxpsEzeQYqhlSb5Hd82hWE81yvElCsrcHTELmqLWrOSsmDudl6gSdacUAlIRWy6kUhnoypZQEgBTQ2WE1BqefrKatZXcE2w1xRPuDI45kjmwPJf0yWl3oYjT08NEo54jq3aFiuT8qmsYIYnrLVT62qFS4vpN0OJYneDSBaQrAtpnsRxIF0luTxhMBDy7c4oXTJyJGYXcr6m1NWF+mbakZ0T1ZgGloU41DZ2AxT22SAqLhyHwjsg71JKpwITcRJEujKHKIwBzFxHGc4r0TlFhcTGLEKbpeaiwZUPH1g1h+b3Ept93uKZAOlLA1dkwt8wZB2fHngmCM3FYmgb9qhe45uIxW+YVMvl5vLdQIsD2DSO2bHCwUSYmwxua65RxFzfvw7uXOHIiuLkb1o8wd+7eO4kikjy/acsNhCV3hVqdE4vOYy+bo5+fcsl5Hd7CtnSeLClUeMW8pEAK0U1JGY+UO3cf5dhSYIi1K3QWmfwqDaIR3Cv3fOBfgk8cFVQ4EfZKZ2gZgRaOHTvOW299O0UFr8azPv8ZfNaNV3LFBXOIOItLEU4ujCJcvnvflH1He1ScjfMhQpbFXIUoElEqC+uyt40KS5PYMccO7edzP/vJ1P6J7N+3jyNHjrJ+vMDX3/yVnLeh49ikpxWtmXjQ1xLslSyeueP+CUePVVSNHmFOlU3rAvKwzFF0qkwyEXmWUZll9pAGDFiDMI14QizhpEc+pEUeWXE68Jqjxywlo0dVS1wWjD56xGXFdSinaThA1vDqhF8aldJTWYYGx0wYWFpSkvCRtcNSgD4MOVksJA7Wg45D+WnNBHA6Te6gJfeCRYIxSQGSCWBrBHdxevI+S4Mqgz4q2d7XS1YsV6hSKCRDpvHZNQsDaaU9lsnVGrx2F6Q41KDfihhuPeJRLxHtiwWrUbQk7tGLKXMNEUmVWLeM6IVI6Lr0EYm3YrqESCPPEiy1KlBab6nQ52HIMlHsJgG7lvBExJa/18om8YCUlNbIcOiDcspxTij340vOez+6CC6sGxWuuNApNbwak0JXYOMF42WGizqXbx9z+fmkHGWizdIL8BCmXQ9MOboUoejeI8ZiXaJWAVc+et8SVbMFrZMVr1Fc0FenJEXLgWu2L7BuBAtzxqXbO6rAdRfNYVLpWo8aAVoFnXqGkoXqjVGrHDxe2XFwiY/et8SHdi0iu/J6vHGXDWUUbVRbAxtfFliViEC2ri88/TEbGUlW7+VeqxmqRg2XgBl/fM8cDyaNWbNSMRrdKV0RRlo4MjnO3oP7AOOyy69kbn6OD+44wZ3HDkeDqhYVUDE6RCrqAYldvLXDgQs3zbF1ITeQteIZyUR4iGdvlVvvvAfBWTx+nD177ufv3/MeXvnKH2Sp28SeQ0vs2t9z+Fgk8XQkMPWsUpUBH67Zl94QihRGY+GCLSMUp1dHRVk3Vq48b4wJLIzPsuuuilrNjozRx76mkqPlXTLZV0cyJBRt4JNrslgC8zYqHdEYTawGdzt7gxuKWB9RaoLHce+TndWHsLgExKDqjXYePHURqheEiLpcoie8WVaR5u8FzFHCCcmW2y6RePXe0ZSWVpPiHkldkYjgHEO9pNLvKdnwuOuj7N806zQsf2+kA+PFvQZJoGU4vRXoJebv4eE7TmnQEcZINGs+wtO33GJaBSnR4MsSLqGxeFCKGrVGnQCQsEw4UFgU46k3flb66dm8DjLBSs32DMF60tQ73mVvesl8mC/n0VoEpEb4dZ0P+i5yHGEYVhvnhHJfmFOecOV8YMpSWDdqlp0IjxwePDYNTnAlS7/Ai7J+nTBXwkLet3/C/YctPRhnadEw6ZNRomyd6yKpI+CWfNz0aoOVopyYOkdPhBd83UXrWJhXFsYScuTG/qM9y61lC+qTyLKLJ9WwhCdFZMxRYfNCYceDPQ8cmjDte7ZuGAcjAgnWST6UIk+SBRlRZedaghWUGKgrHJ8Yuw9OWTduwsPgibWYRbo4/bROI/uO4zYF1ez5AVY92iyMlL7Gprju+uvo5sZsWBA2bhwHzOEONkayhbBbl56mcvBopYpw5MQJOlWu3t6xbUOBCvuPxzqEjDpmx4YN0tfKvn17wY3PfeYz2b4xKkfWj6Mvdi2g1fjQzgknMrzesM4ZaWCoxR2TORotc//RPjaIKa49h1TZf3hCR+HE5Oxi7ngmCRs/24OGV4unko4iG8QyKahkS0gk1w9rDDKnyAghZJQGaeQDNNwrUoSOQpWoimzkgaLBeXeItgAUzCZhfFpOwI3i0+CU4xjTpDk61Ts0nQ5JSqCL49VBosWCVM+cbQ81kveWfPLiAVM6Cpp0RcKBaw3lqiqWDcIEp1YdohvRYNORRXCx6wXryDXNIqBKKEfCKLVWHDX3LG7Z3kHTQcjcQwLsAyTWhXG1zOPFSmcuwOIc0RmkINonrB/w71BSpQ1GThKIRK6iNRMMcnKrJB+uKBOyCTdlNO5WIofVGEGPhN4yJ5aM99y1OMBH5r78hBQEOih94Ifnbxlz4eYuvIElWBhLxjjOJdvnuPj8MAZUSbpj+N+a2emArhQ88MqpB5TwkT1TFichbUeXImy+476lEHohb1UkfVqyo3h0k1fJsJYM4TRu0bqiXHfpHFvmhSu2dVy+rUsPK9NaHpCTVBJiaP5LeB5RGNSEIzwId9h/vPL3H1lkcRLJWXcPbq9E6bdkUk0c7j/UJ6MhkjGjTrHqWB+Ko22RMcpEekqnbN96Hp/zhKvYsmF9bPLsN45a4sF1gA18GkIvMzqpCpgYx5cqPcq+g8aew4t0R+/EpqFAjhw9ytvf/g4+8ynP4CN717PjyFGkUfFE8BLJwurBMnCHuVLC0HpQ/K67bMxcWeaEl+QNu8Rmq32hiLEwdxY9dycSdKZDV0BP2mJJqmLgzWHI1cIbBDIazLWPWJ/WddOJZCCukQzEB9YUDYrK9hPR/kToLWCXyDkGyyW8v+W5miY5UEKmNIucAnOOlhaB9NRwojyKagJC97S1kj2MEgPtsrMils1wI4o06QN5SkgvlHMfPH8NBaulPa8h4KbihFOWvWNEMi9XPQ1UtilIaLJmn5umrIONU/E+k9ie0aWC9ASrqDZWerhq0pS/OK1rbPPWvQqUMIbNOSvE95txjsc2xX2JB6FkhJW9fYpCLwm94QNrpo3oWhnFT6DB9nGH7vROyzmh3IFl7rL2bF03YmFcMOC89R1XXpAcbqIjoXQRxtDHzQ2FEAmnofN6CUGclEoxoleGRoh4tJ+y/wjsP9yzY99SeJbTMLxOdmpLG+rEU4Nq8tglqVnqkb0WE2qXOH96Jh3CjVfMccn2MV0+dSczPgO+qpYt0ZLPXNvccx1ELEOXmp5XhHQAmxbGFD1B9eicV1WiWZNE1j+ez9HAkOh0F82JQgPPj7IxF9E87N49u2Ozm+ETZ8O6zWzaeB5OzRC2GZYI7a0URjUUOxo5C8+quyoaHSAFLj1/BCpcdp6z1Dsf+Ocp+44ZxxcX2bd/H8emR3jStTexbmFj8J1FQ+Gh+Xy3HtVgfZgZ+w7FHFRCwezZP6GX2ICdFh532VzIgUy5YHOwKGrrE3EWR6Q7wiA2vJfs6+3e5z2KexM5m9arfIp4cMJFC5rc5krAra1q2bPPi3rAgb0QXmE+2EIJ7FqnkZwPDzIUi0vIXcm8T95MPHF11VA6UdMRLPzYcyFfFaFoODrVQxGV7DIqSDw6zrtkn3jkjyRgkoJm24go+tFsKe3pI5MNtCKKj/0dMWwW/FSlddxMvY2QcEY85SUiA7JQrI+KcyOwzKBCR390r45kY7SaeQatHt1gu0jCRnAtWHL/XSzgotz3pBFq9Qs1cfWSIYp4PIHKJPIltcm6xVrUvH8OEeEpkZD16PyJDDW/dK70nP5hHeeEch/lnWktdfteYAyIc+DIlIPHFqkKJQt1gg5X0sMJ6pKlRfTEu8yiiIJR/FQtwk2XzjMaCVDovHL+hsL2DRsipCKVdm3ZfsW7KerRWL9HIgR2ZyJKR1AUG/RnwKR39h/p2bV/wgOHjAcOL8X8tGaIa0H7qtlJUSKhYt6jms/SxLNPWkP8Q0Dc85mLVbhs+5jPuGIhhcOTfhaMGAJpjKhHnbfuHHP4gSZyHvRRGaNdh3axaf/mL25BvLJ5YRNXXHNNnNeM9+9YZNLDxJxRU+4OaOQ1RIS+MQkgsdeg6NEocnlf+8Uj7H9wLyJw7NgR7r7rLsxGfNm//gqecNU6XDyT1bExq0exUivkCEPUp6eVhtDBNTDqSDImFimKo3QsVy6e1eGhrKMbdLJAwi2naCqFCjRjnw3m1OM6DGGUDrznU42CfthyHLHWRmttO7jrA5RpmbA0avSgGYANy2KlTOJSB6dRiESf44Myn13P5IBEItKc4krteswKXhztM5pAhucBe9IaNeERSYeX6gnFJJfcPHI7rZeKeyrFEjmFmolHbVW3yRwqhtcSiUp1ICjVUWjbqISaHQcCVnGXodWG5p5SzScrGZn0rUMrAMmGbaIx15pOiRVDazyfVaULpR7pCDrPfEQN42ktT5etCrJOMvIdavHclXxICIAUySdDGS4dFZt17k85zgnlPu2jaqz1FDqy1HN8Et5ga7wgOgJZwkVwK6gtJT0o8DzNGxVhmXPJ9o6RK1s3Fy7aFFWRmjTK+VKY26JRbenQuk4mEgKpyPG54alBkciJ8vig2wnLbQaEex+YsPPBCQdPOH0t7D7YZ6fJPpJXAB4bAO8hnz1KbYkyG7jO0le806ESFOujDNuj/S3Ak65dz2gULAbPxkSaXlXyqDCpjLPYwZJyZiWfnjRS5kah/vo6YVorW9cvcNlFF3LRJReAOjddMc/dDyyx69CUB/f3QBdPgpEZpEDyYc8elXcRVirWx6Y2jRQJi/sQh0lfWVyasOOeHWyYW4CNF/O+ncfoRLnh4o7SdaxfVzh/QzwqzTOJli4eDH5dtmudNMVllBJ0tDCMQSZtz/Q8ayPtXOcFuuRPU+glbWEWHRXNHi2lmzFFyZrSeAiEeCTvHc+e66GopCGGIq1xYCqxqNWIDG3FzelcMc8oTzwLfeLYJKMEY8Mj6aoG8bDpBgsF5GIZrSkjRPqIkMzBSjpakjBbF8pUJ3jpkD6pjt6n5xztBMTSm9UaVZxIJHNtGSDx9niq1u7ba9TOpQ4UfEjUtWfwSGsk6ETLC49pFo0cm9XMwQ0RU3C1rA7IaDpPmlGPgPR4aYyi4Om3ZwdUoHShT8IW5FpbaQ8aQMyH5Lom+8WUWF+DUoVa6qCb4tkPEdWodLRuVNEKf3XZPieUO+SNGZWk+uRiEewAtXyuYuJYI4mWpGaGlMiCz5WgVN141Xrm1di0XuPReTURMMnS+8TxvE+vt3OkLxlRSRZrpPOpMTMhHhcWqivbtjpMp3DoROX2+xY5dCy/KHl8yYROIBvhKWhsqPAQpmDR39pU6bSHmo9i65aTL24aX0wtEQ23JA1hHWAdTXZFIZgVAtikUqdLWegUuGfxSCCXGhBL0eRWq2RvC+H6x14XN6UoV184zyVbx+w/1nP7ziWOLwXXWrvAOVtCE+nT8MZOaw8bb6Xj3eI+TAo+mbJ4/DgfuuMOXvgdr6FaJrjoef8OEJkw0sKGeTh/65hLtkTPnA2jMCRCuHrRFTDCfvp84pbHU4Q6msJKhXOWR3QXTOXoCTuIRe8V7zPcjkSxpD9cG4bt2QtGs7CJwNXFNCm0kVw0ibiweHvkoFCJVh5ChpbSMO9IzA3FPNK8yWVZjfVsGjKMUNFM/HcSUYMI0FNFuGQrIPHkIS/pBCR+H9jzCFdl3wGjnxo1XXbvwashHfTZklcL0ZvGwbWnZm+cwKwjSaptvjo4v6gEBNprMHEiQmhFRsIURy2rSL1jKtBJePya+qE93EQ0EXcVVKIaWKpnYpkkOwS8Gvch9r+2iAroE1qBUM5aAdF81GtFpi3RrrQ2DxlbxQO3ifyUp2yEI5vKPJk0cpqo9JxQ7hFu5qJ6tuzUEGDJ6sig/EbZcF8dxs5jLppDVNiyoFy4aQ4vNaGNXNFsqB+tDDpI7Nvd4gn0NVkuAq0ZUDzENxS5ZLhqHhl3s4q6U1F275+wc9+EvUdC5Qe/uAcpeCaavCVqam5GITvWpUTGJBNT1uF8nk+GqZbPwc0eLNHYpmf3YecmG8VT3zNZViU8GQgvGnXu3bGDu3beS7QFSzw1TxmtjwWn0HUjqjgTyyckNQqeRT/t+RFcsmXMhVvGfHTPIjv2Tjk8if4YsdY97TkyWiK/4ZIKTBxZOhB4YoUjxw7zoX+5g6uvvpHrbnwKHdGrx7VEtsNh0hsHj1X2nZjyoXsFlcI1F45REbZuhAs2zgdOm/QzSVxdPB4KQUYp2qz0WR5dF0o78iYJYlVJFkkwQEon2TCs1WnEfXSyAM4AiTqGOMriuhtM6YIUw6aSve/CG8fAOoE++oKLxm9EXZwhXSglJ6qX1bIRlpDRtCNdOADu8bg3MpchHkZ2vOD85+/bzvr5tEeWPd8pQU8uHt6/GD/wcwe4c1dGGSaZ3M2kp5B1K7rcydFKuldxfdR09rKNhlpWpZIV3RK1J+0pT/GYQCcS05ItiSMyKUQhljAKCGVIDLeqANLKRTgUbQYKxiiyYBl5Jf5FkfD8IWDVlrAdCqGkDg6gWlAlhUikY9EorOTDgHpPWFmiaKu6RH6rZk2PhHM1+8zlj5G7h0WaP8GxaZ3yBU/YMMAWfQelktWo0RVxugTHlyo7Dy6y90BgUjsfCNL/Tno+6OGherJKIkGYSrcY0pfk+zp4wB41Czei1CaTFY0KRVL4gOGhAQJb1hUef/UC2zd2bFkfvbipPaIdzgTVUVC5KpEU8RDkXloSKwUbQnm6Z2ViuE1R8SfhSdQQACU3jPSIwgP7jHd9eILpBM8H93rbpNkmVqpw5MHFnEM0UPKx4N4hpvTxRATGc4WbX/wqTvzyz7Bh4xin8rYPHmM08qCLEcYDoPWQXuqdbetH3HjFOsZdRlQSeGAUoVSgQ6pRi3LvR3ez62hP78bidIr7lK//mi/nuV9wbXh6WajWmTN16Cw5CSrh0UQvRNyUcenj+hw+vPMEi1PhwLFphvFJs3NDPPj4hnLkxOmLPR72YTVqKErrGlqDGZEPlRFRvI9Vrl1SH1N+JMvlqSCjdUi/GLBg1mu4JN00w/32m0GbIzFlR7UEb96jOjP8nw6z9v0sqGnYck3FmzBMEaE2TzMVNZ4P4W50PoLzncBGUCIzYWha6bKlggr0rlS3eCC4NapxwjGRJIioJCmZ1RypPUUVSpdGMCJhIRCqXgGRLOSr8ZQwSWNZk+YI4fxmrkOsRESjgddEXUc+NATwqVGLM4cGlJYtm0l2nGk0kkBS/gxaaa03XCf1iZVCnURxVUkDXYdozSgaCeOSD7uJqSbdNDPpDZ6VPquCz3XPXUXYsG5MreGNzFPpHawGHZHeOHzc2LsYYWbQGYVjfR9Pbwk9FY8j8z6fpmS0LtEqTrVMWpJCUyOEt/Y4MTLkTeinQTm9Rt8KVeGGy+a59LwRpRSkGGOXYAV4a3c7Tk59iQdhWj7lhWYgFOkE7Wv2jShtNrHpCKgklLnj44CL3LOEXEZIgRMbe5buP8FSy2F7CwAAIABJREFUDa9GfYotamJ3lrAPLC6RvToKrsIIYW5e6UYd4zKiFGXqC2y/6nq++0d+iRN73kMv8yxZx2RiQ3EFNXIiUqBYPFx5/7Ep/3KfcvnWEReeF3ht6fJB0B5P9XEvmMDll17CSIT3vff9bNu2jZ/+6Z9l08YNkHmLiCp6vHTMC7gaWIl8RVeS1kpi6MGPrsANly+gLux6cEItzsHDU3bsSwjOKmo9Lt1QrHJWhpMJPE9lI+GFlYh5wvxEMvj/Z+7dYm3Nsvuu3xjz+9ba++xzq+upS9+73b4k3U4H24CxSNtIISREKDzkCQmkSAFeEPBCXvKCkMhrQDxgJFAUhBR4CKAEgaPcAIkoVi7Gsbttt7u6q7pu51Sd276u9X1zDB7+Y65dRu5qfAmnVqtVl1Nnn73Xmt+YY/xvQ8utG+nJdPslbr7yo4LegFwXOL6N7S9KeRE8/fY/KB6mbPs5skqMyeJQWK0BqXPeTZ+V7uNecf/BFIIfyDFjZMFqVtB+x2aroa9wkCz+vJiN1RpTxiEzvWdCU4iWH9Rf1URlVDiZOKwWmoilFJYh0cpAxeq0NsbqmsoLurHS548YBEY9tTggcq13RuaN5OGjXdGzR1JdckJUcUUTbM6K+V3TD0ofy07GREyCaWJVY2ZeJH60WiVQUkWjJrEGbb3mvzr4WOeHGk8NLJp60gTnREjWbUMimaksmo/B2+ETUtzTtALrweMAW3nn8cqDRwuZTeaacVA9oWtk0aahpge4+wH/itLA+mTkUpkersOVSxwswhG1g91DhbeCjsIoBQMQyZ2Nc+/uzA+/dsTcAFy8fFLdji6aw67fumyGjbgirQualt5ZGBqqwNEG3K+uHmXWDHMDpRBZh642jeduTEzNWWoFUWDYFBX7WyP1HGgjfG1S98bak02ixdSu9y/aEcaMHc+cfOZfQHOrCqnCo4JojchgGnBZFwdw/9GOD57uufWe8cOvHfPSrQ1zS7rNhC1Yisi+c/sud2/d4guf+yzpivMFmV9a06ibpVUfDxs1nuZqhHe8awl00KE1GcaKYH39JS11+PTdxpc/JQz5197esds19kseuqBn8jIOKpE1JTeVykLfu5vIb6tAsfn517lx70eIo5vsry7ZXTzW10kjllPA8KlxcutFbn/lTzKncf7OP2T34ZtE7CCN1kIywSZZoGjzqLqYpfqogF4rDbqrqPbF8GliAEiZKL4X7Q1ITNCByaehCqTbc6pJ2cuspVukH4LHsjm0OBC6dpjMmvJUepCzICLzWR+bdWwqieRH4dO1Jl7k3m6WKqg9C1ZsRVx2opy0bp3oBflEL2OSvlXrlbtj5Wzl+nrzAeVKISnPQokXvExSY7pf1VNK2uhjmm0EXXtwvVWcRpQSyEnWcvqmJjJhm3irCcKHNPS6BVWH808xfsDMvgOc1klYM/MnzOx54K8AnwO+A/zpzHz0cV/n4ir5+79xwYPHXYqLkhrJqJDlOqwDAuXGREW/XHpQffqoDxFljJhq56g+mM46Bi4RRBUTkIXbDYnfdkq++MoRL92euXmjRmXsQOw2wFerMRC58ULKX421+u+GqWdCD6/Ye2PYhyUy0KXipQ75LURJ10ovrwNuBvdPd+xWKQR8SAZXDvxBALFfefj+PxHWG1IQubc6+IHZyn63wzaXtNoP6z7rT20JI/QolRPj2Virm7Ii9aiH7PQCfvE3znnpzsJn7828cDOZZ12W1lOqCW/MR3Iht26srXL7GaveFMA0oKBh5BLcWZkeXqRSKGXTChuW/EHX0dGkJ/arnzmGCK5Wflcmpt+vsw3UoVWn6940oRbkWIZ07OgFbn36a+T2BqdXp8STJ/RIZlfWeQskR3Sw/cLZo/cB4+j4FtvP/SSbez/C5bu/Sn/4HRGvJlmfpOI672lFuKIIg6juUBPjWNwNkb3gETlSDaevWdOwTr9UI0m62JYWRf6XXwGigrIkA7VwBZutjtO1YXCAfsVNZSA4roxP2twkiM7HfzskkWmVD6/npMe1gY+KvrY5iF5njDL31a7mtFaacq3SA0UveJmjumksKW+vjFqUaaneQd1tWVxXTTg1VnbTuR+RkpEw1fdtQ8OYyDE1jkkmOTW8i68jpo9AokOGOiTCSEnzMcfu96Nz/9nM/OAj//zngL+ZmX/BzP5c/fN/9HFfYO3JB0/2JTsSMdKL5U7TN5llv54oUhIUUJWKEZjTSj6smNChohw3qvVWXXqjt8G218HxDl0d9NEGXntx4suv3WDjelN7Cic06Z5Kqy4Mbi62vdlCuuCPIX4Pgpa1bKMn2Rpj65NhmHeykomETyI8tQqWyE00kgJpKz3HGj3Js7r3MrmUndqd2J3x7jf/LucP35IOOK06gs7F5Tm75YLLy1vgp8yPHnNzhZN7P1rLDERExihGpk446vtr1TuM6VBwsI7YB09WHpzuee6o8fl7W157aa60wYLJQhNW1yYK1oKMtM5sU/EIGpuhy8uQ1D7NARdIwtr7MGnVQQ9j4zozkV7LIoytp3wUz+hs6yNVR5sJRMenWryQWiW3ffkLTHc+z+VywfLwXTIdr7E7mn62sKiCLbxXK9ec3eUT9sue7eaYW1/6Gc6/3bj64FsHwtBXWJsepGGQUVESDCGDndfGoZomwtFWJWBKqaKa1utZUHEDalBYnJwpCaEKYVS++kSRplCS4SxXaenHR4eAoB2antcRvBLdpWZLTZIHHfw4fD62VZX7l7zu5Bvsaw0eWHXtrdb69cqT0SXXDlr+rgKvrd7C+dPxSeCZG9hazcgg8cvyaKZJKc21YIusCw3oQWsjopeKGq7nq8d1vEOTOTJbokz4FYtJ+TOArzJwZunf4xnAMv8a8PX6+78E/B1+wAMgwsMPGlOpW7xcdspYcMRMZ1mVcaeltLstjIW5cK3rWz/rIF3nNAw0SzhfljSp58o0BV969YjXX3BObmxEbLeKTQ11icOFpt8WZX4Ya8sSCH1wNsZTBHGkQ3O8C2JJxpos9Hu8uoMB94QKn1mpb4C+T95+710+fPCA/+wv/kUePl1Jh3mzZbdbcVv5+p/8M3zxR3+S9eKcq8fvkClterMs+qYuxSW4ygu22y1+bKyn98mXfoyR6JfDBl3/jzrsPh5u0zhN4byeySpgmMzg0bnx5LuXvPFg4Uc+NfPSrW01rxW7HDLzZMW65uTS2/hwW2ZlQqU+J09gYupdJ8NgMRPx2qiHvY0AxPoxxs873uffl9fv+GwD6tpNF1E30z7QbKQ7Nz7/z9OOnuf0ybtE75hNhdsG7o6zoW0VVbv0hb4sZOyJdCbXKJ+xZ7+srI87J5/9KRK4vP8tvJZKe8UeFB1TJKT+N/LTU+vCakOWyFc31dk1BB9lroRr78EI6OpTKxJR0Gcn6KszOaxm1+fO0HkqTXraitkRmSF3bCQ5SUxA6vm1gua0T9WZerC65JKZBku/FhIMuEajvZ7LQJNi4Z4+hvw4PK0ln+UQXaEUVqm2piIve8gZrOfa1eAYWK/LqqTKnh2rnKkBoiQmRVIfmp+6l0C8W5qmn9TXE4fXNImaGlIl/0NaiHAdmTMlq/1+r99rcU/gF0yz0X+ZmT8P3MvMd+vX3wPu/aAvYlXSzYxlyCHJijGVDtSL4FyAFuVis7rxAJ+HCSrJJWipg5JuNaoZljNMsskrTqlDdp67fcRnX2p89qWNSvXaRX6uXissdZlEry4gteC5pVX4mDIsPApf74bNdUBrMoOoTmFsc8nqTFNZLY2SPEqnTlJaXfjWG2/w1//Hv8Zf/st/iYhON32VPXK+DUXN//Y//Bf8O3/+v1EWRRb2WAqiNH3PN05ukH1lu93y3O273Lh5l+n2q6xZSgjGaCop1ggr8zSirSr8tR+zl5NSJiaZK4w2hAQ8Olv5x98OPvVi8vl7W7YtSoYmx64XzJRl3hgLkyuIXIWljCpMUVikTClzmc5ycZrDksrZt74eoiZGDvfvEnP/fTnbcE2a5tA97yfMg81zL8PxTU4fvkealGJLJXtmOtNyxt1P/WHayS12p/dpyxVX54/Iy4ew38ON2+SN53V/dSOWPbv9KSdf+mnMgssHb3C9YKOgQKuzGFleN6PXIhfD5R8hJRqIzhombLjgRGvF2YRwFGfFW1P0RwHYXpOddPXVTPVhaFNlm5iVGok8EYfiHzY8ePQMVnc2GDaguDJbRUPdeK0nNMojUmKKJfb1vciX0SfAUxlVJNmMtWz/zfV7whptQFjVyDUrDwUcTF5Zz7AVmSyTYQOaFHh0zGYiOx69DGB6tkWG5uHMi0fwSgCVM9ZWJGZoes96Ji2KZBXLStaF9HFn+/da3H8mM982s5eBv2Fm3/zoL2Zm2gDF/l8vM/uzwJ8FeOne69y87TRX9+UppjtG1CvQY1asQBEcCscStufl/os+0VvXG2GVp1IKC085NjXKNLInZ2fJl1475u7tiU1rPDwdsqY4dJjpDrEXTk7jEBxkdeDMDpilICC56nzEkRpExRToDIsoVs2pDiARgZi95F+hbA+SD9//Hv/hv/8f8N03vnOI02VWBzz1jvWJebPhs1/6Kvurp/znf/7f4Gd/9o+wPTIpRWo8h8Z2dra+pd044WSzYTtvsRkevfNNPnz8i/zwT/wrHN28I5Kt6/1PRGxZBmFT7Ye97gLTk7BkDiNyI32z5eF9agn3n+w5v1r43Mszt29uOL8sOIouhQhGM439RHUkdSEekgxcTtyhQmt95KJ3tH5M0rdsgbWOdZfZxAX7PauzPc9zEeHOyKOnwXT8AtvXfpyzx4+0N1h9I95FuG9yx8krP8T25i3moxM2R5/l8ul9IMjpiFh39Ee/ycmrXwYmLp4+xID14oKrtXPrS/8ieXnG7vQDQNh4mIvPCRSfvFbh9lKGZDL2dI4usrmimt2lCPOKSZDEh0qc6DCVqqTaY+0WsNKzg0/JIctmQJeFeSsITmqSGGe8vvZUGnySIn8T747c+XOxZ9Us1ZnJrjyXATPRxAngyNVMffvFJw1tnZlpwxr6bb2IUoZYuo8/IysOhfoZmrrrUMNoLhdwhhPTkOaqVpGCmM29InwD1pWcpjr7jXXqgmpQtPZCoUCm+mRe7McPONa/p+KemW/XX++b2V8Ffgp438xezcx3zexV4P73+b0/D/w8wNe+9rX8+o/dFjZcRS/RyCmnqGkvQFJtcEV7Vd6CL422CaJDmMZGrxHGSzYEIlt7JKe75M13rzg9h19/ZwfvrkCU1T2xKVQYUrd0mEKfMozZSgpmGpNMF3xh0sZcOHDmCNRCg+kE3r1I3lr+O1yCkYoBNWBVcRyY31//b/8r3nn3TdrGWGsbvK+GTUabG20zQzPe/s6vsN+f01f45q/8I776z/yhGhYlNU2rSmdKT+zu9DZxZMZydcXl+UP+0Oe2vPjSCYPEsoLELBoTiqzNMl6YS89MDIKOulBHZwcctgYlUVPK1R4+OO1843sXAIcIGqrrovTGAy7ATJECJnGRIUkmJsihNS2cPhTOggw6yiLKDM6vfudayN+vs33jxo0s50xBewkNjr/wNfa7HbGsmjIGdODG8dFd2u07zDduYpOidZs78/amBGPHK5HB9t4P0zcTsTtj2h7T+woZxBrsr3Ycf/oPsP/m3y2ZLYf2M0KSw2Vg35M2DqR1mXkYkkHpbJpVp7PUGzzyzKmAM4xddub0InOLHwlnRT+zd3E/PTQ16KN1SZnr1rCCo1Suh71fGnmLdiiM3bRtKbNjOcm01Q3W0HKL1FSfxbkN663HgEJcMsbKahjvja2SPpIiwayarQMUWdPBjC5DR8//arUP1fuBX9J/XrAg482/hmL0s+pCyRbMBV9lDB09cnkbQgjG18oBtR2Upd/39bsu7mZ2Anhmntbf/1HgPwb+Z+DfBP5C/fV/+sFfTQcg6aRPtR7RWMOVFrjqw9aA1Mt9yGGRLdteSzgkrSOipHzqWMf48+Hjhe98uOfB0866dprp3vYRyZo6ZBGVN4G6cUvBM62nYJgaS6V60a2aLsJjdDUjb2XsbPSSOZIKMbOK71SmTUJ2zBT7OxdRYiae4aVPfRnWPe+++S2N2HMrklUj9ca3rLljv+hgvfv+O/x4//HC5UpE5dBMuvrEyUmQzZOzpzw6e8LNm3eureoUX1BdzMEN6mDWSWaNn9Vd9dnILvcuTPRV0coaVdWRtehEaxxvO1965YhPvXDEr7+75/2HV+wug2xK3ss19HtLBd5TKmp9PfTZlu49XfHFWBmBUOzr2psiJqaV1r0ezGdztjX1qCHoGEzG5u5r+NFLXD16Q5MbwrTbzRO8G8cvvEg7usHcZswnWoiEnqcNfus5vC84MPkxYcHp5WP6siOylm4n7C9OuXnnU8w37rI/faQPyvT+WCtsmlIghWSCNjT4Gag0KCXUvIrbHJBjNR8FqjjLZfDv/aePMJ+I3it0S52nstVdRPkKl5fi15LEpmSdUPKjDetPLRHvhW9nahm2Cw+3UQlWJUn2ujjSkMQ0l4MunFjEcVk9t4sc7o7L8eTVXaOGwAq3HzEW2Gha9P17wTUydokHyMY1v0dWMqcaGtxL4kg51lXXRnatjnOpkQyGSlANTBLN8V4T7bgQUtNBNL8Oifs+r99L534P+KslpJ+A/y4z/1cz+0XgvzezPwN8F/jTP/hL6fZ3a3QEnbhXlnI4aQs9m7CqbmCt5hQ5PCVJ1IcdSxzc/WGK3Ty7Cr759iUPHi+sXaE9JmqesV6LQ4ePDu7Q8QqnKaWMLiBjujYb1ajl5UYTmVKEqSe1UU0pbgVB5NjlMRLvgGalLK9ClKYOPwze/e43GXsqxyKAWI1pdtrUiL5yte71rXtivYlz0DHAcy0so8weNDyNZV149PgR+93KZtqoUw8pH/wgfh7HpwqozIOaQHL4AYbCR/xGGwd8YMfR6RMizurrbdz4yuszn3upcf/hnl97b60FCrots8ZdryLkWaNoJXIGVBysXJW9H5r+axdf2MGT8KzOdtb/vbq+5sbJ61/h4vQDMvvh59scn3DywuvM8xHz0bEKjOtGWztYL4UVE3ObtELOGtZXbNfpuzN6djbHz+n8W7Ls9xy/+hXW8/+DcJdxL6vTcSnPclz2fSXNa7+A11mCPmdBmoblVP9evFMekh2N0/MkYs/IKbcafYd+n1adajXGJKWkSanfQpNsMCAiNRW91CZuguoCEbWUs5sxlM3KZlkryylilYIsFAdidZw1actvMI5FjmbCqEUdDrR6JvW9SighEeSQ9/to+Qs6Hpi/l0IuSvjT698rosTqQGgdZbMtQT80gll/ZgvBT5llToykW2MiMJuuV2d+zOt3Xdwz89vAj/82//5D4F/6nX49MdXKax+kxYg+FZq3iEB1ZE6aFA7loVvSywAgD47u9zWCdx/v+NY7O55eBVMVxxbCiXsF/QhfHGOVH9CBkZEdGVUwSi1TuRF6Lq0yTgo8T3X8gyAOTATpSDfMqC5zxmIBKqi3dikaIt4yg/PTD3n47lsiu7KCj5rJPj5LFrJE0pc9vhfe2cIPZiwqbGqMp5kwz87cJozO6eOnnJ6ecjzfYN5umaZZHdXAZkt+kFBLvoe/ItUZ9yKeS1etB7a03LFKVZBRxGuFfRWp5CUlu3XcuPnKEdvjhbfvL9x/sihMyhvNNE73j6gCnCKvvcKyMmrE12cxPn3lrpQu/HfWuP++nm0BGfqecMjudD9md/6OiMTq+ubtLdrcmI5v4Daz2sq1/KdgymagbaOltuhEX6A1ujXs8XchO+3uq2Cwxp5br/8o52//MnHx+NBspGnUjDbCwur6zg4+SS4Y+jBbxSt7GUik7Eomn1hCP10Pra9z4hC73Xy9zmlxI2Mls1XhB2qxTFTxt/EMRK8oAsEXcnO24reiLgHASx446Xqyrp9lSk32il+ojHQLWnFYWnMXSujsMmuZe8Vh6hEWgSzFnl76BKeE1WoaL8VNhjp+wYZ6hqUIkgoGtI+A4oGyTFvEuFzX6v7y0M8LXpRaqdfUMXXBUd2VS+MJ8QOq9yfDoUrJHJEMz0lkVKlHI1utvEo5vmYF/rQ0tGu0pFHon/fd+PBx5zce7Hj8aGUypHYJqruW3HJChdCb3GQK7B84eoX5ZxWVKGmXCZHGpECwemB9FFSKM6hCh4P1+aC3H6LfzF6mq/payIFJFeH05NHDh7z9zrcQgSNsu5VEzCuXJtP1nngyA4tbOf80Tk4WCpuat0Aj0um9sz/bcbnfM9sWt+SHPv9FTm6eKFPaFUUqF58fdn5SDwuEDmrIFBVWSgp0iQy4MUtOJ6esLjvJyOQmtNDiirTkM89vePnmzJOzlX/yzo7znWSuUQqTofwJOV1Y6XhoNI1JD9vgQbB6QIade5hGnsFL11JdlJm07bY+44FlN3y9wOcJa8ei7GtqdKD3lXQ/dIy417MyuApjOjnm6Oom+3gFrh5h/hkyV2zdsy47rVy0qGjwSuE0rcNrHykqUZyTl4lHaaKCMQaYkCM4rKADdf1Z8JLD5AfDla0KKisihsGj4slQAmfBlx1tdjLTYg8K5vdB3mJ4L86iuJ6x2EZ/hL6PQLe5AhaHWWw0+FVDsgLymhaMjJ3DPjp106S1mulS8yGxrKYvE60RFETgffxMeXC9jpntMI2uiuSI7nR3mgnPF2qjy61FJXuWCkcGsar9wydRRLF5wDJYtd/+9Yko7kLGBRmwKi53YWSOUEoSdRHSfauULJG4OVOZegC+9V7w9sM9T84WybDM6LbqeEayNsmNIkT+aMO5yTlauFk2K3ddvflV3JJJI6JX4U77iKGj4gGsWoDicVox+APztNq4Y1GruZyyM+fBkKELoPGZz/0QX/3az/EP/ve/pl93GPEAh2mgxrVqyWgGn/7i5zg5OpYbd3GwzsaaMPW+soSx3++JpXPz1g0a8NyLd7B5IvehFEBHTkpa5aPr8V6hOqrSd7RUdxfG2pI5YSTqmNeCktCUkYO0tvFAysVnZvQOm6nx0nPJzz53k7c+2PP2g4X7F4umnWyVjBd4Ga0wKzldZSmayCe6CGzMlB/+MQ/AP/2XjcYMA45e+3GWtavzdiUYPve5rzIf32QzHx3yQ2S1ka2+s35kGXIZ4Vxj/9Rm7PgO9rwzHz2HLcF0fMT+6lTnrW3w7fPk6Yf6LOlENlplrquAe/W0krEK+4bVJ01+Dtm6Ml4qAqIXGdtsQG2GuZ7NIYlkykMM/yBV3Wv1HSIsRY638rnI26I8G3WsVvlEki/XFGaa7KyaGxK6Ka9lqp8xZ2hrwaCHQC8XZ9QEXdqIErdx2ajeHBZQW15zZzlKjKoViTL1lcl3cLw6lJy04KkVoJRDXuholt9lKO1S03J66KyT9CaT0oz2yGodY9Qwl/Xnfjw084ko7gnCBEPKmLXS6cCU0dKrK6hGerDX7qJtrvbBw/PON97cc7buZWwyaCEJk1nTNiVPWhcJl2aEqXPXqDeIR+FxpCsO1K0UTCogvXS0addFowAadY1UzoQbU+iDLoQYuouBL6xgZI6LHS/jQ9HgXuOdtawPfUwRqUmBSYoEg0NYfHUmP/S5L4LBbBN5ZMy2LcmanJ2rLXiD2WfJIUlu332eocHVd5y0w8M99n8iDNCHo09dzDB6TQnMScaEImlBOuqSlIW0yUKnmla4MQmGa1GElQrXZ+9tuHfXeXwx8Wtv7XlyoYjheqPrL126eCoCl5QU0vW+u+Xhc3uWLzcrOKpDLCznT1FqoAKt2vEdNtsjrXOLYaZxYt2DN5xGNLuGJMzGWwBTMucGO7rLdDSTuYE1WHY7PBb6unLz9a+xf/CG1scVhJmheFx1owVANJ3T8gcBWYox09n1qIlCsR1pSnaE6npD5iyjJIXd9GdVEdMqKsrQI16tV6y0CRUlXc9OmKTBXjxPM6r5q0+z/BfKVx9FsZ5Hm/FVCjdD0IZyXFYq0xt8PUCEpELbJldXry+vhsZQjHTnWs6qHagp6GrS+WrrKqmpgxfsO3wx4ijUKQ7Y1FHjb2sXV5hANimHatuWQW0663Um1NaWgVWf2ccc7U9EcdfVWyqSEMlp3umrimDFk+unCa2QlqsrefPBwpsf7Hl0prwKbTYx1lqOPXkQzVnWYHKEc/YqyFWMm48lGLo0elR2DNL7BtT3cGBH9c18pMDr9leZt7TKNE+sGfT699UlUXsfM8ZFNdLgCpMWoAqDsBxEjNXlVhOF8OdSDVU7nJFM7kzTRiaPNj7kmb7uOSjXJ2c7H7HsVu69co/XX/s05Rk/OP0ii8MIIyfhrs0aGZ2cE19lsopJmGehaQcp5/j29SPUA+vqQqbiTno5fb2s2lYTW6ydeXLu3W68/Adm3vpgx5v3V55c9NJCw/ivE2nDrE1Sz9QUF12yz2f9SpKIYG6jcCjMzVN8zOXj79Ge/zSzbTlc0l0k/eRyRDupILBoggoq4dBwzBq2cbZ2m7UHmXt8NnJ1bN3DZtKzwfUmr9UcH85LXORzBXy1aPS5PlPTgguNRSPnZaD+pWBpmtaygsalWkJja8iQZxFgc5GqsuqvDtYr6aYZU+pCyGIrrYoozUoNBznHwTDlpZUXX1mfs8VARMgie22IAHKQtjIcjWjvpNbkYaVuCSw3RK5F+LYi+NWMRVNnTnYsSr1Tz7+Vp2I2WBEUZRHihwIJABIiO1TejiHeTmrH4OA7iOuGJlHtatUAZdb+h086LKPCpDd/dQn3PdDmc3PWrhFG+md102fnwbsPF3793UvGxpg5dceFjYjcUrHWG75PmKMX1KIcZzfUnWLav4hVXG1pgPHD+HOQFTYVMkOHpmIipOt2p3t1BwnZe2GUtUfRDsgfmUZrVu7RhnLLRYLaIL0AjNrTGKR3bfBxDjI2m8bEoD9zahvm7QyxYRpQE4lvZr3dVTytJ5sbG155/RUtOQ4KHrL+tzbSAAAgAElEQVTiEAzvkpXSpd7I7FrEs1cHGM0qrlUcyBrgcxDaSVYPm3Tu4372Ila7eY32FUtQmSKynqtj7IUBf/qlY158LvmVb1/y4dnK0kcDLyt+4mWwGcWw8ruLi3mmL0ullJI1VSgFVd2cs1xccLV5TJ7cYd4eYaBuzwV3SXWkz6XZclj40cqMF6xY1g7R6IfUyUzwNpNLx7vaXyEMtVrFKjLXail0ZZ5k/X1Wd9wq8CtLBTUIRoa8dzUVqcqU6WTlp6gwtzDCnVZk8DrDWM5TfDFzxiE6IvGDX6HuOv1aCwXktayzWlElphTH3qxqgSDEbpDNyHWo0Er542Cu8xFpzCEHq41I4tRydSsSWXE3fpAzChvRuJFuOus2vByhiwzDtDi2tk5VkahwQS++D0xQ5cRh2Y46+PgtiMtY6XnYFuVZc8UnHJYxUHfrxoTXFvUJz3XE3tNppXHufOPtHR8+6Ty9igNUQRl1wij3nQ6lxk/h9a1Im7DQ13JX92TBiJuVs1SHiNSYKAtyRSLgIqvqwYQaD1P6Yc9kzRAk5J0sfbN34cWHzAvxf5J+hun3lyJFUi0RbmMjjdVlNTp8DxR5bKB1faYMbIdpKuJ3dlgVAdvrAQU9QDdPTrhc9ty6dYsvfP4L6F20g2onivysO7DMKh1fTRwFjQXJIf2AGWr8lPJSG9qH1PMgn2SM6aUTK75Eb4k6y/ZRwhA/XBBHk/NTXzri/fPkrQc73n+00vsMFsypLs/MSqXU1FsOA8+zfNXoGaH3dzPfYrd/LKIZsHmriSaCoJy2hwZADbAWP6ysdKY+QdOFZhasfWXZX5C7CzrGpm2B1CLszcTpg7eKxxEEOXoPgVlUs6MDKRGTnrhURCRTd8KTNfyA8Y7zUgpblZlVCo5GwUbWUf6+grNieDqoPKYonbrZQVc+8uMVOqTnywruyV4a9VKAVeK1JtjQ9+PNiOxlUprqouQQsdCK5NfGNj2vvRrKsKGOUetvhcX3Sk09qLDMyaWK6yhBNNzWOseN6FpETlZTWLJQPRITiXwFUaIMq+5fS1LKXe/CBbIZ9MNmCMYGMq1M/P6vT0Rxr5NGEEzZD1rvqOjOAE4vFh48Xvj193ZqxKOKYZ/w1hkpaW55bVDowrJagrUOrjejHz7DEHHbtMA46XrjTLd6b23woAIEzQSpFJQi16fGZ0bIUMEtvdWiXytIadIt3noiNEPmHlzfH4m2xZAs5kz14LPXCXYfUsoqjG0SxDQ1pq6hAUtlbmwm5lmxAas5skeLvMpmmM2swGuvvsprr36aO3fvCpc2vfee6lZyikKg1GE0JpiqZU6N0RHKwvGBhjQVVIeKaS6CbKgGutHmcqBWyFLUXhq9K43ucloOklr4qknrHRP3bnWev3XM5Xnwxv1L3nqYrGtdGrlAulyymYdc/Wf2SrTrtzq+5ewBm3v32J/bQSkBME8T3YMpNX4rsnoGU4OQ0XXWbCLnVhZ2mb7W3QUXTx+xe3qfzGB78gLTZlNJprC8/40qQJWCKPNGOVcr38iyFsZbFSjAgm6yv1PCAPktmiZiS2yvs99rI5RXPvGYLOC6EJOKzs2Sr+riqjgt85q8KbZd8RTejbW63MZU0cBRz/IiCHSM0VZQTDq+UlLpUuBVrO8ipUBxSwWXHvJpBIlEKNwty8TUHZ1fqwysLjHEEGAoVnvASUnYos8JJ3MvA1W9L1Y69UBcQjJiimEKxUFns+KrqgbUs03JKxuQa2K+fuzR+2QUd67lX8rsBlYtNTjrwbff3vHwcUgeV+aZIanywYSnl7rFwGvXaQNfx6g3FZl5PVKZuY75OCCp+1CHsDbglG1+Cunv7TCOCZi2bNVVa/SKDs17dRdNYVghltt9IWzWKDzy3kMSzXR1ShPGHEOxMBFNZW+Qg2GK/PQV5XkEpCcvv/wy/+6f/bd5+eV7vPbay0zTDcxWIks/0IM3vvsGL7/6Cjdu3MANbt26y43jrTqm7DiTAsSGdHA4QVEh6JYKKuuUHTpotqGbLuZDurTHoTNTlo+XrNKgXS8atxyj5+jiR0c4Gnr9uxGvCs7ahLM2n7h5w/jKF27yqRdX3nhvx/2nC/suA9nIwBlOzGf2KripfMpcPfwOm3s/quKUHZomDp9m3L14c4Oh7XZFYQxHY6sYZGcl10ZfF64un7J7/IBcrojJaF4r9lIuagupcyxhqU46M3Txle9iVOA04cHyCTSldzJgShWXHhS/1LGNCXtOaren6TutkSBdEdwDwiMnKdPqvNjYRNSz5IqacqyJB1OhrbNVJifFf6tlHqoWbTPS9idLpym6kt4l2ZRXxMvS79cFcwgkovYQR8V4Z53oOvPdDaLjrpWP3fp1To0ekANIYkjZ5VXIbaWmtEHYXvti3JC6K9WJ64IqDq5ScfUVg67boJqocSK+/+uTUdyrmLSD/RbOO7zzwRXf+t6OtcwMzTmw8NoqoPEfHGZtDffujOTBcaDSo9h+qmCNlWNF5iUatcablSkNtweG0hSjQ04FiRjYUmNTBmvhXz2z0ghLrmnUAgJBK0sY5r0kYcJelUJQUaBit0jX+sD04NOvf4qLr/w0v/pP/k/BH6WRTRe+OAVsTm7wM//sT/OjX/kxPvfZzxZxJE52KvIpzXj5tXvl7rTqeEz254JHVluF8+K1dSfp4aXYHN2W2H8zOX2TToteUr8VslVnr8wNcQy98j+oy7s6mKbkzVHwtYRlFarYi0T0xComUH3URFanryLl3L3Z+MNf3PLoYsP3Puy8eX9XSiJpn59haQcQ6WiomIfRtidMmyt2V2dMa2IsTNOs0X9yPJIllsNiBx+qjh6VeNnLbyABAsueXDvrxWOmdoXf+QwGtE2D/Sl93RG5ELmpjrfMMr2W1FgU/Kfz17xh7EuG2KpQq+DLr6bLIQy8R+HjCaZc+CArx1+/tZWqy/MaBtLI6sWjrbTWyJx16XRNkOalFOMa2lNEaDtk0OBZi9DVTGRYEb9ouxJTEdji4qTnF0x7PbVQHJdpsuAjBkl0hkWQossiEXzcBZ8O1VtrqhMyORVEDPp+rPRtw/RovRpFK+5D72m1mxVTHlVA8jpnJvR8JKUo/BjM8ZNR3KuoBWLQzy87v/TtM07PFyw2xNzJaLKSj1uwoBvZnENjD4WdG8LP+sB3NXb11bUs27u202RUiro69qYSVpGiMQSM1Qmtmha7k5teYUvC6qxu7oYpV2N8qlUYD9uiShLoGb9lOhiWeS8sseEHyeUf/Imvc+/uET/zz/0Y3/veW/zyL38Dnye8NTat8a//qT/FZrPlR77wBV6/9yqNJsXKVDI2c4iVdOnce1KuOjn4BrTEqk5NTYGWUmMrg+DN6jSiOgozdZ6RXkTaoG017vYy7ZDCU8cOyAzlyEjxoVAoqYGSAWDaKD81yVnvCh9zSUnd9HnLXBXqjHvjzgk8fzJzPBlvP1o4PV1hLPB+hq+auchZ5N3V2/+Y+bkvsVw+Id3ZX5xztVzS93vm+YhMjZvNNrR5qyLXAusiwrvJ2Ty5Ec2Y2g18e0Q7Xzh58Q/SXATf9uguZ7/5f5H7S/BtOaEVM+CWchinuCrPdv1rrBX3kOoYC1psJiJVc0AV4AyYJuHiHpC9mhth4SNeQYOBoFEbQW/1hJk1TQWmvP6pTIZROLib9iRouJ5UpD11zgfuXVEerbwViTpgKzFEZDLZJCiISg+1XvANdfmouy4/atWFaxJc04y07D3lBZBZCrBk6eIPWlZsgvl1BEYU2mACrIysOqE8LU00pV6qsD59z43cpmCY0M9/0Mbrrf2+r09EcddBgA/OVt58uPDme5eYa+NQkJJ0ldbb3VSoUnnGS/PKetaUlXSwicOyXFNHuGatdUsZkXItbKvpjUvUdHrCWCGm+0M50kNfX2ERwvbl8RceWFko0QO8UiRtaG+H4MrwlPliSWXn6IlzCOm8uxs9Gl4Tw+n938DikufuPs/zz7/Az/2Rn2Oaj9gcb2huXJxdMW9n3nr7bV79zOtsjo7wWeOdJWRP+tRonYIGCufMrLWA6qiK8Cez0h5bSDWAJFtyyOYh48PWRrdgilCet630+rqZg6iuCaKgpZWo91fuU5+EseuBqY1XFDpmgE0QXdBKd0YOyRhp1yi1R5bZp9QfX351y6de3vLWgx2/8e7V/+/n+be8MstBrL9fw+HRu9y59wdYtlvWZaXvFs7efRMCnvYd25MTfDpme3KXaTurKegyM8V+R2cvCKvdZD66webGHTa7p+S9L+Pbm5IJtw25XnH+vX/E0M6O91gyYD8Uqx7lgSj1kvKR9LnIkDrKfBZ2n2pQwqQPz15y1iBtljad6yiPJMsJWhPHgNsyoQk2jMLLm4/OXGqqOTv0VpvZsqYGFbrmsKaUZJZqXCShXEtdovMg0ZtgpFydkfSnJRxSAXm6lmCUIihBF1bTGUyGJFtal6kNhZGMaDFb/b5V571yYQI9I7o9NH92ViZv9Y5KVDE4RMU+G8l6WMohvKphrqmIKDNWfHzT8oko7mtP/t43z3h42VkCptaIyGuHlmb7gi7QA2/CE+nVKTuANpcciHaKwW9WkI+64bCodVvozylMd44q4pZQ2L+bkzEkkLU1xlTERnhZaVCoRaal7JQcMMfolb0eKujeuI4TLYMFBd+EDn8Cse7ZnV+pww3YHG3FFRcxkzjzkRGxslrnyaNHvPDCi3p2XD3WyLzIpmNlfVI37+ompBBz6CvZyvAjDSmUqJCWtZVeGveR6T3hpC8sNoxYfm1conTcmYXHtFomnOIsTEQX5Szs2cCr5zJThkm9x4brAm7CVi3VsYYhE1NUBylRIM3geHJ+6N7MZ16Y+U/mZ9i5mx2WKqs4BPurp+wevsXxS5/n7L03gWR/dcrkM9mvuDhd2NwwpnbGOjWm7S3Cg/X8lOXijF3fYTTWo5U590zzTW4+9yq7y1PCO1M0vE188I2/XSafsdymZK6oiEwmu/vGrEjfKvxFKlrlmUfBQumJt4RlwsOJWZBL9HKR0qrLLzWZFc5Mq+G8ld6j/gxLWkQN3YoBiWi6nKZkXgXXRXamaoKiS05pKXXRVNBdtqR15dxYdQd1Feg5jiCYq0MTj6BpuZKMD4apkiGT0Bxj0b9rDdfApOXzqSJLN63FW6se+ESuURiAGhlaXnMsPZma0fvgmQoia02nt+Idxn4IXJc7ttaWMpHSZJBt+tiZ9BlLCfQ6vwrun3fW1EMAGrlppgS4KjgRrUYlKxcp2HCReuFfUY1wogzoymfoRY4YHPJGRlc9mWSFq8uosHrUHs+U69RXsmsjUPS6SK1UEGO6QF0v5f6zA/ysmdaKWBzyNZGq6MBSOm/KZVnSt/3lQy4ef5dXXnuNaYJcV5arHf1qZXe152q3Y11UDDfzll/9tV+vuizIQ2SMsrsJ0wKQ1Cq0MozSU1DLmrrEpoRWcJEj6CZSuGCECna5xVj7QiKM2K3ondAeSBlHS++JJJlWub2eJTmthnYYa4Ypw+sMREXGqsgfKEVdliEvQ3aug8uCiosOPfBubI83HG2e9TGvSFcgJqmRzt76JeLJO5w89wo0QWm9X+nsnL5PPHlTl3RXOFisCxdPn3B++iH7p4/ZnT1m2Z2xO3/CcnXKur8Q4dcb25Nb9LP7LE/eJmyQi4LI3OUmNaQ0yyaZq1Fr4kYTVIVXqEpR5TEVppz0pvCrKNv8gNJGCIukue0a6hMFUti9Gh1Pl+ackmdGYqYpjnDELzZacVbj0Hpp80GyV1oWR1TjanOlmta+BEeQo5WbVpMHwt/NPmIs0rRuJjmn18UWPmtxRysIszvRZeQaCaS4IOCM0qrb8DWoiexhWl/YHNwVRR1VD0jMrlM5lR4rlZtIilAzOWVdkka2mUPG+/c9dZ+Il5yV1stRSt2taTCVqYJrgnIQQgaV3qhRxlzdfDfdzIsF3SbhsqHCnZTapAqPF/lnPZhD/9y6MxUbH8gI0yajtepiq7tmssOEUE35ITArWpMT1uJa3WOhbqaKrckGqyTFdagSUH59Si54cvsGt27exlxdT5jRY6HHiqVI5slge7Rluz1S7goDojImRzLQesCjCYeXcmFsaZIMztClFpVxHS0wVmGr5syq+HSTXr+1Vg8/6rwtRfi0mnpKFzNl4JNVl5KHqSznoedOplgE+1unZ5cfbCiJQvORttDXiG+tOrgihvssGKc8CQzDTUF2z/rVikkcTmOjc/btv0c/e4sbd19k44qRbhO0my8TPmHrFbk5Zt3vWc6fEudnSoH0lenifawb83zEcvWE/e6SlWR7pML+6Jt/A4+FSGM1KYi0Sq9Ge4sy/dTUaAAz9FVOyoLxkiYJn2f5QQzLzowwdYtWEc96v8cUa2akyVxFmdwyqoM1LbV36i5IQZ3qtQZ5qu/NnYpvaCzr2LMrWGiEscknkfWMSWiwktIwZjVLVZjNytE7pS5DOhOpTVNuRfiGzuhHVD8CBKrpKDVw71oDqePmBb02pmEh143E2B4XTY1MhOnvKed56H0cih4bqEH5PWwo/LqKhVyqEpZ8nFzgE1LcjSFDlDtODDGRZDeyDr7yXLTnMTOFt9G0H9JVNMTLKcXOi73XeBhFLgqbGx9iDieY2UHOSP06JjhikELEKmYeBVS1LLKn3kX91uuvYWoH1Pl6FtRYf2164Lz+u+7jFvfqroyHb/4yL955kak1vG1om42+j8lqzJNrzsw5mmd67/zmG78p8saDlp3IEAm81riaQTOjRZkoTAfWKcKhCcwceRiJoBOrlVND0jfSHxuNCgwAitWvg1xPa0X2FmcSGnFBZhNLFZLVZlofaicOskpvgq28JVkqIQ7Qm+zZPmCmj3QyllTezbBpP6uX4DBpzvW9WClULJOLt/5v/OI9brx0D59msIbZhE+3WJaF5ewRfb9jXRcWWzGfOTp+ju3LX4TNzLK/RBDCxNHxHfbn7/PhL/8Cse+Hi9NyZU1NpAraakRMWlTRvZRJxTO5zl806FM9c8WfSPAK3Y3FukLbMoqQtCqo9VytvTrvmgowTW8VxSH/SmnEvSCKLpWJnKJrXerixrIHU0PneWjvQ+dMmfFWT4+heDvBJdpHnNd8gakpwhJr0qJrtBhNo9y0+pl0pt2lSvKSA9ukCXaywGc9E2ZUWqeMaJXhqqTLgn19NIC9oFJUqKEKcaKf9eD7MZxFf+aAM72mjURejo85eZ8IzB24/mGy6aHNoNnESsfWMW4B0fWXhLlJBqkuV0VgOE3HRnHS6T7RVmFY3UtbWtGwBId0vjT9nrVVl5OCWLrL1NARFuxWBavGWK8bNC1K7WJ1mLwKXy2Wdkkf3eR6kytQCpBDWbJC0w2in7HvM5NBo7Odt5o4spXkC6Ib6yZ5dP9D2tGG892+tiyBLiQZhdxTGmCa3HWmohwZtEkXwBQFDFlpkOPaVCLbc9Y9rC7roIcvrPag0UayNOGl6oqoh8trBA1rFcA05FxTdW2F1dqQzgnSil5wWhe22R2Ntm2M7FJbeJlJ0gwmpA1+hpC72nQnUo5FTW11nRpkdi6++4vc/GzjzotfZnfxmN3FIwi4Ws5YnuyriYBpc5M2IxNMOZQ8Dd8eMU3H7B69xdmv/y1yXWiRB+EBNjOhsDtS+UB7gia5ySE33Do6O6UasV55m6FmxCtj3FdBon2CzJUWtVvI0F5QVbqS+Y6mTOY1EKk7gbiASvkUNeUFTaxYzGBrdfCKrVarIV5I6ZWJr3J2emn6c+Qf1aVGajfpzMiGVzNjIRnwZEZ38T+WQ70ml3SvRmKoaNIbHlH/jXYreG2AU/6B/C7a3CTTXoZjfZGqzPXMOE74omdmdIbhIkzrvCaId7JWkFMr9VhFKFDGqY85eZ+I4n7r2Pn6V0+ut6qYV4+oaN7DvBc+fmqgOu7qEDNWKOyMgb2RsKK8djOG2cILD2uFI2uvoTo/TyqZTTe5+MEa/02Fr2WNk8goIdX1Wh3HVB+SvjmrjGiryYO1womqEKqDT2HiCE8M1L08+Y0THn34IZO/x8XFBZvjY3yeS06pC3AJo1/umTaNXIM33rngnbN3dMvngEdUfFcP3CaaNfqyaqSruIWIZLvdsuy0rHaz3TB542y3Y26NuU36Wi7Mu/RfUHJKeUbK2BLVQVkd8MZhg5NXZ2+1XWeoXzKkufbCHmVpLMkYcDCPWcrjsJSMs6AGXJBNgad0DNLxFpxejoP1bF7iWVxBUJTayLSJJ8vIdf7df8D0wRvc/vxPcvT8Z7hcnpCZ5LKWoWYqdVFBh/Mxc9tgrZG55+ybv8D+8ftEBYplU0Z6O0x3KlqCvYPmI6wuWZERz9NZyz0KyvyRec0P+UzDjU3BllYafi+Bg1QypTQbKw4NTa5QijElovrBu18yv5LNera69PXnx35IhPWfW21pG1HUZrOiG0yad1P/Q+asDpeSN1pNkIaiMoYw4mBkQtOGGyyG+QKMkLKJ6JoIBNZOZYisuL/hIs9g7eILwvTuBiL5VcjVhRsBvRbsNEknLWpaLvVYmAxeSqathqdfGz4H0PD9Xp+I4t7cuH006WEtqCJTQVnqlqnYWYUt9UzwSSKMqdbTxbaWUHfSG7YUIXcUhdMa2MQ04D2n1Btlt+5K2gjTzR/VKo61XJKNaZCyCJo3+qoblOZlN25VcCZNAlU7c3FlW+P40VoSQ31A6u51EJQgmbUjNPiTf+KP8Tf/zt9if77w3HPPM08bjrYbcKOtRk7G1l0jcZ95vD/hwl7g8kzLM5zOmrU1alW307xrDC7883rJRccvtf1nxZhOd9RTQG9dnaA5xyd3St+r5MyeixQrOfK2r6MG8mC+KPzbG+SKYhRUmIfqyG2VbC0dT2V0aJJSx+UGQ8KW1qEXbpnG2LggZ19NPl0j7CF7/xm+RNTJGwB6UCv5W2N6SBFydXqf5Zf+FzYvfJr59ovceOXLrG1LLgXYbSZYO9Zm1t0TLj/4JhcP75Onb1a3DVKUCJJoNGh5SBxs8JFOmqoMKckfzuqdqfshQmIdZ99WdfggUYPuXX02haiuaYI8sOrkVdSjXJgWFT/htVoPI7NhLGqmXJOcpT7rXk2YVR6NkfTZaEPb3jX9RE165pSKqiTG3WnaXaaJOV3yW5s0te9L5DxlZdlQBj69P31aGW2RnNYwTI5prS5cGKBmZDBNRq5qAgfvJV6r3q9VF7v+OK/3Qqqw4c+xMcJbVob9EBIoEC4MCSME+jKG59/u9Yko7gJps1QkCsEie91c1bUNFt4RxFFbgjKla1/pTIsTyhdjY0E0oX2jWGq7SR7G+CjpU0t1mYdOIddqTlX8PcpE4EMqVkKXyqDxSnE0s9KeroSX7rUXkWIqtsOBNhaJmK1kijzsVp1QaYTv3LzJn/ijf5wHHz7g22++iWWyu7xgmjbMm4loxp4jesLD9RX2U8OWIo9Ksjnybmg2kj6kmW5GGxnYxRVogPCD3FCbp6Ky4AUh7E5PmeYZP57BtasVSoSodqnUF5I4+iydvHaJxCGzxqLpM8dwk+4dq8+nDrRVB28MLTKE93qIK5MnagKqcKpeXhr3SaN0F276LF9e2nLt2KzBMwt6i4JsCgqLZlw+fZvLD9/i8t1v1D4DmWG0JVzcRV8D+mU93SqaVrEQyn4Tpusjg+nQoOhiTy/Zn6vAa/pyVpM7uxn4Bo5fmvG2YV2N5d2L8m4Ud1UXvNELT7bR4GOmz71Z/XlYyZiLCI1yWg8ZrX10Ci4+p3d6jFAvO8AegpHatbrH1ASO2I7Vk+ZlpiqUSDsWGlHKHrms9byO5ndEikxLU0YU1TQUUG42IZFbhY+lgSs6OS1r85pUNqO5kVrGa1F4TRpugmUPf3YrA2NBW2XgaqVk000nD80hHK//4FP9iSjuiYkkTNdtWqOIRo9GL8yr1a5GGzklDuC0NQ7dhiABFX4fxcp122XLIuWriK/1gLk2U5rV3RzXKhDLAiBcSYMKnqfSJwsmGiaf0soq5VEaWio8q6ciB3oIe7UKRROMoO7CuiR8TK6OhYnNkfP6p1/ntU+9wtPTcx7c/4AHTzrvPe2wwi6Oa2VYygo+mPnCwN1VvDXujeJQ4WD182UDhqY40Pc7mTDLkfWTYOl0D/qyw3PPZprY0TjabpHpRQYUyxVrToYRvWA1m1Rom+FrVqxAdVSjCyl1wmrq/tJLj2xUl689lEzX3IVgHX0GORL1TN3++AHz2YLuKuhmmjTTDpkuabrIzWsdQ6L3vOti67srrheRJ3T5P9zRz2p2CMEzxDnQGl7rIrUwggO3IpYQzGaF1FnoQqaw4HmCtnL7qzeYbjfalGw/vYWuS/jiu7Ng0q3z+O8/xc87uRg9BmSjn1XIh3AURfvKYet9UjevoUWBuGXg0+J2LwOVSPIelLILEa2m92+4RKM63wFz9mZk10ROi0JN7fD5O8lQ11hdplnNwVD5pDWiAvS6WUlYg+YboQKp6GIzo5fWLIqIHoY6rwkyK04lGY1jPe8lsZyEEWNpdDo+eUE99TOYvmtawYrVxLglOTvt8N/89q9PRHE343oxAK0OHeqQY2BvXK/nahSDD5mN8LU0s0LqW7k7BzFEjZGR0lDPaLFyumkjvJXuNQvPraUhWnFW718k2CpM3KsLj1pbZlk0jxMm45DGSbt2W45x0qGvIsgEUcxa+3d4I2oHpVPyTemJzeD2rdvcvn2b9Z0db58/ZZ5n5kz6coU1Y7naFS7t+t5DGKiwypJ0RgV3VXdtHvT9VN2TiZQe30tAi9RiB0JkW1feTKyd3SpDWN9dadzv0LYzU5v0ec6NaSxEKG3yyMQP9PmOi28awWGpzP6oTJEpa19ncS2ZDdZeBHFNHZGHfs/xUvaANV0Sz7i0M+CXaKp8WdncI79bG4OsOsxG1n7NQzqqyiOW2jAm+K+zRqOVtFbnbJLZK135/KtSPdtgccAbE/4AACAASURBVGUvpZtWWY6IAffk6FNH3P3KCX4c7PcLFw8uJSn+pSf45AfTX3pwcnLCS3/sFrbAk1/ecfmdS2yHcmhsSC2roFqTFn1k0Uxgiy6UtTpV2lSNQRbfVbG9+RFfilnhqcZaEklDwYE9GgFMmeRUkQrllbDDe/dROOoazslZzYZgoiEVlexYCS5F6C9SH2F+EAMMfqyNr9etlDL1wDcXx1aCBAvtlI06r/mR+m2pCa21ZLGJjRfGvopbCFef1fBDVHKvuvj9Xp+I4g510xdHJ2dzLYOOHOAVg2Ab11VxoEBZjTtKkwPc4oA1pukBV9+dLAUPuFehro7WitDRCFeESBk4aqMlI5XOqhtJm/AQK59VgLyw46RMToliDqTn1CVCMeUGEbUcoNQMo9PQQ70cNO6KEF3xcLbbY3IK5g6b6QQtVZBkdL3cEZm131Kd91CQaDrRGjyX8PdwaHRZogexS020ZnWUtbIwAmwSpi/tbSvyyjBPGZuWRZPWsmGtwr6dj1liD31lnjf4YRWfKYvdGsl6KHSqWE60gm6qyxqwzIHPRdCZtoSURMz16xF5gAme1asG8YpGXiu0LlmLXHVvrNWtMUg0ZKxbGviq4kIFXWX2GttBCyeyCFNNm+bVjSKFCVZxTBQ0g87y8EG0E+fki0dsvjBxfnrG+mgRph6DyJZNP2K0L3D+7gWZxvHtiTtfO+b2D2/54G+dsZ5pt2obC677pIu8+AZFTyiWYzKDCMaKycpgFURKlBnIqomr2JGsgphcp6RKgiUlZQrX1tIdQV6K+kVnRF3LIYtmQDaCUjTVJ4a1VYaj0JIVy3Jqm+S4kiVOghWjpKAhOMx6XdKmAL7RpKgDR9BzjDlVxOvBYZ/a7dBC+fQx6Uy0gVIY2JKHoD9+ADTziSjuCayZzNWFtTRWVzl1a/rQXNgeFVQlxngFZqiDYROF6dW2eKuGu27TtVSwmRVTYB2YiSXK5KA3e2hxw6SrxQxvQqytg0/lhB3SMJ+JUDc5FmeL4OM6W14TGFCHoQGBLOCuLhsqLAsFfRESfrFWR9OkBhB1GVALA7LiSKc2Y2bMN4+wvhIe7JYdrJ3onZZJThsyFub5iO12o244YKyaztyxrJ1pndivO4wA06pCSh9cOIMuRKusxhFxnLAgl0ffLUzTSmdLX54e4oP7uhDU8oRpViKi7dldXqnwT8qudNtK808pBFyXjI2AMaueuC6iyTTd6Y6UHI7qkJ7Va1zTABONJUrS5632FaiRcLS0unc1FDEFTRKK6vQDW/sBZpLLVHCjHJaGgmGXgaIVnl6R01arOcp9ambML0+88PU77E6vePrOU02JljonVJ6TwdGdE/ZPrkRWB8AK1rg8XViuOsfPbXn5X77Dg198At/TrxGdYF/Beg2vaUQXb5G1bqxUkSdwb/SWB9OaA6V8qD+3wwi8y3pvm/4shhZ8LT7HkjbC//JatIAVBFQQoWWyRjJN2v6WK9BGoJ7I5gHdytEqiagmkaymTcXcwuufZdiyCKZawmKTohGyTyi/IwU7LuCuAMOeISHApPelVa1Yo4hkH4SsHODxAxyqP7C4m9l/DfyrwP3M/IP1754H/grwOeA7wJ/OzEcmj/1fBP44cAH8W5n5D/+/PABDrmymG8trjAqSTaKcFusH6Y8X8UNhdJIyNX0xc6IHOdVzNXYuAlYLag/dBKHkwMqZVodSk0EVIIParCcMLrtS/sbGcrkpha1Zm4rz0mURbVi6+chBs49E6talUfrKrCybShGtvYtV0LszWhcrfLDnCiYZWyHN6gwaGBM3tk6fVHxJ08o0RC4pJMnBOsmsh8yP2U4a1eftRCzBPi6Yt8dM2eiR7NdLCGexrr2qTsWpChucrba9o4ja7HI46p5o+hGaivO6D9Z+qXPw/zD3prGWZdd932+tfc5979XUA5vdLZEUSZGUaFETJVqJLUNjEtuKYzsDHGdABMOOAzgJgiAfHCNIYhgBEsQJ/C2BDURxFCkeY0cyNNCiZJGKJVt0S5ZEmZQ4SGx2s9lTVXVVvemevdfKh//atxoOu0nbMrsv0GT3q1fv3XvOPnuv9Z9WwjY2cmziD3yvrsWS3e5Yqh+vYDgThOPVrUdVdYSILFoZpKobeq3WNnAIolO3GIdu0EvOp3sdjBR30xhF0A/NDq0sEVxpm15DqG1qviO0rpchqDlFlnsmfZUhZ2DK4s+k5cryyMLD336Vuy/eZX+6Sc1Rfoxuhrtz4/FrqqovB301xkspY9AV6cAhyUj2t/fkQ/Dw77jCrZ+94PKpXrh5PUdRunlXBEBIz3hQlGicXuUaoY7r8JyjWQg5ZM3XcO1CWkuxYCn1CjakeMmEnfOWf+NxbSyXm2AeBp5Nqq2Iw+b4wgducXlnynYTL/hmwme9SNI5cIRywLsFbithGzaEwxuNvqGxissoSEWYvpdyaJT6RiP/1P1Hdc4sQhWsZL6JpJIRChzUU+4HHv3Vanf/ItbmXwR+zz/2tf8S+MnMfBfwk/XfAL8XeFf988eB//WL+Pn1RoSXH0hSDBvS6GrSuqr2rBqzazVWnrtIk6x8YzlPCz6pq9BMVbC5SByLUrNkELaDyleWqkO1j3U/wEAjy52ZFXUQ2ugGSYQqLmXPDMkoI4neNeMydbqPIsb0XAQRnS3VrWTfhDWn5Is5IMoNOwIiBpkbSZA96QwiKx0wqFFkWQ/SHFqQ1b4nrd1PAcxi50dVLTmybOeTowhyBXNnWRtHRzdYvAn6WXecXLnO1WtXuba7wnp8VL1tExFcmeMcIKjKqjE9xNlGSRm18H1q5msj1pT5xHzR/R2dbQzOzu9x9/QeZ6d3uLw8Y9ufcjHO6X1fG9AF+4sLYmxcXJ5x0fdcbuf07fI1XtvJ3MWmVnspVCmjvL3mlfGiIsXCGF0R1V5/10tmK0dlFgymyjpdVVzODHGTmqg3E0lf2HVGGZls49rXLtx94R77uxci6UaZdc4SnoIryzHX33SNB996nfX6wnplxe8tLBcNe9rgRTh+YIe1ZMvB5Z1L+mXyxt9+g/U6eFsY5oKGXM83lQfVXPEBel5H/bnUIrFpXQgnjzLCVVyGtfsIbV3XRGmZWVlKozpKZ3D86MLxo44/snD8hiOuvOGI9RHn6CFn9+jK0Rt2HL1xhy2LpL6pQ1TPTKsGtcyMdZjWcXrY/EmFBYrEV1wGi8qcDOXiHNWeJiNUYiul1nEl58ywQJdKbXZWStrUfZ2u/emonbEmr/b6gpV7Zn7IzN72j335DwDfUf/+fwA/DfzJ+vr3p6yLf8/MHjSzL8vMZ77Q7xF8ItzWhvAv3SSRFVOLm+gEVGtIbWiSbgl/G4f2H0rOByJs3GsIRlVzadrYbCMq41lPnTOWEFkVkmOGl0ohdPA8++Ln+E/+0/+cHF1tXpvVu9pDhQINMheU2y5b8zIqQ8a8sH1HA3WVbpkl0ZqYXGQc3rsOLmMLp2eXnXvWO1adRJqqcboOt8LZc+KJU09b5i2b4zsKxtO9EPzSSoIoJZaUL29+x2/jX/23/zMww5aVo1xYd0e6gWns7UzdR7+saA/DTQN/3dqh2BimARwVFK79PY02KkbCWskjNxor4cIYI52x7YkUvqso1OoOJMiUkadLeTVQ4uFrt7Y1i1YwokmWa1lqDA2UsTR8iFUJa0yns8g3uSoVNz0ldbov2Sm8WPCLAueoTlaFyUF8m11ZSGY88E0PkFc7+1sVcxiKzV0X58abj2jvOuLooZX1yGnrwo03XWNp55x/3Sbec28cPXDE7qpxeXrJ2c1LqcQu9vSHdrzhX3mI5370efy8oJVEkJ2F3OYu/4Il7K1I4mw1kSiwkBFQCukhySsTd5fUcXWraBIEn6SVoq1grDAsgq3XfIGCxoh2n2crgNNSh14uyRIV8TvzqFqpzBKIkkOalGEqRgxHa3m4uorl8GwpegRK8YW6ZentVQilweIoJbLEBllQbXMp6kSg1qFeh/ncK1/t9U+LuT/2skX9OeCx+vc3AZ952fc9VV/7/z0AZvbHUQXEm9/ylgPZFxUnq028CCCL0rdXjR6myICqqL3JYblUmmOmoRROseReVmAqo0LVQ2XJtIoYDVXLVqtxqYkrUbvROqziTJPegh/50R/jc5/7LMag+Qwng1xEaNmww4QWWhlYcmKA1VQLcyGtsigsyC5Z4/GyAI1sHetJHyU5a9qkZ6ZM1tCL5jCnKkS0+nybtpZS6hwyWZa8r85BcrjpACURXxAya5lNjXLCcJ4aG898+td509vfJTcgakVxVR679SptGFs7InLDIrm8vKxjWbglVgoOuaBg76XNl9ZJ0P0c0LBUd4Yq4EIoNAtHWOgYveCuhcg4dIG6d4eT/jVZ2+u6MkoN4siyr8088CY4TNn3RdrFFATUwV2BVEzlVSvN/4JyYeZnLBjEoDYCVFS4sYUAHA9YrjWO39G49eQZNqoSnh3BSWO9ccR6fcfuuGG+MIa0I7uHjmhXFqIHvjTaztWhngXLblF6ZSaXd0658uBVbrzjOrd/5VReOdOBo0MdrcPQxrXU58kUYWqWst2TB+iCyo+RQV2x2yO9ZI7a2GVG1KbpNhUus8YujBo9H1jWtbMi3qNkqF7CAxV0o0m5puWXFR9wWICHXKZMGR591DPJfbUOVg74iDqI9RRkxEFXr2+tOIriq3wITiuKAl8kwe4RVcxwgK5e6fXPTKhmZpq9miDnFf/eXwD+AsA3vve9B9m6GOYkmqYveW0wIjNq8Ve2sxyMiiYNs7L7TkggJJ2vvO/IAb1ck4u2mhITSBGAmHsvom54SaOKIO1NGG/34JmnX+CnP/jBqpqXGcEuBj6iorRKQ28FKw1pYeXRmZIrbeRWMlZ3BfJ/+zfv+KO//4bIGQcbzv/8g6f8wsckOfRxvz1zyxqCnfUACbsLC5Lyq9cDZSOwZvSpADJ9vkoGUZWdSXSEw5vXTZmba3Lv7ov86q/8XR572ztZEDmSkVhhDa1wonVxkqNS4CRb35gGnCQJr+k7PiQ3y+pMbJqV6kAqI1amaR5ua0TrZRTRN3p1cG66TzkMlmQpK/o/6e7+W7m2r1y5kjZkyY8AVm1aIlEVBKfCsAnua1MlorXrA8bLW/Qa9EwdYlo6VsagGUfdtd2nQc/ChrUO3vA7b3D+0hk5gsRpZpw8ckIsjas3dhzf2OHNsTY5EyMWYzFn2RVf5I4vcH4PttPg8qULfLfgRwuLG2N0Tr5u5e4nDc6kUFMMlDZWYua/l88hEuJSxj83xhZ4yXmTVkFZvbwZVnCsVOYeIvTFBas4KuCR3jd8NfowST9DhREmrNusYZXf7rXRQ1fXyEbLRd3SYdRfkmKCsVbwUuXR5wTBW7JFwjLwXqM6rVU0sOSgLYzw0u65iq+0xBlV7KkAsCnXrpweMwX1RQ6sSYFVwPvnfX0xmPvnez1rZl8GUP//XH39aeAtL/u+N9fXXvVlIM3tGIX5GstopcNdCmcrPFL8I/dbqqmvlZ1ZHq7BmOlpoTZ/+jNFNGoTnvidDMK6UD2TjWQMYeFaj4aPgY1gO9/zZ/67P8OnP/2b+O4Y3Om+8uC7v5MH3/0vcfXx9wIN2x2VicEhRQBmbV76wI3mXsOzRZDkCMIXvufbrnH9anLl2Lm2M66cmIKf0rC9DpKZ/07NsFQbL1mj1bgzdUB9XjDCnc3rWrbC1nNRB4M2+dKZHbbD1B9Uda+P84s/+yN8+EN/qww0ejPqiObfq9hSVOT00VWVFVQ1Y14t+2EalFrOGcpU/134p7o4qg0dRYqXVHV2BCYMd9Lh+vh1cH6BCucfe/2Wrm2o+7K4IITUpknFBeigS5bpYMzaKGzCU6oks6AcXeOhwDjqsMxOWkkdbX5eQY73h1kEx28+wh5ZOb+tNZERtB0cPXDMQ19+leuPndCuLiy7hu/8ILdspfz21vCjhu8WKXpGMnrSe7J/2mirOok437Merxx9xap756YY6IJP5hBtcft6DmPIuMcI5fmngEmrwCxzaeOnpNhKPZOtyNeI8gF4FVeFFIa6gTJAkzQpjAoWspnJ0HWvMvXUtDwCKnuq7klD3Uy6usrYrPYg+S/SpUFvVgPsmVCc7qswe9f35VADUJBE5iyx9Aj2eh5yVMc2qhv1IbVOpKC2Vylc/mk39x8Gvrf+/XuBH3rZ1/8D0+tfBF76YvB2gFGa87REYf4UXJF0uz+kQ5ksJSsMgChCRZvmUh+p6FlFgy7C2a30sg3hWdm8zEIK4TIDb9qAvCpH+SaK0HLj53/+wzz59LP4w1/J8Tu/C3v8m2iPfQO37t7jpTunnNk14tFvYveV38HyyFdj65VD3Ov9h3fcN+uwHLS2WdBNVgrcUooCaSYlF9OEeamp0oxoq1zpCdVylAGsVD6F4Y7Z1s+yMRRX7G2vyoVaJmVual6kDdWV1NJrAd4HH//lD3Hn9jP6um9M/b5VFZkhtdIwqXzUgcoAEi3xGMy5kpEwWkAqn8fcS+GUVTXVQ+JSkJiFFnulCN6Xx3ZdP/MDx6sAsX+i3f23fG1rwyiux+QpaMitSSQtBr2JCI8DWo5ciiE3Zqt4CihexUVVrD7NfFUVH4L0pHjB6+f0xrIY251T2hDH4w67B66yXmksRyu+M1ZH/XyFmmWUbj4PC4FgHtbBcqSI4HxxkJu62wwYp52H3vMgQW3auRHZClYchKlqXRbwRVJk8Usu9ZqPCscr/MFMVX4WxESD0WCrznHGFJBsVYSrSp+5/zCiCog6/HRzwFHUiRJQ9b1b+QkUNZ30VOGnC+BkdE1Qqy7YbNSQJ90zc68BJCD9ukTpsSRmg27qot2Ks3NxbJ6asdDIqsx1TWOdeLsOSkNc1qu9vhgp5F9CBNMjZvYU8N8C/wPwV83sjwKfBv5QffuPIqnYJ5Bc7I98oZ9/+D3V+YuEEDlqtYO7Q26hNEPjwCrLLWb19wxiiNQwYe6DVJvIdDjqH+VMqXLM4ZVOaIyQccrdaaHs8CmxzIT9eeenfuU5Lo6/EppzevsmRrIPlztujRrmbJxd3GV3/ctZvbE9/zHGEMnndIa7IhVShpbW7KBuSeCJj93hs88ZNTEBwnnxRdR2Lwg3txSkYsEcbGC1IM1aeQS8oBhVKnKbCrLIREriITJJC5vSDzepfMzhkE3NIXkqM/nc05/iM7/+EW5883erK2kisAQd60Dx7uTFGWkd75q1msX4Ky1QVYhXNdYc6cDnRnXALueBrnuuzB/pthdmhk6tgsI1o2AKs3jFyv1Lsrar4xCXcp/WGC7VRVg7+B+s5AO4SG0rp+TITkrED0R9Vt33yHEf7iMLwhL/nh3Gqiovw8ijxthr9qY3x3ZwdGPBd4oakLJsLcUKxVOVQsSb/BouCM0MluOVkweP6ftk//CGXxh+IrfoNgZH1rCW9A2WdGhdQ85pLJYEgzGGNO8Y5otITJMXoGZTsEZXZK5KWOHrBy5Cz6/crgmb1pPRNAfA7gfapaGoEpvkax46hKTjUx5cUKlgHMB0oGD678bLvBO1p4hLCgZLwUfVqRZfgDsjwfZWWU+Sh4hoV1eOFa+3BbZIaNGayeRYYxDpOmDCEipR9JVeX4xa5t95hT/67s/zvQn8x1/oZ77CL1Jd7kUg1I1pIWjFKqhfu7Mr892LlJwxn8U+pAkrVO56HipZR9NTzFFV100/rtypFGPuYYyK7cy+KBMmg7/wNz/Ehz78Cweio/pqlspYHoNyE0Jb9J4eePv7uPnSZxh5F4tB9zrp0/Am559tdj/d0uD0fOP2bZSFkzrRt/Rygso1FzGzXLSoMCPmBoKzjKGOr1Q5pLI3rKJDrWRbvpqUJaGKIqOmGSnKT0l9TdJT78Zowm8zk595///J13zztwOLCGRr8pyMwLxjq9dAZIhddSylqsmlV3b/rPRrrmRtKlEErRJHCrIySsIalVEjvLMVL6JsDpdQdgjq28+q8zVc2zK5ZFXwTcoeS7wci4Ja5to3DYIxKYAk6mjS/UtQokq5gvScPGj8m6EYWaQo8SXxcpa2NXnwG4+588xLDJOO/vpDJ5w8fMzuaGFZTEXViPtVuheM6FRCa6l2THBSO144NmddVvaPdZo5fQwuT0tD8gbj2lc07v2mpMKWVZBUXHBWZWsNfGiCmkcSuQqznhkyVrkzPtVdWYo5JJpba2BLBuy0jsw7ym8RRzUMFh+ErYhXNRGTM6piQgGzW6EeLazgWW22BoKXhgDAwKqgqQ3B+xRKyiVeKjDl1jd6zWEm1IVEKQKFuKvj1cxnHcAx9fupUL2sPcazET5etSn9p4VlfstfqmRU8TUGmYNeyoEeJuS4ACkRjalNP1VdRy1ERxdCBZ7TouZ1Ts13bRB5sHUPyRZD2lsb889KdrmI3PtLH3iCv/n+n9bIM0J4J6UzTmN37SFOrt0o4idZ2g5s0dDi7JU/LazQMNrEVxhE058lZV2u2F+ZpdTNjGFYcRE2rHA/lOyYNce0eeGyc4A4hbgraTNSJBrD5rPLCD1U1gJl0QuqakXqNaS3X2LgDdrIMlPBxekdPvx3fkidjmmmKT50L20lRuP45ArHD1zH/RiKjFOV6EVCBSNhUb8v1U8Tub3QGNW9ZH22DNMQ5A10Ja3gKXE2FjoQDtNvFvsnhNx/61+q/iTpc8vKUk9ApCG15suvWWKMwaSCrIQCUQWQRx6KkbBJPkOGhAF2KGjqgIwkLNhfbmyXQyMPq9tZl4btKE4oK3nSiXpPLYzIVh7v4glCMNjSjPXqyu7hlasPHnHywLGKmiWL+3LaNdn0l8p9ykPnYtDH9ORBDtpItrbQlqqow2k5PzeSDE8Hchn5WILoQeuTnC3TIE3x3j2rS1TwnxX+2C3I2BRCSKqdz0pdHHKMZlIHjOMun42ECqXJr4NK0QZSQ/lQGqeFYgZGhRVm5cG0SsikFCRe8kcjaFOMEVl+mYIYfRFUqxOqClL4Qtv36yR+QK2JskEalkOnmUkzbmZVEVKSqUFrhbNMcLWS6dJ3pG04csNlK2MDqijnUAGY0IbRPOlZgbipZD11TkY3eP7WPf7OzzzB/vICMNaCKdbjK1x96BGwHScPPAo22N/8LGnO5dk9MmF79mPE5bk2M7+/aCIrCnfnUjQ0K6OSMUbK8BReYWcDotdkd8Euc4hCes5yTg9oBGYDZ63du3KrQ/buKUnzajC9pvFMVQ4HXF7yrtm6el33bFT+jsNIfu2XP8RXf+O38uAbHtcA36hKq7T9NKN152jZcbZdSiMcHUrG6U0Kj5GC26LeTDO737kVoRtjQmXzvVjRGFlqkBJcGpVPpO7n1Uinf+6vrI0XY0+UgqK4JbND9o+QGXV96gxlW885YMNScSZblsqojDVREc1Is2emDmByHIyCquL++xghbfn57QvuvHDGyRuO4OhI7t+aquJrQiiAl6QCvQpWMx0Q04/R3AX5VAyEvCRai2ENmn7nik9IWhUy9Xl7CJJJ4ctbiBjFgl7FmKJu9fxHqtKPVZsmLYlRxZ7XlrAFz3/wTm28nTk6EoR2arKU9oftNObpVgWfkiHlrRMUNnwS4Todwpo6XbKmMxUs1MEWZWBZk1xU91lrAefwWWwqvCobKaeQoXl15TPv3dStgyTQ1VFMg+YrvV4Xm7uYay2kRIH7PjRZyYaTq0g5Md6VZzJKBuW6SMqBKL1wanGZGWwa2NwCzCrjW/AhoGERwjfrYXFnhDbhmZh39/Zdnn7u+XqzTlsWrt64zrXH3smyLmSvxDtrXH30K+jbnouzu7gv3Lv5WT2cNeVJLXrBKJYaKmIBvSldLo0P/PxKLiJXtqGKoSc1aqxgvlRexSQe16hJ8i7NepLYJgyquQxUkjs6bl3DjCp5yKGI2FGyvLzvBKwKYZvVNUq2s1p0z332M/zIX/pzfO+f+O/ZvLKuiZJYKjPHM+jR1U3Ry8laMsVS+sABCaiteNA7qrpmPjZ1nxNGNv0e1320LDw3vWz+RnY9NF9QEPzP9VWVYxag0Bxjk0Jkynwdsle8Rt7vNKyMOmNCU1318yEcL+0+SW9W3VAcXKrUqo5iTXwRdt5CWJVZIy6C7XSjNWfXGh3Z3bMm/qQlOdeoK8BxMUg0SYrNFBY3BCkuq7G3oLXG0dpkzkrBqn1TLILkf133eZjIUUsR+Kb8miyuRM9VVmWnPeCQ8Jr6hDVsQB29VWw4xp1fv1CR5p3sXgNGVgYbQuUrYMx1GnpadcpFUqPDsHLodE1r8AeECpNc6i43oJfksYqJpq4atEfJlDZ9roIUh0uiydKICAbBjlIEQcE/SdSMBq11rQH6xIY//+t1Acvo4S79L2q9c92JNCDwXh+g/rxL/aeBvyBpERWbmlb4WO1OraIMBMczp42PFNvesDLNiMyMauuysk/Ixl/5/u+DizvsFilTlvWIKw+/haUIoFaLIiKJTUqQuLzg3vOfIi5vk2R1fQFRBiJFMd7H/yp6wDy5tGTb4Pyis/VkPzhkZk/oxpH8ysNoNG38hR9qYTrZGsPbAY6S2UmzG3uqdYbiG9KobGC15TWyUMeJH0gkjceTPAwDWnDz2Sf56D/6+2CNZk3wCbLApztpC9u4LOJQw1PmxEQNlTFVSD47sSwytDoEVOmmTUfxoLkeACXzUQ+4NjdJMxFW+err/0vwslIFzUxudVlhyvNuwyXNa4A5S3NYtJESWpvrMsjWq+JV+bfkPOSLb6pixJqWloUrVZVUJYqx7I5ZTnSdAmO5vrJcW3DmkA11TNFEWLt5xexmrVFliQYNTDzNdr7n/MVz7j1/zulzF/QzzQt1kzv0/M5eeS5dnavhymlB9xfvhSWXgSjbAb46eEKM4g5UlnuZAK28Di6ZG0F1QWG0vZV/IpWDv2jNkb2mUmlday5qmSWrsog5OMXQPlIS42FW90r1/DCNz3MbWBVKJmZCe1kcGFlIMeDcVwAAIABJREFUK5FIYOv0wgia1TgHOaxbxSykNM1IPZFa50i4oTBBDjLhV3q9Pip3ilQhGKOxVBqbBuaKUM0lqAFaZUeGCS7IYSfsTvK9sq9nECkjQTj4GMWyN41yawWN6PimLTL+jDpsrBmf/PWP8Qu/+GEuXrzDcuUh/I1fxcm1R8CFAV9uwWrO6e3n2LYLtv0Z+5duSqN8ca8wNVUkhpVOtwlJMZFKDBkawsEjWagYWHcWlR46OCxw91qwCi6KJoa/jXL5ETRX2x69HgAv92vICRpmB6jFsp5fLxtUuf4S4a9GQHSyJttIUVBS1dSAkP045Vef+Ene/FXv5ehoVW5GTnORgPR1PWKMoKfITsFwyJHsqWlVQzLJVnGvaRpHyITohtIUqYpN981qUIvVueRYKABujkZ8VUnBl+JlUnZEhafpf2ZURPFAZciLglqi9vfpYMwwObIRuS9z5Hzoi8MIx6wLGiMPsCYjyWacP3NBW4y9gWNsN8+xh09g3dE2py8UnNkxvPwUwYhgLdhvkoVjGHYRXNy+5N7zZ2yng6NrRxy/8YjWmuDFO8n+mU66BpBQkF4Pico1H9nBksWGCqDqxjwaw3Vwt9oyNXO1IAmTK1lR23nYAMVThAqbUEdXgdZCHAl1MJEVLVI6edeMgsQ0d9ZUi+eioil7xZhY1obPYRaExCBV7dSoz1Fa/ihnqlVXvtYSmNLsNjkk5Uvod7qUNmUtr8EfipG1IWHEgT98lab0dbG5QylkTB8yKEebDcyVIuhphbd2naSFOw7EVrsLX5WkqDb6sNLJojazIGVa4WlhdROTXAa9GqY2hONd9s6Pv/9vc/OFmwxf4OJF+OwvcWHO+b1nIYST++6I/elLgFrZNpJ28Sx29ym6DZpLf9tODI4c28PDv+sG7Wpy+Vzw0hNnwun2ZZkunK4OfHEQ8yYH5RwNXZsRtEqqm1hzHwUFLLA7WWmrDs8czuAIOxv0/cBywX3Qmbk5WqSOrtVClhRxvtSeZzdsUTwprok5v/GRf8Dtzz3FY299p667oWrVFXi1ricsu2Dtg8vL8+IKtGG0TDZPEameVekWOOvyNky8EkuZOKqzmpbwNFjXhoVSMG0k6/Fa2tHXEpbhINMkVHWqgiy4qQ5AmVi0br1pO7IRhf2utaGoMHAry3vWEz7kbLZmpC8EHRujEggrNjqMs091du9ZMLsge004WhrL8YKZBr/TmvDrrOo0lTgZWoA6pEpa2Ufn4qULttPBeCFZr7bC1jRj2DvYGAQKQLOu2cSJkj47ieVCY7CBxk7q9KO7oJWGcm/mEvfZxWVThjsFzcxLmtNTIRd4VieoHBpV4TOEx+pQyCrbGyoustc8h9FwBn0sLNXlZhq+qfBJm8KEkmRbF2RTjnEsSsIqVdNCsrWsbiQln45y5RZHpUM6EFMmmIkqbCTt177gVJTBq9Qtr4vNvTpCxvCqNAt7d9dJN8Dd2UaUEkJVY5Te1otocZuXxKsydVX8No0hoSpxmlzgvvFmnr5VnVg4f/Ov/g3+xl/7G9RALiDI7Zz9078ID78DyyPwpF+e01Jkn58+q4n1F88TmSyuBX/y5Qv+SLK+sXF+b8/p3duMlyBZ2H097G6sbC8O/MWFy6fPGV044NzjFO+1MOdVztRLJkHcjIxRAIpyLpaHnMe+50GuPHIkqeVQNXH2mxc88yMvYGPQa5Go5ffCtBUhPFIDP2A6pwR7uBej714KhGRE8rf+r/+JP/hH/iRf9tjbpYygqXNKyesyZWtnSfrmUuB4k2RMEdcasECKXFKpWBG+Ve1TxjIAlFPuBSs5K8dXjiWlRIekH7nMPK/hy0D5R13XX8PGqcgMK8dtyRorrrqNUQowdVm2Isii2n9Z2Kc6JCGl6BhDph0WXQM9/+pO93cH10+O2cdKb5sgsxhSp6DCwPZD+vil8GI3mqWqVNBc4iLXxyWMofsXN418d60PjN31Y27+wj0AFpOvwy2hDWWaRxQqqWeyZZG2LapqFc82VTHZ2wED14OeigQZQat5CNqsi3MojitMa3VKUaXdFw+TC+AajNF8ZhkhaGA4zba6LwXbRPknXN20j6xuIg/QZtWWlQHv+JidcEGqBa8SSbdkMelBvaLIB3PAUB6ylyxRZEpKZaVnqQq4V3m9LjZ3QxKhEbO1qqosRSaNXcxuq2KAp1Jg4o1AVhgQvf6+yQmJYV1i2mxGHxpGUNwIgPI6ukMz5f635MVbN/nABz4gOdJkr6OExnaOPffrahtFXav6tcS2PZjIT2+QO7j6vhP22yURQb+ndnKEYKbMwViTy3sb7eqO5VHj+tfe4HM/dlck2hh4c7Y0ltyrU3Fdm4iUGaOu1WFzNln3udpYrqmdiyEuAk+uvvWIZef0Cx0GxlYLST8j4eB4nNB6TB1xHXSKO3VaE9TlDnduPsPHf+nv8si//HZlUAPUxKH06WMwjndXWI9gXFwqFrnuRVhJLcutF7XJW7WkeJmabEpVo+ScqjqDjWCH2Y6gptEfyIHX7qWyoDicoXyiSvHHYhJ2le8T+mxbGwXP63NmaENdmINLVEUbVQSksRURKQt+he+hizvSiFsb48Xk+I0r917YQwZ3nz9jf7oR++T4xoq1BkcLxyd2UKoNS3oPcp/kCJme2sp6xdldNcbmPPjt13SQZtLWhi1w+mvn1TKq66R3Dnd2citVmKmoCylyUodca1l+FQpWtJJ12izTq9ArhURPrMkYqPXrwrRNbtvSWd2f2WCSPmsIiboBo5cxcmGhuuAcB+La8mVQ+HzuPEtqXBLSkZPgw2jFHRmLD/o0ODWHTVBrc0hqL0kORZTgZSVo9qF7PVzwlPVGFx31iq/XBaGq87sicKlxXF1tfx9Dp59E7rohqY0zrE44V+Xqh9amK16cyoVoS91Ibey6+V03xSGi1ezgSmpL+JkPfohPfuLjqgAiS47VyaY4zuACz0vIC3x/gfUL2O/1aVIYqO0Sf5dxsT8jCGK9r85x00YXLpTIbFFaYw4dCG8/UkVmi1Iew8imeZReMaC4WkJnOtXuH4pWFYvy3V3ZJZQaJkalCzZGDrYEehmABEwKpcym4dulflEsc9TDVVKXIj5J6Xqf+OAPsz+7ixyxeoB12BQ2PJRCubByfHKNttvV/aekgJUAagU/pUFh/AeHm1WnFfpvRQo7Ww8uL844v7xDYPRxUBe+pi9Lg5ni12ApskxogLZopo8hAYKlJIxRh9OCuqiJxyovvLwbBe80XAd4j9JVH7Z/PMVp3f3Ve5w8dMyyE1x1eeuC81sXnD53wUufucu9m2dsty7ol/3AVUQPzi+Sy3uDi7PO/t5gRLI72XHt4ROuPngFvyKZbbixXNtx+x+dsz9VVRvIub0V/KSU1Kp20WCPbrPbVrG3hEF14JVATcvGWIS5WkOHfV0/CyPbQpCHqGkFkk0Xu56X9LJppGYBK7pAUtpchqSbCGKJeo5Wq759LQ7DIZbae9DepIBDaCYc31MHowh/HbRbtoOhLrtgyGzS/3ubO3VWqFsyYqno8Lq2BEvFFediLCXJfqXX62RzF/nDYvhObQdNORQ6tcVCaC5pseoCbmRwiFEtLaWqWMgQpic8rcgTkiULVY4mU8Cg2irh8o3k7tld/rfv+z4yk+6uVrGJ7nAaLWoSjWA1WbNNJ7JUZsHJddh9zYId58FUYqNCszxhdXwtk5LpIVe2hBF9cPXdK8sVx9ooE0vUIhfjPoCWxpyHObWzsxWPWV2X6UO/qIlcM+cNv/NG2Zu1INW8i7xK1+/UI+HFTfAy+Ao5SO0+FC4ewBj7S37m/T9A5IRPSiWgvEhxB0gdQIB7o3kTb1D5QtPgZITkma7NP+oH2JB13XhZvoZr6EFmMkbn8vQmZ2e3iek2fC1frutQPDgR6kitDqsBOoyZ1Z4dSLh2gM2qwEnAQwRh3P8FHkWYe1XAJQlW15XSgmNszwf3fuWSK49cwYdBqINrq7G/3dnf3dguO3GucDJGY5wN+q17nD53j9Pnzjm/ec7pC/e4vHOugegPHJFD0ErbLVy8eMntn7l7WJe5BfuYeHcZfJjrR3lFDQmetlAlru68TG0I1gmG5iQPQXYtRYCm90rZTMghs14I5vHWaSMxWw4ksyKBVfylGyyTo0vMhzbmqGx4H4SL7G37ej7xQ47MKHliGvhAeDwcvAW5GKPyjyQHSWUnuRFN8RI9UCFSEeDeDI9FvIvJRXsYNzk2zVeYmPyrWFRfF7BMIj3vzjt9Kk2Ng/28h2RjEyZwUxvjpryUqHzj9MMupp9bOeXpaJNMGK3aK3RTp5xO1SDst85f/2t/nbM7Z4BXIt2i1EOVtMTQwmypFjHKJn14kFiwR504HthmVQHpM62tgcMjX/kg5sadT93l7Dc6V75mhzJhBFNcffMVbu8uyXNj5kdDbRKAuxZfgB7SqbxxK2WFKofbT51hcVZ51gty5Cb9uSJgN0FYKjLuD/iQFbzcnzEIW/T5pqWwME1xFNQJJYz4Nz76BJ/9zV/mLe/4WvkTzNRBGAJSbB4IyW45YmlH9KPOLoP95SWjD8w2kkVQWSrPw0YQrsiCZnXQVw66RccMjtYjcOW6974x+sWXZA2/6svE/0SlXDaT0mLOMGhAbwoHIwTPmKsTsqoMtbKzahGjRUMyQkEcYTVpLIycGwI18GKUrNE0NPv805dce+cNtgePOH/pAuvB5kHgjFt6Bu0NR8S24bvO/u4l+1uD/fkliRN7uHKy0vdBnF0whqCDdm3l6PrKUz9zk6iJXFb6+Gw6lHOxOnRmwaCNLkkVL2i951xXlZroFQ9glFmPFExq0/egDi+91aGfdJzMTfuFVeEyi7KW0B33rKA+n5oE9A1jjkfQ/TArw52gWXMNAqJFRQw4I+egIB3a1gxGhZwRNTc2MYbWb9eBv9iQH8RFYOOG28YoZ/nkmtwDMculFLMiql7h9brY3A0t+FE2W5pBp3BaaXo1byxKUiTyoyjS2vsOtwVQjoNywXtlRSSJuoE2oshAvVom0RR/++Enfp7v//7vL1WKKgFGyIyx2uHvCN03iK4QpLZTCFcky3W4/jXH3HnmktIKkDa48sh1rj98wnp9RzsS7veGB054+GuDPjae/9hN1nXHOIZ7z1wQl3IMpi2qdMtZFDZnaJoMKUvdbEKyyFSVbJ6Ms4392Xnh1rrdETB+ZZCh6Uiq8rUxC0sUROYFGzE3DvO61knLTsZCtMr3oTToDe7deoEf+cE/xx/7U39eYWG2CFeWz0MVXR3UYygYri0OQrXINjRIIuLwsGRWvocLohgDHfap6APlYyeXI2imCUNHxyfctwS9di+rbilNzsvMxJYiiEP4snd9bp+QAdX1VNi/W/kVwmXxt8F24FpU+AhVLsgnjCxTkJVE1mvj2O4kz/74Szz83Q9iq3H64hlLJssVRMQuia0L2eHsYnDx0p5+sbG7sWP7rDamdUm2e5Xtk4EfLex2znM/cZP9p7uKJdQNZ4RgRcQJTFOQ13qVXJlKIpUaKsjKNcqCBrNQGis1GTUToGTPgNmMWpuzhAdpjTDBncIxA4YkiNHKNOZeRrGsHWXBe8eX6qiH01wDrs1LZBCwTpx96ABbqtso3xITSU4PVfXMyUx5UG6mD5oL97cs4cTQBC1Hnx1zuhtLxYYYeQihe91j7oKLRVwoanfI2GA10qLmBSpLZqjyOUwTKllTVUOUMkD4eZEt6GQVvDHub1ABbkEHPJzY9vzAD/yAFtxC6cMnHFC/Jyjnmci/5gvOCl3uP/OVtJLf2ZCTzODq4zd4wzsf4srjJ7SjwhIt2R05y0lj8cbxupJ3Gu2ocfmZjcu7vTBtmZusZpx6l4tPChctlip7yHR6GX4ULKONt6GHXT9Q3QvFW7iXVakyebQJqGpYApbSY2eMMlto2EZasA6KtKr0SUT4Xt67xRM/+6MKOKuMDA4zaQVPLYAvMoKIzx2M6LLo1+fy4TXMpNQNTNOp9MfWVAAYQBj7vmc7v+Ty/Jyz01MuL855rVF3iRoGrSIbLNDgmDRYmqpqB4uDBUbEIRo443W46oepfR8DoB26LsE6uvYqG4Z4Hyo22rJaS4gYbOedmz99m+PjY248egW/ssBaaowYvPT0Gae3L7i8eclgsFxdOL7WOHnnMctjC+Nyq+5Zo/h21094/v894/ZHz2VSGynuS0lhh2rTg3LPShxotur9JVhXgZBNpZjig6WeawUzGlnDO7SF6/lUp+p6AERetiwEO5gadIOXxTBASZYgYDey+J4SDKx+8INYE8wy96T57IoAL94k5awn7aBkmSP2iNrUfdXeNJQQKWHAjCbQfjbHD7g1jWcFOqbJcCRNODBTzvlqmOPrYnOvS0CGxtVFbcheuRo2sSWz+nfIrW6YVE0VKuSFIeueqearRb+KqLDJsLkweUo7G9n5uQ9/mE996jcBEZ7jZXBKNkWRKjqvHJaNYu8T8yC949bx3rl4tqaopLG7tvDwW6+z25VEr/BmZTZLr75cPeaRr3uEx3/HQxz7ykufOBXJUtWycHSRL8NKN9yEb3gpD2SI03shO3kK/TQwWxhWQw+y5kM2SlMrUi5nfkeGNNMmPiNC5pssvDK8lAhDZO6omEA36sHTAbT1wcd/6UOc3Xu+eJAA21SymUbpEUFGDUGe6yB1mIfLAdgrf9vIgun6RIDUBXSEsQ7pvj00HFwzRvf0bV/V2Gv3GswKHRUjpV3GGtlVL+Zs5z2wXItMRgFyJtLfSgxgEaoGkxrcQGHx2gAiTXwLWYqjgoGoFE7tTow7wQs/cQt7ceHBL3uQ4xsn5LLgNPr5BWc3L+jnlyytsa6NfmEHNzWt4Wvj5KFj8tR54Sdvcfcjp8wn1m2pwmsc3KXpYDvtqWY6ZCJ7OdODnskwI6ILyFitZjygdFcCUiYjeQamYqpXDAVgvbLhtVZnVAlRG3RKQsqUQxcBOhIs1wOpL0hMRUSOZL9B9KyBOhWsN9d+GIvXLlZ7VKs6ak6LUfbPVqS6YJ6WU6teBzRgq7g9Zc00FhvFOxRcVQWY4ZWH9cqv1wUso5a+sKZozKPcqnWP6QtObXCi/puqcJeyO6zS46JwuUJ3hunkp+vB6VAGCEELUyh/cbbx/h97P2fnZ6owbCj6lzLzYHqQysEp7Xm5T5nJddUS7o3+vLF7247L0z27Kyux74yjFVJccSCSzAzcg8t7l1zcueD42hG3f/4MP60OJEexSmr9WpSWP0v+N5yo2bHWpKPVEOKF/WmHfxj4slQlJ8w1IuGiKkhHTFbJOdIcYy01Q5a+HCBEQlOVqPEyBZMeHtFbVVAYPP0bn+SpT32Ud3/dt4q4spXwTb6DcMIWQTpLFLyycnL1Qfq2cRmdNfal+Kg2Pu2+pLBknYGeCndV8sRUoeiNDiuzz2v5Sj3Mc2amJJFgvdcz6wdnsIUxfCg4bWbTp7rAUQ84LjI9ZsSGuzYvqoosWSU5NdOK6/VImg1dTzMVFneNmz93l91njrjxTUdcfesVLm5fYGH0/V5EH8ghuSS7E1n2d8creWk8//ducfmrGzEkTWpBjbBTmFeWYcetyMTqwjpBs3pmMhlLRVxkYKzqKEu7Hh7kuD9bd5pUpkeFlApNsLyctVOvaCQs0pJnBey1OtwMiIrSEE2ayr5hEaRV204sJpw9ZP23VH68M5+BqHGH2sYzZuhdVLKl5jRPDHlUQujGIizfgSG41UfQvWNojjPNaHMJ18bRTUWaT/znFV6vj829dJ8xpua3l9ZTpKEfSjWRFEGwhAjCzGBYqUpGaWpT1dwomVSEgbeZoDrvNRnC4CH55Cc/wQd/+qdoTcacDGnvaV4DLQbW2oEYbMW051AFThTcMZxcnPNPXHD1+IijN+24+vhV2rqqu0DEjJLw9ECOkVze23P+wjnnn7rk9MnzA9nmSDOfuehhGUnuOm5yhi6eRA0mUNVem0E6bgM2l7Qqo8YNIomY2p1y9GleqvbDqhgTqjZH7kkXoUPVIfGyWIPhOiDaJDatrOTJz/3EX+Zd7/nWslWD26qY1Cz+YLmPpU95nNnCbtkR2zH7i1NGL0wffU9pgzSZfg5VKGegpcKbCDvY119rWAaULqohF9q4h4mAkFlL+T5hrqouhY+HNTpgniy9Di3Fbqqlb8WRpO4lgPKDtMFkSQIJZcbARk8NRbFG4fIQmZw/ecHF0xdcf9cV/MS5+tsWjh89JvcyTbGIxG4758V/cM75dslLHz/FzwWd0YAWdFdMb8+hTm+SiyWhVdCGojMEs6CHclDdsu5VznMrKHWJvmlW62E1r6E2UnliKIBOh4SnH2CPqDRRjXVMWlrFjCsVNiqTKCaBOSGstJotYRVnYYfOV38WB8VKFh8Ien68iVNSxy1vA4veo1D1fvA+yGxchyGCkKPV89US9olHI7KryGqjEI9XXtuvj81dRUaNrdKAgahckIbw3FZVe9SwjfBRpKpMHVRGiRqgqvglJVEZObriLTyI7ExTk5E8+ZnP8D/+2T9Lm7ktkaRHXdxxcLxJfgceiyCGjSJ58jDOLpqqADPj3j+6YHe6cn6lc7ZuXHtghy0Ly5E+dr9MLm7vwZyXPn2HbQ/5G4PsC80GY3PCtmrXK4nY6sAqHW23hHJ6jhS2LfilM2iHfJdIp0fJwLyRfV/ET5mgSndtOVUO+hw5q/bS/+q3q6KIgFZW8mxyILrPnGttwbdvPsNP/vCf5zv+tT/G6kstbLXMblXi1y3KoftntqhKKlgxPJS/U1Bb1M9uofuCKcAtbFQEbplktBtyCO16jV4ZGu/oJqiJXtOtFnV8rfDjlkieizEqCrhVWx+u5yBCkd4HchJdN+bXcqrJRK9irmHz6F6pC9NUsKysFgq3J4J7HzsnPLj3UdemSrUZLRnRFI99VptsGumugwvDQuqNKNwe9F4DK6hVG2OBh1CihSzCVLG6Ksy8DpT06tqmpJcubqs+szcrXi2KuJbowur9WT03JOVgLafrIki0KZgIiS2056RTTlK5m63SHlWbbYpjQB3CTJc1Q7EBNQKrXB26L1nSVUoTn0mOHdkG0QvSjKzOpNCEUC5NFvvsixOejC7y2ZR3XDvd53+9Pjb3gmCEvAzcxIJ706mfhSdO4hQoCESY+HT1zcpv1Gg54KAlNquBIKOVs6weiEh+4v0/zmee/DTgh3wVpbrpBustztM1iDlUwb0epNqEJixSGxDA/smNmxcvceWrd8S9C8YQwegrWGuc3zontlINfBbG8wo6y9Jh2ZBEjJVqyyV9nBWDro3adqsGwl2KA2pEHuZEVdSO4AxrLskbqqiX4hMOluZMQUKTqppkbOH+08p9gABsJW3TpuQTO5dm/5f//vt577f+Ph599M1gQSyl4BiCEUbF+IpU1KOfQO9D9nqnNuskvJQMUJVUYHO6cbW3tJIZ5v0N/rV8LWZlOlKCqBL/BGJNh3AW0egWjFxoyEafCv3RVpGCCD0rkXBMma/VgOeYUnJtChjOOEBsksIKuspFEBDhsG4QS3WLpZLaJhhWhpwqejY5dQQ9pOAdZgw33PdcAKRjaQdir0/+r2Jx2ywQFhUEVkWeWbmTsZrqVVALRuX9FgSThwTK7PU8MqEbQYo5N9SpoGllyBvllTXBwZppXP1htRMibFO8VFXIhxGW9SxY5f1EHaYtg16eBZsYeRYvV3xLZNKb0lyxcfCQUNdsJKxNiESSLKZOYA57Nyp5dp69r7TufovW7z/jy5iDC3KF6GonR1ZlxjyFZ10OjAoNs8pDNrCa2+Xu9ExJjCIgG6PkSFEb+zDd0M89+wx/5S//tfr9JmihhopP9r1ZjeszEVVOENXCNa8t1CVeoqpG88SykRuMZ+Hei5e0NxntBuTxdJZdwi0jLqE/VVVNVS3heqjIhi2LZGNDDxEVOUBKemm51GFSuHNJsoaVdrcpYpRQRZ+28l2//QatJXfvXvBLnxqcX3KQWekBEQ66pNyISm3UdVHYUQqqEsZUeDFahbuyjadwZsvk7/zQ/84f+o/+G6LIWIYqFUEqIIW/CXesgzyqCrQwFldsQ4RwGFdgvyJbs0KoQqdDZJu7RDldv/Qr+uWvbFFQVun7ZzWQRqIceg3f0DbopYef3MYY2rTcJlpQpr/Cli2mukTfZKFcmHSlO0YYa8rgVAkSlUCpNW2jDkkrk0+4uqhF8s2s9Uj5TmbDFXMNYFOUpYOqG/jKyE2T0JaEcJZeJGeIM3AvaHCua1A131rN1VXHE/UesKiYbmeYJoNlwVLh+lxWGLoON7AuPgPvdQ+GsobCirtSwvLAWClYJOVUb1nZOYbc7S7NulkdRvcx4MLvqwOuPSfbpoLUdcjhoSKr1D1pgck4U9BcSZxrfciSox2v1ddidWxL+st4+Vd6vU42d4QL+6xArB7MLlxZO1QRprpYaamp6aksczk7g9b0gFsUgGCmBVX2b1XtVhjk4Ae+/wfpfSu1SCkzqtQwF04d6TLOQGU+z0znQbLWwzVDk9R1zHB9iTkbdNiehDgGa6P8F8m4Z/XeKtysKiApLGANlzbawX1fuPOKDxf2H4NcSi5xkIihjSI7YY0lkwevN77lPTu+6q3G05+9x9sev4QFLs+Dxx5J3vz4Mb07//dPXnDztDbnUA6HGdDLWGP1HutiCkZpSglEczttcx1AAdDI3HjuMx/lNz7yYd72nveBWSXAikyK6MIovd0vT4ojqDJV1Z7snYf3MPXdoxZ4ZOV0HGRqUWqp1/aVdcKIc6lDKMt01lqZtJT5Pl2q0OSy9JmFpFpRYg4NMO8m0t0XqUjUUbbyKejQT1NXtk3Mt6pgYbtZhwpFJFqJF5y0XjBGjb5I4eEthdFLqmO0mNdXu7tmLYgTs3RyEdQ0G8zCFGAYg1HP5FLVdhN3RnEyYfRWn1EgI70C8Ka5zabEsjwPdaaLYB3GWAaIbfVtAAAgAElEQVSexkMPrHhL/t3ffYWlwdYhRmd0Y1k1a/X528GP/9zG5WVyuqnIPESaTNjJKD08hSWqwzEr7qq08ORg5KKuAj3r0Q1asoSmqAmBC1ooG3+OA/RIbEgw4mqrydC4Ph91J5fi0l5l3b1ONvdymdqQzCtbVX3yBfeEHdDGPKHjvsrI82Bd1kmnSlFJdKr6bSpBXFW3mzMs+fivfZwnnniiQryqSyiHn2cWxlYTybPMw6lYWpvwmnWirMYUXBJpgoPSyMUkP0RY2zgveWbByFN/PFUFIEhlcR0oYUGa9O7eVrWiFJY4D8HK67YhQrWj92YmPPwtjzr/1nft2MYZ9+4GD15Lbt69OBBxb7jR6NspgfEn/tAxH/41+NVPJU8+s2lTqU1UVY2qNcXOvsz+bKPyaErylpCZmCs+YH9+xkd+6ad401d/Hbt1kY63WnhYS4Nf159RWKhhfdXnH47bhMt03y3AerXM7gVVlRV/oapjXdPX8jU5pPQEm/irH0hur4nrWgpW2HPKjNgKm97AVlWGjoinGInbjLWFtHaQsyuOI5SHl1WYWOJskKvwYbNJY4hIFJ6muQGjINI6CHBBDtJuKx7Aqr3Qhrpq2lYNj8mS77Q9VWVmzQ0osZtJ4aVfn8UVqMNRvpPalFZRvTUdGbNRxZw65lYRbCq6xOeQlbMD7FbjO9634zu+eeX8vHNxuXHz9iU9Ocgdtw5HzfnKNx/xX/z7R9w7O+YffHTwEz93rvXjWXvOxPpLdIDitiXPTjAXOlBBZi+PFDYr53FQG3uUSi0P8RDzoMBkzIIs5ZQX9i6s39xkxDzoPz//63WyuetGWl/KVxAspjmiWgjlJnWEow0TTJCpAJ7CvoRnVehVlbBai3YIZjLUqp/ePeW/+q//NC88/RzNkj4zTcxoxYrrKNCDNKySKWdLTWVwpyFf3GCYMG6vJ0ZxCCkbP6USqDauBtxo3wl/WXSCrM2B3Xc0FIYZkYJXUoOgh+uhM7IeRhk8fIZOWfAVjzp/8DtWbt0+VzZ3BpgmyJvr8cAGlxvYYjx/a+Pr37Xy297h3HnpGj/4Y+ec7UtSh9cFlRHMDbnzUCVOPVjTFYkrl97M2FrysSd+livXHuK7/sB/qO8NPxBOWZAXCe4L5ODoaKUvxv5yj+UoCQIlZ1On0AqDHBYsIfXUhO10BuS8iK/Za5QPITGW4iuibcqad1WlMsI4PeNQWEQzjWEEliUPOSbNjO5ZFWSRPm7YFsLDSzJKQ4qvEBfEMrvZrCwjoKrSHEazIFdh1lmH9zTyiMDXQbowJP2rPzOSiCE3dm18dXeIJvlu5FKQz4RNBF1ErSerrlVnQmE8zO+blXJqXN6SNZFr9gyCbRW/q6+1Bt/2LTu+5T0LHp1PPn2PsY9DkYVRpjFd04uePPXsnqUF16463/2+a3zLe1Z++IMX/NInu/aEQO7deodeRjs7XIoyQY15WAajSGzTo12IhNQ3SUE6te9YyinvIP6rScIcHUYkS0VNDA2TVgf4Kq9X/9Mv1Us7XxEzOr23icd5MmqTHNQDPiGDwrzNUxEA5eSUO0zkkibRFJyCgoDS4AM/8ePceuF5zaNctGlODGvqh9PVInrpZWMEnVkp3s94MNuADZmhChYpYFLKkAAPWrlV0+RKNHQay2U5H6RFrXGkoJ+Du6EeQpxsfqjSMJTLbSLpnF759oO3vXHh3/z2lbu3z+lRC6JJtY+DddOgjNTfr/HZXNzr0IOvfFPn3/t9J0oxTIhhbFUXyrGqzgFDSiOTuSaQekb8RFVSpUv/zY89wYufe7KULlEI3Gx8s+Sq+4r6NY585erJVY6v3cCOT7T5F4TRrMisXIoQ3MDjoAJqoUA3XmOdu7s03Q5lyml6v56QQcdFRCOS0V2BUaq2oziiVCfmL4uGczQcJkFDD+p2ZEJbFFhTgyImJqnRbrUvuuINssr93lyheEMmuXSnezsc6O46TKN+zwxjiZYQwVLDb0KLXOrIpq7S6bLnVz65on2LVEBHjJ45I2iV5IjWXagLHBbaE7Iks3UllM9Tc+eWBDf+ha/f8Z3v23Hn3iWfe+6Cfhm6xr2ikAeE6TMVAiQYcw937gVPPnvK1vf84d+78rVv93oPqi41N4EDTFYt6OEQxChnsbHU8yH9xpwrob+c+uBlElRxJWqpTJKlHPQofsLFUXmq+381pQx8EZu7mX2fmT1nZh952df+tJk9bWb/sP75npf92Z8ys0+Y2a+Z2e/+Qj+/7p9ONqsxeCn5VGUmaiNxMF8q/1qEWqPGWAE9y4maRUggTCtJvMniriWw8PyLt/jxv/0BtoHkjtFKfzzwkvQdbog6aVoZjlo5IW1Ux1AOymBWITqpRswb3Qr3ndGdImSGBb2chqSxDdhi0+mftc2lCC4r+CML75CeXTfWGDLDRCdY6OYk0sF/w1etZO4JF/bcUlLSpRlXjhbWnaCmabJpi8gqDCyDe+fG2x8b/PF//QonVyEbRB2OPZPOfRJJiZgaiOzVXSo3SffFU0aWWy98lk985OfplXKocXs6SN1C8Ex1ISx2eHiaGUfHK7vlhBxODB02YR1vA7NDm1PhZxzUDq9UuH8p1jbUJpAqNHJIJli2OO7byAV9RUTBLVH3t/LIqQCCIeWNVSie09gs8ViwUEQzoAHUCbZU8WCmbmYSgeHqGGlg7cAzRdlHMxU/vEYvpdqOKBNbDmbaBdPQF3XgetnkdQSbstPnpgfQvNzHxT+YEl299CgtB8sIfMTBvasQPNQNJpJhuipns1kACVbK0fj2bz7iO9/rfOape5xfar7C7Ahe2q+84ytOePtbr/DQQ8dcf/CIF24bn31OU6uiDESx79y9t/HSSxvf+/tP+Pp3VkwCgkwCHboWkq1maeatSZe+T4UZTnNVYxaD5bBt9rK1qUItRuoAqqhsySzVTVkpyrIEJNr47VWb0i+mcv+LwO/5PF//c5n5jfXPjwKY2dcAfxh4T/2d/8VsZrK+8mtWEQuLTiQXQSqVe73/MLz0oiT0YsXxBQ4En6qM4fOTaYeeVmypLJJ/+Isf5qO/9hEW7wrpN9XnMRrT3TrlZzpZaySZ2WFwtrDQ2RbXIWSQrSAdtzIURQ0LKOIUILtkfug0n25TQiRPM72nYRVLjCopSfyyNLX6SFRLJ01+SQzdaSt823sXbt3ZBAu4aXhJM97y+I73ff113vTlJzz5goi7dYUHr62AVEZLg0wR2u9+W/Jt36Cqo9X1MRrNtISmBM5QlWrVUojY1HwTfCp9jSf+7v/Dtj+vytOqEouSLoYw4aQWeRlR0CFn1pTH3YRTGiKjbJZB3JekuM/a7rVb20DBqFlVtJfqRRkyZlM2GzVEQhCDqnnNEvBaJ56DTE1GmiP6vK5b+p6sISDqnHRTbAi2iLk2KfmkK7tBiYPycmRmZcGjrtMUmpUkblt1zMLeK8BZ31sGvpwphgd+wSqfvS5DzPuctDLZGU5YI5o27GHIfNgg1pJ2mjoOUylDATHaI4pQn2qab/2GI37X1xmfe/GSniJu0+B0Dx/+VeehGyuPP36Fxx+9ype9ccfa9NOefM742V82LuOIB66trE2BgheXgzsvbfzh71n52nevigLW5ZfT2hJbStWeQC5gkjKK+NKfuUUlyOo9Q1TjU0FyrsC/tvx/zL1ZsO7pVd73W+v9f/tMPUotCQlJlsFCRtgYzBA8BOPgXMRD4ZRvEjsVV26cCyeVVPkmd6mkisSuJK6UKeMh4MQk2MQ2NmayrAEEQiCQQIBoiQYJCXULqaVWD+f0Gfb+/u9auXie9ztNBR1kY9T9qVp9ep9z9v6G913Ds57nWc6aBBiO1hzBc4dWGpQj6++wcu/uHwee/rwOMXwL8D3dfd7dHwU+DHz95/U3j8EFTcdRH2TgwZMqjbAydEeeL6rShccF6UGsqt6sPpn+iws9zW8PPvBLH+Bvfdu3sZYQR7WYIKUKZtrO12eFPUxbqqJRydI0O3aWrLAiXNS/9HYokL8FXgI8h9otMdhSXQKyRsgAhpwsx3ZmPNODN/OVFxdW9gLC3u7OItIHDSWaKUewm7d3BYMpIc+lbeOr3/QAX/Z77uc1j1zlD77xfr7lG+/jvK7x4AMP8Ptef5UH70syB5laA3Kckxt3dr75j9zHl7w6yJZkXp7YrvJiPR+31wuOctLOUzbSxbx94zo/+N1/nTs3rpN7ui0bVh0mNdp7OsG8If/dsMJPnzkOhi/0IEo2RsTJx73vsWLvC3a2CXdDggwEW+DtSmhAf8KM1Spme4i+tBSBqr1tqYqNTEeIcpsyw7BBofYK+w71UkCChW4iDWgWUMbk5SlTZ0WFFl4I7O9ThVkhGKgtbioHdi27CPndBCa0e6MSrYH4gD4Y8zZ+1C2ygvyWIHalsSL0611ujjNtbh1w5uJmllS/lLry7OR1rzrwZ/9Y8pmnb5/2vGYU22HwZa+7wl/8Mw/wNW++zNkZXLu/efnDl3jFy67wdV9xhW/5k5f45q+7xGseusobXn2VRx65wuFMsN7zd4rr13f+iz9/iSuXbJuwmnTCGckmJfbSmaWZ4bKdmKj6Zqhrmx3eP6EuWeQ4+9CYEh1R2itb2u/asYs1FEL69Rw+d+n+O8Hc/6uI+EW3tg/7a18MPP6CP/OEv/b/e0TEX4mI90XE+5767FOnNqtjIxxYCNjDH3ZAlCmEuTKXLnDNOgkEFAwU/AfGyRw8j/vOD/zAD3Dj+nVV+/5w8jCF5aHSupaHTKra3qetROci1imQb261Fcjtrd2wPKMzwo3FLgUlR7MCmuxNQa9w4jkQm2liZuQQat3C1L7C23X83GdqsKo+WV1CL/nzbPZjyZWymje97hp/9Ksf5g2vvcJ99+vnXLoCr3/tVf7in77Cf/h1V7h8Kblxc/LszZ0csG3BmSPLIXe++vef3W3FA6on2VPLM9SSqBV3IMsKv25L2DUEgWg++is/z6ce/1UFj7xL4SyVkMRQi445wdFhy4gpLDckqR9xIeZMFCc/hyrWrkm9Py/e2d733TAIat2nWRQ6YivUGX8eLPYJaB6yxOtZ04WEukd15IXrcnkyOYCHK/TqkquqiwOdvSQqyZIt8Op2ycnySVp6hx7tadNKp3G3WEzZ4DZSNNeOBoO1IBOxqmq9xh39w3TyUqU76oLN1bwYUE3unJbZSwTkrnQ0Xr7lAaZYKrOaq2fJn/nGS3zmmXMujmhx/AjisPHg/RvX7tt46Gpz6aC7Oy5gG4MHryaveGjji15+xle+8Yw3/d4NcjAyGJv83kfqbN6+Pflz33SFS3aMxfYm7f/pvC//I/2zlZl11faPUWAP2y8MzyAWS6hbGHTkpjoNUbSl5FWMW1vjXrgc/Ld6/NsG978DfCnwVcAngf/t3/QbdPff7+6v7e6vfeTlj2h4aNw1etJhv3GQMAGIOTR5L04e7XHKYK5YQlW0lg5L4BFTweZXPvRB3v72tyHb34bUrkehmU3uwxWgqyziBLoXpWGkZwLMNdO37wSSvSvImZUygd1ts3HTiuXwfrCbI8RotraDc6t9Vqc+qUz5i5S4yIpfTXYxdhjpNjhK+y8jGanKIi8FxODK1Y2XP3KZR15+4HA5IDaevznZj0FkcunSgGyuXhq88mWX+JlH4bCFNsKYZzp3+JLXBleuaAF3lOif8jDxALhheXosLnx7JqEtf15t5sv6Ez/4DyjEBGhkOtVTQ7OAEw7cATGSQwy2bQjOKdaVAmCS8ugov989GGYm3AuX/N0+29uQ/fPM1Vs48Z1ob7sWR2w+N1E+TwNsUlhAW0MfFcQuJ8WaOrmq3ufJEhkHv2IykYAPt/5r2D5P5QIudKa90JuR2nhUM8wGU3WaMWFrwwvTjpViqpm5x9J2TKZ2KlgF25Esd9JJcAzYY6WmTUnFl6+Xu+WA8PAZsEeSznSE/NMz1BF/8x+9xOteCc89e6HiaQte9crLvO5VB97wxZd45GUb910ebJu6g5la7H7pSnL//YOH79944MEDDz0w2Lbmxp3m6WfO9ZkM6NnceP7I17958PrXJGOWPKe0EoRg0pugT3mJKw7tJ7aPu7RCVbkZQOqGAs1blByrQomgRckWvDiJ4cHrvnDn7Z51y78VFbK7n1y/joj/A/hB/+cngNe94I++1l+79/dDrdmwVJdAh7jbdCz5ErUNIzOSuakirdQBzRLHu+vEJdD3XpeqJ9/xHf9APGGVnRzxG9cbEcU8tNWXChCZU0pWJNSJHV0E5HuSDQ991Z9lXH6YWVPfO1S/jhgcL27x2V9+F/PmU2TEKfBrnjBP7fnazK71aeKrZ2jXY/ZRIiogcto1c5A5qV0USPFstXQ70v470bzjPc3X/v4DX/TQgUdedsZ2KYiZXD8/5/y8OOvm2pWkZrIlXL0P/uCbH+Dhh47cfy24OMJnr9+Wv3sP/sCXnnH10m1u37Z/Tw6pUlvKxWVuSgQxzYQZOvoDqSb17mht2mef+SQfePcP85Xf8Kf12Q0rFSNJdtncpiwheqriixxcufoAUNy5c4voIQ8Ox/AKt8mpADaWt/6LdLZVfupz0cBNRmztYXtgyib6vwZyBHNKyJJDfPQuUb56rVjUc5WzaC6qhgV7LSgyUwtrZg9VB6ECZGt5EpU7X3otlGlpPPTByTs83Y+Jo6y5R7a7KAt0otjtDyMLItEHolafh4KXEz8cvQ9A2E+sjnPfvG+PE9ssw8kigljZS3xnvw/BpVFcOSs++WkF9j2Tq5c3Xv7w4MErl7h65QzMtOsQbLu7cMjUMHM7BKSMvvZjc3F78itPJI/cbN74ewydeUD6p75+8OGPT3Pth577IlOEPuecU4PQSnn3p4ocSnqNMaFDy06SoMvGfLVcMddtkmCt/L1zEQdyEHVxz5P9b1W5R8SrX/Cf/zGw2AbfD/wnEXEpIn4v8EbgZ37b7wcnIyK1k+KIKpAbrhkSphTF7sp54Zi5MN69qR4vUFDi4R68973v48Mf/jC1T3oeqBzqEna/ySksOwjZRqrZ9bAUiz40Cc/V5kayXXmQcfVBzu57FWdXHmG79jIO115FXn0Z53MnxnBdUkRMLagIiE1YcYWrUGPrweLEl+/ruiBJ9zLvgj6mFIiICphD/tCjdUiL5PYduHo5+cyN4nhnp49iKNdUzXvpLIgQtLNPVZPXLiW/7/UHHnnZxkMPDa6cpZWjws5zwDzRDVHQcOs4ol0tyGhJK1vFBunFyfWFTeA4dx79+R/n9vkN/Z6ZHvKt3hQABfC7Kg8lwKH5yNVLD3D57Crj7JItfx1QhhJ9xnaXcvZ5Pv5dn21AFMW57DDcYXTQvRMtK94u/ZmwSiuQ736XW3XS9stmW3QyhjLa7DxRcLclhDu4hR9uPqeYZZXWQoRhr1CwX1y+nM3c48RuqSNSmEYLdvS8S4sr1v1s01kxPBgeCrc6lhh6nSh4TnvAMENWBb2G4/UChbUXzGAbksUcWW6TruDJ4KH7B//el2/cOZpXXsFrHty4Og5sm7QlWu8odthFc6qe1zIgrZ/c2aeg1itXm0fuh1/4UDBGsxlqOb8z+bI3XFEMyGSUf8+uk8oQJntMzd0iy6I/Q27VLBbxIC3eas3ODklvU+pt3J2mabA+392eAf5ORUwR8Y+BbwIeiYgngP8e+KaI+Cp9lHwM+C8BuvvRiPgnwAeRUPKv9udlydfE3O0Jk5JCj2aW+3kPyE6ClGgzyNKCBFc1I1Wx6Hkr/hfcOb/FW976Vm48/7xqyE3ugWPaBCsg9kWf80gnrMIzNTGAzqn2tFV1X3ntV5BXHkIhTRilbttORnD72eeoK6+mz5+B4wWUWlENzmRdOoaUdlvr9VaWtgymWrRtgq7J/gKwUcyhGQGbFIjTZkqLoRIEH/jIzpe+5sAbXjP5zDM7eXabBy9vPHjlwH5pkoegL/R3ZsDlEZzvzfO3Js8+f0Hsywa4iNgYI9inMP4MUTXLFsHroEmaroRc5QPq7TbLn182wTBy45O//hgf/9gHedObv4E0HpnFyY+9bG8WaVbJfmAsQ6gIOBwYOxwvLiCaiJ3qYbzWm6w+R3j/QpztXv83gr2C7CH9RCtQq9JeeITw1g4xYrTEYriaM8Ye6Rw46dbavqhmDhF3ag9yTObU8N1OyIZ6OLFKZH4lfeceQUSevFZytK11oQ6Y8ocuxwjmC192QNVmEaFe7NZ5OsfQd+mt2YR3qNIiJmsBdJtTq0JLK+dkGFZuawTTLu9zMVEWfbKAp+9csM+me/L8Pnjm1uA1r4XDmc7lAD3vSoZeJNAcWqpyvTkqXq5d3nj5g1d405fc5urV5uOfTL701Sogj+aZP/ggPHXdduRrLlYWM64Y1YOMaWGhqv+20FJ2I4aAS7OEOfXvLBV3YyZzOPlMdU7t9aBbaBtX36N2/22De3f/p7/Fl7/zHn/+W4Fv/e2+729+BJ0b1cGGglZ7h2pG0MNCDV+AxBjiSFeh4pA2kLmdOKgKzsH/+R3/F+94x9s8hAlHjmaPI/TG2E0N2GDsfVLAzqlK+BCq3OdQFbqn5D59dpVT0UOz0b4olgMN2L3nMZeoqt22hSbkkmur2hlSRCEEok+Cq5QIQB7ZBLTzesZv9kY/ydh1oS4umh96zwV//b++j8NWHI9Q1wYbqrT2SlUuA86G2suLY/P4k3f4tV+/ybWrB171igNja66dDX7+sQtuPC+qnKyNg601zNxbCaCXhUMjDF5ebnRrs70MoUzZDL3X7/inf5srf+kKb3jjm8kx1ILPts1DSD0Mxh49ShrDrIwl58fKR6kyFT/uTYX8QpztAJbyZ1CGFgd1LMhNM4uYqvw8Gwp3XpQ7T7Ov6IO2TNmThRhMJ9hEa/UWpr/ZD36poIvGALU6Wg9jiSYHojymfdZLPHQtoNHnuY9mHFNMLRyo05XlCDdwDurTvPQpUVLYIyg9A+kW8yNqzZZcxkbcpS33ZohORIEIF1euadND+rklf+wPHZjHNX058DVvusIrHr7E4SArhES6jOowohNEHi2cTJKNKiXLJLh8CB556MDVy8EjjzRbwsWxuHnzyAg4O4Nv/KqN733XhTyYjMh0asGPhvmq9CnZbk+L6SIm27AZWa3TqeSwsfxyVFjO0IA5tqBmGiJz5zRTd+8eZ+93wpb5d/+YckzvSGJgq0thqNGDCju8oQFNzUGxnahdq21biSALnvj1j/MT736XKvwU33TrxQ/e9MY5c5c/kGzI3T7bCUds8lv60Cg4XLmfB17/NZibRwwZKxml4OYzT7Lfus3oNNsDt9v6GWtj/VJ5BhoCTkM22lLm6q3hiKq09GEVPc3snFCVI9W+qJrZTWRy6zb887efc34bbt5sLu40t4/TSlO1+3WUve7FUfDAxR7cvoBPflYzia2THJNfeOwO5+fmKC+4ytX2Sd7rKhC7CHaHja92nbZSUugN+8E3N288xy+9/52c35lwXEiPNtOUB8iyPhDVTCaGsqjMEvsh7IOymXquoXNK2fcSeDS7Wukqap/MoQRE1IkJVEPvy9q01SG4Lkt6xMlOtlWiCudmcM3TrIOcPlsHluCtYt7ND8uQzYVOzQFznCpAaSasLm3rGiI4VDAOpiR2UrsWZcQE02PEs5/i3HTp/J7oyKh4k/JN9qxxQj09w/HzhWnm2YRsRsgRNPOgbLksSLQxg9/3msHNm2VLgp2ti0uH4NJYM4uSaGq60hf3mKwDiVZ7Cv5LcgTbtnHl8oGHHjjjlQ9f5uH7L3HtsinOEdw6hwceuEzuilNiryX04Jh+DfZkasK21Rba7XnCCbWEp5FxmvUiadOCKNhNGCk3GhGoXC9ig7mt9+u3frwkgvsagTJ+85OtIZxRviYlQRDLh0WmSWlsUGKMOnl8Z6jCeee73skTj3/yxJ1ND2VqoO/tTEgL9w8rjWrFJ1brNdkttx4kbFc0ZEVLI0Rhsi9EqnIVx2fX5vfIu0Y/Vdog02qDG0RZHFIVyubQ8uypzzNb9Ea1560NUTUIhLtH67Dojlro3Ho/f+bRC97/Ye0S/eVff56PPnGLZ589aiCbzcV+5Pr1C557Turb173yEq9+5RmvfcWBJHnwvjOu3xy8+5fEyJHwSIGmNnOfsb2wq8JciRL5yDOSjKRn0jOcB6YUplF86Gd/jJvPfsaDUUnvY9apwowpWXo3bFNq28V8nKkqnXRiMKSlhubfBHH/3XmodtAAMVt+/eloGyc+vy5xG86KHKcqfJrrX5KmOnkbixYfF/outVfQy1p6qEBWW4ltxV3BTbQYI5WLzYNpxM0cDZsmQEv3UaUCJGjGgDQFVy4Kwwy1s1M13rVQY90f4fbDVW0Cw2ZwJi2sz3o3Nz+lVF2ddqxOInbmUJchUVHYQVSIz6994hafeuoWt27uzEpmD6s7rd8wCaO8F7kp6jhPrp3raxGDQwaH4TlgqqLe93l6PdLg6HOhmmGhmuBHDZ9naoYnOqeLQHeZi/02S0pdeT8pUecWNJs6rVAsGqaBR+nncY/a/SUR3NWic6KL0XajM9zR9pPIHTaOPlDKetn7CUNLWsHQuPRz12/y/3z3d7NtkyX86pYBU3WqbZxFx9Fsq90XSljLHH3iHG9sHGz1Wzl5+Mv+iEySEO90uN3WaryWo98GcetJ9v3IaHHbK8UqoYseCmLbwhNY3iFoe0+pis9NLpmZ6l7KUyc5/3lFephnayxP10Qt9fO3in/8lju89SdvsJ9L6PLkZy54/lYTkdx4fvKx37jDR554nk98+g5bNH/gS67y2lcNHnxg46nr8D99502evyk2RG4a9B5p+gLowYhmDvOgWwIO5UxvrJkBtUvEIaqHJfaDikHXkR//V//IMxCrel2xBymsNtX57O1xe3hZeJm/3YIT2kk+Rmuf7D2EHl+QRwVROx1oOTNnrsatYwAIiYdUoNiRFFk87Jqi8D4AACAASURBVEsoFyklcks/MEKDbVXHyxMgfXYWf3qSNUUWSdEReywu/S7xUyJVdFl97Ols19TcyzqLnma9h5Ju9BDkNJZaNpWQnbjKVVJOsbqE5e3WhhQ9ir2k1O1IJYQJvQ0pvNlMXHBXUeieeEo8EdzR2yA3Y/bIg/38Dtw6n5yfWxEbyRhBs2tF52w4aobUNKQQ/NnB3IOLY/H8rcmd851jNWfjYHw+OMvNOhexW8RfSRi74IY4Gac4L2kWUIX2NBhW3qYS7yQ0dxlqZbLh6DMbuatrzcV3P7D0PRH3RtVfGq6QSuy23O1TyqnSTkys0goHtVqXvgeMo8HdiZZySCF24+Z1/u63fxsX5+f6XuaOZwnOyFXlLLFSNbMHNvb1hVxzF7W64Sp7RvH0479KPvWcfrYHKBJbFMdSgooZdKmNxib+aXsALeLYVY0u2AYNejVIFK5aTmCBkk5g/x0zlSOGOxoxcggV/qp1VEPsObh1Pvmhd01u3LrJ73314Cu/7Bq3bh85XGpuHYvrN4o7+2Q7C87ONiKLvc74rh++xS/88jlPX1drLQ8bJT9Zvuogz2NoSU6maXB9klcT5bS5hDalYCRlGlXipH/8136eD73/R/jyr/0mvbaFPaNLtIoUq7o1eGXCSC5fuY+9J+cXd/QZG2fus3vj7l+Ih3aIuPosV4stemhvSV2Uq28WTsHdHaBWlFaeFinrzPbJAjnSCdCGd4F9lQKWnUaAF2tNRg8qrIA8TloubiRSXROcvPxZro5M9nZl2fpMe7jrJcmx03PK1K7DQsNWopfJJ9sopheqBNB7LDCCVeGnK3SAmkp4uc5zQEd68bRw/Y0gZnD5bHD91pEguHQ5uHbtwFwddWIVuSAiuY/uhlH18/Mkpisu9ubmncn15865fZwczgZnm/ZGjBFcux/Oz71HINLDVMTAo8g5qLFLCxM4YLeN4nSmYwFh2eRMObSWXGUblDiMAITzNkM20YTPx29Tmr80gjsalk3zamm5og2mGTC14gBzeNF1cRJSZMbdwVMMYsLP/PR7eevb365qYLRMfBBVcng5sf58kfvdFriDu/Jeb5CZJw/ugNIAZX76MS4e2slLDzJ64afhIU241ZQ/haheYt28kF4RiKNcqEOIGNgQ2myFZtv7bnudbRFVEUOJo2t6oKPWcTGFBG/oVm8ENQa3Z/Ovf6p49SNHfukjz/Blr4cv+eIrkMGVywfOXCG+8/07d27DD/3Es1z0IPZk9GRuF8xpOMlQWttxM+xyWbEj+3wr6Wq6CnNFvtuOpS1ISzvldfD87Ru88we+gzf9oT/OdklLUPY8EnM7eYwwitinh0tJ58bonRmDy5FcOpxxfj65qJvEvpwzX9zKfYviOFqVd7Uoj5suaO+raBFkkXOXGKniFMS3bPnae5vQ5kHrEWTz66mEfHd2xj5snurqfUsxbq4+zH1f/GYnWFSQ7Eltwuppw1vnF9x44ufI3O/SU+073znZW4Ej6q4aeQnbyvew19cSYtd92Gswegcc9HyeNTsRIMSmYqmniq7Mu5ueZHxa2hu89A/s/OwHb/Pvf/UmoR/JM88Gn7k/ef2rDQeZrDCRDqXMieuSa2O46o+p1Xq3jsXTz53z+G9ccL4fefDaGa942SAce2oGP/1L56SplZvP8Wjv900pS7fMuxbhYWFjDujdhZoHaoEYXm3/p6m70nie5E5ItM0+WXpHnzwmf+tz97t0nv+NH2Eur4rcNWiQSEa7HY9uwcOGUykiP1PBxSyA0XA+7/Cd3/kP1GaaeRHrkMS0UizpfWrXqqlpxJnarIBOOTtGDwmQEKbLPs1RL+rO0/TZQycueq7iC7lex36Hfv5J+0egDmTNMdHrmtRJXq1ObJjEMOm0odIOYyzZ+hTfeWGrFI2GN1sKzlkl7kkBmw17SiSUBz71dPH0c/CLH1aC/NLXTz76xB321gU938/tG4IqikVDrGBkqdUOO/6VBRpp0tDCvFGgyG5m2zd+hi6xi/HpgevagXrWwfnzt3j32/5fvvHP/Ge6lK54c2i5Q+1KvjjBjZhMf87d8pq5dNg4kMxx5Pzi9t03/EV6zIWlnzQS6gwrdPk7NUOYSww2fROG/i0Ry2ScmEOlTnY0vSUS8wU9VYSIXrn2CAi/jmjOrj7E1df/YV96ezQtRo6fV3Syn1/n5uPvlUgogjqKUKDhXko1na6EJ66GkTpzhudPohoulufWTXM0vTM9n9J8jC76EBIJngKaVieyQ3ufXhkWOXkpOO88+rGdP/UNvp5dXLoWvPyByeVLlxiUZmLR1NBAs2no8OxD7+NsFRL7Mbg4nzz1zM7FvOBHf27wx79y45Uvt14gm4tj8LFPTbGEerE4RYHUnMz+7x5I9AHWzlkJVgeZomb2buuU1EwxvGEtrflRQeneM4aV6LrnHfs9T/ZLIrg3phCa97u5bSyvmdIw4yDsKsvbUCaNzJJkJ62GZh/w9ne8lU89+emTqEOLpRVYY9fBaZo8C6/XkxETsasVCmVGduzFjdqpQrhdWaV6+2lyFnumu4CAqrvULSlADLkka5F3baJG1joMFaLIdS+DPZYNa3RppZifD6cqvbWYodTyb8MuIiFf+qrBzKM7noDhHa/e3HNRcOeWKJEf+NUDWbt1Ya76Q/i6WZaQ1h90SHVquGmONvanppo5Vb2F3CZnQkxdJCXuuzCYFJcB04Iyn4UPf+Cn+Mqv+w946JHXgL1KAgTZhah/QVK7GEHZwLBJ1dRSg1FJnh24uDj3+//iPZpgLFUnmxwZF2vI1XV3qosbeWq7R7cofHEQXxph6ZpJyCN/1qJCrqS+lsVw2tDFwsP9PBbyKEM8kwcSQ0NilkSUlwP5D6OipVO7RBVzki2nKmEXNmELfVELw3BpnM5RxxIVqcrPspXx7s7bTqCTqT0Lffc9klgRXEGgd3Dn5i144skzLp8duX2n2Ped44X+TERxUc2+K8DL5FiUTRouplYC7vRpIB8NW0oFcP+V5Cu+xAPpPbh2DX7j0+fqOCbgDkywsQqwHHAkyLQluZ096aFCL9fYOqkNq2TcaVMMXEya0pvRWipfMtZLwz1lH6nP9XhJBHdRwUznKw9PAxH3hyVCrYwWZbVZNn3RxKYJtrDb4LmnnuNH3vojVF2oCjJ+Zwaj2pkQC2BaMp1SdOgdHiGDsglrlVwdxSpIV6ejJTZomrk/wzZVFfcuo6cmhU0LVCOM8feQP0x6wt/AsOVAlwY6gl2V1KRGc5nbpZuDWAqVSgp6o3RDK5oDoQNld7rK8jQ/II4a2sykZmnfyD7t1tcn6KSN+y8GylJJ5hT1SwpE2S3EpkuzWZewKJ6D1dHH3WCDruOopLgANreqLzBxi+LpT/8GH/7ge/i6b/wLLJ/2GerYYoZWF+7CL8NJ/WL5yEdboCO4Qt3OiwvLEOXAknRc2CAPJeMhOmEPGYP5BJAk+yyt1pv+DA+q4Hpud7FbcRFdEJQPubqc2ExVXIwUz6ymh7i9BtVaHeQzJkO8Hhpu7suzplwknGiUJVO4cO5v7yWeiuejmx72RyGJTSwYCRJlM9FTg3lGM8r3mJWUYDNnfO9+wTk3v741s+ka3L5IPvZk8Se+4hIf+dRtsoNPPnXO7X1ycVHcf9/GIYIrlzcuXwkunQXUYHaxX0xu7bqj29XgciZXrwbXrgYXFxt//k+ciVm3KyldPjvwtvfeceWszke9QBPpHQzqU8WeiZUIxTYamzu3qa1NCuQmjUR6a5X0AWOXxfFcs5pFmV1aAO59rl8SwZ2AXLsiPfmJGPTmwyvw1hSlIa+M0vbzMsc30aH9G//r/8LP/vzPscCRJZxb9g8NbsmSrl3dY4kpnD3RKV44sGH0M1WEwxWgw4Y6jZ7eHnOguKOaoicxgkPDblhp77syZaz4E1Lj6hVvDvVuS/yhn8yCWvBHYzpa92kw213Upo5inoQNfuFO7stdj/DrXHLcIbbC9HsIKYxWTS7MPrXPSq4J264/7UUpYfEKTMi0nB22bDGXSvOTPQTASECSJ5wTK3tHeD9sFz/51n/Mtfse5iu+5pupgqxyFYcHuciZLxqwtmG1rYRe34KxXmRYJjuxfyJ0ukjVTMaEIZ2JDBc2YgRFanF02h895mD5oXcFHHYzSQSbZDj/R7Kxdum2vGRSC2I6mi1s6jbLs61y1TlsOxBEb8yeMroKdR8nfnmrye3Mk+8TZtv0VqeuTKwQvwm2CxGVUsFd26XktXMXflaXGdaATBDcdEBU0FxLMMRQiwiyJj/3wSNf86bLXL18wZ2bzc3rO4Pg9vGCG8/vXLs0uO/a5GXjEpfP9FxqFjdv71xcwLGKS8fBQ1eLcdh45csvsR2GNkmVOtFM+PUng0d/bQfz0ZIhgVIE1HRXiz2qmtha1N8we8v+TztDWpGlYN2LtUN5WbGcOlbCgkz/+kR5vndwf0lQIQEWOJcj6Tyg4rxFbUwxYIf7phFSgo3QG3GoJip59NFH+cCjH3DViStCLdZtwmvrmtFTOx8zjX16KUCm8ORe4Pnuab0HIob79lS1GlmMkNPjnHegg6006JiIh5ybMq08Req0wo+EOCBWTstT47A42ZUc0EcnE6jNH7i8ZczCFe7c4Wqo2XzT+tSdKGmKX6/2PjBGmO3JcTF3HxNDPVqiIIFUNRaPbVQMyaFbGL8aB3ujsLjLNsOMReHTQZxryNuqmghVPkVD73p+7YFxBsfzI4/+3I9z+/az6ooiDDGs6lYQwd4wS25TMy5IJnmcXoqQjMOlF53qvrjlueCTkAK6W7a88nXfDAm03FCzZIfsqi3NygDEUrnkCWMF9EEr7ghBXSwIQgd2Bkh2vxki2cQ0I9gm0IPdFrPVel49d6UMQ0bL9ynGpoLChmbp4emcoK1g5YUtu6mL+qyWkV+Z/aZiXHL/TtlXT8ObEYYfUt8zt/C8Z7FrUBBVpUZF8dT14p+87ZzXveqq8Oxt8PwdzXo+/hvNs9cnezXn5zsX51rwfv35nSefO/LJz57z1DNHnn3mnE8/dYc7tyZnly5x+co4WQmcHeDlD5/xEz9/zs2bngd2sq91ZKAOqW2mlrvmhlMJLnY3SI4NgpV3ZpUKOsO6nclxfQahz/K0lcuiJ92T1Wl97nP3kgnuIQUHfYQx+3RAF0tGEFnbZVCUrPYbv2dyPJ7z1re8jRs3nrNsOok5xY0OQSq4da1QVTxq6kKNJG15mmbQBE2N7TTtlh2vsLNBaI1eWTxxVNDKCI7Z7KNPbomiYPnA51CQx4Vl6eLtLON+G54N8XeV7iSYkHx8d2Xq11HemuPnFOCB1mrudaiWyjAoaponnUoRbafFCBl1hXm7K3EkGqJuoxm9W2ijz0sLEZbgwz7g1eRoBoJ90hoFxfjBEtfE4p5va7/TQcpkB7/o5IlffT+feuJjZhb4c5jNnCtRoCTRes1ZQ9XnlieGwTbOJKJ6MR9lfJXBHsN0CvGW01CY9s8K/A4da3KWxJw0uasD6RjM6Q1IlBki+wkKCOsk5tJb6IAryXZJKFgT2MmT0ElsFTmMulvNokoFd9viodtYOgFxRkaRnkNFCjIgm+gN7fX1Z2k6Zt/lZ7I+wOnBfR+DTfv63Jkayup13gyxdrCFXvOWd6v9rOITnz7yC49NvuiLrrBFc9yL/QLuv9q890Pwa59sxpbcOcJFNTdvH7l+/Zynb+w8/uTk+i1tR7t5a+eZ525x647O+9lZcu3yxvf92G3e+d47VqW1Zn4j/HLs4lh2ZWXzCO6gGj9Ql+xdFaOtL+AuXCkGYDiRc6Jln2Z4uNtN73ug79mUvkSCuzbvBApwM4K6EL7csfjbHjmUVYuXVmATnPDxT3yS7//hH4BWe6plvitArrguuXKYlaDtJopWM0Xja8My1bLl1HPIF2D1ysALtpmnhZLqBgY48bRZPko0cpFTxSa8vd2e+oPtgN7JbHKTr8Q0u+YELdmjvELDtpHa/aqWVgOqaL0/ud6vEezjCDH1/MNvSGmB9GAjYpJxjoappSFXqIpqD7mmWUTVbQw4TtWkWmg3O4QWM5dVgcvJstQtKGEII9PmqtL7UcsvpZCPvbD+n/rh/1uBfdMhn1u5upNrp+AYt8VjUmOqUEDPEVc7L+ajHKHyZGpX2tXbAbGRhhyjd0n0u+Qh094fyhJnefbhQLtIKZMFOe4WMM2TMHBLGRd0JGX/mhPWwjjR9oxXqoOtpvdmVBIzTDsMf3YQQ5X56dzNZYgmFfbsoHdVU+0ZQLhbBY+3hgy0ZGVvUoSrfHHIlFm0ks5V+mLRGpJoFpNrUAS3j833vP02j328eeUrzrh0KdhjcuVK8/Vf2bz25cHZEMT71NO3eebZI8+fNy974IynbwQPXjsD4M6xuXNnEj3FMhqDX378Fu98321tsppytySnO/66e6YL2zsLiGvTHjdz6DvuFk9KTHmaBxYhIzh0j+aissaQuIrhTj7ZWWKxz124vESCuzLzWnYNOvxrAwyswOEn3K0KNHUh6Obb/+7f1vCsQ7g8dl9bB8zfZS1PHgP2LDKGBRcKkmvQ2vZKiVZCqBLEXC0YRgdMlVMlxpATebpvqsDwVpY0ZNEouVia3acVnGIJtTmTMReWjiyMq6gFyfgAVen51fAHzlSiMg6pNHQXJy+/rz0KhhhIOlBLDLNRB0EcS1Q1j0e6TFUr0cW65JOOh62E9zw6CRHNzpGiqJ5EmeITokJ2KsiU3fHEC/bXew2a09Vj8qlPfJRf+dkfFfafmL2hAL/R9gOyD/5uqqwvQI4gc2Pkep9fnEfTes8RdS5srqav7UraU8K80YXsJBwc1/hgXfm0PuOEwNqvqEOLw3sT1c7B4djqriJlIby4L720I1mM2o3wBBFTQ+4QRNepQiRzmp7b/uzCCXoKloyjGFGlAaDUlOnNT2JlVSD5vZP6tpSvJRrlcASvvot0YLuDqGZMn4tqdTRrjpQTYvHDg+/7kSOPfTR5zauucPmsydSCmGdvH/no47f57DM7z97YOZ/BI/df4url4I/+wTOi4Ti3k33yGIMr953xmc8G3/OvjjxzQ6SDsZKcP13xzcV5mYnYRMjrPWNoK5otJKiJ/DdV+FSob02WYLK8VnCxPAKwkj0KepAzGEec5T534fLSGKjiwzuEqav9aBtCqbKVIRDCC9kt6Ev62LznvT/Frzz2mCbwCAMMwy8Zm1gqa7lkog0yKDj0KLqPCj5lBoODovzDnT0NcXbI0EqXKkUxbGHKOVSjS5IsCf42k7G5OkdVXIda0LZrnjAn3+Idlkp3QUlKPjtLvbaWPGSHzZKkdAx33oxJ1DgNqgZN905y0DzAOLhg+7TWVaXgGPbd3hUMKGGjNYI8NpWbZgfDWS11+GSFEPLlLiWU3gOGPMSlT5gQXvEWR44VDFs4hHnH3YKJam/NK+qC97/nLbz+TV/LlWv3ESOt2DP0ZSrOyfUww6XsgiT9vr6IjyTZ9z7BLRZ4QqjSDlSopJdS0O7WTCmtaHpP1o5VQSBTME7D2NOKUw9rMwAJkE5Jt8xTJz3nmczNi1Q89tBoZmMPFSpdKhiiNjG5Bk680w6P0AwF5TU/MVSQSz65moJQIiqL8Hp4h/GC4SjrPmyChjQpUZr5LAdK7VcuOucCtdSp1oE5ZFFyJ3f+xbuKK5eu8OVvfIDrz+/cvD7ZZ3PnopjPXhAZXLmS4qO3dAA9BsxmjOK++zYO44x/+eO3+dc/eUcLu3MwusTnbz3/3gZxnNIWMISjd3tY6rfgoLsizHwDv0oReheOjjU9tjJOQ1SEoNlErVoWs7T677dTXr9Egrur3i6WW1x1kVF2hUxybM7mTQ4F0O7i2Wev8z/+D9/KzRu3JLARrKtDFHfhnmnxh6iJOox7hbfLn9314NHqGoATO0VVuL7nsCmZ7p1XY1mIExwECXgDzvJC6dneMCMO7LSzoRgF4vruIJ23B8jp75hMsqUBz93+EgnR8nAXDNdmI+iQRns7zKJWIQVuparv4Ze6fMJZ0ngGM0zhDBhm8ky382VLWBXC7m6Ypva5T9mHIITR2uU529am+j6SpKtyWSKrbLsA1mITafitNyl44iMf4h3f9+38ub/018g96U2XQqYMu9vboQCykkiHaXOOqC/WozVOHOEkFss+YHU6br9tZywqllWJU+dcAUCuiiJU6PvNqTNaYw20X7Air6UBkXZCw9k6f56LJ39F58rBIoZcQJcOGoJ5fps6Lu57QxyVjNAdnVNBdZRM/NpfDwsME054+XRFfYICbTvhGSGFVKslLFVzHTb7rQx1lgwxd1KOmRkS0YVp01GyBlBSaWrT8pl/9JabPPSe5C/8ySu88XVn3Dg/st8pbt8+53A4kF1sQxbCs4uH74MtDuyz+IUP73zPW5/l9h29B2oYlYCi9HOagDoSm0gbWUrkGXV63YvWXKaW5miOpWQuwoZYbmGfny4Jkw6uAGZqrmcSkxKp547mhX3Ox0skuAsHD8ubdaUHpyZyU4sSxt16UcEi+N7v/afcufX8b7rcE6CKkXma0y/P9EbKyLCoqSIMQRoP3nX51PIUcwx6V7tKFnMOO04KExxelKs9kGUoSP7cKRI40eYim9t7iE1quYWnZmqWYBpcATJP26CHfla55esWFWy4IE214xoVGGJZwTaaaadL66t8KWRz3MCWUwMwvz+wYKMwzCTWz4hgyzJfdyURcZjKRbMsi0X/wpz/Q6q2msCh1D5XDLCYTPCpiYFL0IWghEwFhZjB47/6S/zGxx7ji97wZnmPu3qcuvGm3ym4SMwFtbm/73tdgd/lR6wCTlqHmRIi1WojB6fkV8a9l+d6t4sKjiz2PmOw92SjvCEpBce5A2Of9pop36FJpCCD443P8NkP/iskEtLCsd7LkJ1Fdsb6K1t4/RSjp/KA3f91Vy2gu7uWso0SJCOK2nW3DomgHAd+eSxBx6A5mj2URE3Wkhwt326OPdlCgWxkWGk7XQjq7ShTDiXqCuYIrbUL/cynn0n+7vfd4ct/z8ab3wBf/xVnXLtvOxnN5eYKe2q4+/afucljH4WPPnk8nZvEjLYpXQ1ew3kiBpzug6DKirirk6mD5gbm+NMSVi5UQFRjSauM2ZmlbO3CGtSG7uZcbLF21X+Po/eSCe7yvxvCpsMsPW8r0lbw8m7QdP1dPP74E/zEu39CFfnBrSUwNgBxgxPoKm2mWcsImrutn03/V3VXwxOqUOOU865X9nSbr+q8ieMSUmBIyP30Go8bl68atBk5ev6harlgbhNqt5hl0Gkk3IZlaXVfh9gTM8QYGqgiXeKJmRrQVayWX0ycNG0qYw2K0+KLQeRkdgqyibW9aGNw1JIR1X5KsyHLhSjY0gEfmN1ssbBw4w3VHKLYW1WpNjIJvoHy0EnY+2ByvIsSnVS3hJe2oKRy8+YNHn3/j/OK171RA5MpR0KPSrwRXvDS2A13ES+JqVIvXYWx4w5hrdpIE+64XeC0YLqeQ86i80jEgd6UIjN3xi7/797cibaSnQocz2VaDJotbQU91cpX29XRfuNRQR6COXdm6bMkQzbVq5uNlB31VIchAzN1gtXuYnERky6lRkPZoKxKKwFR0XHs0BYm3yWleAXBHVsT78XGUDJEW5h6ddZdns+JHimF+QshoZDYsaVGZTYf/MiRxz4C3/fj52KP9aTzQM15Wl9XATOGqudY3cYLpP+54JNUgHJCqShjbepm9tD7qKUy8YJBq+cuy9fKm+Ay6jSE1TFet0HCzJM1eaeS3zy6yL33uXsJHH09YkikU5LFSbqOcaUa0GK3aJYWdCfv+en38tFf/5hQlFkKjNHaFqMIZ1rgApjv4vjqFqbUpmF4JhJlkKXzRy20qZOFgLEKjBu/QFXKWpJh+FxHiDQtDCcp7ffcdWBtWhYkcVzvhA5TVZ+YIOL7a4FCWvEH0HvDlBhLb4JhFqYq313BTowG5a/NvNJOiU5GDfGKVSJrmt8y6tJwuAxl63CNIeWhlpysYSFUuoLcRUecxlq7tAvT7zjN0AatxdllsJyeK83xphgRdB/NqRc2/Ms//y5uPfvMiSomrNXU0l6yoAFbMMwbzhcRkTk9QtDMRP73WsCx+w6v87VsjNXhhf1hylu/BqHB+Gz3s+rwFlxYVuhltTzQNS09VXY9kh3dhWMPCYJKuPbaT5vh97pW9XiUcHBiH/5kt7p6hir6RY3VxdRAWz1hnQgOKoyCWUlVcFggkCmsnWW7EVtazKUvbg5OehmCNMJddTDcceZdUH9BWzE9f8kTLq9c38w99c8c7LuUvHsPLirEL6/FyA+JDldHGfPE3wc8xJeLZs/hGXewu0NmwXDVHo4ns6Fj97/lRKklHHqNGO5dtgMzU/5JreGsWju5oJbPzL0eL4ngvuhzEst4phgQJew0W4cr3f5FND/2rnfy9/7e31dtaXWd85u+X6yBDeQUIwbRi0W5alX20chDPdQllPvobgejky+EzfO7iDlOQp2Ywt+2ELkS7DyXYozMFoc5lwCluStk6Gl8ePr3WiVoD2KacbDa3egFjctXYg1Z4gVCphgntsmJRIv8RMK8WDHhbK2ai/UjhgX2546xvHF0nqTDUFCpTnnBeGCUwL5LfNQR9GENtY3sd1NZWqxiTx/NQVpOmWjmkPrPE410bji/trkFO+e3r/N9/8/f4Ppzn7JYS4O2Za5FO+RN4fdZ40UXMIG6uijh5fJSkflTrHPmWckG7vxUnIiP7sJhthifKGB1hPz7y+pSQrYSpAd+hqoM2amwaQXgqBOE16sjnfaAP4n1BOs0kO4Uc5PqeFW0SWphfNSJCruPoPdhF0d1peqMl8Jcx2x1ne0NWrl4+bMZ6eIqbW9gIgOGYsmALeiDj3gbqpiTZIg1NNuJZ9qIrHxWlHhmaggNnILpcBeiLltMo8xyF5g0G9sU3n8qGE3UEEpUdWYoQQAAIABJREFUREovI8q2mV9ra5hFR7G5uy511hGKW6c7FUYWZusCRroYNLrQizac3Iss8JII7gEQzYaqyMNQMFqLJwgNV3cgBtx87jpve8tb2I/CxaYd6NJuhd1qgZaiq12FRi6FqluwqQw4159Bb7QGkql21glB3x82D3NqeuFdaAB6DLvZ+cKqGBXnvtpQzaIu9a5LlhuL/3vinwdqv4eq0ayphN2mHJ5UekHn8AGyuGqui6kJu2YKyGfEzy38hvcMujd6Tg+ApiyXpbawo6Bk4rqL+hlqXZ30TPJha6qODtC7qhBEoQwPAbVFKU7YYrsL64aKonMwM+hQolHxdWCQ4v6GYIrPfuLX+NhjH9CMoWqRnE5nfLliCtqZJzbNi/nwuNOFB3jLBHFc58HzpkAJCUE5DC1/GcTJCVOpbBKuzvV5uPLbNHzVaioFlhlQB+PvIZn7QPhvmShAQm8I5rGlwwpe6dlTcyHYLiC7TuKdsGNH+kPInvRi/ViU1aNVsLQGxXbBcIKTRHWzQl1ajKF710GFPKW726aBy0Y3Yddu1Lbvigq83d3PkD8OfRrWL9eNTjToP9FNxXzbYxNnvZq9y6rfoYHm9ORIGBTbqSIPoiaDZjqpzdXN9PTg2N14tq7+hb4yY7j+cdEzDT2FXDBHSBEsfQOQfq5mIomc8LnP3UsiuKvMgJrCuvY25NBo8BLJzBRLhuSDjz3Gu3/yPUgi7yW4EYZMGjKIo6rANGumaWJuumCZgszswJd9EHSBq+dotpAaUq2ulWRTjJDZF/ZBKdl+or9DClMNFmVNUIV1Ez7IOoRdJUOiTDnKIVsErNKrOU4HKdySigHt9yXHKmzpode4p4KbFi3r4K6qaIY3ba4IH5NgF7zCCkBAFfvawu6BpxSfeVpUHiGNgOifyIGRQfc4nbVy93Si6013SYq6qsadQIopM6kuPB11aJGH9SlZTXHjf/qt/4w5L1TTLw1CKIASqYUR9MnD/8UO7xm+3iHwZcnre72vMYx/tyvOVW1Pd2raODWNO9eQJwwRgspKST88P8qhBH9yE+2kcvfg0YNAdztVSJhDEvPgn6skcuqCAdZ6vGrWFrTGXUlo0As6dz3SHUrTYS760HAkp7xheql071p+6lnlQb8qc/1r093JlDEZGp4KhgV1pi54NnU2kEporQJAUM2p4tLPMSlCA+IAdpZdQxSMGNrzi/Odri7TW6xkkoeSjxeAxEFn+7ACsOcATbjzwAWR22J2iSDRWTd6q2TlIq13r7lJiD4IDl7JdFmZfK5z9+/sBP+OHmasjJDTFuEdp0GEeKOjgz6KFvR3vu3bBeG43Sqm1W1ock1rJ2nIEqtk0caJZDy06TzGbmyuPf3W27FHcFwZPrXqa2+QQEdwzfRS7ZweKu1At1snn4SWKjSnDmaV1azo5217UXv476EkFq6wA80AkFq0O2WFnFLjjanX2rZXoNUlDML0tDrx3Gm13RFF92RvDYt7bXrJwYZmYC4QxDZA1EvmpEpLOKZ4qoQvVbXolZk6rCPidKq0pFvfsIYxstCfX0yfaCW3qQ0faMeoSrtl97ChGcweSY7i2Ruf4b0/9i9RRaBuLIaq2Nz8eaRGNbkGYi/mw6pSsfWSZTndvZNzcvA5gSQOoUA9xEk/VZeu9qnyUvBWgjZdMYxltldDijVkBkojte7uLmqGhoZe2L21zL6m9ygsNsqpoiyEy1NmmIbOlOFNhf9p1k9zmNNnWFReSTpVeMwxfX6AbNLGbhP79YegGvltay5GNL3LtndxKGOIwSbuQgsS7TX30jakJRgL7rqw1nSlb3bLQDMP9XvG0wmZlVVrd0hD2OBj1DhV5L4FInuYirkH1JChYJWgSSlP7fUTYrHksGrdiVZEh9I5Bn8GsldORLme4+5Zdo3Evaw1ftvgHhGvi4gfjYgPRsSjEfHf+Osvi4i3RcSv+t8P++sREX8rIj4cEb8YEX/48zn/a8v9yGZEMaqNv+YJsth75wd/6Pv41Kd+Q2IChPXqoMfJM6bNS+1qysrHNiSSfTcQ5t7ELle5ipXYddBjePC/F810wNFbJnwtzKNPtlZemrFolAP1ExqrdpSBFPmLxML9QlbCbfIHqQXeYQOldkcwMK4pWoVob21JeosNIExfrKIlV1mA86kKKD2jzapcrWVDNLBWRxMtp7+y3eoa+KUhICwe6gh2uUwJrrFicdnOpjsKarWmWL+gBDRqvMAZEzIEUYX//qI1dk4zMkzsK8h55NH3vZNnnnxcB7zzJNRZ4qayIvheLMgvyNluJcRO7C2ihFha/KqBXujyd+xQk2zNWpRsPe2IyRimQ673ue0c2in30KHhud6M6eUX6NfujiHpHMj4SjOXKs9k5nQHrHOXU1oSsk9LN3rKldVoA5nF3ILe1FkmwyQEf47GjIntrj7E0EyzYDR0O1LMmq10wxZECFgU1as+0LmvYcy6TcYQX71C1Vhb8BUdbIxTcuuFL6EA2mXbBdDzB0jdiRnt5zrFuqOIkdJ+7CIydEAuIzVW/knNrrIgSsrvGnSGnID8Z4YLwTDRzGwNAqFrMwYL1MvlzLptgms83/pcj8+nct+Bv9bdbwa+AfirEfFm4L8D3tHdbwTe4f8G+I+AN/qfvwL8nc/jZ6hdNFWoraLpCEFoM+ksPv7ER/mb//vf4ubFOfpoDPgBETtbK4MrzOnrWfKqWJjnrGLaQH92is4V4oLLDsCVYzWdLWyN4UAhOtmIsL1uWtCzMVMik/LgrD1MXUtxMcxBTws7PfG20i0qPHg9WrTgVxCCcKbtCKLlccmYq+mgW0FCDpfbKSCE00m0sfOlTnL3kOpjX8BRlsDp4MGThoAtahqrSlAnNGZpfGyRUCz4r+BkdrZ62hSccNxL/SXQuYQr+r20olhhyC1/pUViutTLNK0jePYzH+ct/+zbZGVwAu91lkaosxl+TfeI77/7ZzsgappdW+Tu6GLYLod1B1EOzOLuL0vcxt1NNM3GlqpGZ4hzQmO/c/99prq0Dnmo23imfdbbTC3Ke3fHgBQvRENqlf2RGo5nhOBJ0TrIoUB4lupAiibqCMfBKZ6Hiwv7BoWLKe05aEbBcNAPdA/WEmzaFFj7t6sM8vwnNSeoEmC3zkyfJtMa8hJ6HVo2kzDKKzzDRQ0mE/Up99DqXmU66CBtl9PRuq+BkqJgo/RQV222INtmG4IvXarQVvuKraO9p0oWgfFTDc872LaUj30ainL8ksjJ1hoFNSe97+Qx7oXK/PbBvbs/2d0/51/fAD4EfDHwLcA/9B/7h8Cf96+/Bfiu1uM9wEMR8ep7/gxglrK2mnBl6Vlxau+q4R9+13ex73Kv0wLhTYKbTUFLPiNqfzS8WdlZ3NyckzTfViiQDpACnIKoKsC7lMm5hYPUJFHwXjBHthwguhpqZ3SfPsReJcrw0uASxqklHEPDHW+TJ5HUe0+13aEtOKuqMEPOj7WA5HAqf5aCsXs7OedVDNL7KHGXwYkxEx5Eqo09cXfR+rQ1DJuu+ElBCJFTVXIgjnk46SB7gs61otDMkKFoW33qJciaho3GicURhuVGx0Io3Qq3HPDsOpjl1tsTuc888QSPvf/drG4qzH5Sxa8Edi9c8gtxtvUYxogCcjedL+meRE1Bi5EWHwmqCLMtSDNYUAGyhnVhkl/gjmgeRL2NOFFl6eEguZl9Johvmgmi+Y8pl40+k1ay7unE4E+k5l3sulnMWz0DLaoXVILhS0EqThIniE0TkLKQKbq1ycucdWMjSmiGbtIdPSgo5wzvrElkz90nVskBe9r7qGsHa1jn0cSYp6425mQt4o5SHNjXR4Wsgtuuq9XFflzQjLzcZ3gG1hBs5Bh0T/qoNZ4zZFyosVkKks39tGktp2ccos9pjlF6L6pKFNVaVie45MGsIn/oyec8236HPv9HRLwB+Grgp4FXdfcn/VufAl7lX38x8PgL/toT/to9H4f1pvcUJcu8ZfFYm8d+8Rd49Bd+iSVIzhiiSXWq8qmEgylercxfXtFGql2fC7pguGJHSQDjth66RKzg4vZ3U3VvgFNUtc3Jo5t9YdxmGuyhQVYjLnnsxujTByynsj7Cx/OoJKR1ZRsdZ2r7Ou4GgghWNzmiyanKuENMlOgdokRVW34r630IJ0sPgcMT/TWQU5c0qT4yog3XrIpIFd5E1clABk/KYQoka31ZYu5/Dbo3BbTQz5qsS2RbCBbv3wE99DMlVUgf6DolVOv0wKrLQdPHW3zwPT/KPN4RJJOal9TqUCLRCsB71O5fgLNtdA0K7xbQBxd70h56ZuBOblOSt8iIgsrBxJwoJ7CBzne0KmHhsjpvM+MEp8Uy1Ypd7BQWpvsCFslAsE+tZN+E6ZbTVK5YzKyW3WwXgoH8NYBOaRbEU9CAU44iojAXYLGGHCqRsdgJ4weItLJyreFZqbuFZYfRmrDnPaLDjlRn3T1PZnQzJLITYcHxRJHVXTymNS5cPkWpXXCNO0WGZnuLMaTnhGwFDE3q721iwLRN8parLHo/w1AQ2XCYsD5VM/sm0JkWS4oBuHymCKxn0GaqTlOE73G0P+/gHhH3Ad8L/Lfdff03nd1e8rDP/xERfyUi3hcR7/vsU08J9jD0wdCQhFCgvZgXvONH38VTT39WVKYxPaUuYZXO0FEHwSE55TsSdcKgaScEgi0ajmkutJ541eK5uy2NZu+UfH/HFYIqjF4Tc70SucSNIWfJNrK+h7Z6+NTqaA3T1ICyarTDAi4bS0aQ7H5dOjy7L9xS6M4Q1JOrjQ6LkkJmX6JD6gJWNVtoOLQcBNNZohaaEeIAR2+udCH2RWOUKElLfAfHkeyh190huCyW7XEZU7XQS4ceY60LkxQMQaVac8urj50ckchHCw/WZ4FnLr7gaX61/TY++uGf5VOf/LDx5rI98qknkQvmi3i2991DOg2BOIG+Nm+rVtIsd4KjIDKYvtArHC9n0WzZS+zldj42qjdIBbtuwUDrI9EGLoh5EMZedyvFnBJICYs29Rcl1zaVdYt2kbi8HkUhJlLdazTJgaXe6SxX9cNQClqNqEN5FyZiPzFI5F+kJBTG68vWF7sHNkVoKNvNmE1WsRdWTQ8KwaKUFtvQU3BSwYyj4NFdv0+mdpJGmC+jz2LQHHT4YSyIx3Oc1T000hfsGjSH4SOt0gyz0+Rqu6xSxDsq1jrQsCh+ZxWNnDqWMp1ZzfOwcjVWSao/W3m3Q7rH4/MK7hFxQIf/u7v7n/vLT66W1P/+tL/+CeB1L/jrr/XXftOju/9+d39td3/tI488Io5zqsUpQxKqLpqnP/sZ/sX3/wsPSdeuTOGq+2gTYYKeR7VOnfJgIQ1vFqSsateSCO2OLBuKqV1O0iZYMmMamKmRoaXcHcARMR4sHirE8Jgo6GvaQ40ixu5BqqqB9odjuYZaYpqKaaaWPsRJsCs6ElEa2M4FPXlo647GcxwFttB7KFohGsyE+MIz07QzJboARhaVtWBw8483QvfdGObC7JNispXZObuYFuFBkWT1gpVAlU6mdQoejHOq9MB9uRdETLYoDrFgn8XASHdAhhQIT96DymGstPjpt/0zsRtCOoktgsw0PHfvyPy7fba3baNGqIqNQbjNLhdmyyPdtaCSfEDVPKm2jdtRJ08aTr483apEq4PahS/bUtIsGTTU2zTJOJpRgvUHq+HXHHawZPRBnbx9MoTja8fnpt/veRqqElMFWdXJ7qINpSp468yKAgqK0mmRVhC7A7fPCqHXpYXyg0a+/SM14wkNvnSWYhVxMgzTNXUSXckgdF8FZ0wL3nQGAyWXHiJf7NHSGhgpAm1mcx5SQk3NsAJTQjO8slClejmpChvCbBppOVZ3kjRjTIYTvqjaZkj5rrbnGj5VLCNCiRKtDbjH4f582DIBfCfwoe7+my/4re8H/rJ//ZeBf/mCr//nZhZ8A/DcC1rc3/rRqmxUKXpAWsmYxZNPfoZv/db/mTpOyd5zMF3mVgi6UMVqHm01y2xrhAIeladWNjo9e5EiMKLkt2Ef8EYQXtjxTsuEzVlP4ZJlP3YIDhH02PXB7WGvE9MMzUVvVGGUO5M23FSxxrzywRumUo4wO2B98Fszz1oXtxE9KwSPaKDlanZP2IW9dpcWdsdQFTCTM2OsS7BFqLUU1meoZx79HPS8FltgcwktV7ygDxr+hJNfpVvZTTOP0aKhgmNN65XiijqnWlWpY/P0j2ajtYg+mmNMJ5mYWrHYsIZtEcHHPvKLvOcd30PVhb4a7e+xEuGLeLZZNiS7dv56kfhwAgpEk0jKlgQSIh2SF1xcnff/r72zi9V1vcryNcbzfrMHQoJYQwgSLYYTjrQhhkTCgeIPPal6REyUAxMPxEQPiMFwglEimugBETU2YoCI+C+YUBUt2jYopdB/SP92sfQH2kbF/uzu9b3PGB7c9/POWdK1NnGv+dO9vidZ3atzzTnf73u/5x3PGPd9j3uEVRQaDCEITIEbIoTdSwdtND4NqTUc9hipTFUzPUXiK8MPKnYrpFSlKcgGc6rixftXna4OasidNLqlDW93wCJOKwyJrpMgwK87rJGf9KaZyYkbl1oZ62ZOwlO+JZOMyVz2GmUEclX8BTGcPbcgH9ldS2E0FzfTHmnZ+n34EBwO+rPcRW0zs14qH/qoDth07/Bs5c5NyZ4hqK4Cf9Zll9cxm63wwZfsfSU+zrxVpwUiLcly7btnIht+ciNiLWsV4iXDMn8Q+DPAH4qIt/vPa4DvB/5IRLwf+Fb/f4CfAp4DPgC8DvgLL3oFJZO62a3BFYQaLt7ylp/nne96mxKKPejYGUPvqP0/UePa9Ic0BuiMPK6bmLrtOY4kSmugR88wmeXpK4YWWFUCKOMNZUxbaINW9w2lhv/rF7WVf28v6Ceu3yetsi0VouS8Z8lTS8IRU8Zbgd73dZYVCuwpWCQczCJCeGaoTXm9d1qWuDWsW29BXsvRL7tkqUyoWek0vGE22KWpJnCAiCOTltEVngFpl76AmtuB+VMibdnysF+Q77olYYaPDmm85YDlCgTQSL2lLrBbZZs13VtEHXPyi296PZ/73Gch2ioTPUQ9n4i33/7ehsNhk5YNQ699ExONyAt5iY/TEc87QhOvDDOFs98GOlMeQbihpxYeb2ntDfXT6iymm7mHGsFQ0D2Hm8c8m3asdvZud57KTgNXtzLnEuRVSOVUEYcJXHdTu+AyEvZqadDD3jkhuKm8Wbpbgay8s9vwpy0/Oj3MIoLe9J7azVFqnAJsNRzThKurkLIktEabnNVhp9QrOKcSrGAjKz38QwlMbqoSyrLQ7GSmhc0R5OaqtYMtDMlQ9trx/IJsjvkFLQm0ekcwVDyPHox28LfUSME8BiPQXGSKHQ04Xw1m+swfPTFzf1FXyO5+M48/H/7wF/n+Br7zxX7vF/6QIIkcIiYSNVQ8+vwjXve6fyhJXMApUkwygjZoNXHMUCfm6Oas26hyLiF2dX5Nm+VjWeCyT0ySGsmMKbXGCDULzaLHdXPJbtyt3ayUp6B7agB1hyRmtTmIujM2B2yTuRuPZgj2obHXKzmFNEjnrJpcWnNBTDX1gY7YiFRbNC0YZF+VSIRxaT8wIHjKksqZxdYbbc+OEaH0X70l5KbMYQig1EGYE64SPFhgtCx4u4bIpbLmGkm/2u3zW+lhmz7MoiB3G2BtftAmZKi6snMEB/ls/6BqaSs2Sv7Vu6ohOmnkspYhz/sYyfOf/zQ/+x9+lG/9U98pzXc1081Zj3sC7mRv44AzxwFFDbOCbVqmjV+cG07OzNr+/BNsASB1TTp31+DxyaZP7cjuqoOjJdpKJPm+iFSdLG8UYbo90fSn4+BEz2GGA8n63QaGd3cqY/iFqedsikyV0moXXDnikOEGk96Stg/RsAtsddvKQAQqpkjCzwWdbnpbWLRhxQls7oJNCROqMMzhBruYqsCHIapQAieEw7BRt3pMnJxoBIHw/qipKj51yIo0bZHMo4naXNmbQgnd+8om9oSYZE11Izs5IsuDa8RFzem4Eg1Dyda03Yp27VS8ae//9TmEn/GXmLnf+modTtfJ71T77z//F/+Kz3z6M2oqjWbGPKiyRIQqpVmnC8feIuxZ4exj49gcR/bszHASNsUSbFEIsxNZiwJnoQyWPvDL3JRpjDaODdpw23rMBNlENezF6GBYSyyJeciydDY1hhMQZUyE5H/Cv02UDljkMAGVZb8Yk8WYrAViM9k1Sn7mw94wrhAiUvvlhYUL4orJJXgEqPXKZJUqD/EKATVRq+Agl7KhrJXpPrLpiAXCqKKIaOIRCu6pgJfWHHsKi96JGhYlaRto8lOl7uos1kxaQvcsGvKsZpIPvOctfPJjzwFpSZwUIU/M3e9gBWWxdB/a6TCh136zYdTt0bof2c7eEmYJ1uk1ftH7ZLijuZ3RrsQh2jYcbihqkaHdhg5KsFWcV/9HHTCLCltjQuY0OmRHUUOJQDozbkN1q9cyl7LDOw4wHya4ZpynIR2I3DCkDT31HO3+SRt5qfkIQAf1kgcPJChQS1EzW5LijNQAcgdRoQD6efxzRreVDJKocWAZqek1r4Z2p5yH2V47eG8z6D1INbHokFh80opHvaAcJaYiyHVdwcmag3BqQUWyBoEeQXr85u4DIZCMNk3KDxPFpvceux5EcNeaJoyEm33kIx/jzf/tzTZZgmmr0wxlv4Vd1CyPWnhYr0ymtAHLNzhiU9YzbMabbb6jNVB4yAg/nRUvvDtsWsVSMIxVDgs+qtIUFk2JT7P5GIKRZW3vwiszTZCNlYnoQdPek/NfpRszGh8K6OT3gyQeuCHV4j1H+DWVrI57V/YioMVJa9LsjIEOhDThGYvETY527Coy5e0tJZ4w1lpqiWzmSD085iuCYrj9aKMsO9YDOjLRrEg1lQzjuDtTDWVMBaAt2dFEJmxVmxOrQ3QwzwxhmdiEKs5kmosheP4z/5df+oX/qkw1ldVcj5i+pxX2bqk8qpFK0ea1MmQ8bSksgGv59eB7zqYAOoy1qsFp7Z2dGAvmU014ZjLKXc2sRiDNFCBwZi/r5aWCklWG9xEm5PcwjDlseaCAmWWEw86LlA7zM8apd8EO1lwagoK9N0T0GpPOXT8/N/eNCKarZbtB2wGxiNpFMmJHVVcvs8OWDTqEPLuF3t3c37IhkIGefseMlkmabCN1OJnvmVO+R8upM6rYZrgJSoTx3MRJtQ/ak2PRCuYQpPsZ9B6EyhXF7mfBAYHKPFwgq1IqOiuoRhYxVmCfUOY+ctf+WdLsx6yHEdwbqQgCQCL+v/43/hrv+9D7XMq6pT6a7uX6do2tKfBpW46cDlZpNYqwsFnTHhQTONsGFI8/GyrpUg9LR0GuUsqHQwejU+ZjqphNiCKMcAqbHDhLReqN2SJVgpI+dei4DWuSieXxrSlG7QA7OiXja2vMO22/6wnpPZBdQUta2MDG9ZCFXUEyWPj1MKRh1fCwO6PvAyF/bkz2bJautf+EjaOSGx7pPanMI8ucaaMwruWO1a5C3Mi0MMVhOGnzPdAwiY2Ok14Pvr6KLf2OLBuOaQ/0Ju11uHRu4O3//ad465v+nZVPkqXd62oFmGsCWdgtDgaJulaLUDYPZFpZiGWotTgPZ4TVB0kdZYimNkFgEToYFqY80T039JOBPpvcV4i87ktAe7dzWgUiGefW7eYekZrrgEoLGpaZXdi+InNQMa4rhlBSsQZ25AEhehZxCuKTIZjtOawGojW/YPTQoW8/HAJiPW+dZG0mkgV3yN7B4U2R1Z+DtlvOFZyRQKLF5cTQXqKnMXfZHWsottQuo5pqQbCEDPsWTyTKwGqeQomNSe601+8oVffVuk6jCnaY+AXInPIk2Tng1d5sM+LDwDvisVvvQQT3MFSyytaffeOb+PBzHxa/MFR+xoCuAVxpUABpFKOUDdKeaepSn1J2O/HJZ604llA1aPqLg0uExtWZQNJWDpjB5AURo8Ns//oAOq3y0Wabc9o3BPSK3GiR2gCSY6W7Xw0fpa7RoQczXOZV68OtxrYBbbhCfiJVTc1Sx97u0rud9UZooG/rfceOdMTdHuTgamGBOrXQmQWvWMFiX3dxgctx03I9DCXtrUEODZttuGPzgbL800zEddTRAVvGMQsRh20DtipUIbBrFKCle9EBvTHSXiMTOOuhdORXM9ujyXvf9mY+/X8+ZTfFu9rFj1mGqeamLHTj2seksWFeDgWEZYnBYKxjWJ0tx6McsUFahkiLrB7XyqQGZcd1Df217TYoBXb5JyXzcExce9iK7DUbwVn6jIQ9GWdzmCErYkK1xdSHbCQjNBi+ghFjdTTJJsKGXYUbpSw4WAFNbSBrpEgJlls+LgOC3QmVpoJhyw69ifJ+U1NQ56r0UYLRIu5r6N92P/NlEnZ0Oalo2M3PlWCQmYYwPTrwjCpQmdwp6Sk/Vx2qfPsomsMVl7pOw3GKrRljeWGFrRJewNpRJVQzYSsrvsST0JhjePGk5UEEd0AZzkye/9zzvP6n38BnX/gcVz2Fjkzo3jzMQQ6RMuQRYkijk7FNJCHFxDT7PxBpM4Z+Rx3TYl4g+uwPxSWhxp6Q1YxqthWAQ2XmUp1EWR2wCT8kND0oTm7/DiQx7HWYhDoSqzlHKMNdATqWTDKEy/mQWxvS2kgiNaCbksxzHdwxllF1HNJNehjfk/FauhW9Mg7CXTm2soKYQWxWFDFVZYT/VNJDcJiam9L6ejvWiW2yMkIQmuM5nZ6KhAJY+nPqOl0rAw4JqIgwZXSbvbvjqNp0JrmqGu4TsEUFnYefyCd/9YN8+P3vYEku73UFJvsEJ4lb39URXO3ZoiVk15wOyH2UCmH12zqGRVovcUB4f/S05TVliwsHy+kubEOV2apbax0WIfkjGGqLXqcxEhso+0+aHkWNTUPZEdbcEYxNDq6VwxSYsvAzc2kWiJa7ogyJbJNPAAAfUElEQVTjBAVqdOm0TFEwSezSpK/kq6u93y1tRRj8HMW+9nUowZJuf+p56xIs4+aw1Sla9EGOVp0A+0S1fKL0efXhMlmclteuYJxxrbXvmMzhA6AKYlOVMQTt5gGDFW0hSOBkqRr2/eDMuhRTiBPKye0mOxQzhGy0ErlyUrVi3xPWAwnuwgorgg8+9xxvfvMbhdGhFEfT46ekTUBb5tGI5JDNrrIPMf5rErne/m44g5pHAIVVSknDDssxcrHy6U7UJve0mVdR3DBMCo4HgZKnRO59LU1zs4WmtRewmnTcfxmCerqlRhktTfkaOLNa8oNmTDWxuN+DHRcYtkZoh+ox8UYpt+z3qkqhl0JA93VNt1mKm9Ha7PLhcPBABHPVmcgpeVrHNcyy6fvrZMLaFgWHn3zp3ovMns7GlYdKeZYwNJKwp1Q4+nzrOHjKmUtEEb2rqWbhNSmKPVpQxaydcxRvfdNPUPujG00g97UkBdyWaVVOaYxapnMi0HQv954yU1tNPiuj9vMR5mdy3FDQrGk9fpTTulKj+LCZ4xneV+0c0P4uYut1hsRcnbBILpsoqKu+Q25sxnnTFr9dHnxRIoYZyl4j1PjUbqJzRVtWjui+DL3s9GsZ4lQU3PrI8qnwHrISreyQaC4DgpnS37Sf9WUn0qnqpjNF5rdhnAVzhbx8cmhPaV+n4UD5z+iaU8/51BNZ1izr0B0O1IJc9xIUPKYI5MCOrqgrvlZd5n6EkRJHtDvFK8tEuJoMccf3YNKbPuomnqiUgQcT3JXBPfehD/B93/83rxUBBg4i3IDgTKNWgA7/bGgEX+UkTrCtRg3/ZilQ9L2rqetaUyvsK2nrTXVLdkyYIpmjNny4SUp3dYIIEFCJh4yzyDD8Y7thdp20C+bAbfihbCCzXYkI1Tu1+IddDiJ0nx3AJoO49oIOWxNgkiqKzqmfqck2daDs00E6d9Iuiuo8nTCaShGZe2u+vflgb3RrBnwAaAMq9W+1XcIcjF3YfayxZC4bu8t4r5UZsyDOakYadcAJBtYEobGpO/KEIJfCJBSM3nQouKMZa7kJZZftQPGpj3+I//gv/z6ff/ToNjfub2EpcHc32Y9Q7WHSrYOxOxMDTiTbFLaVUxps4djGqROK/VDGVDgDP3Fd3gd0T84t/XyVPIvU0SgopRyAQJDbIMCdx7FbSIAgQg2XMJRXzuYbKFlSZIWSo3bcT+RD00mvSUMDQw3AvlQu9n6JFah0ODNkL22jDFXpxsOvq7g0RKHXEhPGnAyTvmkLjQXhjHJGnwsiagVIJ2dLhLF3y+CL6z2orRsUGyyrYybN5v274pRgntmCW1byU0D6vgkm1aNxkq82IEXauMHD1Ay7ZrYPeY21lHND+9GKa6XeY9bDCO4N+aj5mTe8gV/7+EdhSGc6Oohweddn1HbEdRnP9clZIYa5SjjagmSWdKpRnOAcRElKSLikjBXsOUiipKRfqh1SkrS2/jWPzNqQgj1n9CGJ5Mxq+axXMeoElex9ctMJtmIJKNhn0iXbhYzijCClzDLemiKHxoZne5jUlIpBlqTuhAwHXMun2hsqqqUtn4YJXNU0ynJySgEwHMjD/EPGergXSTfdSq3spMCDrdPBR09s5SJ7hIezOJI0ge2AcGh8GURMtsObHpoktjYhoIaphaGfDy28fnfh6qwhJcnhvW97I7/xyRdtIL31dQYSKyNq075WfXI0hR3NZVZ1abau+IbeYYZMpoS+LAWMuxun8xw0eHyuA3XITTI7iDOcYxPHlG2eSrCniB0dGOdNQWOa6F4EeghwFtQ43JSWU74wjQQBqxqzfHJZbUdL0huRdoHcrEJDn6t7PGIGzMWxYEGDArKnCahCoxbeoz03VN2t6nXte4CleMuIAyYdBaMnG7hyn8d1lIWjEyM0+3gpYQ7dYStTx8M4Ds6uZH3R4ENkoQ3LjVLfp4pCXbgjoEL24OLNVHFUO9lxExZGKWIfGvF5VMOP33cPI7gHfOJ/f4of/7Efo+eZxbDrgLJDYAzlu7WQwKD2UmCdIvXGDHJa0VGWzZllx5lujyA23Vidqn141fQKNKHCqVNlaFcyp25ku3wayLIg0w06hCCS1mHUhiRqahBvuxiLBYdgqVquFnJvWEMzoZY5AOH1+CD3A6S4qXK93ba+/Ndl+xq+tSnoxJuMbK5cisOg3XDU20HRsXBwvMmWbcLRCWjzpwG2KTBwMP1QhSRy2brugryGE/oMb+Rd4klVPQFDvvnl0iqmIIngmvDT87uRDVeUu5VXuW1Ywr8jqvnZf/8jflf3tFo+loqVkkKSq59AlY7uje+v3jon3UCWx3unMn1x2qpEI1S3SbLKsT/tn8Eydyl7p0SpqpPVxpAXT6nkDwfVUf7s3Olac1qte0PFUiVfdavFNFhanEHae/zwdAdyt8KqdyE7PeHcB5wTU4nUzGvlV6LKUfIWcVLTPNTc5f8SJn0r8XPvCryT0clAmnDZUMtEbj2fExRoc1kmyDhMeDpWvVjpI/8ID3pvpofPh7uE5Uo5idiZU2KBmbZRKGAo4erwuEQLBrqulX5gkzVLTwnVeFHXBLTDFMLxi7Gskh+zHkhwD173Q/+EWSJmsIxOjTUOcO7aIq6xdFlxoo1MwiusIy6xz/Ikb2o6023BA1AewqEHLtr2wJ0eKZZMd3iySOlsttjlYjvbZp0oAzVWtrXw0+VfHcNZZBjntPyKDlcAxupuYOB76XUu86X2B94ryA1Ik1UUigTjoEa9KRbeaDnWcnok7LHuyqLLkrMBu4ecWGNPBtiKOFsNG4XcN4fLf40T0+sND0yu8EDrTlsyL9c9VVW9ZrZ20lfoAC0NXojpILg+T0CdtgGdbLsDYunuzwg34jQznb837nqUOuHD//M9vPD8525v777YCvQ5u/ivDLuFY/gRtKNL/QrTzXWBceGCVPf07kNVG8pSzxreS+3qJtBUq8GqWYcP6KQsQzRpPtp7TPe91rQxq07IoSYfguiNGvJ/EYSixMqvRtyJHlvLNd1/MvVrSGv3t6S3EJ81ws+w4D6pSYbLbLQvdu/X8H7tTdNkWJmrCP7KEjEZyiAqpohSw1DdVpZ12DLBJnxzGZytGFDyUeo4qvWKJeeUJJkqai4hUxjnT8FkQ+Sx3KydZHV6fkOzMe0k6UCNSLZOS5RpEyAQtVnSvZAC1CxlQ7d6EX+BF7UfuIv1+eef513vfCfbKTjVScM3ZsHyipnrjU15Z7Cxpdv+7QWz2dHtiqZGinkfyNva23wLfZBZAVfK2GuzemNeE6u13Tj11CcMu0ri0QlXHjvgzJ2WT7fGYKXghbxihdSJTKL0oDixMr4etmlNB/PMM81gpq+VIjRHGybBnaeZInlaxNReU1NePB8wZrNFEpudh5mogVYbKdfUAFIKITbjOlP3g9RDksHJlNqMoLmCEgehEYLanGr5hy1dng4Xo219/BZs7FRvLin9MLcGSfgMpEK4pkbMbcAk09u01YUYS8+fk+W0eOqQosdQZoOCxpw8/9kvcPG909VAeOAxMZGJ6RVlfTNA5y5MN4rY+qhSzjS5K4hFtGcBGIZsTw4L+Zkc6IC7meuRYJ7BaqwzQToFB2ZOYm7GJJW16iHbpXCqgDozO8hNP2iK46gJIzb2tsKlTZ5jvxvn+pW7pJWG93rBfqHyanawuVLPXhYVCrJxSsr9Giyu6ySSszeIeWLnrGw9mhxrmMZ2NCHNqWc6hivi9TZN4LLvmthVQQz57igg63BodvEfQ681U7bS3XrOhhUvM3pR2koQMS+V7ujddI1uV+vmEbrKw0cE1Sg7QfdrlJEE3dNtIFuKxTMtYf1j1oMI7ldXr+AH/94P+KRasARYG6YHpIsMl25TWWyFboZMvEDeG8bdnMVGlsbh+YYIFgk9BbYv0LfHAUlMs+mylFWrPc4KIgR/LxIMwxKStMV64azuU5WWbfxQssB2W78eeEm5pC/HRk7oQxtpvay7A/WLef/HHvHcr78gWH0K96t8gQa2trLHv7dChkUbq1fRpFRybX26KYPJCm3CaEapq7XdDUrugPzt0zAIuLIw/q0sytixfi3RasiJxsoV53pHQBLO3EezkR4KTQ2yJHQUOScCOCQV25Ef/LASSHp3k7ah15Yp2OH7/sqfvqWd++LLO9HZ40aEOi5VXQnm0pswXCJAT925oWBynR4vQq8ggowpa405PDy5YU4qhmYR72qa62Ejq16dzrbCiCmIK4Lz1PQv06uWDQ62EDwxsfw3seokYWjWsQbjIIWKn8eoEm6fyUDJUA/r+qNkGdCDUzZzcqi+0k1vcrfkOChkq6vAX9FsM5m1M07mxlpNeTN1YEWKIB2JKmUEr9TUPlFfC2RsPIqdq+EqcoceSkhEgmsGgzgD6JJMsUPyyzmbuNImXEzWsSen/Zeq3FWvA6pLryV2QUYxW2RvK9jrMFUWXyi3TMcbSWYDHaOPh2TggQT3zOArX/lK8mwjpURac6cjkUn3LuY+ZbmbbuSQyKSU+dTaXGatfURPksNB17ekTcZwdoNU6ObnVJm6tLY50WsIkzYlV7jqtDeKPwKB37Am/6RklzH1GsIf3o6xQZdzOniLZIPa2RmcXIM1wuVnNEtUEgFf8dnP8+WfeQSk/CcGbPllzOmuQRNkIt7yWhLW48DmGdiP3QdW2I1ynawzyCEZqbDMkJNHJcNNLBpArMEKXcbUO9hz6hCoQcZ+BNsOIQ8auKGAsDLCynFwFdIt65Be48Q7pBKJ1u+sscjyoRK8lKnTrTtcypBGBGMM7nNlwdyWIiTlnDjCDs7N6itbroQrUyOWIktS2jUZK3yotoH2zkmWjbd8+AVy+SRWJ2ZaiQQxdSh0DGW0yISpaUaaxI1JzxOk1VIYQjK5vvjKMkC9hYZTqwaQQVBgR0WmkyenTwXBxmGn6x6NkdCLPY+pZ9rwCeYBVoXSVkDEeZNjZOmZlTWHqoLM9DnY9mcPE70OxDbqGymid1TQwyDZWJJdiKF7nyXvKXAKkv6dUxLn8Gc3O6yCU4Uqnf6y/9bB2E5ERm8iXFsHajp25ZKBRBOtCk35y5BfUp5wtvf4ffcU9/BLWjGT86ZAMozTxWYUqopRHlw9IWJnrzBmOSiGSxW7Tq3AnSgYoWxfH6mkBV3WnIYzzU5nLnJ0HK2JTeFxepJQhpkefW/smvaizHbAmne4gnE7c9L+pxduFik/+oLYIWpYkbCxhcjHzt1PfNq8S6+pd7dhj9agamvkypmf1CdW+4S9Smbb1ngSy5L1bJ90SxbLOnvVScXmaVepXWcCKjVAYthTJjUAuUrEz2x1/m1z43SoGqSWGYiIm0zb8zSR4VJ6+DA38dqb1T8GX7PpEt6bm37uZGXC2RWZm4pFhJVK7VzvnceXrnexahNRSVnv7YC2Oyt/hSsQPbDhKidYA2HW7FoJCXQApEnAxT9lONkQs+LfI24nHFailorFVRO2H2751qhqhVmW5QXGrHVotIdFqAo9s86i7GDWFMyWgibZGjb1LASqrJdKaFvQjYT+h99QlwrlcHu+Pkyp26DoJVyIab7fqqIzrmwc2LHSx8oTbRzj7i1M/BhAfjKRbDdW9mLOScwhDyp/X82moiA1otM+cA7qSkHavB3G+7utZW/NG2x0iteQmGA1/6V7C5LNz67RgIbT7jmx4SASJc+Zg395/HogwV1a65jSptLLRIdDV53hh3TbFSzc2r4yCQJybATJbJWIE/0cU3hWeuO3s0KFRVwqnaV+iuksvTzRqA3dhEi/IYx7ZAKbTvLgwM5UAXBco3cF/3K37doQZLFFMWymNaOobRchvDYToK8ky4KFsTDBMDR1RtZlYWMwDLmEX3fDNtR9SomI3wJN8rH2OCxnDMu3iMNeIOAgOTW4exqvh6Jcuiq3W2qdCljDnvGBVrVIZJWcEQGzqPPKorTZs5WpqR9Ab3qpNxS0/MyfdeAMQ2oQViiE6UmHtHyyouAulu6xibFCB1rLq0QSybblrkjV1Z9FLx5EfkYC1jbtL8qSX+huzr1TJr2PIe4GeYjwJCgTiJlsGzQnlh9S4gPSCUxyIqOscw8Nr+hhzL2JsbFGwEnpamikRY53DUMlsTa9yMkOZiiZoeYhSR6LKB6TGTsxLfMkgCEXSScfK5NdVUmk5cuKq+AEYQ3puMal/Sw7Z5AySNZyFcGjgBqD3mBmSa6MRnlKvaNqI0hvPEk8l2KpmRJIUOSeJtcOm6fV08jGUkUFvQ1LepdoAu/j5rRiCygmpu7nPHBrJ4CPWQ8iuB/4FpJMzbBZEcLeuoNzh6a0MFjzCoezurDhv2AFG9xXkLWxD+itqdqVpXjzXnvVbNK/3mg80ogv/fukIE6HMVh00/NMsesg2IIlzYyCmCXL0EXeWJS4cdhsQO3uWh6yyLW6hqkDaOGUGkadJk+sFc6ihqsIlQkuk5M5gjl07mvMpc3Auuhdv58qej6SP0zapwYZn+ky1wdg2KVuBIy+ApNxq3uuK9iyiRDJu3oqKneqJjX0fXVW9iR1QB3KAAbkplK8EzepcJQ51dPXQ68/p9rf0x2xteCr4aSgRbjby7+O4/ueM3ebuU2mFD7V5imKffnJRJFjF+Tgw1MkZRO9G7LSzyYLDoujS1Szd93UZPteEvUv9HBHd2oLmJ/EUr9rC2zFaLEtZR5FR33EhFTHZUfbkAyEn+s1bYmcG512xqxrwjbUtFfKChzKklzDZdJzd9MNc67QQFmyZLnIhTUaWh3k4AoEJK01NNcxGdmS+NrNshwgz4kDtrkjwyQnyvfQTWOUKmNn1mtM+ea9L3LWn9G2e04x9FwM13qXgjPVWS4p5xqvKM8mA2opxdJWqmYqtU8mLXM137fRSVii/aSt/SCCe4SgiTX5RHaaZZxMt6h8Y4VTps1zNCtV8U0lZ+PT1/Kq1SmWTgxOgYjVQodCyB2vK+jcWXLCBA0tQKc3o4kpeIQZKsG8rWaLtOwIxnDrc+vQSntjFJZReTNlrgenhdkZkrBNhpl0G0KlJ85HyPNa49HVkRcmxhr6HCKbXe0ESILWIsfCZebgyhpbZSFrCEAYDqFuGByN8Pc+0r/Z2Y9ABFXFkUGPDr23HlbkLAI7XY2hZi10G2SvYHLQCVUNbH2wdAs6bFLdVGq7L6kNVLLD6g9Q96KUOyx5qXHe+1zppp88cDkOvFeHmZRH0XN5oPlV6/EsmlH2KEnzTNP9j25wCcv9mFJzpO9y9+AcZ0ME5pBmiEA/2XIjNJAlXexVi9gMnBmHgk7DwTWxAhzDgdlmXNnqcs2EHOzGv7sE5Q1bITSWSIb06xWrglB/vW+TqughDkZur67JlDgrsUk9q7CCn4J8o/tEXon/sWcL2jquTtOViknQuGFjkh5GX9aop9VfYEMvE8sE7Cf1uyz5Ik2vHhrUjLnSR0g3rDU5dk46wwQlTSdyLBHGErH59RNSse27q9kn7LuXvnWfzooSJFLrZqU3fgrTSqycCGFyXXZec0YAhts2Y2PtTHqXZlzUO1YMBHs24aEbJM5IhJH4PlMRXHWQ7BydYyjgDW/KKJdIzrT2djAMP2AZ9JY2dlqZRtBzFwZIHF2bkrwl6t1JDxIJ+224dMvmkSuInSUtkyQ0c1fDhVmfBa2IfPKTnQF9VsZhXVjsznAoNHhDGumDnEo3MqVwSblFSs3Q6elSZandQM0yJb/5hSEesjzDOaAHa4a1FAeeuxuTVsYdxhnXAJZFWEmqGUfFECnCK2rYNrZY3cn3nbnHXFYB4gkqGsZk2+rYF8oBBuzC10WWau/eHNbQKPNuxVem57AWlt1t6+jsg6BNJNkdWUJPzEfNamegul9gHiTE3hfKSruC3kMJGNeQj4j3Imoc0EFMwyTTNiFpIjWTiJS6ywmXjMEkW+4Seb9bFikO1V5SMchQn8CavdpOjDZQgF+9Le1iocOkMYKKdEQS7Fwr6gyFTDumNtwAa48DRM+99qy60tNzfYEoHzqQYye3achSr3FvVUV7ew9YnOGsRJ+5GzPD0pgKyY51ENUKXYClxy2OZdkpPG49kOCuphP7CXgSeaust6Kgh0jTmUsgJLxvhlEw34x5sO2WLm06HJqNObwputmqZA0Qwj4rbZx1VvCPU3GKkra7W5gPykKXz0eyW7bVhx1oIUZ9dY/JrlP4f8WaiKMsU4oZf1gmuRht/DNFnJwCYrDMttTR6DIuQh26rc66SKsjhg8dwi3W29HMJI7KPu+9qQIAcpfPS+c8VCcdKBNEWfreegjGaga39Whl61xAZk7TsEC1M8W0GiCSiNJPuwlkzj66NYVrXjFaPviU9d0gh8OSXcRSPI02hlfqU+hQNtuhrso8guT9Ze56Gxq3Ftk+BBUUizwM2iDcNyHCfFlUCxXwZ4FVX+2mmdn2Jcc1fYgMZclelTCt6UsxhywxKAW8FDtRzmgrkXmbN3gEHM3MsUQFyt7n2NycpExTQoQWeVgbPa4g6nqCkXMbVaTrGYaabclxqVpoV55AsPzn68j8BfWoOuiSlFCVzzpwVJFHKcFb1e1MV4mt53NOSXpHNpUbszdqyq5YSVYvHlZGebisMVHdYYSgRfguxc8jvw9l1W5WIyQO2EvzkFOwkoAJn2RMNPxmwXaCk9Y9I/xa0MvYG24Okv9i64EEd2WkWZLf5ZRee1uKi3OzuctyK+zMNogx6ZrUdPs6wsGaMOSCJHTIVEiYt0tISk5z5dPY7oK9WU5Zcj8kSrMmEwfYYO+wK6Ow9ra73eg+yKlpkDmMuR3WCVGSVrkGProDraFvD0WWXbWhhYDZ7Xr9xJgnZsDuxotVv82Fg7qsgzqY/gzYHMg7hfkNZwU7zrCHmf9uZ0omJX1Ps0+WpXo8luGCKDP8C/s1dNJ7OXvy4c3Zg70R2d3FKddnGw5yKo27SoeFyUY5C5ZUDdvSNAunPcysgLkLzJBm3Mqoe8zcAynHF0kWqdZ5F9jK7E76TCoW/m3yLGG36ZZi4XBioL0YCTkmW0gXD7DwrZYWz+W/s/WhLmpaU6GUtYuAsnONbJbnIqFXxq0gFNM9r90aKt3tLFqkOiXYaE/QBCUpc06GfNoQI2lPf0LQZbonIWArebtI3KC9vSCaXO8DJIVNVQFruEhZSKDzTfvwkLX3elaNCDCpHsyhinYbgs+ClRAMKZs4/DV1T7sdYF0mtDkMV8hRQ7BTij0VaKmGs0zNa27DhZufmZlBcxLPFfsxZDwi9D4N61Qec+ZWE+uT914/KfTf0YqITwKfBT51Ty/hlfd47Wf9+ndx7d/d3b/zlq/xRVdEfBp4731c2+uyt+5v3evefhDBHSAi3trd3/isXftZv/59v/fbXvf9/p7l6z/L7x0eDCxzWZd1WZd1WU9zXYL7ZV3WZV3Wy3A9pOD+j57Raz/r17/v937b677f37N8/Wf5vT8czP2yLuuyLuuynt56SJn7ZV3WZV3WZT2lde/BPSL+eES8NyI+EBHffUfX/JWIeFdEvD0i3uqvfWVE/HREvN///e1P8Xo/FBGfiIh33/jaF71eaP2A78c7I+LVt3Dt742Ij/r9vz0iXnPj3/6qr/3eiPhjL+Xa/n1fGxE/ExG/FBHviYi/5K/fyfu/z3XXe/tZ2tdPuP6d7O0viX0tr+f7+YN6aj4IfB1wBbwD+IY7uO6vAK/8TV/728B3++/fDfytp3i9bwFeDbz7xa4HvAZ4PepR+Cbg527h2t8LfNcX+d5v8GfwCuBV/mzGS7z+VwOv9t+/HHifr3Mn7/++/tzH3n6W9vUTrn8ne/tLYV/fd+b+B4APdPdz3f0I+HHgtff0Wl4L/LD//sPAn3hav7i73wj8r9/i9V4L/Ehr/Q/gKyLiq5/ytR+3Xgv8eHe/0N0fAj6APqP/79XdH+/uX/TfPw38MvA13NH7v8f1UPb2y3JfP+H6j1tPdW9/Kezr+w7uXwP86o3//xF/7bZXA/8pIn4hIv68v/ZV3f1x//3XgK+65dfwuOvd1T35iy4Pf+hGqX6r146I3wP8fuDnuP/3f9vrPt7HZV9r3enefqj7+r6D+32tb+7uVwPfBnxnRHzLzX9s1VF3JiO66+sB/wD4vcDvAz4O/J3bvmBEfBnwr4G/3N1fMLH6Ht7/y3U96/sa7nhvP+R9fd/B/aPA1974/7/LX7vV1d0f9X8/AfxbVJ79+iqT/N9P3PLLeNz1bv2edPevd/fs7gJex3V5eivXjogTegD+aXf/G3/53t7/Ha07fx/P+r6Gu93bD31f33dw/3ng6yPiVRFxBXw78JO3ecGI+G0R8eXr78AfBd7t636Hv+07gJ+4zdfxhOv9JPBnza5/E/AbN8q8p7J+E9b3J9H7X9f+9oh4RUS8Cvh64C0v8VoB/GPgl7v77974p3t7/3e07nRvX/a11l3t7S+JfX3bjO2L/UEs8vsQe/09d3C9r0Os+TuA96xrAr8D+C/A+4H/DHzlU7zmP0Ml4hlhbX/ucddDbPoP+n68C/jGW7j2j/p3vxNtuq++8f3f42u/F/i2p/DevxmVpu8E3u4/r7mr9/+s7O1nbV/f997+UtjXlw7Vy7qsy7qsl+G6b1jmsi7rsi7rsm5hXYL7ZV3WZV3Wy3BdgvtlXdZlXdbLcF2C+2Vd1mVd1stwXYL7ZV3WZV3Wy3BdgvtlXdZlXdbLcF2C+2Vd1mVd1stwXYL7ZV3WZV3Wy3D9P2bn7I6dt15OAAAAAElFTkSuQmCC\n" + }, + "metadata": { + "needs_background": "light" + } + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "
" + ], + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAEICAYAAABf40E1AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9aZBs2Vbf99tnynnOqsrMyqrMmqdb93ZjBCJsyVjChkBhsEKEB8nYoEBY2A6HbckoZCP5BVhIxooAS0QYBULxArAe4OGDsPVB2PJDEAoZmdfdt27Nc9aYWTnPZ9z+cPL2q77c7n6tR7/XxLv/iIq7z9nr7L32tPZaa6+dV0gpeYM3eINvXChfbwbe4A3e4OuLN0LgDd7gGxxvhMAbvME3ON4IgTd4g29wvBECb/AG3+B4IwTe4A2+wfFGCHyFEEL0hRCLX2cefkAI8dufYvllIYQUQmifVh2/XxBCfE4I8csfkb8rhPj2ryFLXxWEEH9GCPGPvh51f2aFwGQyLr/y7iMH/tOElDIqpTz7asoQQnxRCPFDv188fbUQQlwIIb7j683HpwEp5ZaU8ouf5JuvpxCUUv7PUsp/42tdL3yGhcDXEn8Qdr7fT3wW2iuEUL/ePLyBjz+wQkAI8e1CiGshxF8QQtSEEHdCiB98lP95IcTPCSF+QwjRE0L8phCi9ChfCiH+EyHEMXA8effnhBAnQoimEOIfCCEKr9AvT9IBIcTfFEJUhBDVST2hR7TfK4R4VwjRFUKcCiG+Swjx14A/AvzsxLT42Qnt+oTHphDiUAjxbz8qJzPhoyuE+B1g6WP65HsmanB7onVsPMq7EEL8JSHEc2AghPgCMA/8+oSfH31U1J+ZtK0uhPhvHpUREEL8jBDidvL3M0KIwKP8H52Mw60Q4ode6bPPCyH+JyHEPxRCDIB/TQjxJ4QQ70zadyWE+Nyjsl7uyj88Ke9OCPEXX2myIYT4xcn47gohvvmV9n7HJK0KIf7ryVj0hBC/K4SYe00X/pPJv+1Jn3ybEEIRQvyYEOJyMs9+UQiR+AQ8vuSnIIQYCSHSj969PeljXbxi6n3YvBBCLEzGV5k8/7wQovbou18SQvznr+PhQyGl/Ez+ARJYfuXd54BfnqS/HXCAHwd04LuBIZCa5H8e6AF/FAgA/yPw26+U/xtAGggBfwyoA980of/bwD95HT/ATwP/YPJtDPh14K9P8r4F6AD/Or6QnQXWJ3lfBH7oUZkR4Ar4QUAD3p7wsDnJ/xXg1yZ0T4Cbx214pW9WgcGkXh34UeAEMCb5F8C7wBwQevTuOx6VUZ608+cnffIMMIGNSf6PA/8MmAamgH8K/MQk77uAe2ALCAO//EqffX7SL//ypF+CkzHcnjw/BarAv/UKL1+YtH8beHjJL/5cGE/GXQX+OvDPHrXl4hHtfwXsAGuAmLQr85o+fFmn9ujdn5304yIQBf534Je+Eh5fU/4/Bv7co+f/Afi5SfoHXo4tHz8vKsC/NEkfAmePxqgCvP2J1trXe7F/lUJg9MqA1YA//GjS/cqjvCjgAnOPyv9jj/J/AfipV+htoPyYn8kkGgBLj2i/DTifpP8O8NMf0qYv8kEh8O8Av/UKzd8B/tvJxLaZCJBJ3k/y4ULgrwC/9uhZwRca3/5oUfzZV7654PVCoPjo3e8A/+4kfQp896O87wQuJum/x0QQTp6X+b1C4Bc/Zsx/5mXfPeLlcft/CviFR3Ph/3qUtwmMXtc2/IXyvV/BnHtZ5+M59X8D//Gj57XJuGgfx+Nryv8h4B9P0gJ/of/RyfMP8GUh8KHzYpL+JeC/BHKTtv0U8OeBBaANKJ9krX3dbcOPgIu/oz2Gjj8AL9GQUjqPnof4i/clrl4mpJR9IUQTKDx6f/WItgB86RX6Bv5OfvGIbgp/p/tdIcTLdwJ/0YK/0/7Dj2nbS5SAbxVCtB+90/AHeWqSfszj5UeUVXicL6X0hBBXE/5f4ur3fPV63D9KP+7TD9QxSRce5f1/H1PXB94JIb4V+Bv4Wo6Br4H9Lx/xzSX+bvthfAaFENorcwL8MTl9DT9fCV7XZg2Y+TgehRD9R+83gf8N+NtCiDy+5uYBv/WaOj9qXgD8JvA9wDW+CfNF4PvxNaPfklJ6X3nzPts+gQq+pH2MBT56IbyK9+0+IUQUX32/fZT/+ArlLX7nv6SPABn83fQx6vgayJaUMjn5S0gpXy6UKz7cdn/1yuYV8JuPyklK/xTiR/DVSudxG/Bt+A/Dq/yLybeP+X+1/k96hfQDdUz4edmfd0DxUd7rbO5X6/v7+GbVnJQyAfwcvkB9jFfbf8snx0eNyUfxB69vs4NvurzEa3mcjOXLv4qUsgX8I/yd/k/ja6qvq/Oj5gX4QuCP4GvDvwn8Nr6Z9a9Onj8RPstC4FeBHxNCFCfOme8A/k3gf/0EZXy3EOJfEUIYwE/g24wftht+AfhBIcRbE2fXTwL/r5Ty4jHRRMr+PPDTQohpACHErBDiOyckvzAp549P+J4VQqxP8qr4tuVL/B/AqhDi+yfOIV0I8YeEEBtSShff/vycECIshNgE/sOPaOuvAX9iUq8O/AV8e/6ffsQ3r/LzcfgC/phMCSGywF/Ft/1f1v+DQogNIUQY3zz5OMSAppRyLIT4FvyF8Sr+yqT9W/g28q9+An5f4u8CPyGEWBE+ngohMq+he8DfnR/3yReA/2LikIviz4tffUXb+CQ8/n3gPwC+b5J+HT50XgBIKY/xN6J/H19YdPHH8k/xLyAEvu62/0fYZyF8x8kFvkPpS8D3PMr/duD6lW8u+LId+Hn8neU3gD6+2rTwiPZ1Poc/j682NicDUXwdPb5T6yfxHTJdYB/4zx7R/kngOb5j8gT4zsn7bwOOgBbwtx7ZmP8n/gRs4DuP3prkTU346OLb5j/Bh/gEHtW7N+mv38TXVl5r/0/efS++xtUG/iKvt4m/yMSPMWn338Lf9e8m6eAj2r+Mr6LfAj8yKWvu0Xj8d6/U/334ml1v0s6f5cs+n5e8/PCkvHvgRx99+7mXtK/Qa6+ZCyrwY8D5pK5//nhsX+Hpxydj0Qb+MP5G+Vfxd+cHfKGX+kp4/Ih53QN2X3n/A3zQcf2h82KS/wUmfqjJ89+clKt+0rUm5Gu1kT/4EEJ8Hl9I/NjvQ1kKvo+iJKWsfLXlfSNgsmu9AALy99roX8n3ZfxFq/+LfP+1wB8EHr8SfJbNgc8SnuA7Xe4/jvAbGUKIPyn8WIIU8N8Dv/4HeXF8o+CNEPgYCCH+FPD/AH9JSml9vfn5jOM/wj+mPcXXnH7ko8nf4LOAT80cEEJ8F36Ajgr8XSnl3/hUKnqDN3iDrwqfihAQflz4EX702jW+I+bfk1Lu/b5X9gZv8AZfFT6tYKFvAU7k5NadEOJX8D3RrxUCQggZCoYYjUcfeB8KhvCkxLJMgsGgT4vvjnVdF9dxMQwDx3VeekgRQqBqGrZl4roehhHAcWw8zyNgBFBVhdF4/D59MBRkPBr/Hp5CoTAIDyFhOBoTDAYZj8cIIYjHYxQKs1xcXLxP77oulmWhqSqKUPCQ6LqObVk4rouhGyiaigBM00TXdRQ//BsJjCdtD4b8dr7kKRQKMxoN368nHAwxfEk74ellu4PBAJ4rsR0bTdewTN960VQNoQhs24+z0nUdTdOwLAtVU1EVBaQAIRmPTYxgAGtsEgwYjMbmhI8gIBiN/LpDgRAjc/T+OL0/dgJCoZBf3uTY3fM8bMfGMAw8x8WybVRVQwifXtf092kt08L1vhzrEggGsEwLKeX7YxUKhRiNRoSCE54mdYdC/vWN0Wj0fhrpMTYtAoaBaVkYhoFt2+iahu04GAEDELiOg23baJq/JBShoGkq5ivzSDd0pnNl2o07bMdGIBmNxh/og1AwiBSC8YSP0WhEJBImm1ugdn3GyBwhhMAwAh+Y2y8hpUQIgTPhCfw+0nQNOVkD/hzSUBQ/Rm08HmMEDMwPjJcCQjIajl7yV5dSTr061z8tITDLB6OoroFvfUwghPhh/KMV5maLTE1lsFyXvd09tp88wXQsDvYOAH8iFEp57q/uyRfznByekkgkKJfKNJsNRuYYTVVBCLL5HEiX6/MK7XaHQr5IKh1HIHEdUDywpEQREkX3wNXZ2X2PtY11DE3Dkh6GUHj+fIftt97i+TvvoCgKi8tLqCjgCYSqgbCYL8xyXb1jaWkR1VNotZpcXF8xlU1TmMv7AstRkK6LUCWeAopUOTs7YyY/Q9SIIhUPKS32D0+wbYflhTWkIVFd2Hmxw8qKf5v6+fPnvPXkLRTPw9QEAoHmCUx3zP7+PkEjyEKuTKVaYWt5E0+VoDq0Gh0ur658C32CqalpRqMho9GI0twcD40GU4Usd1f3rKyucHlxyfriKseXpzzdWscjAIqNogh2nu+y8ewJe+++eDmOLK8vs7ezx9b6Gs939xgN/cUQDoWYm5vl6u6ehc1VDM9DShWBQr1eRVoeM/kcrqJwc31Fs1FnoVQiGUnjCJOTszOW5+YRwSCK4uG5gt29PUajEZvbmxiKxnvPd3j65CmagC/tPAfgrWdbuI7Gzu57ABi6werKE4S0QfWQQkFKaHf69DtNYtEIlesbpqeyCCEQqk42N4MqPYTtInUFoYBwHA6Pz7i6OGRjcwOhaBwe7PPkrSfsPd9jc2uLgKrxznO/3m/aeosv7b6LoghW11YwTYnlWjx5toXqaYAE6WHqCqoHGhqqYlNvPjAcOhRmZ8GVCCm5ubul3mh8YJEVZ+eJRMMcHR0hpcQcm2xsPOXoaJeVxWWEAq502d09eCmgXhto92mZA98HfJeU8ocmz98PfKuU8j99HX04HJJrS6s4SFQHHB3wLPb2jtB1nfXlddSgpNftc3f/QGmxTL/bYtAZUJrLU603MCXMFGZQXQlC4rkKoKIYDmdH5+RyORLBKLuHh5RXFjHUIHu7z1lb2cQISd57d5dn29+E9IZIEUaxLd492EEIwdOna+zsHLG2vsrJ8Slb62uMRyaX11cUCnmq1TrpZJbOoEGpPI+CQu2uhi38XbBerb7sFwBKxSVq9TuKs9NcXt1gmhabmyvoWpQXuzu4rgNCsL39lBc7z9nYfIv9vXfZ2txmd2+HZ9tbuK7H/tExSyurnJ+esLq6jgIMBz2qtTrZzDStTp25uRKKkAjAkwKkRAiF65trwvEQmWQCxxN4HghNoCGQ0uFg/4jVhQ0OTvZY2dzECCjsfOk5W9tLeHYEPShxbYGijFEIMMZDoCBHDgfHexiGztraCkITDJs9ji++PP+EEGSyWQqFPEiJz5aKQFC5PKfb7bxPu7y4TOW6gmmarK1tEjQ0dnZfsP1kg50X+6w/fZv9977ExpMtVFvhxeHO+3U8XX+CqgnGjsfJ2RGrm0sMe2POTs4BiMXiFOfLKCroQqH2UMXzXKZzORwpwZNcVyqkEnFisRgoGgjQpMruwS62bbO2sYWmgaIqSMfFkwYBHUzPwQDefb7Ls+1nuLbDiwNfcKqqytLSEkdHR69dP6lUktlCAU961Op1HqoPFGdzZDNZQEFKgSckZ6enDIe+lri6skooEMLG39w0T8VzxyCCuHgYjsqX9t75XSnlN79a36clBL4N+JyU8jsnz38ZQEr51z+EXn7Tk6e8u/eCjbU1Do+P2VhdQaoqluVQqVwyN1/iplJhNlegWq9SKpVAKDSaTazxyFevVZjO5qhUrmm2G8wvLJOKxbE8k/PTM8ajEWsrK2iRAAoewgqys/8Onuex/eQpwgWhunjS37EdVQIurgsnR8esr23gujbn5+cUC3OcXpwQi0aZmZrh+OwEIQSpVIp8PsdDvYEiJVNTGW7v7wmHo6RSGTzpcXF+znS+SCwa4HB/n/HYZGl5k2DQQGqS/efPWVnfIKiE8HDRVMtfcJrg+c4uT7aesH+wz9bWBiAYWRZH+weEw2EWF5bxXECR9IYdri+vSGcy5GdmwVO4f7ghYGhkEimEYnB5fUGn64eoLy8tEwyEOD45YmG1jK6FwAPP66OpEaR02NnZw/M8nn7TU55/6TlvbW3gavDiua+16brO8tIK+wd7hMJBZudznBxcEo9HSaXTdNptSnMFpFSRrqD2cM/9Q41CLk96ZgqkQLouqDqap3F8usf8bI7zyhULi6sYmmDvYJ+1tSWCmsE7O7tsPXmKxOHF8xc8217HUwPs7+yytrLK0ekxy2sbKK7F7ss+WlpCQUd6kla3yc11hUw2g67reI7LVH6G29tbWvUms+UinWaHfrfH0tIigZDBydEplmWzsbXO8eEJjuPw5Mk6e3uHeJ5ka3ub3Z0XSOmx/fQZwlPY23/O1vYTsB2e7+2h6wbrG9vgWUg8JB6eVNE1gefvY+BJHEVQvbsjqOlMp6e5uLoklskQD4dQNBXP89CEwsHRAeVyCUMJsHuwy8bWGvt7h/7cfnsL1xbs7ux8TYWAhu8Y/OP4sev/HPjTUsrdDxMCq0+foQiXk90DFtc3OdnzpaZhGKyvb9Id9rg4OSUciZDNTFGpXHygjOmpabIzM3iex/3tLeF4hEQyhZSCytkpM5kc4ViQk5Nj5kvLBA0dIRye7/gsbW1sYErB6YHvttjYWGP/4Ij15U32j3dZ29zg9OiY9fUNbMvkslJhaWkJRXHp90fUG03m5udotXuYwwH5fJ6HhwdqtRq5YoHRYEgiEqfdaZPNJtHiMQwEJ4fHjCd23JONdV7sH7zfpq0nT9nd3WF1e5PjnV3W155xcLTD5voGtpCcHR6xurGGbSlULg5ZWlzA9XSscY/zy0viiRjF4hwKgoeHKtVa/f2ypecxNz9PKpXEcz08JKrncXR6RmlunrOLc5ZWVhGaxvH+Hqvb2wRUD9f2ePFij2fbb/Pe8y8hhGBzcwPhOXgigKuCqnh4tkSVAk+AkAqqlEjdpdUaMhr2CAQC3N3dkZ2ZxnUdWvUm+dk5kok4lxfn9Pt9FpeXCEcjnB4eszRf5vj8jKXlVTQVdl68YHN7A8UDzTOwVYe93T2EEKxvLaFaBvunhywtLHN+eUZpvsjVzT2F2Tnu726YLy2iCIHApdNqcn17+76mBjCby5OeznBxeUk2kSAST+AhOTk6YTwes/ZkC90VKIofZyx0BU0Kxo5LUKjYQmFv9z08z+Otp2+B67B7dMjq+iaOZXFZOWd1fYXBYET19p7pqSnq9Qal8iKNToO7q2uy2TT5XIH7+xqqrpJNpbm5uSGRSBCNRRAIjk9OKM7PkQjHeL67w/LaIseHZywvbhIOuth4qJ7Le7vHeJ73tRMCAEKI78a/GqoCf09K+dc+glZub7+F6TmEVA1UF+EouI6DFC6edDm/uGJtYw3XEwjPw0WiIlGkio3Ew0F4klqtTjAcJJVJcnl2SSaRIxoL4AkPBR1durhCB9Vif+8Iy7J48vYWBzuHSCnZfrKJ63rsvNjl6VtPeP7uC1TVd76sbW5ytH/Aysoqmq4ikHhS4AmPYW9Au9MiV8jTrjfwHMn09AwOHqquoKoqihC4losiFc4uz5ieyRIK6ShKEFyBIyyOD09YLy0ggga2JzE0nffee5cnW2scHJ6y/mSL/R2fp82VRQ5OzllaWcKzPY5OjpESwuEQi0uLNJtNhoMBoXAYy7SZyU7hSYmmq/5O124zV56jWW/S7XQB3m+r67qsb2xwdnrK2soqqB7CEdgS9KABtkSqEikUXOlwuLPLxuYmqAoeNkcvfFVXIvFcj2QqTqk0h+eAgkBRBB5wX6uhqArp7DTXlQq9Tuf9xSiAleUl9EgQOfIQeoDDgxcsLW9wfLzL0voaJ/sHrG+vsv/8iJXNFQwbdg4PeLq5hYPk5OSE0vIiV+eXLK2sogDd9oir6xNAkEwmiEQijIcj8tM5hKKCkFzfXKFFQowGQ5KxOEY4wu1VhfHEMbq4uEClcoXjOiBhZX0VXdXZ39tja3Mb1zMRqOwf7LO2sg6KROJyeHDsm7irq0gUFNXDcaHT6tHpNFhcWMR0PVrtBs7IBAl6QMd1HQLRAOlkGm/ocXldYSqX4/7+llKugAiEODjcY2WpgNBjHB8esr75hP3dHRzH4dmzZ7z33ntfWyHwSRAOB+Xy0hYHhy9YXVpGC2iMLJez40N0Xac4m+f6+paN5RUsV6JpEhRwXN+LW2+1qFX9YL6pzDTJVApD9e3eWDpB7b6GOTYprywTVFUUB1zDA2FwcnjAwsISAcNAERLLcznaP8RxHJ4+3WZn5wVra1ucnx9RKpU5OzvDMAIslZdxVZPBYEy9+kB5ocSw1+Xy+tb37iJA+rZiPj9DLJFESo/bm1uSqSTxeJzzs3P6vT6bm1sAHB4esL6xyt7uwfunF8+e/SF2d98BBFvbT/CGJsJQkQg810NYFqc315QXFtE0nfHIpHZ/QyaWoVKrkIzHCBsBTOkvLBeP6dwUoKBIgW3ZGFKhclthMBqyuryCCOmcHhyztrTq39uWY46Pz3yeJCyvL6ATQVUkjmejqBp7u3tIJNtPNzBNFwXQDI3eaETjtkapNE+73ebq+obpbJb8TB4pJEKFu7sqquKf2oQjcVKppH9i4dgTJ6zk6PiYxeVFdEfFVSWO5zuRnz15GyXg4QxUCIDZG3B+fcbachmpG2iWguU5aIEQnungah6GruA4EsVW0QzfH9JqNbA9i+xUFgW4vb2j2WpRKs7T7nZod9osz5UIJaI4jsnxyTnlchktEkRzJYeHxyytrSEwURUd15UorsQRktOjEySgqgrLi8tIXUUVKkiJ7ThYwwHnl5ckE0nisTiV6wpT01PohkH17p5sNksqk0bVFEByeV5hZjpLQI+iaAILi8rpJbZl4zgOmqpRWlkmqCmoQmfvYJftJxu8887X0Bz4pAiHw3JxbRXVU9jZeZfVjRUq5xVW15bpjUzODo+JxWIsLiwyHPZoNFok4nHanQ7F+TK2BCltVBWqt/e0m23Ks/NEExFs1+Xi4pLZuRkUzeDs+Azb8o9dNjY3MQyVg/0j1hbWwZA8f/6cZ8+esb+/z+L8EqeVExRFYXV5gb39Y1bWVgCwxhZX11eIydFaLBZjfm6WRrvDaDxG0zTcsU1mahqhSm6uruj3hxRKRTrNNpl0nGAiSeXkgvlCkcvKBbbjsra6zvHJIdLzQMDm020sx8MbSTRV5ehkl8WlBRzXo3J+jqrrzJUXubm8YG62jNRcxqMB3VaH/GyBTn+I2e8zMz1DvdGgXq8zXZgmk8qi2HB+dcHUzBSpaBRLBaEIcH0/iHT8I4X9gwM2n64jpYPmBXn+Yoftt56CB4cHBywvlbFtydXNFeXlRQ529ohGoywuLOC4NroaoNNrc3lZIZ1OkcsXEAp4jo0nPUDhodYgkYjS6faIRiIEw1Euz8+ZzeeIx0LYLnjOGOkZeBIurs7xpMfa4hKeKnA9ULQY+zu/w9azLU4PTigvzXF6comu6eRn81QuK4RCQaZzU1yeXROLRikUClg4qFLHCGjUH6oIqUyOGYNEYjE0Tefm+prBoM/iYombm3v6/T6ariOQrG9uggeKIvydf2ONg91DhPAP9FYXy5iOTiwIL46O0DSNxeUFHMvh8PAYTdOJhCOUinN0+y26vSEzU9OomoIUE4HUbDI/N0e316Pf71Eo5Gi1Okxlpqg+1CjOFwkQ4OjimHJ+lvObK0zTZH1tA0/3iGlBfvfdz7AmEAqH5fLiPI4rOTg4IhQKsbS2gjUacnlxhWEEWFgoI6SLYzu4wLA/pD8cMDM9Q6/b56FRI5VK4bou4WCYcCSOlB7397d0um2WlhepVR+wTNM/Yw0alOfL2IokakSxzDFCuhwcHbGxuYXjOVxdXLK2tsJwbIPicHnmn3oKQDcMivNFdEVDug5qMIBjS6TnoKCAlDTbbSQetu3Q63bIF3I06g0SqST1hzrzxRLBcBjLHCE9GymCaKpEUzQURWDaY/Sgiudqvnq3soKLxosX7xIKhlicn2eoKgSFQCI4PT7CcR3i8TjxWIzhaMzMTI5ms4HruHieh6HpvklgKHS7PXrdHuXiLPFUGstxCGo6Y8dFSkmlcsFo4n1+uv2Eo6MTFspFzi6uWNvcZjzqITzB+cUFjmOxsLYIrkX1tslSeYnuoEu90WB2dg7bHNNsNQlHwjimQ0A3qDa+fCU/V8gRCUdRVJ2Li3P63a7v7FUU1hbnOTi5AEVgmf5Zf7m8SCik8d57vu8oYATYWF9j//CI9fV1VM/j4OSYlfUVrLHHyekhS8uLqLqBaVqoQkdVBJ1Og/pDg1gsTjqdQtU0BIJarUokFmPQ6xOOhAmFQkjXA1VB0wwqF2fMlktoiotje1QuKqyureIpEmtoE9BDuPQQIsDR4TGKUFnbWEGZOPyGIwvLs2k81JkrlXAsh9FowHA4IhqOcFe9I5vKoCgKtcYDjuMwNTXFYDAgnogx6A+YnspydX3DXHGeSCjEyDKxpctt5ZpCscjZyQmGYQBQWljk6GD/MywEgkE5Nv2gCaEozBXnEbrGzeUlCwtLCKEgpIc1HnB+dQVCEIlGCQVDDAdDcjN5EC6dXgcVnVQqzf39Dd1uh0KhSLPVIBGPE4unCeByeH5Oca3E5eE5w/6AJ0+ecnp6gpQeCF/lEhJK5TlCRoi9oyMc22J96wlSeDgjX5DUHmrMzhdoNtrMzuZo1dtYjoWiqphDi0I+T7vdQtcDxFMJTNPkqlJhOBgAsLq8htQU7q6v6Pf7fvuFwvJSGaEaPH/vXd7+5qcMWz1cVQMUDOlxcX1DIV/g+u4aKcFzXcqLZayRRbVeZbowRb81YnZ2lmG3TaffQ9VUOu0+iXSCWCJO66HJsN9HApZlMTc7R7VWpVwqc35xhmlZrG5vcrLnm2Sl4hwhI8LR+QGzxXlC0TA777xLKBBidm6Bk5M9gsEgtm2ztLSAZ7lUm3WS6SSDbp98vkCz0WBsjSjmZ5GOiwiqSNtBoOJIQa1aJZmK0m53abU6FItF4skEtmlye3NLaaHE+dkFC+V5Ts8vmC8UqNzc4ElYX1llbA0wAjEUz2NsOSiejRbWGHaH3NxXMQyDVCZN9e6eYChIOBRmOBwxP79Au9WgVq+RSWfwPBddN0TIMBkAACAASURBVAiFguiKxn3tnv5wgGmaGJrB4lIe6epITyGk6xxfnjNXzCFcFVOFu8o1q4vLHJ0dMT+bx4iGEa7K2BohBCiKwcHBHvF4nHKpxGg85OKigmYYzJVKGCiYlolU/GA1TTeo3lfpdrt4nouqauQKeRLxOCfHJ0xP5UnHY5xcnNLr99nc3uLy9AJHumyurjIcjzk8PAL47AoB4etNLC8tE4hGuDw5Y3FpAXNko4Q17MHYP+MWAgUPzdDQAyq9VhfTMjEMg/HIZiqTAc2P0gvpIBSd0dhC0xVubm6JhRMkYnGG5hip+qpu9e6O5eUlQGPQaxOMp1EUF2FKTs6PWV5awUahcn7M4uIq0rY5vTwFJMFAiPm5OWxVYdjrMux1yWYyNFsdOu02mUyGbDbD0Bxj25JOq0G73ULTdFRVYX6+xP39PalEksr1FZvr62iBCP1hm2AwzMnRIbP5Iienx0TCYaT01fWlxUXOzs5ZXFnE9jwOdnYnkYa+HyIcCpEvFNBRcAR0u21cyyGTmeKh8UC702RmOk88EUNKyc3tDaPhyP9xwfkFqnc39LodNp9ucXlaYSqXpXpXozg9jdDg8PiMp0+f8fz5eyzPLXHbvEdRIDc1w+2d75vRNJVkKkm/P2B+rsCwb3JxVSEcDhKJhDHNMcVigbv7B1xXApJENIahB1B1ldu7O8bjMVPTWZqNNqViGU0FTxGM+kOCUYOzkwvyhRwOCol4iOODE6Zn5whic3B8RjQaJTs17QchlRcwHZPq/T1zc0WGwzE3N7eEwiGmprJoqoKUCp6QCOGBI3l4qJOIJYgmIwxNk8pZhfnZeSKRABcXl3R6PZ5sbKKoGifnJwz6AyKhEMV8mUhUByk4OD0hXyyiuBbXN/dYts3q6hrHJ0csLi0RCRm4KIwHAy4vKwRDQdLpFNX7GrZtI4FcforhYEQkEsE0LeKxGO1Om1AwRLfbxXYcctkc0ViQvYNDFtefIEwLS9jEjSjHZ0dIAaPB8LMtBKLRCEIRlBZWUByw3DHHx0eU5uaoPtRBkXiOh2maZDNZCrkiHq4fGSmg1Wrie3uzPNSq70vxeuOBdDaFJxXqDw/kCtOcnZyjGzrDwZDtrTWGlk04FOb07JyVxSUU1aPbsUhEdSxhoEgHzwWhwmg4JhIJI12H8dglHPHNAMseoxga7WYLVQim0mmkEDiKwu31NcNen7m5At1en6ARJJaKogmd4XBI7aGG47iUFhYx+0NqjSpTM9OEg0EuLir0+33efuspvc6QWCyMrcKw1UdRJVogSvX2imJpFonuO79sE80wsMdjFFVFUXzb0ggEMS0TRUhcz0MxNCzLRNeDGJqG9F7+tpfK5eUpc/PzDPo2FxeHbG4+5fLihP5Ei3n65G1OLw4RKCytLtPrt6jft1iYLdMbDXA9m16/z2yhyKg/AAnd4QDbMUkkk4z6A7KZBN1Bn2azS3Y6SzAYoP5QJ52YIhQJYJoj7qsP5GZmcTyTSDDEYDSiVr1lsTwHMsB7L54TDUdY2lhFOB7HJ6f+1VhFsFia4+zymumZGcLBEL1RHwUJUkFTVYKRAL1uH3M0xtANhqaJIgRGwMB2LMKhIPFolOFoRKVyDQjmyyVc2yUSCLF3uEe+OEsyGQOhcXZ0wkp5gcPLM7bml+k4YxzH5ezshHgszvzcLJXrG4rFIhcXlwQCAYrFAlJ6jMcmY9PGNsdk0mlMy2IwGKIZOrFwhNvbW+KxBKFoCE86gKRRb5OIx4gnE1xVrkgmotQbbTJTWeq1Ov1+n6fbTxmMBngKnB+dvlYIfGZ+aHRmZhqhgoqHxGU87BAJhWi2WkxPTaMGBEIqXFUqRMNh+t0uSlAlEgyAqxAJhQEPRUoSsRiqpnF9fc1UNkUoGuXm+oZCroCqq0RiUeZnC1xdXTEam9QeGiQzaTIzGTq9AelIlNrDPfHYPIq08FBQVbAdj8vLCzZW12g027Q7TdZXV+k7Fo1m04/jdm1S0znavSGhoI4RCBAKBEhFEyiqSnZmmnqtSsA2GNoWrXqTVDqJ8BRGE6fncnmBk8tLZqamyE1Pc4+g2e3Trj+QjJYYSUG9XmUwMtncfMLc3Dz9Xg9N0/wAmH6XSCjCzfUVyUQCXVPxgFAkxqDfR1UEjudiaDqWaZJOTSGiBop0kZqH58J8qYSUgsvKMZFoBAWX5cUSJxcV+r0+nUGXqXyWRDiDY45pVtuUF0p4tstDs4qmG5RmiwzGI86vLgiHwwBEojGC4TBja0zXHJPOTpFMTzEejLm+vCE7nSEQDTAaDalcXjFfKmOPxzRaDZxsgtZDi6XyHAiV7qBHNBpjqVRm0OoiUZiezpMOR2iOOkjFJZPO0KrXCRZz1Gu1yamNJBQOUQjPEIvFMUIhPNslKD1syyISCqEH46ioSCG4u68hUMhmslhjk2arhVGYJRqN0qw3UVRBNJZisVxGagblxUVc4GjvkGgsSiyWZHFhkU6/TiabZmyNWFxYBlUw6LQYWRa1WpXsVJZ0NkO71aDfH5JIpsikU9zd3jK2TLKGTr/bozfsg4RUMsXdXRUjECabzlCr1ynNz1O5viWbyaAqCq4DtfsavX7vQ9feZ0YIPNQbdDtdyuVFYmGDq5sq25sbuK7H9dUNasAgPZ0hEAhyX3/AtkxSqSQilSIQCDMcmghcdCNAb9jD8ySWY9EfDjBCARLxOLqu4EpBMpVk0BtQni/y3osDCsVZ+s02rU6HeDJBLBJlYXEBR0hQBKp0QQpUTxCNxmj1OnR6HaQE1/MIhXVSMkmj9kAkHqPda9Os18nlchjBANlsFk+BxkMNhhAOBxibLr12j3x+ltOzIwKBAEJRSCZSeFJQKpe5vbik2e0wN1ei1aizvLiA7UrG7R5LCyXOr29RVBhbY87OfO0mV8gTioZwhibJRBKJx2A4RNdULqo1kqkEY8fFCAdo9zpEw2FM20SzFayRiSsdovEk/W7v/TDj8kIZ4Z/uU1osc3V5Ravpx7HHS1MM+n3KpTK2dDGQZKfShCIxOp0+eiBINpsll5tBehLTtmm2Ghi6RiQUptvuEggFGYwGEy3Fj6e4vblD03QMQ+Pk8oKF8hK1h3sSsSTIEK1eg3arQyqZojns0m21/VB8AfHIPM1mG8eOcXd7y/bmFq7qUV4ooQgN27IZjIY0uz08CZZtYSgqqVSKbruDaY6RroumapiOg23ZFGaK9IZdxr0xM4VZNH0SYxCOs3ewS3HWI5aJorgug2aHQCZJIpEgnkqQiGZptxs02x0QHpqiE8nHqLca1G7uiMaiJFJJgmGDdruNohksLubABelKAoEA+UKcaCxOLBklq0z50ZZ3twSCAYbDPvf396ysrGAoGvOlMteXZyyU52h0uyyUF7m8uqDT6bx27X0mhICqqpTK81xd3tDptImE8iQSCdrdLqlUinKxSL3bZTzok8lk0XSNQb9PKByk0eoQDJgIJIqEVrNNNBalel9FCAXTtPEcj/5gwHhskUym6Hb6tBp1NtfXSSST9Dp9lkpl5FWF+dIc7UaLeCaGQKHT6pBKJvyjLF1QmJ3l7v6aSDRCt9PFm9QbC4eIlstYtkWn1fLvKiSS/t04DRRPEA6H6bS6jPtjEAqhYJjuqIvrecwW51B1g7ubCsmpGJ16h4X5Es0Xz+n3e8TjcTwhabQ7DIYD4vEYsVgUpIcyiQGwLZurC//Xz4LBIJFwhEaz+YG+brcmE6Hn/xr2sDcAHlheLnNzd4+mG4SjUfrDPvVanXgiBlLSaXVIZhIoQrBYLuNOBIQnbbr9PolEmk67A45kOB5gmw7jkUVCCDJTWQa9PrFIFMeyMRSVZCJFrfZAt90mX8gTDAZJpJLouoriCGKRGMl0ClXXSSRiaIbC/Nwc11dXNNvQH/aYK81xc3WLByQTceLpFM1mi3a3Q3FCG0vEqHdaKKogGY+BouBJSa/TxrJsbNtmZmaGUCRCq9EhGgriIWm2O6iqRqfTRlVVNF0lMvExDAd9zIFLv9/DtVyS8ST9fpd4Jk2102Q87OEJyWK5zGWlQjIa5/KqwlxxnmQyhvDgodGkWrsllUqSSCRRDZ36XQ0toBPUAvSHYzQhkNIjEAoxGtu0W00QHuGof4qSy+WQ+DEKoXCYQX/AAEglE8yX5/CEx6DXYSoeJ56If7aFgGEYCAnF0hyKC67wCIci9Edj4hkPhMtwNPCve3ouy0srhIIh/wYegnazNQmu8a+vBoIGsWiUQCiCa1uoSoDcTJ7q3T2tVp1AIEA6nUbV/IWZTGVwFQhHIuBB5arCdnYb6SkMe30SyRitRhuhKiQTUYIhA3NgkUgksCwbazwiYIQwbRPbdOj1+iSTCczxiHA4SK83QFMDmCOTVCpNJOSfghwcHmIEDNLJJN1uh2QqTTgcplVrMxwOuPdcMtk0oVCU66sKmVSK0WhAYS5Pq9ZkbJuMY46/+F7BeDx+/5rxVwIPjXg8SSqTQlNVwpEIQjQozhWpNzrc317jMI8nPAISkumsH+MuPMLhMI1mncFgQKfTJZebwbJdpnMzNFstNEej2+2BB9FkBH0sGPdHKBKSyQSxaIxWq4WuaSiaRnvQQdVU+oMu8WCS2XweKQXNZotCocBt9Z7ZuQKNZodiYZ56p0G338eSHneVG7KZNK7jEInGSCbjVO9uMTQNNxph2OuQiCXI5/K02i00zSCdydDrdmg0H+hpOsFQCEVV6HTbqKpCNBajP+phmTbhQIC7u1sCwTDFuQKH+wdsrG/Q67Zp1x+whhbhaJThsAdehmjE93VJKbm+uSaVeAKKg+WMSKYzGIbKaDxkKj7D/FyJkTnk5vaGUDCIYRj0+gOklHjSJRgK0u/2KRZ0wrGAHz2rShCCfKFAq9XEcTyk6+AZCriTOS1driof/l9OfCZ+XkwAwoV6vY4QLs2HJjd3NwQCAVRHoBo6uZk8hm4gpEBIPxzVHI8REjKZKYKBIJl0mlQqhaKppNMpHMvk9vaWkTlGuJJcNkez3sQcDcjN5kBRsR0ToTggPW6vr5FANjuF6woa9QcKs7M07hvc3N4wtsZIKXBtyM3kMAwDVwr6wzHV2gOtdhuhguN4jMc2tuvgIbBtB9exSaUzWLbFQ71OrV4lEolSKBaZLRZ5qNVoNqqMxyYDc0y+MMvNzQ3BQIDRaEA2k0EIKOZmAZeb6j3TuQKtRh1rcvfgq4LnX7ntdFq4jovZG5POZNEVldq9/1P/ljnAGo4ZWCYSlVrjAZAMRyMsyySXL+B5HtVqDcMwkAroRgDDCJJKpxm7NkgYDIa0ex1CEf/Eo1FrEAqGGI/GmMMxjuVgWyajkUmr2vB/L8LxuL27xXMlgWCA2kMTazT2jziHYwKBAPbQJJueYqZQ4PruFtscIfC1okAggHQUer0+tVqVWq3KYDgknUkipSQajpKfyqEbBtFYhGAwQDgcIRqN0mo2ubm+IZFIMRgMSSRiBCf+qnQ6RbPVYDAYMzYt5vIlzPGYYn7W34mHJp4iyWYzZNMpHM+l0WpRnC0yPZ3DtT0Egnq1Trvf8UPe0wkkgmAohOPYSNcjP1OgWJwjFA6BlPQ6HWzTQniQSaboddromkZuZhbTcRgPLKzRGHNoct9ok87+np8ReB+fCU3Ak1B7qOOpEqlI7qt3TGWnsK0x99UHUKQfwaYq/qUgBYT08FyPXq/PaDjCdf0fDvF1A1BSglarSTAcYtDvEgxoGEqATCaFJyVSEdSqD0zn8zTu7/EQk0scVQqFPK4nuLu5JZNN43gu0zPTZGeytBotZqancRyHkWliWhaKoqAbGqFICMf1iMYipJIJpPT8yyWqihAaUqo4roftOAiEHxqLwHVdMpkMnuOhqSrZ6RkeHh6YmZnCcj0KhVlq91Xu7u9RXBVPh6mZKVzhUq/VyabTH9PDHw8pQUqXh+oDtu2io5LP51AUSXY6g+YqzOSmuK3VyRXyuJ6F67r+5SBVQ1MEqqIzPT09GVMPx7ao16qEQ0E63Q5GMIjnKQSMCIrmEI1F6Pe7dIYjgvEIUkCr02FqJk86naLRamKZlm9yCYVsNkuz3uCuXvXveWxvg+JRmClQa9yjaRpT2RnuH6pMTWdRFRXPc7i7q5KfzkNcoiqC4XBAfzgil8sxGpl0Wh3C4RCqqpBIJIlGI/R7PdLpDKPRmGgsjuu1abYaaJrKzPQ0jWaT+7saWxvr7O4f8GR9i4d2HaTDbKGIa7vU6zVmCzmkEMzOziKkiy09XE/i2JJ+p00mk6XefEAIcEc2Y/P/Z+9NY+Tb1vOu31pr713zXF3VVT139Tz9z7m2URwrBizhDyBZwpGQIuGAYyUSEEDCAmFDAlEIiowVy4kQQhEYARIICILEQkkgMgakGGzfe/49Vc9jTT1VDzXX3nstPuw+53q45/r6XhsOJK/UX6p679q1V+13vcPzPk+fwaCPrULEYwkmJ6HX6dPr9pFSkI4nGA4HgSMpzzLod8jkMpyenhFPJEglDZalKOSLKGkQRtBsNlDqd4p5fdO+EpGA57mY9ysxxkYIwWSxRKGYwyAwPoQdm9JkiVw++MEPBz1eX94waKKxEOXJSYwx9HpB+OQToOgy2TROKMTT0xPN+yaZXBqlLKQLD/cPNOtNjIa75h2l4lQAAzagTLDbSy2ZLJaYmCjw0HpA+5rHxycQAVNP+6nN56JPRgsiToxUMo6Qhk63R6/b5+3ljVaryX2rQTQeeWezgaf2E74bcAfkiyWklAgjeGg1Eb5HqVwCAUIFfVCDovnYQiGYLBWxpWByooAxv1O05/dvQVgZREEhJYPwTGvcd5afwmTQj7+7a9GsNxDGMFkqghJYQqJ9w9PDPYXJIspSZLN53p7fmMjlg4o8IIxhOOjjeS6pZJJ+t4c0klJhkkg0QiaZIeyEeH5+ot5q4rkuCNBGct9+pJDPc9d++CaLlBK0Wi2kYygVi2gJdw9NpFBMTRZRCJR0KEzk8D2Ph8dHbNuhVCxSmiySzmQxRiKkDMhEJPR6wZppIzAGMuk04VDA/CMN5HNZntuvZNNZJovTPDw8UigWEUqgjcYEM8Bgg8Bw99DkrnGHAZp397Sfnsjlcjw8PeALjbIVpWKJYqlEJBoNHFI4hqUsWnctpBAkUzHeei8Mhn2ymTROOIQhwEzcPz0y8n0KhQKTpTJhR2CkQKFp3bVoNBsUi5Mo9eXKZF8JJxBQKfnYwsJgKJdKCDyksMnmJkAYctkUSmuUIGB8kSqg7vJ8CsUisUQMMEhLgNAYX5POpul2usTiEWKhMHbI4uHhidF4xF3rjkK+SFhIChMFyuVJGq0G2ULweY1Wg4mJPI1Gk2arARjaT23y+TzSkow8n5d2G8exyWQzSMtiOB4yHA8wQuKOPeLxBEZKEukk0WQEZSssoXh5fuHu/o5sJs1zu42rg+KiZdl4vo+yFJ7vAwILiRQwOVmgMJFDCMFEqYjxDPetBwqlIrlc/g9gDUDailw+gwE836PVauFpxeP9Ex4aLEm5PM39/R3KSJr1BhIoFArkC5MIy6NRb/D0+IT2XF5fX4nHYnReXynkJ0imUwzGw+DHWW/Q7XZJJuO43ohmvY5jWUFPPxolYtsYrbGUzf39A3d3LVxfUC6XKJcnKc6UaTaaPNw9gIB6o0GpWEApRSE/geVLfN9HCvC1j3QUYSdKNpvDdqJEIjHaz4+EQzaThSLJVOqdussgLUUilWQwGjAc97FtCyFkUC8J2QilAEkul8d2QkH0IwWFQgFhKeqNOhYBg5OFjSUtLFcglYOSFtIYQiGLdCLF08MDAsFgGOAC0ukMju2QTCVRdgAf19onmUgQi0dBSFKpTAAHloKJYpGQbeP7PihN666JOx7TaDRRUuEoiUHieV/O/P6VcAKWZfH4+EAhl6FRr5NOZrmt16nd1mnd1bFDDhrJcORxd/9AbxT09i3bJpNJ0+11ub2t8fb2RiyWIB5PIoXEHbtYlkJKwXA8JpXK8PryxuPDI45lk5vIM5EvUm80yRSKhCN2QHUnBI9PD0ghCIUtnp6ekFIyOVmgXm8wHo2D+QHbZmIihxMKEUvEAp7B0ZBON+jZx6IR4hEH3x0Ti8aYKBSwHYf8RIH5qSmK+QKxUIiHVhPPc8nls4TDYXLZHOFomNubWyYmCtxe31Kr1Wg06kzPlNFoGvU6dtSi3miC+IPI6hTZVIbHxyeUtIlaYZxQBEtIylNlmo16AMcdDpkqB4Qu4VCIm5sat7UazVaLbDZH+50CS1mKYrGERuFrQ61Wo3ZbYzQakssHtRGDIRaPYimLYW9AvdWgVrslZIeYSOfpdrokkwni0Sjl4iRCyvfj80wUsoTCYcrT0xgtCUVi4AXce1LASCpUyKLRrGM5EXzfI5vLoEWwgwpLvg8AEYyr4yOUxAiD6/kYNCFH8fLyzOvbG9mJHM+vz2g34JJsNpvU6lfksgka9TrGGKz32nQ4EkILj1q9Tr5QYDgcUG/WyWbzAQegcMimsyhh40RCeOMhz49PWFLhex5KKcKRKBP5CcLRKNIOoY3Ac30en54YDLvkCnneXl/odTvcNZpgCYQWPDy2ebx/xA4rMhMTDEcjhDRIO/ylK/+VcAKfk0z6QpKMxFBKk0qkCIcjdN5eSScyGCxGxuPu/p5arU5/MCCeSmK0ptlo8dR+xgmF0K5P+6nN3d0dD3cPAeDo6Zm7xwdcd0S5VGZ2ZpZsPocRCmNDLJ6kdnNLLpundnMLvsV8eY6b21t67wg5T3v0+4MA7BIJY9uKiXyBl5dXHu7v8UYe3nhMOpMmnUzjeR5vbx1en9/wXB9HhQLasYd7Xl6e6fR61BoNYvEoqXgCJQUYTTaVQStDITfJ01MbBIRicWKxOPF4inQ6T+36lkgkQr/bx3ZCNJrfqdjwl5svNPiaSChOOptlMB7S63VRxieTzRBLJPANtNuP5HJ5POGRTWcIxYLKfiIaQRFmemGe6fIUliVJpVJYtiKVzhGLx4lFk6TjSfK5LJOlSYSQvL51Gbtj4skAwqyk4Pnpkb47CrhKPZ9+r08qk0bYkqenZ25ubpFGk8nkyOZzCCxy2SzXNzfEoglubq+5uQ1ATc8vr0STSR4eHmg06kgjsPCxHYWwLB6fnnjrdHh6fiIcDZFMJLCl5PmpjUSSjKeIxeK44zHP7We8vk/EiZJIJnl7e8XzIBqJIQkwCq6vmUjn8BG0221uarfEE3HsRAQlNNGwQ71RC/7ZkqSSaYxQJBMJQuEw49GYeCxJp/PGc7uNMYFje3l5odFo0rq/xxuNySYSJBMxEpE40WiciWwCC8nUVJnZ2Uky2QmEb4hFEggp+Ml/6d//0rX/rp2AEGJGCPErQohDIcSBEOJffX/93xVC1IUQn73//ZPfwdmYn5vj+vqGWCaDkIbXzhu5TJrpUplGs8HYG9JqNbBtRaGYZWp6Cvk+ZjnoD0ilk4TDDi8vz7y8vPD6+oLnubRa9zw+PDKRz/Pw8IATCdPtdbm+vcb4PtfXNyTTSXLpBFdX16RSKRQj0qkU6XSSVCbN9OIcvgvPzy/0el0SqTSWsohGowwGI6KxGNFIhGgoSrPZpD/oYTsOg8GQ0XhMOpXGtiP40pCIRUkl08QTKXr9Hlr7vHXeuLm54erqmsubS4QBXwvmZ+a4uroil06SyuVJJ1JcX1+TSiTIpjKkImmymTTJROK7XcYvTGqN8aHb6eAbj+fuK+loHF8K8AS5TAZMQDhicIMxWeGQyieZqczy+vaGT3B8PJOkPxzRaNTfyS7HuOMxuXSaWDSGEOC6Y4wBxwkRCYfJZdM4IYdev48TtnloPwSUXyGbRDKBVIb67TWlyRKJRJrryxp4HjeXV0gMUhpS6Rxvby8k0gneXp7JxjPMzswRsiSzc7MkYkkkmsFoxGPrnoRlEY/E8EYew/6It9cOo+GISMQhGg1jOcH1hMIO6WSGmVKZkGMRzyRIppPMLUxjlMXb2xuXV1cYNDfXl/hY2EKyODtPPJEklc7x+vrKbe2WVDLF29sLRhm80Yiryyvunx6Ip+NfzFW0249EQjaRaIjXtxfumk3i0TBKClLJFJ1On+5wQCKRZjAckIjEkMZGS0Mmk6bT6aN9jTQWL51X8H1O9r9cl/Z7iSM94KeNMV8XQiSA3xRC/M/v7/2CMebnfz8ne2q3yRazWNLHaEU+m0HYFqlkAjtsY1k2xWIRbzzm8aGNMQLPc7/ohQ/6Awb9AeN3roDPrdcLRmHf3t4YjkYMx2OG/QHGGK6vL5iYzHN9dcnS8iwCRTweDYpRAh6f2kF2oBSxwjTGGDpvHYou3NxeopRFLpen03lFpOKMXY9sZoL+sEej0SASiWG0Zjga4dhhhA+RcIKoYxDK4/5RYYQknUziegJhC1A+tlEgXFK5LE4IlAbjGLSS5HMxook0QHCcNEQykd8toP77NIUAKen2Ogwu+0yVp3h5fiady2Kk5uL8gtnpRZbmF7m8vGBpYYHzq3Pml+bIJrIobSExZFMp3JFHs9F6r1UYIrZCp+J0h2/ct+5JphIkU2nCdpherxfMfQhBLBEMFj23nxmPx0xksyjLJhkNc317wevrK25kxPz8MvXaDc6sIT+RAenjGYdUNsr19Suzi9NYCwukwnGwNWOhyKaSjFyXs8tzPD+oP5VyJWTU4FgS78Wl3+uTzmV5fH4hZNt0Oz2EEYRsCyUtCMUQUiOkQWh4fnrh3mszkcuhBIBgIp9FaLi4umJxfgHtghGabLqIkC43tTpT5RIXF+doz9Dr94jGokgDQ3fMeDwmm8thOSF6r6/ctVrE4zGKpQKeB5YI6kW2bdFs1AlHLBxH4GK9z0UY3t56jMdXLCwsk8+lubi8xMgvLwx+107AGPO5Mi3GmI4QokpANf77ttFoRKE8xeNdk/xiCldAIhbn6OyMd16GoAIuDL7nf0sQzO98+H+n9d9piWpgzQAAIABJREFUoQa9b3L493pdTEszVShxenIZVPnvDIvLFS4uzun1elQWlzi/OOPW1SwvVTAYlGVRKBawhIXtWLjeENt2yE5YRMIRotEQd2MPBGTT2YAfICAjQ4iggnxba1CcKNBs3TFdniJqCTzbxsbn/OSCxcUFzs/OqVRmOTu9wCjB3GKFeDyH0hKUj6c9bm7rFKdK381t/20mZLDRz83M4HmaeCyKI8IIYTi/vCSfn0RIn9bDPd1eF4RgopBH4HF2cgFSEI/HScXjuGJEJpni5bmNE5LUr+uEohEisSidbhcn5GDZfXw34D4IOSECkJdNKpnCc33wDZZjYYTGsiW5TI5sLo8j4PrmHGMMx+eXGATxpSTnZ0cI4bO8VOHi9BKM4N7cgTBIyw4iylaD/EQRoSSWkryOOvR6PUrFAlmZ4fHlmfpNjWQiSWc4wrYlo/GY8ThENGpRa9QoFvLYlsP1zU2Q7lghErEYFxdnzMUSJBIpzk5PmJwscnZ+zspiBdf1eXhoopSgVCyhbBjUh5TLJXo3PYaDIfVag0KhSDYzwWg8otaoE4/EmZma4a33xs11jWK+QDiVxhIezXqT9lObxUoFowSWMlyeX+N57nvLuQjKJZ5OIaSDo7+8g/QHghN4V2f9FPg/gR8C/qwQ4k8Cv0EQLTx/i2O+0B0ASMXihKbKSF+DMpyeXzNdmmToelz/FrrqP2jr9/rUWw3KpSnOLs6oLC8gxuKLWkA0GWNpeRlf+7Tu7qnMz6OFIRpLIoxA4xMWEaTRWE6Yl9c3xqMhk5N5jASpbLQBLVxazTu63S5TU1MMBkPymTzD4R3nlxcsVZa5Pj3H+B7lySlOz87p9fsYI+n2e6xWKlyenrK2uoI/GHN2fokUkly2yM3F1fd8H25uaizOzRGOxbm6vCAUmiISs1FGMVksUW80SCwtUJicpCTB4JNIxvANTE/NvDM0G0aeplavMzczjxMPI0NhSlNlLAm2tAkvBzRoEkW7/8TV9RX5XIlMOk3rrk4sFSceTSCUxii4ubhhPBpTmZ3FhCwsJP1+Lxj/FqDfx6X7vWBAJhZJ0u9dsLy0iDZweXlFpTKNchTJVIxWs0k0FmF6dor2y5CH+wcwkpATov34RDyVJJPPInwXgcIkDXf3dxilyE5MYFkC3xWUJorcNuv4vkZAMJyG5OT0hFJphlgsRPeix/H5GQtLi5QnSwgpsGybi8tzpJSkMilmRNCqjiSi3D3ckUgm6fY6TJXLWMrCDjmoqEM64xN2bO5adTqdDvlcAcfpcXtzS2V+ikbrgUwujyUVCrhtNEillzAaYskE1aPql6799zxKLISIA78K/CVjzH8vhCgCjwTVvr8IlIwxf+rbnSMcDpvV5U1Ozg9RCObn5zk+OWFzfZXq6Rnj0ZhQKESxOMnNzR+OQwiHwgxHQ0LhEKvLa7jDAZ6EcDiC9AWepRn3h9ze3GA7DuXpMq3mHbniBK/3T8zOTGMM7yhBH9uyaD+2MVIydl1i4RixSITrmysmpicIqQiOsBgywhhJREUYu0NGGGJ2GH80pnpyyKcfduiOh9gqgjceUKvdsrq8SG/goiwPy4rx8trh9vrqe74HSyvLxCJRqtUqc4sVwuEowjecnR3RH/TZ/rCNMIqTkyPWNpbx/UBp6fL0jJXlFZRR9Psjjs4PCIVCOOEQswvzKD8oenZ7Hd46XSKRKL47JpPMMvTGvLy9kIjEiUTDCEfTrN/T7w2Ymp1FScBVNFq3uL7L8vIyft8lHIviazg5q7K0voI/HDB2bWKxMHu732B7Y4ezy1NmpmcJRwJchutrvPfqu6MUvoaHpwfc0YhSqYw39nl+fSbk2GRzGTCGRrNJLBQils5Qr9XJ5wvc3TWZmZ7i5PSUhcoswtiEQgpL2vQGY6JOFC00I6+PERrHCmDiRkv0eMzJ2TGV5SXClsNoNOC6Vmd+fhGtfQajPm/Pb8TjcXzfpVDI8/D4iGM5ZBIphp6H1j7CdjC+j+d7JEKKkWu4qdWYm5vDtsJ4wxEXtxesLy9weHr5OUHqHzyfgBDCBn4Z+DvGmL/yLd6fB37ZGLP17c4Ti0ZNZXkNM+wyUha3l5dMTc9Ru7lkNB5/fi4sy/pClukPw3ZWVtk9OebDh02OqmcYESDVlhc3OLuqsr68Qle7uP0htdtbLGVRnpzk8voa27ZJplJEYjFajTrZXI5sJhsAfXxNq9mi2+tSzJV47T1TLBWI2nHOr8+YKc9wfXMdwGMBDKytL9MdapIRi8PqMQBbq2u8uSOUsrk5P2N1ZRUtJOPhgKNv4+m/U9tYrlC/u6eYL1JrNQOsgjEszM2AtImFQ4wx6OEQx7HRUoFSuP03rq7rrK5uoH2XbveNWr3B0vIiynJQvsE3EoyHMS4v3S7jwZhyeQrQaM8HYSOVBOlzfn5Bp9OlsrjC/UMroIQbj1haWqJer1OpVDg/O0drTXlujsbNNStrKwhfgLLxvT5KCMbaJ+KE8XyXy8tbyuUZ7u4CotOb2xoIiEQThMNRntt35LIZstk0RgTs0BgYDTWOFMiwZDzSWErijVzsSCDjFrZDHJ+dsrS0hJESrSXnJ1WWVtc4O66yvLrKydERUkmWlpa4ODtnvjKHLcK40sXrj6nV65TKU7TbT5TL04EmhjRozyAsC8/3sKRCOgozDvJ+aQBluLi+pjw1SUha9FyXmBVBK4Hlw8AfcXV5yfTcLMqXHJ8ff0sn8L10BwTwnwDV3+oAhBC/NUH9p4H93+tc2sBR9YDTq2tCtmQ4HHJ9ffGFA4BAn+0P0wEAHF1eAHB4eMJoPKKysMrC4iJ22Ge+ssxYWDgYEuEos5NzgCAaSzBVnmI0GvH4+Eiv88biwiJow+tLG9sKagi+NhQncqQmEpRnpwk5Yc6vzui8dfGlYOSOqczOUVlcYL6yyFH1hHhYYTxYrswzHo2C6vPFBVGhKM9Mc3B4yNnJEagvB4J8pzY9N4+KxpienSIcjTA/N09lboGF+UXCsQRXVxcMjUABZxcXGC05qlYRuFghh9FohOu6nJ2dErMiASyaEMPugKvbG4TSdPsD7h8fmUglKBfyPD09clg94vGlQ+OuyXO3jfY15alpVtZWiMYlrhs4gMWlJcLRMOPxGF/A4nyF8XhM3I6wOL8KCKrHVaqHe5ydXXyh9edrjeU7TBYL1Os3lMslwtEIC4uLzC0uUJ4tkCmkmKssYoym3X7DEg53zXv6r29EbAsZklxf3+B5Y7RQ3DRquK4fkI9In4XZeYQrOKlWMdIwU1ngpHrA3NQcJ0dHzM0tMhwMOT09ZXZ+Fm0kZ+cneMMRjVaT2cockUiUWDxBq9VA2UEa9vLywlG1ytnJKa8vXRo3TapHh1SPq3SHAy6vb8hl89ze1Bl4hnjIgRCcnhyzf3LI+fk5swvzXF9eEYpHvnTtv5eawA8BPwHsCSE+e3/tZ4E/IYT4hCAduCLQrP89bW15CYyhehjset4f8gP/rWz8/pmu67K9uU31uIoQkq21LUK4eN6Y8/NL5ubmaT02WVycRwpIpdJEUwmkkUgpsISgOJHDN4KxL5DGZXqqSL3RwnJ6PD090ev1mJueZ25uBguHjaUKJxeXLC6uYEvN0soiKIuDgz22N1cB8JXBHbsY4RELR6gszeNbga7e92ryHaWopOLs7Jz5+WWcOGgvgMMuLCxiSc3h/gHLGxsIEXATeq7ENoLK2jrS0SwszeOPA6ckMWipcV2fzluf55dnpmbKPL49Mx65TEwU6A+HgI/WHp4nQYQCoU3L5/L8kuFwxOrSMrFoCIRidX2V0Bi047C+tY4lQdgC7SsqqxWUlgjLoD0LLXwcKXBtiKo4C4thxsMep6c14vEYmVyap4cuUzNTdLpv+FpQKOZp3Te5f7jHnp4mYkmU7TI9NQVSgdLMLU5jyRAnJ0dUFhY4vzzH9zWe72ELF9sxrC2tUD05xvM9QiGHlZUtQqrP6eUVyxurFKdnqN3esLAwx3igaT5ckc1meRu7PN23GXl9lCVxXZdCYZJMJknKj5HP57it3XJ5dcHc7BTxaJim6yGFjzAKTxuWlyuMXY+T4xMuzs7xPA/xbWbMvutIwBjzfxhjhDFmxxjzyfvf/2SM+QljzPb76z/23kX4tjYcDqienCAt59vCG/+ftMOjQzzPY7GywO7hRw6PjxFSM7u8ghWKsbg4g8ICZRgNe9zVmthSoiQ8PrdpPT6ipMTyBMpYOFIxU5ji6eGJ7MQEqxvLhJJhzi+uGAz7CNuisrCCZQuOj485rp6ixh6e56GlYnNzBzkMlssneMBCYYeYcpCj7312wEZzc3nFaGhYmp/HsiRHB0doPeTo8BBbSQ73D1haXeP44AB/HHgehc/BUZWIlLjeOOhkWDYIkI4hEk1QnCzw2H6kNFWm8/LKoDtgolDAWDalUinghIxEyKYT3N5e0Ol2QRvmynNsrW8QCcfxgIOjIzAKEVEYSxO1NHsH+xgVqPg6wiFkCyIiguNopG84rJ7g+2MODvepHh5Rbz6wvLFCLpXl5emFcnES7RoyqQylQgFbagrFPOsbG6TTmeB+uxbCsri6vKS6u48/9AFNpbLI6dkZlYUKvvZZ294BozjYO8WyJOtLG3y6uYklJJcXx4RknKWFWSwNjhD4vmY81jSbdaanZoimAuxBOhPGkobx2GdzaxWBy/7BPodHVQb9LvPzc6yvrfDw8MRwqAHByfEZI+0hjY+wBSEnxOra1hfPk3a+fKf4SnAMRiIRU1msII1kr7r3//bl/DaTMnjwNtY2UFYYhIuFwRMaJYJecuuuSSaTodt5Y3Z6mla7TavRpFiaoJifoFG/4+m5zczcHPF4PJCu0oGyML7GUoLq8RGu57GxsYGxDNIVSMtH6DC7Bx/Z3vrA3v5HjDGsb33A9+Di/JDltU38YZ/jk+Pv6XsKId4lsSVrmxUsFcUIwfHBAcsbi9jGxnc9pGOz+3GPT3Z2+Gx3l0+2PwVtcI3Hydkx62vr2ErQfR1Sa92yUlnGk4a3tzde288kEjFqtQbi/b5OlErk0lkUIC2J0XB1fUVhOk8snAbfDfJf2+C5EjHSVM+rVNbWOa0eBvMBUrL1yTZC+xhXc3h0wsb2JkIY/NGIw6PTQJo+FKJSWX4n64BO55Wbeg2AXC5HuVQOtBeloNFsEE1E6Lx2SGdzPN3fU8jkiMXCnF5csFBZ4Pz0IoApS8nG2hrV42PW1zfxgIgE38D+wX4gjba+QfUwkLyzbZu11TVG3pCb6xqLS0sYJJ3OG7WrQCw1m80wWS4Fu7QxaC2RIuAkeHlXi5qdn+O+dcf0dJmwHUY6ksODI5Y3VlFCcrB7wMbSKsbWSGmzu7v71SUajUSjZvTe+/8qXM9vNSEEW1tbaM/n8KgaaCOurwcFPE8ghY9vDN23Du3OKzOz8+AbHp9aCGMoThbBN9zc3pKIp3h5eyE/MUEyFuPk7JS5xVkuzoKwd3N9EwuFCYMwPnu7VTa3N/BHFnbYxxsLwsLgWqCFYjwacnV+ydzCEqfH31Lm8fdlKysLOKE4ShgQMpjg1MFUnFAStGF//4DlnQ0iJtByHGuBZYYYbXNwdMTqxgbnp8esbSzS7xrOTk+IxeNU5hfpvDzz3O0wMz3F4+MTjWaTYqlMYaJIo3ZL+/mJuZlF4uk4l5fn9Lu9L65taXmDq8tT1laWcIUiIgRjqTn4uM8n22t83D/+Yr02t9c4PDhhc2cLbzTm9OSUucoCtasb1pfXeev2eHxosTi7iIuLrzUvL894+EEBWigK6TzCkfg+4GjeUdWByAgAFofVfZaWF7HCIZQegQTthxCWxLgeBweHrH9Y4/CzKjvbG/hjQ/X0mI3VZexICE+74CoGoyFnZ2cAASHp3HwAPhKG1t0dEkUun4d3aLnQgfzbxeUFvV6X9bU1Lq+uWKosMbIElwdVtta28aWHGgt2T/b5ZOv7+Prur391nUA0GjWDdzDPV9GUUuxsbAGK8bDH+c01i/NzXNduWVle5vmtQ/vpKXAAQtJ+uscfuRQLk0hhqDfrPLbbzC/P0X5o8/bSYW2+wnWrzuz0LFbIBsdCajjc32d1fZOjw322NjdA6kD020iEK9k73mNrewejNHh+wNmvxxzuH/1eX+Pb2sryIuF4DKlthK85ODn8ohC7ubXF4cEBxhg+2Vrns/0qWx8+CcLU3cNAQn1/j43VZaqn5ywvrxJRku5oyO3tLYXiJDfXVyTiSWZn57GlwDMuCMP93SNWxCJTyHF7fkMmkyabSOGNA+rvzyMGX0m0Z4gqydf399DvI847Wxv4nkI4QXitxnBwdsDG6hpHpyesbWwijUBgGAx7QddiaQlhBK/dLtdXF2QyWaampxHGcHfXDKTjskVu6jWeXp+ZW1gImKonJwNqdw0nJydUKotYto0tBNoPJNXc9yECozXSOAEIS7vYGIbacHiwj3hHF4bCIaYXZmlc1VlcqATRkIKn1za3N3VyEwUmJ0soIWg06oRCDrlMmqvrG946nUCwNRIN5sc8gyMVWis8hoiQDDC9noUxPloK9ne/wgpEn+sOfJVNKcX6ygonZ2csLSxwcXvD8logjNJ56dF+fGJ+fgEwGAF3d3fctVpMTZbJFfNoHVA85XNZ7u8f6XS7rCxVcDJppBnDWHF4uP9FDre9tYWQEg+Pw91AKVkIwcb2B+yhYe98n5W1Nc5OTlhdqbB/8E0n8IOfZPjJPz7Dn/lzu/z4PzHF5lKSv/gffbOF+O/8y8vsn3T4G3+n9cVri6srJMIOHhZ4AqUkCvDMCEcIXC8Y0zZS4BlDSAu+vrcLBCnT5sYW2ngcH5+wubWJ0qDx4F2N6fW1R/u1TSqV4uYm4EEsTBQoTU7SaDaxIxFy2QxSai7Or+i8dVhaXiEathFCUj06ZmltHSnhcHePla11HOOzt3/8RfT4yc42BoGrPSSGavWElaVNqsfBdUbCYTaXl/CRaGUQxtB+eePm+oZ8Po9t2zSbTUqTMxRyeYwIWnUoMEEhBgycHJ8wN7uAEw4jLB8bwTc++y2OaedTLKP5+t5Hdra22d3f++I3tLm5CdLHyEABSY99Br0erdY9E7kJ2p0nZuenMdrw8PAMrqY4WUILg9EGJYJ0KZfOcfd4T6/fY2lthbgV4vD4iKWlZeyww97HXdY2N6juvzvvnW0+2/0KaxH+f8UJrC6vcnp+ysbKJqNRn6OzQHk3mogzW5knpCXGt3hoP9Bs3lKcLDJRnKReq5GMRclP5Dk7u2AiVySWiiA0HB0dMz8/SyQUYvfgkO2NZRAxjFJ8/Mav88nOFtoYdvf2+XRnm2/s7iGlZG1ri4iWuLZGAaP+gMPj09913X/8R6fYWk5Se+wQjSj+2n92zV/4s+vsn77xRz/N8cu/2uTv/f2HL/5/ZXWRq6salaUKYWXYPThhfWMd650swzUOtggEYT/7bI+tjQ0OqlW2P2xDwB6GkoGAgXEUI1fjvr7x8Ngml83x+vbC7NQM90+P+NpnsjiJ1gKpJDe3V2TTSeKZNNqAbwTnR0fMz81jW2Fcx8dx34UmlCBsfMaA8gPU4O7eZ3ztk238zwUUfIOWFsIGxiCx8EWgZ93pPPH8+MLC/BwoH4TA9wQKgUCidfAdUXB1cUkxk6PxTkm2trWKLYNZEBUTjIeGkCXxtMfuZ3tsfvgUaXz2dndBCHY+2QnqHa7P2DOcHR+xvraJUYHoyuCtT/O+xdJiBYnGkz4oC6EF2vcxKtgApDYYE3Rj0EHBWAuN9CVaCKpHh4FituOwv/uR5c0djvc+Y3NnC0sLPu7/QyfwPZuyLNa3thHagNEoCWMR9Fm1BlsH039GaqQ0YBSNZgMnrOj3BsSTSbKpDCjN6ck5xcI00VQMy5X4tofUYBmLMWNsS+FbCkZDHGUx9hXSaJTwMdLCFR6+73K4e4zjOCwuLnB0dMzXNhL8N7/wCVJIpFD813/3gerJE9OTYboDzb/yzy4EdF12MH/ua4NQgj/31175H/72IYtzC1hRGxAc7O6zubKKbwkspXEBpR2kdtFIbCFxpcAYH2kr3IHP6XGV9cVljq8vWFlfY9Dpc3Z+SjIRp1JZwEiLx8cnxoM+5VKBerOF48QDyqynB+bnZnlqtykUSgEHIYrT4wMqc3PISIT4WDKUFlp1caSD/z5b4o58Dqr7fG3n+/nG3m/yyfY2nm+oHlfZWN1Ey4CQczAYcdeoUZyY5PHlgXQ6Teet887n5yGECCK4qWn6/R7xZJJ0LoXli8AxeRKpDEfHh4zHYzY3t5BKoj2Pw2qVDzs7fPbxI59+soU/VMiQQbs+xrYwWDB22a/u4jg262srCKPQVgDD7r90Ob8MELG5bIbyVIn7+weUtBm7Lo+PgbOenZ2n3X5kulgkHI9xdHzC4vwCtuPg2VD9bB/t++xsbyOEwntnyj74+j9MB77VJ/M5l8F3YpZSrC5WODg9+eK1WCxGsVjg8eGZVCrLbe2CfH6CuXIJIQzeeyXaGIKx5UyGXCqBkYaTozM+r4WsfVjHxkYYjdCS3YM9Nja2ODjYY2fnU2zh4/oSXxlAIYRBGh9fKyypGbkDjg6D6/qjX8vyz/34HH/zf6nzj/5Alv2TLn/hX1wGNH/5Pz1nuhBj77TDH/tahr/1v97zJ3+swPf/QIF/8+ff+Nu/csDc9CKhkIW2DVIJ9FhysL/LxoctMHDwMQivd7Y/gAFXekS0hWeNUSbM0O1zeXHF/OIct5c1VucrtAcd2i9tZudmEUbw9PSMq8eUSmWkH+Tsxvh4RiKtgEPS+LxTHgnQPoeHR6yvrmCUw+H+Ljubn/LZ3m+yvrFD9XCXD1s7WMLw9f19Vjc3Oa1WWd9c5rh6zurGMsYPoTAI4yFseH3s8tx9Y35uFmlcjFAgJBq4rd0SC0dIZzJcXV3R6XRYWloiGgtAN0IbXKE5PjxmeXWZk+oJWmvWt7ewBNjvMcXYC6Y/bRGgL4XycLXCgSD1QoAJuiJCuMj3aT+jfbSxQRicdyIU/722ITwLoQynFyeUJkuEQjFQHspywNPsH+6ztLVKSIfY3ftGsFaffmD3G//AOYFAuxCg8rUfo3n+awxe71n/4R/n/Nf/LqNBl3/8T/8c/9sv/Vto3w2kwL8DcxyH7ZU1PAkKeOv1ODs/B4K2Tjweoz8cUZ4uvefFUKvViIejpLM5BHB2cUa326OysYpjh7CloHpwyOrSEpYKYYRBSMHHjx+/oFL/sLWJLzXSSGzhMNYjqkfHrK1vMDYjTt6r4//ITor/6hc+RUiB8BUIg7EIvJBr3iv+IIxACo22DUJJ/vxf7XB43seXiqODY0aj0W9zkdvrGyhHYaTB+DagwShc4RKyFOOR5uT4kLXlNU7PT9jeXGMMYATal0jpM9IKywT8d1r6SAwW1jtfHhjf4/r6hte3N1Yri9jxOMLTnJydMhqN2PmwycH+ESubK9gihCc0h5/tsr2ziW1JxkPYPwzSpY0PO1iuxgjJcDji8vqclZV1FIASKOHz0H6h1+kEHP1IjA/3dy0sIRiPxzy228wuzpJLpvG05Oz8lOJUiXgkitA+AgeEy/5BlY2FdfbPg/x7+/s+IMeKj3tfRwjBzvYOltD4Q83e2TGLK+tcnx+ztrGGcIOaQ7fb5fziknQmzezMDPg+Rgmad09IJLnJCRq1G7LxFOlUjrOLU8rFPLeNJv3BgM35bVRCgVaMhM9pdY+t9Q32DvbZ3txhb38Xg/kHyAkIwewn/xipqXWE0Ug9JjezxutdjcTkNEo4PNaOyZeW+dX//N/mh/7Mv8f//h//LP54iFQ2YND+7wYtWZbF5tYHRsMRl5enLC8vB+0gE6DqXl9eGXV6TJenuHu6xwjBRLmI0oabqxtSqSTJZIqLy0sminmajRYz02WsaIyQttiv7uF5Hpvbqxzun3wRCaxvfoKFD1IiXA0hjfQ0I2M4OT5lfmme8+Nz1itx/sYvfh+2rfA9sGzQKJQ2SMvHdXUwdmoLhAtG2kEeIzS+a/jX/+or57UB0iiQNrYyfLa3x9rKCofHx2xtbWIL6HkeEan5uH/CxvoOtpRoX4PlY3yNUAIhbYwZEbgdB2kE7ZdnOp0XpqeneHh8wchAFNbyBRO5Ca7rNZLpLOlUAozm9PyMfn/A5soqtuPgIjiqHgRqzo7ieH8fYwxf295mLA0+DrbvIrDwPY/q6SGbW2u4I4+LyyvW19bwtcdgMOL09JRUKs3s3CxKeGAkd60ntNRM5soorRGW4fL2hmQuy9PDA8XiJKGQzfnZBXOzs1xeXlJZWsG2LQ4O9vF9n52dT9jb2+X7tj7wm/ufsf3pNvuf7bO1sc7eweFv+z2FnRAri0v0xiNa93fMzlfovrS5rd2Qy+UpT81wd3fH/V3ji2OmZqbIpLMBrkMblFG4wuPk+BjPddnaWKJ6fMna2hrSDvgF1WiEJyPs73/9/8dOQEikshDvO39p9Y+QmqwQzRTJTc7xG3/zr1GYXqW4/UO8Ni8ZjobkFlaICIfW9SGTlU8wGn7lr/80P/Kn/jKdl3v+r//u5/Hd3461tCyLzY0N+uMx9ZsbKstLdDo9ri8viMfiLMzOIB2F/pw6GYOnA6XbVv2WWCxKNpPClxaOH3QRTk6CXQ5gY3MdSwr2D45Y21ilelBlfXUdbRksHcIog+WN8W0LYySeO+Ls9JSNrXWQireXVzKhNj/9k0v8l3/rll/82W3ckYeWBmkrfum/rREOw0/9+BSDkUFZAkfaCAE/8W/8Jn//sxeWlytEQhHGOoKkGxSkjM1I+jjvVO0fP9tlc2MbIS2U8vE1KKHwfA9pCwyGYX/A2ckZsXic+coSnW6H18c28XiSRuOGXD5HaaqEIdCYNO8ksgYZQH29cZAiKIWxJHoc7Lyu5XLy8YC1zRWkHwnCYAFGKiyhcY1B+Yb96hHLa2soJCGgjXC0AAAgAElEQVRLMHAN2u1wXbtjamqah/s7FubneX5+otcbUJ6a4v7hEXyf0mSBerNJJBohnc5gRNCiPT09oT8csFGpcHF7w+L8Aifn5yxtrBNC8HF3l+2tHZAWlhnhaoVlXIwdwmOMVDbGNYFilPbouz3q9XuWlpfRxsNohe97YAy2CoTfUAJfiAAbYDS1Wp10OkcimeD87JT5XBmZinFyfMjCYoVoxAo6OSKg3h8jqe7u8eHDBh8/Hn61BUm/WxPKIrf4CYs/8E+RiKYRtgVS8np3ieM4tGpHFLZ/kOLkJp3WBfHSDF7tgpeLY96kAEsw6DzyXL/kR/6F/4C/9x/+NAA/+FM/x6/90s/gjb5JQmKMoT8YcXV1wfLSEqPOkKenexaWF7k+v6LealEuT+H6PrYK8jbb+DTrLZKJBJ1Oh7ByeHx6JFvMc9doMTM/QwQHgY1QGl97rO9sgevx6fYWv/HZLp/srGBsC+Eb9o5PWFlb5fjwAF/7gejG0Gc8HnB5fkl2I8Uf2UyzXUnyP/5Kiz//i1UGQ81P/TOz/MyfXmM00rS7Lj/310/54e/P8cPfnyEcdvkv/tLX+Nf+yjMXTTeYIaAH7w92SPqcHByysb4ZKDopxdFxgNbb+rCFsSUMXE7PT9hYX8fr+1xeXBKNx5icmeRof49EPM7C/Cyu0BhRxnfHtB8ewUAxX6DWbGKHI7y9vlDMT9C6b1Eol0lEFXoMZ6dnzM7NEpZxlFIc7h2xvf2Bw8ND1pe28GLAWCCkYKw91jfXOK4es7yyQs/oYHpvYQFLSeLJKJHYAr6vSCSzJFKa+8dHjPFBQPPxiXy5hNQC3zMIBFp6CClYWlnh8uaGytIC2neorK0Q8n18+T55KDXIAUNtIZTH7v4J2xvrHB+dsL6+jfJGGKkYDz1qjTsWF1ZQ2uetN+bpvkkmm+X25oaJ3AS27eAZH2MMjm0xGAyIxxMkUvFA80EI3JAgJnn/bIMvJNXDfTbXN/DxiAgV6EJ4Xw4t/4oQjX53JpXFzPYfY/tH/gTCG3Nfq9I8/0jz4hvYsSSjTgfd76IM3Nc/Y9Tr83J1itTgW4KxsRBS83p7jSUFzaOP7PzoP8/Gj/4kv/ZLP8P/Td17x8iW5fd9n3POTZVz6hxevzBhA7mB5FIil9mkJFrimrIom5RIS46CDUuWBNgGDEu2aBs2IP9hGxIsgwq0TMKWLJHgcs0VLZErht0dYWd2Zl7o7tfd1d3VXVVdOd2695zjP27vmKQ4soElgeUB+tV7XXh1D6rq/O4vfMO3/om/gpfK4aWyAGitefkyUbW5uLggk/XY3d4i6+d57fXXyRcS8YbBXTehgiLo9G4J0mmKhQIbjQ3ymQL7u3vcXt2wtdki4/u8OD9hsZqANdi1RQnD06fPmJNkH4YgMae0hldef4KwglcePcEPfA4PD3j+4hlnZ2fJF0HAF5+N+OSPfo4vvjXkv/qzr1AtumQ9yXIR8uN//Rm//4/9Mh9/Pc/3fqLEf/ZXn/HZz40J0RgR8+LkmDe//DazRYyONMcvjgmN4MHDxKD12bMXvPrqq7zy+ocAsGuwUYhVlsPHDzHG4Po+r736mIPtPfJegVeevMpGc4PRaEbn8oZKuUqjvkmtWqder7OyEfVWg9Vyzmw2JbbJZ4u2GCs4PXnB3v4+52fnxNGao4ePcF0XIxwev/oqT0/exjVrrI3AaF48f45ZGx4eHXF8fAJrB0c5BEGaB/uH2CjCGs1sOuX6+irxr9CaxmYL4UqEFNx2bpmMx7QvL5hNp5yfX1BtVPE9l/39bYSA05fPefrW2xAlfpCPjl5LVJHwEHbNs7ef8eSV1zGO4gNPnhCZiHdPj4ltxPn1BQ8eHGGJ0VYgoxglJFIbKuUKrY0mlXqZRq3ORrNOpVZha2uT5XzOZD7j8rJNo17Hc11CLEePHnLx8oxoYcGCloZnz45Zx5bXXvsgX3z7/RGlv6fKASEVrp9ivZwhHZfNJ9/E40/8YZazIbPhLcYatAaFREiNRSOUxWiVSE2HEuWAIyza6vuhtkUag3AU0gisUJS3n3B7+YL63mM8I5kubvncT/xlMoUKe9t1MAblp1guZlxfXnNwsEdsNY5SWCOQwiJdF6PlPXJMIyScnV+QyxYp5PNctM/ZarXwPYHxXN5+612OHh9x+vyUwwfbOE6J2M5xjctKagIrCVnhxT5vPXubV17/ILGr8WMXYUPMSjILZ2TdHn/jL38dlZKHlsnIyzGwnBv+2v9+zt/99DX/4Y894Ae+s4mNE9z9v/9X3uHf+CN7/Md/9R2Wos715TWLxSIpfx49wiB5/uIZh4+f4AhNHIbgpDl+/g57+wecn73kyaOEzmvQxFqgdcTV5RUHB7sswjX9Xp+d7V20jhPJMMchjiKEsfT7A3zHp1iqcNlpUyiVKaUznJydUq1XCbJpjLaYaI7jl3jx7F329ncIpIcVEqMAB55/+V0ePtwnjiVnpy852j0k9hykEDgqETfVxiKVQ8p3sFGyT6zFDRTGJBJw3dtbFAGlaoHrmytyfo5CscjL9hnNrQbXF9eEYcjDo0ecnJ7w+MEhxo15+vYp1loOHz/i5OkzPvDkIWtioshBsgaVxto1jhtg1yuicMnVbZ96rUrvbkihWGA5m7O9vY0xBo1GaIES4PgSc9/U1UagsYg44uLigs3NTTyVZLWOdfjSO+/w6JVXUMriSFhrwfN3nhJF0e/9nkC+usXBh7+Dt37xJ2k+/hhHn/iXMfMx47trpJtDCku8mqGNAqHBBbREaQUqxEgPoQ1GWEQsiJTFs4C0SCtBJuM86SSahhKFIOadX/o/UG7AN3zqz/FP/uZ/iqcs+/v7TIaThB3X2GA0uWNna5PhaEK4DKmW6/e1OMRmDdpye9NjNlvQ3Gnhuz4px+PZ8+fs7e3TvrygutkiF+Q5fv4uR49e4fnTt3n0+BFPnz7jycMjnj4/5tVXj3jnnRfs7B2gMDiBB0oQjVe0r8+oliQffqXAf/3nnyCsZTrTmBg++yt95uuIf/37NzFIBqM1q3lEvZoiSLuslpof+nNfYhGW8VMpTs9O2dndRmkHGQgcCdq6oDXPnj/lyZNXQWievvuUh48eY2JLLJco7fL8+JhHj18lihb4jmIVW4SnEOuQ2WzOOtbUanXuej2UqyjmiwgS2K/0vPvALNBhxMXlBZVand5tBx1rdjZ28NIFTk6/zOGjI6JFhBcobKzRwnJ68hIhJLs7O5ydneE4Dg+PXiEM5xyfHBPHMcVigb3dTcLIsJgtWCxWtJobICUQIY3h5raL9BJj29vbG+rVGm42jY0SWXSrJI4VGCNAalAiyYpkYs2WtGQ17zw9xgqQQrD74IiL02M2d3dpnyTl6sbmNp3OFcVymelwSKuxgfI9RsMBRicWd1ZCuVJBoIiiNVI5rOOQzuU1tXKNVC7Li2dPOXpwSOCneOfpuxwcHaKXIem0YLkUuJ7Lm1/+8u/9IABJD6B5+CEefMP3sRj3cJUiXMyJcTAYsOJ+7mwQUiRwSyQIQ6pQw0zGGBmzDgHH4BoHLdcIFI6QxCTadmotcLwAo0Ky1QfEixXFcpXjL/4scvgMKxRnZ2ekUik2t7YRFjwp7/0HBd1elzAOqTUbdDtdZuMJe9vbZHJ5ztttlqs5m40m3bs7trZ3SPuCp89P2d9/mFhG3Vtoh9M5VkkyQYDGQVmLsTHPX7xAKsnW7g5plSaWEds1w4//B0WGo5haLuBuEvHW8YyX7Tl/5od2QYC+Zwv+hf/mXT73xpD/7i8+YaOZ4vJyxv/8M4Z2J0QYg7YCJ/Bpn5+ye3CIZy3GcYgXS4IghZYx61WMHySmrISGl5cv2d3d5qJ9RXOzxeX5BTu7O/S7PcqlMpdXV2RyaWqtKqO7MTayVOtVrq6vyaYzrMM16UIOx/UQxnDbuaFSrdDv9dja2ibtejw9ecHGxiapdBpLxPNnL9nf2uVl+wwhYHt3Gy+d4uXTYx4cPWARzsikcmhtmE5mDId3bG5sYKOQ3nDCxubGPUYwwQVHsUY4YOKY29se88WSrf0tMkEWoRPItMASRxHaJJ6SfpAjDBdI10FHEb7rg2uIlxG+n8G4GruOwcBiteD2psuD3UMsEes4QsfQH95RKlcYDu6o1+o4jkIoSLAsCq0j4sjQ7/fIZjKMpxNqjTL92z6tZoOT0zMOHhyRUYoVAqksJ89ecPDoIcdPn7Kzs8/z509/d4KAEOIMmJLQ3GNr7UeEEGXgfwP2SIRFfvC3Exv9Da/x/2sTyvEobhxR3nqCny0iHQMWjBAIIZHWYo3AWAEkKbi1Ck8FyCBpcgnAcXzW4ZLF8IpU4DObhuSKNRQSbcH64BiZdOOtRGYy4KTIZir86k/9OI6Cne0GxlhErImWS4R08dI+QmniWDPoDxmNxxTzOWIdk03lSae9BF0mPJQnuHx5TrO5QWQNGd9BC8Fytiaf8TFGo+OEWp1OpWi2tsinAhbrFSodYKYhTuBx0T5nb3+P2XxOWg75t/5ok1/41SHf9KEyZ1cL/uyPPkAYg5QSI5IP6eZ2wf/y0xd88uN1tlsB//3fOeVPfWqX/+nvh/z8P3oXrEBIye7eLtrOSTlZpHBYrkOeP3/GB17/IMaJOH1+xtHREbP5jHQm6ZtYY5MjFYVcdzpsbW4TmzWucogjTbheM1st8FwPaw0WGPbv2GhWKeSKxFZw1bliOZ+ztbnLbe+WnY0WodakAx8tLCcnL9nebJHyA9bC4eWLZxzuH7EMF3SuOhwcPkjwRRbOzk7Ze/CAdbji9uqKYqnEdDpLREI0aAV2vUYIjRf4dPtDXKUS9WrpUCoWWesYx3FwXMFyuSLl+pyfXxBFa5bLJQePHnPbPmf/8AEnJ8fsNDYxjsOLF095eHREkPNZjlYEaY+zszb7h0cY1swnCeekUW1gBDieYLkKGfSHbG1u4AUuwipWcUy/32UxndJqNJnMZmRLRXKpAEeAsYqX5y+pVGtkMx4y8pnrJSo2kA/wY82L05csFovfWXmx37I+eS8q8pUL/EXgs9baI+Cz9//+qpZUDqXNIxpHH2bavyDwPQIVIEUil02sMcZgtUBPJ+jVGmME60mfeDXHIum8+ysEQY4gV2IxuKHz5i8yP/48/dMvMLu7QgtQ6UQaa61ACYk2EK1X2OUSieH17/gRDr7uu1hO5xAZBpMx7dsb7qaDezSXZDiYEMeGo6MjpOOQzWTIFHJ0B2OGkylYjQxhb2sXHcd0O9eMBzMEgtPTF2AksZQ4niWTybLZbNDt3iA8zVWnw3I848XZMavVmnqjThyJRK0ozPN3Pz3kRz+1x8v2gj//Jw/pD1acXy0wGMQ9xPn/+qU7fv9Hq3ziIyV+6jPX/PAf3qFZC9CRIJ1Osbe9z8HeA1zlcXM5IDaGyXzCdfuSTCbDdDlDGsXR4wcg4PT4lOVsjrCG5XLBaj7BUSpxlo5Cev07FtGawWhAPpej1WhSLZep1KqUq1WOHhyRzReIrYXYstncYmdrEy/w2NnbQzou3dsukUm8BRutJldXN8xmMcvZhEarhXLg5PiYR4ePUUKDjFgvV+zvHSJshO8qNjeaTGcztra3ksatMJgoZBGFTGczonsnpFyxTLXexOiYxWrJ6O6OxWiQmK52+8RxTL3apLXRIpvNcdtuc7i3i+tYDo8ecNW94brTJp3JcN25Zj6d0unesFos2dvbZb6YsZhOGY3GtJotrrrXtK8uuDi/JAoTd+3JdEIUaVbrNZPJmFwuxaMHh2TzOVqbmxQyGQSQ5CIxe/t73A16WKNAaW47V1zcXqGQGOsmQe991u/WiPD7gW+9//tPAP838Be+mhd0vBSpXJnh5QsOPvq9RIsuyssjJmGC5Rf3CEHHcNv+ZwiVofHoQ8xGt5x+4efIVDb40Pf/m4TDO5QbMLk5Yf/Jh7i87vDKt/0Av/bT/y0f+YH/iHgxxk/nEFKyDqdIpXCIMFYx6V2CcnCzNaaRw3o5IlqvePDggLXRrMI16SCF7yvi9Zr1ak21UmG1WkMc02rUubntsl5GrMWatbaMRj2K5QKDwR2ZXIZMLstwNsURmhWCw8PEA6HV2sRal/39A85PT8mm09x0O7iOZHsjTa6YZzge89qjBq8eZXj14AgrDM9P5pzezPnhvQwCw3S25o9+9waxttzcrfkzP7yP6zi89XTEYDCmWCygfImSmul8zs7WFlZJev0Bh1t7mIzD2ckpucM9pqMx+VyeXCZLt9dnL7VN9+YGx3WoK4/LqzaPnhyxlWqhjUGVcqz1mtU0xE15+OkAosQnUkmB4yTyXcJ6tK+65MtZ/CBD1k9Tq9ZBKAyCQbfH4f4+L8/PMMYigWq2AMB0McEIQybjc3J2zCuvfQAjHCQaqfzEj1DHLBdLVmGS0terdSLHp3N1gwwCNmt1rIBcIY9yXDKpDN2bDsEyYntrm/lsRv+ujzGanc0DHE8wm0wpeAGOEDw8OiKyMQaBQvPWm+/w+quv8bJ9RtlEDO4muI5ib28PHWmarSYguL29JVxHNOoNXM9lNp+zXq3JZDL4gYd1ZIIxEJb5fEGsLalUCiEMKIdKs4qQEmNiGrU6bi6Daywx0Ll+f4Gv34lMwAKfEUJ88d5LAKDxG2TFboDGb/1PQog/LYT4ghDiC/9fF1CuT7F1SLwO2f/QtyEtrJch03Gf2AqsvRe/sAbHc8iU60xunuEqy2uf/CF2P/hJtj/4LQRenrc+8xP0jt/gtW/740SpFvVHH8FPlwBBobFP591fJZ2rEKTySCsZ35xh3AAjBEZaYgxBtsCDj3wHOlNN1GiikMFtl+7tLVZY8oUCylWMJkOw0O/2mCzGoKDZaKGN5bx9wV2vw+ZGg8l4zubOJtKH3f1dhoMB3bsR3X6fyWTIwf4+3dsO0/EYJaFaqfLgcJ/9/UP2dnaIo5jlbMn2ZgOlR9zcLBlPQ1ZreP1xgU99xwaTSYQxguE4ZrQIuZ4sOL1aMF8ZBsMVjx+kMGZOv3eHjjTGegwHY5ASay2lUgXtJnZf+wf7SKG4648wAg4P99nd22e+GHNwsM/W3g5SOGQz2aQRawyL2ZLbTp8wXNO/6zPq3xHOVmAk8/mS4XBIHMeswiWRXpPJpplPFgxuu+h1TH/QT6y/rWB/fx8rBQcH++wfHnC0v0dsDfl8nm7/lkG/j7SKbDbLctJHYu6FX4fc9e8Yjsd07wZoE1MslZK7/XTCdLEg7Xv3cGlJr9tjNZsgHZvgP6xGm5jRZEw+n2d/fxflgJWCwXjAYDREC8Nk2md0N0UIGI4nFPIFZtMpO3u7jMYzKrUapUIBo2OieMVkOsUNfJqNDYrlEv1Bj9l8SimfB2G4urpktVgmI1OjE3TqdEy/30XHmsVshdaa0c0I9JpYKI7PXgI2aXArh0qp8r7n63ciCHyztfbrgH8J+HeFEL//Nz5pk6bDP1fzW2v/mrX2I79djfJbV5At8fXf+6c5+Pj3IbyA1WyQ1ADCgIhB2wQLbwXpdJnt17+F7Q98C9lcg0nvJQcf+x5W4z7xakHt8EMMOsdIa5j3O4zOnuOm8rQefhSk5CN/4N/DSQXYyKKNYdg5wYkdrHaIhUQYw2reZdE7p7zzBCss81AzHI5JZ7MYYLmMsFqR8tIApLJpXC9IXJQwLFYzMukU29vb+CpNNpcH6TIeTBAWCvkixVKJ/f0dhqMJUlr2dvc5eXmGlTCZjdEIhLU4SFSgaDYaEC1oVX1+/UtDvvR0zNOXE778fMz1zYrrTki7sySX90n5irSj2Kr5zBYx170VVzch5UqD3a1tfOkgrGZzY4vZdAmuYDIaMJnNENowHA5xI0uhWGB4NyQSBmzEcDjHarBa4foOG5sttJAIqUiU8CCfybK3t4cjHQbDIWE0I5dP4XoOo/GIy8sOYbSktbHJ/u4+e7v7uIHL5vYOnnQYj6cYbRmNRxhjsVYRWYlxNIVigb2DPfZ29zDKsrW1xVm7A9oyHAxZxSHVep14HbO3t0+5WGYyHLJYLrFS4gc+nh8wW64Io/A95ysJxMIwn02xUcTWxibL5QJrNOPZGGGS9+L87ByroT+Y0L48Y9ob0j5rs793wGg8QQrF5u4mw8Edo8mEUGtm8zm5bBYbRSwWEzxH0ajVqJXLICBIB7Q2WkhHcTcYsV5rrLE0GnV29/bw/YDRbMqoP2CzucVkvsJxBKViEccmmbE0muFs9L7n66sOAtbaq/vHLvD3gI8Bt1+RHr9/7H5119BE6xkmDpn224TrBUKTjGKsRArw0jnSlQ38QpVcZYODj/9B8o0DTn7155jf3fLo932KoFTl8Sf/VT74fX8KN0jx6rf/CK9/+4/RefYGpY0j2m/8AkL5rFcLotWcVKrE1ge/le7ZOzgIfBTKDXDTZdzyEbPrRMl1sVhRr9epVqpgIF6vCXyPXKHAbL4kl8kirGC9CjFWUK832N3dZbFcEdmY2841IrIsZwtAMJ2PmUzGICxbOzsYEl/ESqXM4G5EJpOmfzdgOBkRW8NoPMHqiE5vzufeWmGlZTjXnLZn9EchyxAc5fDGWyOeHU/58osFn397whffHvKlLw1RWP7BZ/t0BxGT+TQpEp2EUzBbjFBxzMbmNrNp0ltpX7RZA/PphMV0kfiUKsXW1jZIcKzBRhHj0Qitk8Dnuw65XA5Doq0YpFOk0wHj4ZDJZEqpUrq35A5YLJZEYQxCIFwYTaZIEyGl5bp9ngTw8ZS73hBhYgbjAf3+gNl0AdJyNxxijWU8GlEulbHCsI7WNJstPClJpQKUjTHrGKRLtVJGYZhOptzc3jCZjIjjFdlMCs91mI4nmCim0WziBR5IS6vVQkmP5XyGcizz6ZRyqYRGsLO7TblUYrZYUCqXscKSy2TAWFwBuzs7bLY2cAyMx1NuO7ekgjS1Rp1IaybjKcvZEoEgny+Sy+dZhxGL5YJYRxgL0/GMOIqxwrLRbBGuY4wwTBczBv0+m5vbDIZDjJUIa9nZ3Xnf8/VV9QSEEBlA3nsRZoDvAv5z4B8APwL8+P3j//nVXMdow3jQZznvo0xCvdQiEVpwnQxuOku4GIM1TIcdpr1LrIHWwYdoPPwofqbI9Tu/SriasPHkm3B8j/MXb7Dx8BtRnsNydIMx0X0H3RLFiZa+XoZE0YzTf/bzxEbTOvgARglMHBHUKjxo/BH6z34JNXqDcrnGerEkiiIKuRxWgsEyGo/JpDyUlCgpE3KNNmgH4mhJfxBSKOQYj4ZsbLQY9u9obmwmVuVCI41ASEW4XrG1scWX3nqTSqWCBCQSncuxXq1wynm2d7bxVMhbzyYc7FV4cRYTx2vOOgopNLf9Jcs3hqSzOYLAYzaZ82g74Kw759O/MkM4BcL1Gm01RkiGoyGNepNhb0C+Xk8aalZRq1dAmaTZZAWxdbBSYzQMB3dgNMZK4ijGmBHFcoFU4JP2UwkdVhjS2TSL5RIpJDrSLBcrKqUKpXKZwd2AwaiHFJJirUi4WmByLp4SVCoV7gYDNjdbXHZv0bLIZfuSarlKEAQQC1bhkqLO0bm54fXXXsdITbVWY9gbUcimWMwXROsQrMQNUggs6SCgWq0yny8IzYpyqYB1wFUOV90utZaL8lwQCotlOBpRLBQJfB9tJVubm2gtsMJgLexstjDWw3oRxmhW6yUFUUQZiITECH1vSbeRuBRbWK1C4nVMvpBnpdekRBppJNPZnFSQxvNc5os5Qrh0u3dsNCS+ozDSodlqgdW0tlp0L2+IrWS9XmG0wIp7h933WV9tY7AB/L174o4D/KS19tNCiM8DPyWE+DHgHPjBr+YigkTCylqJJeFaWwVCK1azAaPbc4yNYVshY0s4m95r80V46QxSeYSLKcvZEK01DpJoPkVgkI6Hny2y9/XfifIlLz73szQefojTX/85mocfRFpFZfcJl1/+x9R3n9A9/gK15gYidUSQrlE9/DirzgtG4wmB62BMjEjiVJLa5zK4SpHKpN4zJR2OhgSFHPVqg26vT7XWZDDoEyMIdcyq30cIQ22zTu+qC0KwvbVJrDX1ep16s4VjAKvRJAy84d2IcrnCYKL5h7/Y58OvuJxcw2g8IZWOEqOObI75JEbJEflCjmVomC4No4WDH1QZjkeUikWGgyHFaoFYJ+y6SBuU1fTv+lSqDVzXSdR3EHTvbik36yiT7CUOIywCxxGUSyVenr1ESCgWColc133/djVfMptMyWbzGBOD1ViRTOwdxyWOFhgTY+II5Tj0hmMa5RrNRovubYde/47WVhMZC5rVBpWNGt2rHhjJZmsTG0O9VkWIZO8RAmsjPK9ItVzi8uaGcLWkWCgysTG5Qp5UJst0NmM2nzIaTbCAk3fIF/LM57PEINVNtCBtpOn3B4l+X5y8NyYWKNdSrVVBgLE6cVWyilazSbfXp1atYGxEv3+HUhLf8ahUqqyjFcPBgMDzCXJFApVJDHIxTKYjAs9jsVxwNxiws+NTLOaT4JD2EEoipEkUideWZrOOAVzXAWEwytK77r/v+fqqygFr7am19oP3P69aa/+L+9/fWWu/3Vp7ZK39Dmvt4F/0Oq7jvu9zjp9m5yPfTbZUfY+fb6RCWsFi0mXcPQcstf1Xqe99ECldWg8/xsNv/lfIlDYwUYRBI5Ri76PfiZ/KYm1yd7WAkJZnn/tpYmvRGOJwjhtk8NI+xhWkgtx7HdfrF5/H6gXx3Tl2OmN1+Rzr+JhUBYgJsmkKpQLGCKbTGTedGxaLBRaBsfL+MQnKxoJGUKnWkMqhXm8kCEUhSO41YI0AK7i5vUYIg5SG5kYLHLjpdpOmnVL3XHyNteC6Hm6qxPEV+IFPvpQnnUqRTacRNlGzDVIp9H3v4bznMp4l7RUbW95TVVgne0HARr1Bv98nXlskNknxreDm9pbbmy4OEvmVP5XDRmuDcrnAZDKiXCljTKLTj6DC4nkAACAASURBVLQgYuaLBDCDgEwuDcIijGA0HBOulmA0lUoF3/cSPL00ECWsZwu06k2MlbhG4FjDRquZCHYIwW3vBozESEu90QAMxjq4nkuxWKQ37KN8xUajSaveolwoY6zEGJW8P9ksreYmQSpLoVDET3mUykUcNwGj3fX6aB1Tb1SRylJtNej3u8nGHE2MTYRGPSfxUbQuVgmMcMDwFfEypK+o1mvEcczd3QgQBL5PsVRiuVoxGc0gThre6WwWP0hTzBdoNRr4KY9ypYzrJ/RqRyamqP1+H20SA1XHxtjIoLAg4KZ7/VuP1nvra4JAZLHUN5u/7XOOl2L7lW9mPrq5/xJZhLIIaUhlsvjFIvF6CcYyvHjKuHuBUC5CAzFsPvwmgmwRz/GRMrkTKyRnX/x51pHGAo+/5QeRSiC0w97H/yBSuex/5HtwXJfrp79C7+RNrI65ff55SsUM3c4lcfeEMFqztga3vEmhWEJYixECLZMPe76YM1+sAIGwcDcYslysKJVKBI6DRqMx3HZvCDFYYZDCpV6r06jVsbGD1pr6ViuRLbMJk1BajXAUK2Ho9boox6FSqSKlIHBdGo0GpWIxaZxpS9rPIlFMZ1OQlmwxi58KElwDksFwRKjXVMplJuMxxVweKSWOSOjZt91k6tFo1hDENBsNjLUIR9Jq1sFIJAqEQIpEaqvX6+EGAY16M6nLrWQ5DxkOJggE2XweKRSz2TSRN7cKKZP+jjEGrMEID2k9yuVaUoO7SdCOHUW9UUcYiFFc3lzRveqiHIVSAiMMVor7xh6E2tK9vSXSMXfDIYj7nMxo/JRPvphNEIMCBBJpJYVCjiDlI5QA4SRqwlLgSAdNgt2v1mrc3t5inQTy3Kg1Mdpw3ekkB14JOpeXCJNwOFq1OtHa0OvdUS9VEbFmHa8ZDQf4rkepUMJiWa6WzCZjrDBMp1OEFKQyKfKFHJ7nMbgbcNO5IZfP4UkX4kTTQiGxDijlghLUq00cBMJAs7XxvufvayIICCFYh+t/7veOF/Dg438AwghHuqQzTQSJseVi3EsswB9/nNV8yN3Vc+6unuMGGWb9Nm9/9m8yHl1gHAEIWk++Ac/PIa1F3pdHypUIJAcf/m4coRLegO8hVArp+wS5BlfPP//efnQccf72F+j3rlnP+6ydDJdv/jK58hbL1YrBYMBstmQ8GWGMIZPJ0NpskUmnGA4GhOsQqSS9/h3Reo1EIIVgOBxitEEISbgO0cQYK7i6vMRPKer1ZG59dXWNlQkgqlouctvpkHKSO3AcG67abbrdW4yF+XLJaDzGcR2shGUYYoXETwdMRmMWiwWD4YDOdQcTJ3qIxgpc18VKyU2/Q7mYQypBd3BHuVK+l892uLy8RIiYaq1BrdZAmBh9/76Wq1WsgiDIUCyWMcA6jOn3Bwjp4CmflJ+hXKuiBcznCzLZAulUhkK+iOcFuMqj378jl/FwvETuC6URUtDpXBEbkxi4yMQfwVcubsqjVq1Rq1YQGG6urrFxzPX1LcIaPM9BJmL+CBxWcch4OmGxnDMaDxBOAhe3EqyIsCJiOLzjqn1NuFyhlIu0DtE6ShCYNqn5fOklSsUpn8vLK3zHx097XJ5fcnl5lShHCYEGLjvXaKlQrp+Ucyh836PaqBMbm+R/UhCksziOYjwd47iK5WzGfDFDY1GeSyqVwvMTUJsQSXZ807khXy4gpaVarxCTmJ+AxBioV393R4Rf9VKOQy7IsLXxmzuYRmsmvTax1azDkDheknyMkMqVKW88JF/borz1EGEtm48/Ru3gVQbtd7l48xeZj7sJR9wmAh7KCKyQWGF55ZN/HM/1EdIiNWASua3ACBzl8M4v/G2CbPk37UcIQTZdoLG3i6xtwfiUbLZANDhFKRfPD1COYr1a0el0EFaQCgKQMFvM8f0UXpD8COWgpEAIQX1jE9eRSCS5dApJssfRoEulVuHqrI2UkE4HSJFEdoFDPl2gVCklgU1COuvjBQFgkY4in80hhGA4GpDL5ygUilgjUb6Pcj2UFxCk0+TLRZTroGNDPp9gHFJBhthyX5iAKxTt9iWYBJ3Ybl8liRkSawTtdhsdGa47V7hCUsqViVYh3dsbkBIv7WF0xDoM4f51c/ksuUIRKWSC+rSWXq+P43lkC0Uc5XLdueayfUEYaoS2ZP10ovJrYzAS61iqjSqLxYL25TkX7UskglTWx1qFn86gHEGhUMJ3HFr1BmG0pNdLamRHOUT3I15hkz0MRyNm0yWu47OYL1mvQwq5PINeH+VKXJEEhOurK+bzGRhBsVQkk/Wp1ipUylVSmSypVI5itcLFxSUK8P3EXmw2m9HuXAMCKVwKhTzraEWv30fapPFsrcHzPdZRjLDgS8XwbsBwOCSdTmF0TBxrLBIjIeX593MXkZBjjUwYtEnRi9bvf9S/JoKAACbTKaVygd29LXZbSTCQjkNp6wFr1oTzMeFylAA/BLjZCvnaNncv32K9nLP5+Bsp1nfIllvsfOCTlDYfIg2c/to/ZN67RmhNbEEYiTWa/vnbmHiNRRPd16sCgdUghKH18CO89em/zge+60++t08pJflCnsVkwmp4Rbd9TPOVb8YERW6ur+j3e9x0rjFGU6tXKeTziNhiLJQryRf1/PwlnlJ4rkKQiGmWCvnk4FtLtVgA4XJ5ccHO7h4WSz6XQxvBZDLh/KyNkoIIw3Q6uZf6Sw59JV/Bcz26N110FLNYLvAd934GvSAdBDhSkk6l8R2XfD5PqVLB8ZwEc6AkOAqFoljIcXNzQ/uizeb2NsSCbDqLdSUbu/uUckUElrPzM87bFwwGd1xeXZLLZonCmOvOFY7jk83kiXXEcrHEdV28lAdolDBk/ICUH2CspnfXY76ckE6lUJ4il81ghUM2yJIv5FEIjLUUSiWUEpydX3J2fgFEGOmQK+TJpAsMhyN0bJlOpgjXUswVMJHh4uycTqdLNl9ACZ9KuUaj1sRzAnK5Iq4XMBqNGY3GLBYrwvWabC7HzsYm6WyKbu8GRymWixVro7m4uiQdBBTzyd6sgMlkhrEWq6FaLDOZjBHWkE2lwK6YzqcIYSkWMvhBhuvrS6xNgExxFDObzTHGEnguxWKZVJDF832KxQKL+YI41uRzeRSKdNrHdxUiBmEUhXwBKyRX7StEHNO+uODiIhmnXlycJ4za91lfE8pCYRjSaNQ5Oz9DCIvVSb4ulUt15wmT0Q0omSjQIpLoF4bM7q5wU2k2nnyMTKnFxVufQ6VSNI6+nsef/NdIp7IEuTJuLodF8PY/+tusJnd84Hv+BPWjj4IEaRN7abRAKoF1LHFsaT7+GNJxOf38Z97bp9aa6+sryo0mqDz5nMaES8S4Q7lU5is8KMfxCQIPUNwN+u9ZjxUrBa7aV8RRhDEpBIkSjTCASJCPkZVox1DJl8iUSmA1+WIZbQyFfImL9lmCGhOSSq6MiUEbQ6dzzfbGDtI3pAtZXOmSy+QIwzVxbBAmYjwdYrE4wiWbyyWMtHSWIPAToU1HIDVcXl3RbG4yn81o1puMhyNyuTSlYomXZycYY6nv7SOlpVQuQWSYTMdkykUK+SwmkpRKeSQO+WyGMA7JZXK4jody3WRkhWY0niCMJTaa226XzWaTYqmUZGvGct25pNqskHEy98rKLlaDdA2FYhmURBqF1oJivsDZ6RmHu/tYJ6ZSKCQir+1zGhst5vMF5Z3dhAEYRyyXK5ysz+nZKZl8jnq1RojkbjTEcb6CdoxJpQOMUhRyWXwvQ7hcIpSkUMyTT6c4u7wkU87TPrugXCjfC1gn+P1iOQM2Ilcqc3Z+SrFY5aJ9ydbeLlIbFp5CSIVWBj+ToqZaICFch4ThmnC9wgClYgmLIe24jEZjfGdNJp3Flcl41UiLNBJfGgrFPEIoCoU8kTRoRCKR9i84f18TmYAxhly2QLVcplytUm1UOTjYI1rNefsX/hYpP4PWCY8bkhRo0rvk6tmv4eerBPkKhpjy5gOKtX2wUKztcf7mP0YKgZ/O8/Yv/h2qm494+HXfhZI+tb1XEI4Ckq6wEDHi3pDTERK04OTzn6Z//v8qskghqZaaBJWH3Bx/CVYTnv/Tv0+mcUQ+n6WYz5PPlwnSfsKJt5pUJke1tkEqyJJJZdje3iaVSSGkTSzFleSi3SYySWfeiBirBblsHhUn2cdaC85PTyjmcjzYe0CjXGM6HtMbdDm7PCOOIoaDIRdXFzjSkkunifSKq84108mESqVMsVImHWTJpDNksgFKCsrZLGlPYZRCBhIlDEJoCqUCrqcAyJfyzGZTzs/OOT07pdSo0axVOT49IbaGXq9HfzjgcP8B+VwWI0FIQSabAydESPAch3Q2jRGS2WxBv9dnPJqyCkP8TJZMLn9PDbYYDDKpdyhUCijhJuWPAaUMwrO8PHtJNp29Zy6qRKYdSa1Wp3vXwwifXL4AyqHaaOF5Dvt7B2SzKRAaJ5CUCwU8X+CnXcLVEovF9X1KpTLFfJnA9+n2+0wXC4QVpAOfzs0VOArHt2RzGbRSTKZTsDCdTMmVUpyeJDblUgsK+RJYByEktUqTQiHLbDrDrDXtdhvXVbSvLhFW4kuFMImgSeAHZNM51us1t50O49Eds/mM1WpFKZ/HzwQgFbGF9s0Vq1XI+VUb4jWD/oDIWHKFDOVSjpOXp5RzOc7uFbF/u/U1oScQBL5VyuXw8AChJFJbrDIs5hJ/9+tIFUu4fopFvwsYtJLYdaKxlqtvki036Tz/AlI6bLz6+0hyZBgNbjj/9Z/h4MPfiSMEmUIdqxRf/Jn/gde+60fw8initYvSEuXBP/1f/0u++Y/9J/zyT/4lwDK8Pv5N+1Suzyuf+BRCBYTjKzLOivThJ4j1mvXolus3PkOzucFweodeJ/wupRTrOMLzffL5ItLA5XWbar2KG/hcnJ1TKhZIp3N4whIrF2lj0BIpHF68fIY2sLXVonN1zdHRIdYKZtMlEov1wLUuJy+POTzYRWUCovmSXu+OcqGE43osw5Ber0upXKZaLiNIHHuksQhhuby+ZrlagIG97R2ubjrEcUyr0SCXyjJZz5O7dywIcnnOXrzL9sYmXjrNW299icOHh3Svuuzu7XB+ccFWa5vOTYed7V1Qid5QMt+V3PVHzBdz/MBH65hmq0m/d4fEUqwUUY6DMZLr9iXL5YLtjV28wKN98ZLtvS2E8FhM52QyAcenp2AtBw8f4mqLETBfrcj6LitHcnn8kodbD9AyvudAJG6iYRhx2b4glc1QrdeY3I2wUlKt1EHHoCS9fhdlJOVigdt+l2KpgDACJ+NzddlmY2MDpVxWswWpVIrFMiRd8HjzjS+TSqWRCB7sH/Li7IRHDx6ijUEIyyJc4nuKZ89POXr4CLPQ+BkPKSyRjjBxhBOksFYRxhFxtCJwfUycZEDK9cCCNAprLfNwRspLEUUrUimX1SLC9TIIx3BycsxWq4mfClhEEcfvPv/aFRVJZ9J2d2sbL5NGRDE4Ck2M1B4yW6f46rcj1zGDm5cYqcEm7UEvXSRTrqPcgPM3PoPj+Rx84x9CWMmzf/JTVPee4GdLpPwiTuDxxs/+jxx8w/djpOHpL/wkX//9/w6el8ZKF8cVDDtnvPVzf+Mee/DbLCHIVrbY+8j34AqLdAMaRx+m337K2ed+mnoujet7hCbERty7ACWkI+s4uDh0ri5JZ1MUciWsMqxXK1J+wMnpGdvbB6AEjrAcH7/g8MET1nqOiSPy6SKz6QyZ97l4dsLB9gFnl2fsHR2AFuj1GlelkW6MNobIRKwXIfP5glqtjI5jlOvhKIcYuLm9YTaa0Nps4SkXQ4zEwfd8liZEGk3gpkA4aKORyQCK0LPoyYJU2sMgCcOIwIN4DU7aYz5fkfE94vUKx8+yCkMuLl4igFw2S6PeSOb9QoK0jEYjdGyoFCv0727JFHPk0znWywWRibm96bFxsI0OI6SfJaUM1njAknBpMMS0222eHB3x9MULnhw9JHbWCOESTRcEQR5rE/KUcZKRoYosS73CSkXK9xjeDViGSzZbG9x1BwhXkc5m6N32KBfz+K7L9e0t9WaV29se5XyFbDHL6ekpexu7vGy/5OjwAKtgtVwl4i1GUPBTTKYrMoUsxqzRRnF+fszB3h5RFOP6Po5NylvtGGQs0E6CmUiwIpLeoIe0UCpV3rO6EkKgHSBKSuSL83M2N7dw3MQrUhgwRvD2l9/iA6+/RiwEpy9esFwsv3aDgJTSvv76axjrJEKRJ8/Aghdk+fAf+rcRqRKr2YjFsJOM8RAEmQxBpoKOIjK5UiLZHKRov/lLZAo1KlsPUI6H46axRoKnmQ56eKkcvnSYTe8IMnmE6/Erf+svYXTiwDu7e39QhXJ9Hnzse3GzZWK5IFgNKex/kuWoiwkHmOs3IBZEgLQWpUFLi3XtvbQ2/w9zbxqrW5bW9/3WWnvvdzrvmed5nu9QXVXddAMNtO0GERySCAPBsQg4hji2DJbs2PkUS5EipMT2hzhRFCmAEmQcGVsJjoTiJAYn4DY0XXWHM8/n3DNP7zztvYZ8WG9Xt6GL7q7uVmpJV6r71j373Xufvdd61vM8/98fbb2T8MWbNzQadaanp0hns5h6k8OzM6YXZgilpNXQpKMIIT31JhEpIMEGjqTWIhMqyo2Ey7MzwihiZnaewGqstYggBUJjWxZtHWGQRijHw+MNSEFvbx821sS6wc3tPf29Q9w/3jE4NEQ6k0bijUr2jw+ZmZtFhgK0RBrYPtxldnGBCIMghS9sxSgREhvNyfGJx6W9OWV6foFmucTZ5RVdA300i2WmJqaxCAqVR+5vbunp6cYYQzadIZfrwIUB1xcX1KtVpqZmCRREERgdIFUa4RKcdGgEoXRY44hbdYIwzcbGBs+ePmN3f4fVlUWMEMSx4fz4lPmZWZAgnEOjAIsTCiksziTc398jnKCnpxcnJIGKSHSMVJKry0tyHR3kuvOeHJTKIp0jbtQIsxl0rUU6SrF9uMfC8hzOekx4JoQ4ERzs7bO6NI+2ilg3SMs0TjhiHXNxdcXU3DTEUKvXKFUK5Dpy3FzcAILu7k76+wYIQ+9G5LNigrM3b+jvHiCVy7C3t8Pk9CyX56dorZlfmOfo4IjpiQmOz85wzjEzPczO7unHdxJIZzJucWWR3a1dlleXaMRNRNNydHbKk+fvEmRyZKa/h+uD97g93KBvdIXzvf+XodlPMP/pH+Xq4A+wrRazn/yzWNNEqoAwjHj1z3+F2+MNHPCJf/uvsvu7v8HyD/w0HZ3dEAkUEf/PL/9nVO7Pv6HzDKI0K5/7814NaBKEFLTKFe4OvsDE+ASBbaCE4v7xgVorQQgIpaKvt5/Lm0tK5SJj0xNkch1Y3cLphPOzKyYmZ0iFAUkSk44i75oLyATvOaAdmwc7LKwsYeKE4+NjlpaWCK2jlsQeKa3CtkrRUK3VKZYKdHX1cfHmGBD0DXTT09+HNAFB5LvfnLCYZowSKYzVqIzg5OCU4bFx0krSspYoyCKNRihPJWrFNTJRBziDIMAKj+Le39ljZWGRaqvF+fk5IyPDvHnzhnQqxfjYJCoMcFojo5BC8ZF6s0H/YD8RguvLG8JMlv7ebkDgEs3RyTGj46NcX98yPT7G0ekbEq1ZXFzg4OCAmeUFjrb3mF9ZQmqDkp7hIFMpGnGLfDqNtgaMb765ur5hem4G4TROpwmFwwYglMZp0MYhHchQcn17SyZI09Pdw5vLczKpHN09XZyen9JoNJibneHNm3Omp6aIUn7hMkKhYwdSIqVvWAqUYWdjnziOiaIIpRTz89McHZ4xOTPNm5NTJsenCdOeI2hMAEajMMQ6ASUoPRQRqYAk1uRSGfLdnQg8/kwqRSChFRuO29GACBVpFZDomCgdktQTDAHHR7vE8ccYNJrNZt3K4jzNxO9nEmtRIsEYh0x107/0QzR0mXrxHpu0ECpEJw2q95cEUYpUtpNYNwmiDG/e/22WPvsTjK18CtuoYaxvklGpTlyriiTNF/7pL/GJf/cX+OI//q+oPlx8Q+eogojV7/0xbDqP1nXCxx3CfD8Dz36U2uU+V3/4v/LmzSk9PT0MDg23nX0cTkqUiojjGKzxmConMYFEWU3iPElWx3C4v8vC7AI2G6ASS4JFW0nGgKOBUlliHCTat4waw/HhPnNzS8RJk6uLC4ZHhrm+umFibAKZdZhYI51FyhClAgyAguvzKzpyOfL5HDjlX2jlS1Unx0dMT8+RVQEbe9usLMyiZcDu9jYra2veJi12oAxGKUJniWONCwTHe8fMzc4RiZAmTaSSvoPTQrVSplKuMDIy7Cc5pVAOXHs/f3l1QT7XSW9XjmZiCIIUECNUgNUCQ8Lh/iGzk1OEmQ5E3MSkQ1QbNyxpohOBzcDB5g5LS4sIJ9ACcA4lDfWW5fbqkqmpCTABLrAI5TyQVgoEYOKEq+sbOrqyFAoluvM9FIpFegd6iVI5okixu7XN3OwsxyfHzMzNE6YVSntPQQJDkoAKBCJ2JMoQOOs9CENFq97k7OINszPT6ERzcXVD/8AAxccikxOTOGm9bfrjPUmS0D84hLM+1JfCgfGNbkjL0cExzXqT6blpj4FzDhcJjLNYodjf3GBtbZWk0WJz9zvEGPx2jGw26xYXlwiEJSbAKY1KPGgTIZD5fnpW/jTl+2taSQ1pHeX7E9689zsAni779g8CEIUpmtUCF7u/z1frpt75sb9N98AQ/+of/pcUb04IU1mSZu3rnpuQkpXv+3Oou13E5NvtKMD3wkcdXfR0j3Pzpd9AJTGPlXvenF8wNNBPGARc3twyODRCX38/lxdv6Mrn6ezJIYzEGIlTAikdItHsHBwwMzfP0cE+xhpWl9cRSuIcbG2+Yn1tHSmS9gsrsEKjRYRrWVQUYIIY1ZK+Zdl5pqAQguJjgfOLC3r7BxgdGUI45zmK1nJxcUGuq9vXuq3g8PiARrPB3PQMZ+fnLC3M4ZxkZ2+HpZU5dCskigI2tl+xurZGCs2LrT2ePVvk9asDltfWMDZGKAh0RCVucHKwDw468h1MTk/inKVUKJE04ranAvQP9XN5eUkmlaG3TR62UiCE4+DwkKmZCdIiw9beNrPzU4QqBCK2tl6x8mSdrVcbrK49R9jY74kdaK1RgcIKhbICI9pO0QgQFomgVKpTKj0wPjXG48OjL6UK32zT19OHEMoj46XXeqjA24MLFIlLUML5HLQUbG/vsrKySIwgg0Br/1yCZGvvtTcDceBsk8ODM+YWlggCL5O3BkSgKBUKVCoVJiY8uNYJDbpdwVIGEfjtjzXKuxNJB8bQ0obT0xNm5xcJQ+9aZGNNgCQ2CWEQ8XpjA2PMx3cSyGWyrpXErK2usNH2awuCgPWlFRICokye/k/+OR4v9mg1ajjhKN0cc/bev/jgGLPv/jAA2Xwf00+/j2bcAAHv/7P/joezbVSYQgiBjlt8M07E4EVMi5/9MYJUBtFq4YQlrF0y9Omfwj3ecPner4OVGBxGG4+LtprBwQFA4KTCWVAJnF6c0DfQx+3tLaOjI7w5P2d+Yg6Xluxu77C0MAciYHt7C+ccb62sA5IEy+bOJk9X1tnY2WR9/Qn1VsLJ0T7T0zOcvTllaX7O95AqRaFWpvxQZHR8HOscgZDc394jQkV/Xz/CWazxk4VyDqcMzgp29vaZnFgkyuP9AWJFLCAjJFqBaYFxCUEkgRCpYXPnBc7Bk6dP2xl4R7PR5PzsgvmZaYyVBErgQgdCIYxtl/Uc1gYEShJbPzldnp/R1ddLV66Do8MDxsdHSEd5EJYmCRkRAZbXm1ssLCyxt7fD6tIcKgxQDpomZHdnk7XFJbYO9vxePIGjsxNWlpaw1lCrxxwe7ZPPdzA1OYJQgZc4uwSrQoT1adDz83Oy+Ry9PV0cH51QrVSZW1gkF6XZ2d9hamaeMGURVpE4S9jWHigEhgCkxiWQEJIJGrQsBCZCRQnaCqSDRpxweXHJ7NwcIQ6LbvMgJVYopPBNUtI6Ls4v6cjnKBbLdPf109mVRTsFLYcUTfYOjlmYXUZGCWkijIEX2695srqEcfB642vbkH0sJoEv04afPX1OYixhYNFaEChHM9Fc3pT47p/6zymcbbH1e/+Eubc/j2mVKBz9Aedv/H5eSPXB8aRUrP/Af8jN8ftcH/6hTwl/1HOTiuXv/zEOfu+f8eRzf57YKrJhwODK2yS1Bw7/r/+Bo4N98p1dTExOgvPtxYFIsNJ6FZn1jre0+92lNRweHtFoNHFt9RtC4NqZXxD+ZcLz6tvKFtbWlnAihdAaKyV7u9usrK4Qx5rj40Pml5ZxFtKhn4xKhTKn5+f0DfQRRhGuZXDOcvv4wMjYKLVqld7OPh4e7xjo6yfX2eUx2sIhAkFohO9daMNbNrZesLK+jiREJJaN3Zesf+IJtqjZONgkDEPW1lZ88UZHfuoSGidCBJpqucxDscr45DhK0HYr9tx+hwUrODk+pa+rn+5u3yVJoLzP3sQcqdCxubfHwtISgQERSl6+foUUgidPnyHaE0kiYg629llZWvOdmIHDJW2cScoTkY3zfRn1Uolqpc7I2BhWOCReYSqEA6ORVoACayWImIPDY2YmJohSkgB4sX3A8uoyW5vbrC+u+gamQPLy5UsAnjx/jpOOpGUIhP/9aqM5PDhicXkBIQQ2ESinKFfKFKsP5Dvz1CtNRsZGfTLTel9B/xCBiyTOCJQFowQHu3uMTU4QRiFpYzCZNDJxX179efr0KULAy5evPr6TQC6TdbPTC2ztbbC+9gSDJUg5XDMiVAItYkSqG923hGs8kqo+ULIRh1/6Pz/0mEJIPoRs9o0PIVj+3E+jQudrs055tLlwBKlOugfGeXz/V9AN71PupPXCECuwKcnN1Q1SOHoHBzl/c0HpscDk1BQP9/dUq9WPcDrtTkopWVhZYX9nh5W1J95BB4MQoI3wQVQ3rQAAIABJREFUD0s7QWXawhRBG0cmLZaAs/NzMukOn4yTBuc8UCWQKWI0gWmv3O3JC+1DaItDKR8WJyjfYOXg9dZrFtefEGoLgfKrVwAGhbSWSqNM6fre78WDwG+rBF5m7P+zra/2xxciwCYOnMZFAozfHmxsbbO8skwQBmA1Voe83nzB07eeETQt7+9sIIRgbe0J4NjYeP3B/Uun04xPTHB1ecnQ0AAP94/MTU1yVyjw5vyCwaFBhgdHcNpxcX1OJttBtVKmuydPT0+378M3XgzshGN7a4v5xQV2d3a9Scz2DutLzwlSiW9FN956vWUhFThq1nC0s8/y6io4R6PR4PL8nLn5Bd8Ip32zlBYh5YcHEttiaGSYq8tLUmGaRqNOrrt9LsbhrCJlDVp58ZO0XlwnUBgToGSTF6+2WHv7KX/jl36Dn/kzi99e5LgQYkkI8eKr/pSFEL8ohPg7QoiLr/r8h7/esZxwBDlvvEjgrcOEDkEatNEYK1GtCunKGcMz34XtXyHdM8nT7/0Rnj9ZZ3R45I8f01m+pQkAWPvcz7D7O7+GMxGBUITaIgp7jDz7Abqnxrj+g19H6wBChwgcpUKV04sbbBhyd30LzrT14gWcczjnOD05+UgTgL8mfwxjDPvb2ywsrLC7tYlUimoSs71/iEtLykmDk7MzEIFfaZX0tuZCkRCirWBseJJarUKtXsYFisPjI1oNzc7OFqLVYmd3C5u0kIGHVW5ubn5ghf3ixSvPW6TO641XCClYXVkmlF7/vruzhWk12d/ZRSUxjWKRwvUdUzNTOCW9LNr6MiRGtOvi3jhTBRFGhOwdHtCIK6jIs/d39naxwrK+vMrO1g4v33/Jy5ebvN584W9OU/D+zgbPl5/gnEPT4PXrVyilePp8nedPnrK4tEA6SjM9P00um2d6agotvIuycw5rHVc3VzyUHhifGKdWK1MsFXAibHMpBHsH+9SSOts728zNzxHKkMW1NcIw66M6aTFOIp1iY2PLG4qmNc6F5ITi6doiASGhtHRnQxbmFgiEAxIIJSIVEEaa3qE8Ukpur24YGRmju7eH0bExKo9FXn3pBY37Cif7u7y38ZJWLUHEEmsUm5t72NhzEV9t7PB09Tlb773mv/2bP/mhz9W3JRIQQijgAvgU8DNA1Tn3X3+jP5/NZN3y4gLCWGIj2dh5RaAU6ytr+GXC+7Fl+0boeuvHqTeq1B9uaT5s4oq+BHZ7ecHV1c23fC1fdVU8/TM/j5MJjgbcbpEe+xSDTz+L0pri2R5f+s2/z/L8AkY4ipUi1ccqk1OTxFiEEyjvicSbi0uKDw/MjI/xWCxR+oiTwJ80UqkUi0uLNGpNLq+vmJicJRSCUFmsstzePeJahvHhAWQgiIUPKYVySCPQFkhZXAJCKEJiIIXG+EDABWy8fh/nHM/X1rGBRAoDGpxK4YzGaMPu4S7Lz5aQSQohDaiQ2kORm/sb+nr7KJVLTE1M+Jq39DZeos2IODt+Q2dXB135To4OTxge7+fi7JqpyVnCTMjOxjZa6494fyIWl5dJAIWgUanycH/H1NQUDodygpubO6wSDI8OYfH35fTkhO6uPro7M2jpeX0u0MhWADIkMC1sGKIEaOd49eoVa+vrKOHVla82X7K6vk5OCWrasrezw9ryOqRBJAKDpVmpcXV3zcBgP8dHJ/R19zI1OcHN/R26TWuOIkXvQB8y4SuTO4IUCa/29qg3W6yuLhKqHDawSMA1JUImiLTEVASv915+R81H/hRw6Jz7kFa7P3k0mg2sFOhUgEgJVp898fvKUHowhnTYwFF+vOb6S/8EFaZRsgF3R5A4bq+uMOlhnn768/T/Cbrpb2Q8+cH/mJU//ZdY+5Gf9V5wkSAMOpn8vl/AaYWoFjn+rb/H49ZvMjM3h0FSKVcpPRSZmhrHWSje3XN7cwVScnN1TfHhAYDj84vvyAQAXoT1+tVrDg73kUjiepM3FyfcF4q8fLGJbSUMD48QE9AwXqZtnMZq6SXDQrO/tU9cbxF5l0Nebr728BAbI4V/+dbWn/Jic6NNIpKYWBE4C1JAKmB9bQ1hApRqoYRDxQ1y2RTz0xNI6Vfcu4d73pxf4bRPSgpnEFqCFYRxiCJifn6ebNjJwuoKx6fHiFbyNa/73Sdr3+D9idnb3Uc6hRKObEeOnu4ezt688XZ10jE0MsjwwKCnFDkwGCZmpujq6sAqydHhAXGrhZIRQgXIwGCCEKfAWAAfVdgwwAUS0gHPnj0hnRLEQpOSAatry1jlaNZjdna3CRSYrDfH7cx18vzZU8ZmJ9AhfisnLcNjQ/QPDCIIOLk499GJNNjQ8fLgkMmpCZ49ewsVZBECpLbQdO0I0JdnX++9/NB78+2aBH4S+PWv+vtfFUK8EkL8shCi5+v9cCaTBWlJYosIDSkpWF1bRcfWW4wRYIyjXqiwvfGC0u0Z6c4hKpkxXr56gU31MjY1BoFgdOEZzz77o/SMzH/TF7H6wz+LoE4qDaouSKUEJlaMLL2LVRDLmIvDDXrf/gkCgWfOC4cTGutAG8vDwzVxXEdJy8sXL7m9u/umz+Ojjkw6wyeePGVuZpLOjjRTY5P09vTy1lvPGR8ZxSoHkU92Wh3y5vSSarnE8fERjYZmdWGeTDZF4iC2goXVdU8xEhFYyfr6E2TKPzIvX7/CyoCN/dfoxPdEmND6tlZjieua9957xe7BMY1mk/dfb3FfKDE5PcNgTz8jo6Pc3N1wc3ePI6Sd+kKHPkN+cHBAM6kj4wBhBVp/7Uf1i68/3HL7jw6BI0WC0prAWrp6uujuzPvksvDypeuba26ubz6IDoT19m04weLMHJlsgG7B1u4mzbhFJEWbU2EwQvDk+VOUAWc1mBYWSJoGpwOccGAdr1+95PjwgKX5FdCObtXN4twcVnikXOnugYuTc/qHBhgcHOH6/JqXL15SfHhgYnqah3KRSqUKzrK8OE868rLune1NYtsiSeVwWcHrrfewIuH991/x9rO3P/y+fBu8CCPgElhzzt0IIYaAe/yG/L8ARpxzP/s1fu7ngJ8DCMPw7dWVJVyiECmJ1gm7OzssrS0TuACrHa24wd7BHh25DmZnZkkPTNOz+kNULzYp3R4gO6YQlXNE4wLZM4lND2C0RMiQoy/+JtXCV9qBFz/z75PqyBEIjWlrwX0/euLxmU6gZQYFjK29w+Xmv2Z87W1ON/81QhvE1XsIGQAa11Z/eXSZ4LFwz9nZN9aB+J0Y2VyWxYU5hPNqAKkE99cFWnETKRVKwsDAEG8uzunKd1IoPDIw0E+uo8MnlVyEkzHOSQgkm682MNqw/vwJMpYIIzCR8dl9CwiJVA4ZO2xo0AkcHBywtrJASwbtJhyLMhJHAi5sW8NrWkIgJagvw0mkwjhNIODg4IjRoXFy2YzfqijJ9tYrtP7aEcHXG6koxdrCGkQGoxUucjijCU3gvRFooQQYI9HGeiq08mxIoTSSEOU8a9JJ144WJFZJQCOTCJtpImLl5coEKJfw6vUmb62u82J7i/X156AtSlmacZPj01MWl1ZAWjC+aiGV/EBhKnAoJ3FYDI43b87JdXTS09OJQKGF42hvn7GpUTqCDFs7O8ytLLK3tcPqwipOWaRQKLL8lb/7a/xHn5//jhmS/ijwV5xzn/8a/28a+N+dc+t/0jGy2axbWl4kSQRSCmRiMWmNtJJGK+Hs5ISFpUWcsOACQmvIDs0QDsxQuT0mM/kuulGnWbjxWwfpPHXGei6cwltFOwtWSYQNUS5pe9EphHIYowhsE1KKSIBBkqgQaZ2P9e5/HykjBt/5D7j6wq9iE4dSpm0LhS85Oa9Ue3i85+bqw22fvtMjl8sxPTuLEgGBsFgMiQiw0pF2mss31wTpLD293ZydntI31E9nqgsZgsPiYs3W7i5Li4uooA03DwQ6tsj2NQbKkJgMNklQKRAiwbkQ01YNKidp1eqcX12yuDCHMymsiHHaP5i3xTuuLq8YGhxkYGDAU3kR3uFYJwgRgAIlLDu7ezSbrW/pnkSpiJWVJS+uMT7voUKHsRLhNA8P98RaMzo4zOXVNWE6Tb1eo1AoMD0xw/3jHSMjI4TpEKkMoVFoFSEsJChQmp2Xr3iy9gzhWiRRgNCBt7DTYCJQRkIkcaaFchkSk6CcwSqvywiUw1nnKzWBxCH9VgmQSviIyyps2mKtIXCSUHtjF4dBZCCxjsAEbGy8Znn1GREeQBuHhq33v0PW5EKIfwT8H865X2n/feTLFmRCiL8OfMo59+GpSf/v3LOnT9nY3GR5/Qmy6VBZQbOlOT7cZ3F53neBSYFwEYIEKf2eLT+4RpTpR9sWNj9K/f7GexMGvv5uXAA2AQRBCNozLJExkAZtLaCQ0q9GYBmeWef66DX9Eytk0zku9l9hdQOkQV29AmlpVmJ2j/fIZXMMDAzw8PBAZz7Pm4v//6KAPzp6unvoyHdQb9QZHRvm7u4O4SL6ez2K24XtFdhJhInBCWQo2NnZI27FLK6sopxABgrnErY2t3m+9hQjLcporEjxevMVq8trBCmBSyzGOvYP91lbWSKx/iVDOtIiwAiflsNajPMyZucEV1c3pFIRAz19IARHp8cMDgxwdX3FyPgE2TDN9u4mSfLRooCvHrmODHMLUziTQuEFqYn2XYTK2Q+qKdL5rZ4RBunAInBGcHR4yMTEGEEqxf7OHtNLM6RUFpGAiJoIIpxxBAIa2pL68sssIWk2ccqxv3fM4sKCv99BmnpS4/b8nIH+Ph6LJfIdeRqNJqPjozhhEMZHmULC2dkZ3V1ddOa70DhvvykEJvG5MyFBCoXB4VoxKRURW4OQktcb34E+gbbhyBkw65wrtT/7n4Hn+O3ACfDzX+VL+GHHcU/XnuGkASnY2dph+fkqUdPRciACB1YhIuFXEufbLTEOREQ0tkrH9CdRQpLU7qmWrjE2wSUKJRUyccTStd9835QThSma1hACUkQkxiJTDhdLpta+h7Od32Zi9Xs5fe93GJtc4/pLv0biYkIUBkMgIxwKJxIwkmKpwNnZ2Ue+l9/u0ZHPMzUx6U1KAh+imjY05fLygnwmRWffgA87ncMKh5KSWBiklQQOJJaNrV0WlpbZ3fGNSYKQ2NTZ39ln7ekTiB1bu5s8XV9DK02YRNRIUE4RKEe1VuX25o6Z2WlCJ0iEo/BYpFFvMDQ0zP29pyW3Wi1ymU46+3rAgTL+96VFCpxmb3v7W54Esrks84sLHiaq8ZOAFRQKjzR1g5GREaRpK/WUjxaMth7EKn2eTckIq2OsVFgF+5vbrC4tYlNhGyXnCPDqza2tbdafLGLQGJNid2ubtSdPMc2Yo9ND5pYWsEqidBuYY53nReJwQnFzd4s2HjNv8UgGJb0fozSCveNDhsdGyHSk2dveZ2p0jiivPK1ZKgLnZcWvNl+x+myF1+99hyKBb8fIZbNuemaV/f1XrKys40SCFAqsxiUCoyzYEBHgVWBGUK/WKNUqZHM5WkmT6bc/T37sLWqlG1K5TrL5ASrXZ6Q6u1FhjlqlQOXxCOMUUbNBdmiS2uM1IgzonXqLx+MX9I0sEHQOgylytvWHjK9+gv/7H/wiSkkml5Y43tpmdn6Os+MT1pZXiF0LKSSlNvvv4zi6e3sYmZhA4ni8vUPIgFazSSob0d3XS+BAOeG9DUOPKHfCoWzgzTOExKJAJxincU6xt7/DyuoSUqXABjgTY51A4nyEoRzGSZrNKncXd8xNzWJDsBKkMViruH+4w2nN2MQwRgCxpR2OgfEoN6RDhYIkcezt7n9Lk0CUilhYXiRwCuEMDeOIAs9/FFGINJYQgTaSRHsAiAoc5+cXlMplZqYnSOeyPkflPD1JWI210ne4BimSZhMVhAibgAoQ1tFMYo4OjlhcXGJ/f4+V+UV0qCAUJKUal9dXTM/O4LT2eSbjuwplEHB/f4/FMTzYj8BiLYSBJHGgY4F0zgNbpE/27uzusLAwx+7uPvPLi75xLQxImglhVvHqSx/jjsFsNu1mZ6dRQYpmnHhXFWOx2rC9u4dzkEmnmZubplprcv/wwOTkFNIJjHI4Y3i4vUMoyeQ7/xa5wTVc0iTM5QhSAU0tiETA7eF79E4u8/Cl/wXXv0yuZ5Ku/nGudr/A0Mp3c775JYaWlrn7l79KnDRJpwKazZhMlKFm6pweHDMzNsvp1QkjoyPcXt8y2D/E4cnB173GjzIy3SkaxW9tLwzQ19/XtgyzJEhurm8oPz4yPjFKZ1cPaENiW6SDLA3XJFBphNaYQAABIk6QgWN7ew+cY2V1icA5EheSARoO9g52mJ9fIpQCE5i2SSx+MglA4tASXMsbZTwWH0kSw/DwENrFKAEEAcIohIWTsxP6e/vp6syxubtLq/Ut5gSiiPm5OW/pljS5v3ugt7eXcqnI2Pgo1voW7UAKrq9uUCqip68Lje/ACwRolyIMEu9ulXgzGY0mIMAmhs3tTZ6trGHCDMK1iJ3jcGeX5SfLCKNxNmR/f5fFxUUajQYXlxfMzE7TNAn3F7f09vZx/uaMrp4uRsdGEE7hjEVr4z0ZUgHWWN9inTgiFWCF9fZn0iCF82gzFRIJgRbChw9OYuOEjc2vrR34WDAGQXJwcEyr2eL0+AiXaFqNFjIMWF9dYWV5menpWVqJZ/aNT8xgraVpE5yBUqGINobu7jyN03/Fze//j8SVc4TR6FaDwskrauUrbGEf3SwSRR0MzL1FLpOnWS3hYk1cuWNkdpHi7/4ytn+Rw/09Yg1HR4dYkRBJxdTCLJlMyPTkJLc3t0xNTSNwBMFH57Wmu1PkBjKIL5shfNXnP/C3Pkl2IANAtj9Dtj/zlX8gINub/oa+w2q/V3cqxCaG4f5+1ldXyXf2+nYmJbm8uKFYK3F2ck7tsYp1CYERiHqL/cMDWrFjaXWZtZUVnFC0iFCRpRL7rPri0irOaLRwXsGoBMIFWARxHOMsNIo1dg92ubm/RSiFCCVWwcN9kXKpiTAKKyC2MTNTk96iu97i27FQxXHM0fExgVLkMt1MTk6S78gxOjxC0nRgAh/iC8HgyAjd/T2A5ObyjlqlgRUChcYmAtsKiVsG6xzHe0dY0eTgcJ8gDHGhAd0C4chIwcrqmk88uBCnfEnPas3l5RVjExO8OTmnS+WYnZqgM59jdXmZ0ZFxsF4L4wLBY/GRcrWMc46ry0vq1QoX1xdUdYNYxwhrOT06JdGC45Mzms0GzuGNSBKoJUlbg/K1x8ciEkhn0m51eZkXL1+x9tQXEo4OjliYnyGQAougVom5ur1kdHSMKBVRqVSoVysMDQxhhKVQLiIs9A/0cnt1SzabpzPvMcxnZ0cMDPSQHnkG5Tf0PvmzpMIObl/+Y0z3JPJ6h529XRaWl4jG1knOXtKqJ4T5FALj5bmJ4+T4hNnFeeJWTCpMYYwk1jValRpnF98Yl+CPjnd//jld43ne+59ek1Q1OIuQgk/9J5/g9/7uF/ncL30v//LvfIHv/08/Q61U4w/++/cxsSXdmeL5jy/zxV/doHr79SXRPb19jIyMc3d7QxSGdHV1ISJQBDhnEMJwenLOwPgk12dnTI6OEqVD9vYOmZlf5PBgl8XZBa9TkAodOkTLsXO8z8ryCkmccHR8yPzCPMpAIi3OOuKm5f7xiunJKax2aGk9uQiFbEvFE+M/C5Qi1k0uL2/o6x0km03hlGRvZ9f7FXwLQwhBPp9nZmYGlG23YEuqpTKVcpW+/h5UJBG27XQdBUjfkO9fKCBptkilJMoG7B8fMjkx6XNDgUUaSKSgFdfJZUKsDmnGNSSKKJ3GWV9SljhQAmW88tBJQ2ggNoYojHBohAux0hC0EWPa4fMFypuNNOMWQaiQKE5PzxgeGiIT5rwSNCU52NxlbHyMXCqLVZK97R0+/bnP89u/9b99fLcDUkr39pMnbO7vMzU9TRRGCBkgnEHj96wWQ73ZpHB/z9j4hG/SMTF3t48YY3yZSQaEQYAUlriVQKhQSrX9BNr71XbSJW0UrZTFaeelr6Vmu14bgnJcnJ+yvLBASzmaDU1HKvJoKmN4c/6GidlpWq0Wp0cnH/kBzY90ULuvk+3P8MlffE53dyfGxkjrkF2d3H7xgvx8J1JIHg9KbP3THT77t96lfNmgcyzPF/6bP+STf/Edfutv/3NyAzkqVx/ejaiCwItjnCNouxzn8h3kszmMFAjlqyYRvqVf4RAYnA0wAp84xCBFCkmAdU1q9QShLLlUCuEUidAgHUEcoaMYp0MClfhoQyog9PZcztuOe+Uk3N3eEKZCerr7uLy8IJ3OkU6HWAnZVMje7j6tr+FQ9c2MMAyZm5kmFeZwSgP+hQJJqVDm5u6arr5eTKIJlKR/aACM9GpCAXHS4vrimsHRYTqClBf8Si8HxoFpNbGpDNuvXvLkySoqiNjZ2UUgmF9YRBlDEoHShmarRTqTBRRWOuJ6ncf7BybHx73CUTifdNYJwjhIpYhN7HsGLNzcXtKZzZDv7iNUCiM0lgBhLVZCYC37h4fMz8yQBJIgSfEX/sbf42/+zGc/vpNANpNxS8vzGCtxAprVGtlsJ0J40JYzAYlw6GYDKS3pdBYDFEqPXF1cMzI0SKAUGkl/Ty9SWI5OT8nne+jJd/rWPqGx1tFsJjinyXWkMYHCxYJWs8zV+XXbbUeAcCzMLCIDg3WGw8NTJmfnCZygHtfoCtPEwlJuxCRxg8tvsjmoYyhLmA155+ee8+ofbrL0I3PEtZje8T4SYXnYu2fkrWGIHTebd2T70/RO5InLCTbyL7HMpdDVJjbRvP+r27z100/43b//BzSLTTK9GSqX/+aE0DfYz8jIKMLjKLyeX7SZrc7Xzq2CqO0oZKQlchYDGCmpV6sIK8h3dnj/Bwu7h4cIATMz856Bia95K7w6UMoQhI8IJN7PITaGTDoLQJLE3jVYKU8DRhE36tze3tJqNQGYmh7n6PDkW54EANKpNCsLi1jnMM6SmIQwjGg1WqgoRKUDCnePhFLR3duFdUDiV+CLy0sG+vpJcKQzKRC+yclhqTdiDvf3ebq2zsHxMXPzcwRK0tS2bdNqUUBsLI1Gndv7O2amZ1DCop3DWYEILEJLtNMYY0lFKQqFInGrRX9/L4+lIpHKUCgW6OntpFqtk8120N3dgQgFjXKLVCaNlBbhFEJatFEIkYDw2POX77/38c0JpHM5CuUqqv2gHB0fI9p6emOkh2smLS6uL7krlNBts8lIRgz0D9LTN0hXTy+93XksCdZKMlGGQPluNkQC0lFv1rm5veTm+gqdaGRsSEnNzdUdc7MzDA8NMz+3xNzSPIloW+ASMTs3TdRuC76/ucOFkiS23F6cf1MTQH40R99yD09/aoXv+evvQgDLPzRN466JSqe4Oyxwt/+IxVLYe+Bu85ZQGxp3TZpFy91RkcJOgZtXj1RvatxtP9AsJDz79xaQoWL131kg05th6rvH/9h3J82EpJn4jLKDRiOmZQAkEmi06nirc4nEoSSU6jUfOUm4vbnh+vYSY5yvmSOYW1hkfm7Ok3yll27XalWs/Ioy78skpnojphknPN4/0Gg2MTiq1QrlSgPhJK16HZ00KRQL9PYNMDY+yuzMjLfunutGht+GR1XgNQ5K0owTCsUStWaNq5tLKuUi0kJfXx/dXd3gLK1Wi0qzSWIMPT09EAQ83N9i4gRr2wpIJbi9viKby2GcZHbRt6vrJKFZrVCtVnEuoCXAGc3dg1dT+o4UD5wRUmKcwihBoxlTKpdBQE93F2Eq4v7xge7uLvLd3iRViZCx0Unq9SotneCM4+7+FmNAGQFCgXRUG2WktpTKBYT9cOHVx2IS6O7spHD/QCwsoYN8Pu9BEBJvFIm32Z6ZnKKrqxvhBNIKctkOBnr7wTqP9LM+s2ydJd+RI5NKYYV3Iq6V6pydnNGR72R+ZsZ3pBlBoVxmenoOGcD9wx0Ii7CeFKyNRTiolMpYIZApwdTkFFhJlE4zNv7HX7YPG51jHUx+9yif+IvrdM10EzcTKmclUt0diFBhii0gIRAG5QIy/RHWGS/nF3Bz9AjOop3C4iif1BBO0KomyLyicFog3ZEmyobs/dYRPdPd/8b3l8slisV77/aLplIrEjdbCAFGOMrFAtYkCJeQCHDacfdYxEmF1DA7O83swmy7ZOiNVIVwlColwpTDBb75p/RQIDISHDSqVawTtBJLuVwmn83S2dlB4fGOWqVEJpOjp6sLCdQrdUqPRY/1cnB/X0DHMamRFLPfP0GY9snXwdW+D5KoQ08HGH46wPCzAYafDn5wrUPrgwgpGFwb+OAzKSX5jlybLibJZ9OMDAzTmU0zMTlGRz7/wXGtcEBArVTj4e6WZivh8uqKYuGRsbEhUlEKhMIJEE4wMzPNzMIslXqFcqUIife+vL2/4+H+1pupWkulVWN6ZspPuwIMFucM9UYVaxNajSoduTQD/f1+GyIF3V1dhCqiUW8hhWBweIhsZxYhEkZHh8lEaayFialJokBghcMEDpsIHm/uqZTLFO4LVKrFD302PxaTwNnZKTPTU5QKVUQgmJyaaptuCg9IcJZKpYZJNJVqGW1jao2abyJS3q0GgWeryQCJoFav0zKaWr2GNpZSpUQ2k6FeqfhEVOBXs+JjGYmm/FhmZmYaJxIsEhkKrAKNoVCuIKQFI3DGy4OtTahVvnFF4NyfmiKpaEp7FZy2VM+r6BYUzorYxKBNC2cEgQ5AQPWk4Sc7KZFWIzBIKxDO4IRDkoB1mHrM43EFoRydo3me/cQqY28PM7DS/8fOQTgvkhFCks110Go1eXwsElvH2OgwupG0DSwl4Ojs6UUGGiehUqzgXOgTW9p60YyRVAolhHE47QhlwOTkOBpDkgiKlTIhmiBS5DpyaBzpVAqlAh4eH2k06jQbDepxk/6BAYQQJLpFpVakf6CPdCZD93d1crVzT/9SDzKUfOrnnzES6aXfAAAgAElEQVT5mRHG3hnm03/tbT79197lM7/wLlOfmWDinRHGPzXCZ/7yJxBKMP6psQ+uXUpJV77LS5/bjbaura9o1T2h2TlH3GgSNxKEg4H+IYYGh0lHEflcjnq9itS+3KlwGGmoFEs4DRgoFR4o3hc8FEUKpmdmmJma8bxABKVilXKhitEJhWKRcrFC8b5EuVSh0WhxfX3nCVnCw1tazRatVkJPfz/5fA9ttzrfxYloo9cdTgkqpZJX3DqJtQZHyPT0NA/lKtMzMzzclz702fxYeBGmohRW+H1nf1c3TlgcCms1j49lwOLihO7BIYYyw8TNBvVGg0w2ixRef2YR1Bo1pBBE6TQ9g73UKnUq5TK9Pb1kslkGBwcJghDhLMYYypUyY2PjKKBQrdLR04NQmoe7AoPd/ai2U+74xATFYoGOzh6sc2jjINZUa19/Esj1Z+meyJPOhuQW+2jWNfXrOknLIJ0EYzHCIoTyRiXCghJo7TzvH8+wwwgPvpDtCMlKbGQhaTf7NC0PZ/fk8hnG3hnk7AtXjL0zTOmiQvXKVw8ajRZxK0ZjqZbLJLHHVWUzEeVWQrlcZjCdRoUxUjhaxQIuO4ILoFiukOvsIrEBhfIDA739OGuZmZrEOAtOYYQA51mLYSQYHBqhqROKpSLCQTaXI04SgkgxMTyFdJJysUCAotqqku/JE8kUmVz2g9VJ64TnP7mMU/DyH+0QRIp3/9IzTOz3vS6RIOFq44bv+stvYQOHaFgm3hnmvV9+8cHvwVpLpVYjm8ujjaZRadLVkUW5AJXvoEMKhPXCHiklkYxQaJqtGkp2MDQ0QLvB3FcL2jSmk7NzFhcWSYmAsclJ1Jft5b5suiQSCo9levt7GZ+Y4PrinFxHRKlSRjhHFKTo7+unUCnQ0dHhv8P5qkArSbDOkJbpDxB51gkqlTI6TujqySGkQilHtVqnI5dHGEWlUiTf2UMgYGpqEmclM1MzvP/qK/fjq8fHIhJwAALGJyaw0vL4eI/Cl11ade/AMzw0jNaacqmMbM9dd/cPVCoNDJ7dp3WMTrx/vHA+A97T10UqE5E0WlhnKBQf0dYQWkWrXsdhsU4wNj6BwqES4b+zHWCAQFpJvd4EayiUigQKwrRicGjoT7yunulOln5klqc/tUx+LI9ONKCpFRptU1GLFZIAX5oSzuvFUp0BqYE0Hf0RYSZAZkNE4EnC9k6h2uIoJwWEypurOAdOkLQSZErw1l9YY/EHZ0jlv2JHmySaRFuSlqYr38XExAQTE+OkohT1RouhoX6CMEAIi1WSsbFRJBJhNRNj44DAKc/xF9YipCURgruHe0KJB2PiyUelcslHHs4hnKC7q5tGpUxHR4benl6U9bLars4e8tkO4kaCi3W7A05SKldoJpZ0T7rdSSh59uMryEi1DWsDjv7FG082xvH2zz5Ft8NzFwa889NPmP6+6Q+uXQpBOp3CBR7hFSdNf1jRZjziK0edXZ3kO/Me620dqTBCRdLX2Z3EIrHC77sj+P+Ye/MYybLsvO937r3vvVgzIiP3rC1r7Vq7Z6VGFFdRNriIlGTRkgkClm0BlAUBgkUbhmx4lQBbtqDFgmHCArzQhG2YJiFRpmhD5AzpIWdEDofT07XvVVlVWZX7nhkR7917j/+4r5pDcZokJP/RATSqMhAZHfXivfvuOef7fh/Tgyl29vcIIglpF2Bjc52trY16h6ocjUYJ6kpkceEkuUmYs9OnTzE/P0ugwqoyNz2Nib99TXTabSZ6E+mHWlJ8dLjP0fEBq+urDEdV2jWIML+wiLGWmMHKy5eoVslAZ1N4TRJJf/PHx2YR2FzbADyq4H06EtEKjUaDubl5ok3J676qCBIox2N8NSaor9tUwmS/h0EpR2OMESYHA1y7gaphZmaO3DiqsiQi6SSfnSezLqG/37bJ1bC4eAIRj4omOawIi4sLIIKPnhg96xub7O7s/J7/rtkr05z47Bxqk7tLbGrHC2CcYDS5mTbubRGqEhWl3c2ZWhowudSnM9uj0e9iM0ujZ8mbKbMgvhZe31ln/eZWmp5kdac/OKpRJIwih5vHVCPP1IVJlr79JN35Np3uBK1Ol36vT6vZSrhsBaxlemaO/aMjytITY0YMwtrWNiFtQtnc2sCgGIksnjjBxnr6WdWw8noNEkqAIGl34UsP6jk+OmRqdhofhXGVGo9pHumgNsUA9Cf7NBvN2nOfFqwQ4oeIb8GklqU1CdNNyXDnADQ571xMtuRHv/AUNKIYWoPfFlcpCV6zt71NkTcYTM0SRDgajTg43Gc4PObw+BjVGn2mys7+PgfHR2xtb1GWHmtTSIii2CrtCBbm5wk+qfVyQNVSVp7KAxbWNrbS+WTTp9jY3GBzcxMwqFgwQqPImZqeTedGEDQKw+GYo+PU95H63FSNhKpkajDJ3NwsLs/BKDvbO2xurrGxvkHUwPT0DNtbW2xsrSMxlSaf+s4f+sjz9GOxCKAQag+fDa42TBiiQhUrNjc2qcqSvYMDpgZT5K0GC3OLzC8sMNGfQEggyv29IYdHRwRVdnZ3KMcVUpNkjDWglpm5OYxNsNBo6mrIkO60pBAMEUl+ehEMkRAD6+sbOOuYmZsDVUZjz1ZNDPrIR4TxTsnhqyMO3hxSjQTU8nax0ZhYBK6T0V1o05nr0j/ZJ283cc7QaBfYiQKcoWha8o6lc6MgvFRe/9Y67UGLiZkmnV6eDFWSGPlEZbQzpOjmXP/Tl7n+py+zcGMmjeZr51k0SbIjKmhyXhN9/HB2jqYUHTRdhKVJC60xYL1QmliT35SZubk0XjUejUJuLNP9Kba2tz7cUTWKgsn+FKPjMQf7BwxHQ46ODjk8OOBoOEwXs75t7grTUzO08iJ9FgePfukZVelRTUuCKFz74cuIQnSx9t9DNa5AFa+eu//gXvp6jWF+YZ6ZmRliMGn2DmlyoYmypDGidWCMiCBOUoScEWKEzd0t3qy/wZcVW5sbhFCxsbFJ0MDs/DwiUCpghOn5eRbmFolRMR5cDBhvQSKRikpimunHFCxqeFtGpeaWGAMakrYlJkDsW0GCSLoZTk9N07AFxkvyXcSEIYtqmZ+bY311DV+l8WwwkaLIP/I0/VgsAgKcmF/AiCHYmBRSRsjUMjMzDaStMs6kCU+gnnHXJ7Ckg1TvmtjZ3WU0GiNW2NveZuzL+u4A6QJMB/T15hqeVD6gkRiFN29W091I0nE3FhBNkU9RkZqmO7swz/TU726+vX1MXxrQO9PhYO2I0V4FPmAIqY5Fkrs5g6LX4NS3naR3coLXX1vHthwSlMxEJDcUExmNfkGWW4q2YflXX9L7RJMrP3SBi9+1xMpvrdOc7JBNtLANUKOY+kLZfXXAy99YRTKDKQzGJGegxnQ3q/GNKQEIGEwPcLlLdyRV5mZmQQzWaKLerq2l0RjK3NwsKul187MzNYkHvFc2NtYRIllWMD2YSsfc+NS7VTDiERtwdRiIirK7t8vxaASi7OxsMq4nF8cHgTJGrHFpEQuRgMGJcPv/fESqhNIYmQiXf/ACd/7BQ+Qb5C/WWvr9Hjs7O0xNTSXYhg2oFZqNFr1ej06rjRU43j9C1IIKE70erbzBoDdJM89rhUVEbLJHp7u71DsiZXX1TeIqorxeeYlEZX5+nmhSkI5Tw9zcPNMzUxhJzk5IURFiknEo2uTobDTbdLtdRGD/YI+jwyOipF1U2qgIW1tbjELF5OyA+YUZZk/OYKyyuvYG1cj87DRv3qyCBLLGR0vMPxaLACSqyqtXK7x58wqpIm/erCQ4hTgmeh22d7aZnJxICCZJclNROD4csr+7h0gkxCrVakVOf7KfZrTjgKhhbWOdlTevWHn1itWV11QhiVRWX6+mO0MQnAiuUSTegE2gbg2KGpuioELCnUVJ8JPqIyg305cGXP3Bi7SmO4ShxwDOS+LHR0HUIOIpJizqhKxw3P/5p2QTBpNZRD02t3ztp26TOUPedWTtHNvMmDjdY+JTHZa+8xSIpZjtEnAYqU/GQJqSSLqbh7KiLD0z1+eYfXcubXeJWDy7u5sMyyGbWxvEcgRRkSCsr73h9etXBF83aL2y+mIFY1MazovXL4lBkHFS/IkYDELAs7qxigi8Xl/l+Pg4MQzeqvOMUrSatNtdimZBs9Oi2enQbrTIbIbJHOLAZYatrXXM6UjWFu7/3CNOfMsctpEjoe4hZZFWv5W2MJIoyBFFJaMx2eLOzzzk2r9y6cPvxARD5rJ6h5OOgdQLYWr+CGIyjLPs7e9wdHCACUJvokfRaDLoD5LyVCJT01M45xhMTdUkoJRAXeQFEgzrr9/gmjnWgOBRm6XNlRowNpWeNQ1bRCjLio2tzaSC1dT8Ozw8Yv9gn2hIxipjMQq93gQHh8eMx0NslmFNPa41EImsr67gnGN+8RSKwRYuOV231z/y2vt4LAICyy+XyZsNWo02RqHIm5Qx8ObNG2KM7OzuMR5VrG8korCqoCpkzpI3HKJK4Ry9fo/BYIZGlnO4t0+j2STLMhqNgna7QaPVotFuEY1laqrP9vYuL16tIDUocro/g7Ngo01UIQxOFIkVWmcOGBHUV+ztffOxS+9kh8mzE4imBGUEYiJtEYwiRmlOtmhNtWhPNWj2Cjpnupz77iWMq18fhcG5HlE8LnfYvEGj02Tp2xfJuzlFJ8N2Dac+t4iEEeVOyfMvviZU1IpzpT3ZZOLEBDqs6C+2GJzrpl1N3eTK8wJrHM1GzvbmOpX3rK6u4jJLs9XGmzSrVhW2NjaZnJ4EIo1mG0FRCbx4+ZJXr16BGsRaOhNNBtNTNJtNikYHFWFjbSNp8nnLZEx19/beHnsHSRzVarc5PDhgeDyk1+vSnejTOFMQ1TMx28aa9Puhxm2JhaXvWUKMR33ayQSXLsiz33WKiVMd9l4dpC9EQcTRm0zHE5UUCBMjWnfiVFMfo/IVWeY4ODjg6PC4/nXFY8iaRdqq1yWOlvWRlhRmOjs1jQFajQbTM7OkBoIgXusRb9pxqkm7V7XgRbFiyFyeEomGx+xub5NljizPAaHdTgulkYSOL1o5xmX0JiYorKlHx4L1hlbRYWoww+zMNOIsU3NzUGV89Zd//iMvv4/HIgDsbG8xmJpgMBggWWAw3ceajIYr2NjYZGFhAWsNjaKVcGFG645vRruZZKjNVptGo8HO9haHx/vMTM/QaDZQEbq9CSYG0wz6U0xNDjCS7mAnlk7S7bVTvRbquhd49uI5yy+WUwdekwBJbUI5ifc465hb+ObTgeHumJ3nO5QHx4SaDxckjTEbgxaNyTat6QnaUy26811sI2P70S4iBisgNhAVFr/1BEbAOLC5wRYW4yy2MBAjX/+pW+i4TGGgxtActFCFEASsxdVZhn4UiGXKdUgiQcvW9m4CTzQK2p0+7dYEmTg6nS69/iSTUwMyQCgREU6fOYWxwovl50xPT2Jc5MXKSyYmJmh3usmxpo7JzkQSuUxNJfXdYIq82eboeMj65ka9ATYIlmZRMB4OOTg4REQ5OjxgfFyhwdDtdHDGYpuOk5+d5fbPPiAcB4wDwRLLgDifCFI4VNMoVZI3iTOfW+TVV95gjGHx5InUgKwbbMPRmBfLL9hc3+To4JitzS1UNZVcRUar2WSiN0GWOVxUrCrGKL3eBNZm6RhHMC6h1kKUpL50sPzyBZODWaw3PH/xAm8smBFqHNHCq5ev8CKEAG9W3iRDVpbR7Xc4Gh2xt7tHrDzra6nRt7+9g1HY3tnm4HgIGHrdCXLnEpmJVAJ7knOzNxig0ab0JFLvYXn5OS9efTTw5g+0CNTU4HURuf0Nzw1E5BdF5FH952T9vIjI3xORxzVx+FO/3/uXZcnS0lkkJN/U8vJLKkCsYTDZp9+bpNvtkWUF3W4bFY8Y5fDwkM3NHfb2DtnZ2UkMQqDVaZHnDdrtNjZPaTdvs++cVB/+Psayv7NHpz8goCCBkEWePH3O9t42E71pMIEgsLz8iiomka1RwYhhotv65gc1t3iEchRADNi0XbOatrqd+SbFwJEVFueSeWfpWxZwuYVoiFmy1BoVJFi+/Pe+lmp3E+sv32AtnPyW+dRcUiFrWvqLHcJxoFoORBGCSSKj6ANVGWkvNZm41CSgNJpNcrGYEFhfXSVvNjGZoz3RTinGCsYbBMFbx0R/EomO3sQgSV5Fmez3mekNks5ewPiQxpQjz8qrFVQi1ho63S5ZkdFutWqSESCGotmsX18iKswNpjg83Gc0OkaCcvTBCMFirWHh0zOoC1ijRCnSMY2JV2BNMjyZEN6OO1Br6vPU0O500lIcFI2RLDd0+xMUzSbGOZqtNojQaDZpFU12dnYI3pMXlrWtNZ4tP+P4+CgtYC6Jrl6tvIQQUqCs1tmBEXq9KUxUgo30B/10XNDEGRTodju8Xn6FEej1uliUajRm/c06uSmY7PdwzhJCSb/fZjgesr+3R6vRxBmHxJAArySFZiTj1cprnj55RlmNCRowtg63V0cWlZ29HSb7Hw39/oPuBP5n4Hv/mef+KvB5Vb0IfL7+GeD7gIv1fz8G/MTv9+bOOiYmJmolHMzMziJlZHn5KWotrU43zZzLtIVSTK0u8wyHYxpFi/G4YmcnSSPbzSZ51mB1bZ3jo+PUzgkWaxyqlkwzdJyx/PQ5g85UXa8KuAwTSwYz6YBtba1jNdFnD/cPUD/i5YsXYCR5GV79bmra3PVpLn3/WfKOS2IgNYgmw87k+QlWvvKa4/UxeWZqdVgau83emMUYw6/9N18ljpLaJBMhoFz8o0tp2xrSRYmPSGGZXhrw/k/ewWQZvoRYKv6pR196xo9LUgtPsSKEkef44Jhx6QnB02oWuDxDBXr9LhvraxyOk+8fowQRohWeP38B0RM0NWd7vQ5WK4iW3sQEnogmCABeA4iSZZapfg+nKZ6zHKW7bbvTTXN5IjZGbIBBf8DEZBfNhOZEh8H0LC4viE4pTjusTdX7zDuTaGYRIoaSYE2acoRUskQUNGn65e1c3RiWTpyoO/7pfcbjkvXVNSY6fTqdNkXhaLeb6aZAmlI1ux0azSaqhtZEn/70NFmWp0mKT9Lc3tRk4o2R7NhG03Rhot9FbTqPO71eEhdhibFEMfQn+kwOpjC50Gr0U+fZOtq9PnnhaLZadLoTLM4vMtHtMTk5RbNZ0Ghn5A2X4CsxTc2CUaBkcjDJ7GCQ+kKixJDUrkGSnPvc+TNsbW9+5PX3B1oEVPWLwPY/8/SfAH6y/vtPAn/yG57/XzQ9fh3oi8jvzgn7xg9hk5AhRMOjx495vfKGFy+eMzc/Dy5dn4gk/7uCEhL2WVLHPysyJqcGdNodtjY32N3fR11kcrJLI88xIcVSCxGNhifPnlOZkumZGXr9Nk61NtZEnjx+Tr/b553z55mbmSZa4cWz55w/fYbMWaZm5pI6LpAERN/wmLk8xXt/9ipF5hiuj1IDUdKdozXVJus3OFg/IozHSAxAABfIxCJRUR955/uXiF5xwSR5MpH563M1EC/whf/yyx/mHGTGcPn7zhN95Mnnn9KYb1G2Sl7trJItOjApmjtikGZB80SXvFugGlECuIRb397eodPq0HA2dcZxLD97hoTA3Mw8NlZkHp48fYxXSfmCWhCwVEkTCxp59uIFaIYx0O7WJZYNuDxLi7wm7YBR2NndYWtrk2aec7B7wOOHjzk8OKLVarG+vpk88wOLqalEmU10nwrhS3/r14lDiwQLQSlDqrO/+F99hcykMJNQ1+2tVhsT6x0WESsZg95kas6S6vH9vX02NrdSjzBA7trs7h6wf3hIM28w2W6ROYd4MFVC0nfabZ4vL6M+kX6MU0JUrChehRgC4i0iEVTImhkm+kSjXl9HgmIkUOKRDDpZjhJRI2S5pdlqoNaQN3IODg/Y39vFknoZYLCSIVFQtTRabVqTE4mHoIZny08SURnDw4dPyNsT/JW//j999PX3e179v/dj7hsAoqvA2wL5BPCNwL1X9XO/4yEiPyYiXxWRr3pf1fETFYsnZzl54iSzc4u8eb2CiBJsYtoPh4e8fPmq7qBCa6LH7Nw0hkBRZGRFRrfXpdFK2/RWq4nLHGodwVqCBsiU/uI0BTDRaoHxRCtJuCHC4vwZoo8U7Va6YCrl1NwiWbdHxNBpFQAUueXCpQu1ki49dl/s8+IrK0huyFpZPYdWJubbCBa88okfuczUhR5aZHWUVEHp01RDIwzOTmKyiEiWxnniP0R1BWt470evATkSDDGHyXNTuH6TKz9wlmbfMLjSY/4Pz7L8Gy8QU3P/8oj6IZl4TnxrnzAYsbt7iFZAVKamB0z0+xiTY53yokZ7PX3+nLxd4ASMDSyeOklLDGSCVxCjWPFEU/Lk+TNOzS9AJYClDJbRsOTx/Qc8e/yEly9f8mblNShs7e9wPB7SGfTZ2NkiRs+phQUajSZqPFMzA3JrE1FIEtAzqiEjiWmu/PDlpIcgoFLh1IBzvPcj1/AG8BarSeSkIfVP0oTCYgtL1siJpkyY+KB0Gk363V4S5dTv25/s0W63WF17w8FwhBHHy5WXDENJlFQSTs8tIM7w9MkTxt5jBCpS01pEwARQnxZzD+KFqMlOLJnBqEl5DJps3BHD4cE+a2trtW8GQOl1Jug0u2mRwPN65SWPHj1keDxCJCDiWX7ynNEI0MjczAJBEvLs9Nw8GfBTP/HXPvJC/v+lMaj/HPG/qvr3VfUzqvoZ5xwhQm4y2s0uncLRbjZYWFxEkgYGU1ryosns/ALDccWbF69piCHLMkLUD0UeWV5QkKHepBUZizfKi+fPGB8HqAydZovghPCWwRQMSMBUUEwUWEmJspYCAjQ7TaxGfFXy9MkTrPFgskQvGkxx4uQZAAZne5z63EK6MwD1QJ7hVonrOrJmTm9xQKfTxFj4tb/7WwxXD0ENZeUJEvnCf/FPiWPwwUOlWCkQl7DXzlv6S5OISRjqX/kbvw4SKJqG3lKP9nSH3lKXxc/MMX9lmme/9JzMWcYvlaOHgejBdoWZk7NMdCbBGYIIRdGqd1tjqhgYjUbkWc7pxVMYEbxJnIdWo8lYPFWocKYEIxg1mOA4feo0WaNJzNLup8gDSiBG5cTpU5w7fYpm0WRtdYNuu8fM1AwNaxj0uvSmZ8jbLWyWhDmNRoPM5hx+bQRDj4YksEohJJap011w9SmXNVBbgR/TX+qBgLqQFmDAZI6oSmnKxPCvx6CCwYpQSWT36JCd3W3UpGTnrY01nj19wvHxAVPTk7RabZTAzNw0RSP1IESEXqfJ40dPOXFiloYNPH70kEf379MgEIhIDFQmR9RBhNJZjClYWjrN4wdPqELamRrJE1gXodVqMzMYsLOzy/raZipvXI7JMzwJsD07Pc3CqTNkRZNgDCIZi4sLFCYjeqHTa+EkkIuSdzJEDc3so8E3/yKLwNrbbX7959tB5Apw6hted7J+7iMfo9GIew8fUAH37z/E13ebRrNJJUJdVGONIS8yiqxgZmEOakvxzvYe6282wQMxjYkskZUXrzg6OMD6yImFRfI8wzmwUTDGJXQUlgcP7mODMrbKw7v38Jkl5ppYBG2TlGAIWZZx8uRJqiiIjeAVSyR3htnLAz71o1cRcew8H3K8W6b61UVaC03aU3naqoonWEG85VM/eoWin6MChUSsGD77Fz6ByRxNqwQUsQmGEkVxLUCOGVfCL/7nv8q3/NufpDpQvvS3f4NGJ4M8J+s0aPWbnPrcCa7+wHmCQjaT0zqTMT4as/Zwg+HuEMngzcorDg/3USqM1Ho1E1m6eIFGM6foFYi6NE7DoCZiRChcTpYpNqQyxhHJCosnICZ9VgI0Wm3Onj9PO8vJiwbdfo+ZmSlyJ2SFTe9nMjJj0UB9h1WkBK0i5RH4EiQGSgATkZC22zlCtJ7V91+l8YmaJMNOt2JM4bj0Q5cImU8oNE21eb07T4k/KMbARLvLTH9Q93CU6cEk55aWaLc7NPICCDxbXsaHiMES1UBIKcanFhYp2h1GJufsqdOcWzyPZDlOJAmtoiIxpHPSCNYojXaXkydPEeOIZy+eoDE1rQ1weDBkfXOLbrfP9OQUWRS2NjbY3tshrydVb9bWCZLez4WIKQNZVhCCx2SRWHmCOu49fsiIjPsP7uOy4iOvv3+RReAfAX+u/vufA37uG57/1+spweeAvd8vd6DIC86dP4vFcOH0OYgp3w2v+HHJs0ePawmxwdiYDDzWEkWJ1tHtT9GdmqJKx53Xb16zu7tPVSX9vxooMktQZayeR48eUVbjuhMfObu0VH8S4cKFS7go4HOqIDBKdVpQCOpZfvECRTkejnm+/BRjDJ1+m7bvc/cfP0FEyZuCSkm0ShUs+2+OOFw5ovIw8h4fIuqgNWiBVX75r3+Z0X6FUZic61EQGeYlKpEsWNRYcJagMB47Pv8f/jKf+/HP0pnrkE9bvu0vfhpyS+EiyepukMISO018JmgDNM85vBcwewXtQQOJY+ampmkXXSQ6ohjEO8RHGoXgEeRYsRpSQ0zKdBuyhlBZqjKiVUW0BhXBqpBJUnSqCgdjz+MnjxM1SFP6LsaQ2Tw5Ir1JQh+XejtWAjaGpKZ06cLt/+GMRs+y/XAHxikoYOXeKgZDCCmNZ/HqLBKU1d/aIGBRhZfvr7H+9XWaExlaRrKQ9B1iA95EMgNFLTiz1mAziykyQq3EkyzDNQpELFEFK4b5xUXyRhc0NQErHEGEvNNBoiXzFpe3KZqK+JByCiSAVoipPkyAVlW0GpM1LVlWsLRwEkusS1xDp9NgMDOLcQ6TC5WFwXSfwUSPlZXX7B/sMb84TzfPWH7+jOPjEcF6njx5jK9ZAhhHhnDqwiVyL5y/cJ6/+B/8jx95/f2B8GIi8r8D3wVMA2vAfwr8Q+CngdPAMvBnVHVbkiXrvyVNE46Bf1NVv/p7vX+r1dKLFy8QUgVLZjKMqfAlmKOw37kAACAASURBVAL8qKKsPE+fPkGATrvD0ukzqV9g0tRZI0n3j+DjuO6iWiwGUeHYGgoCRqD0nswJlVhEHXkoa2lnTOGb4whFMoQcSMQYQxEDIx948vAxF8+d4/mrVywtXcA4k3gE+3uUk0Pe+9eucrRVMto+rhWHoGLI28Lk2S4uN0gjI28UWCwaPeXhENvMicbhnCd6R+nGFFGposMq/Pxf/jzf/7f/GIaAP44U7YKqUCSL2MphtGLnxRG3fvo+3/7jn2X91gZf/omvM399ist//BzBpp1Ms99k84v77N7cR4j4ChyGkIPxacJmY0pkSgm7JffvPCLGyOXrN3CVJ1qHELhz9x5Xrl3FWLh3+z6Xr1xJdWx0hOgx+GT4CYmpv3d4wNH+fkrWUYsJqYRTI4hJuOxo03MuGp69esof+nfeY3/zgFiCiQb1EXFgs0BwOVIKaiM6DkQXKUxBiBVhpBy+H/FlJNPE7POjMa/XVjlzegmbCWQOrTwGW9eyPu3wosOKfNjTEROJ0RCDYG1EoieYZho1xzT7vX/nLpevXq6lzTVRCUPMx+AdNhhCU7FeiL7OWNCECo8morFIYiJbewpq15/xgpiAmmR/t1ZRk2FjRdS0LmfW4L1CXmBCBEluTzeG0gasFLTbE/zTL//yPz9eTFV/RFUXVDVT1ZOq+j+o6paqfo+qXlTVP6aq2/VrVVX/kqqeV9Ubv98C8PaR2XTgrSSun6/qrNoSsgyaRYNLFy/zzuV3OLV0gv39XV68foUxChpZ39hgc3UtGXJcjlWHM1AC0Qg5Y1RKQggYYxEVshKcRHAGHy0ihjt3PkCLlDBbFYamOIoIloyGtVy5/A5SNDh7bgnjDJE0Fut0Jjlz+iJFu0OjkVTmimJEcYViCqU8qggVZAZEA5UfJxlrI0sEoRjAK6Jj2lEIWmCrdBL+wN/8dqwGrArtCYd1jmrf80t/5YvkAahyJudbnP+eM/za3/lNZi4P+NSfvczRoyGHN4cYNbjMsfvVPR780gO293dRJaXqBrAhgFiePn2CD0PII856QhC8D1y6dA5TCZE86fURvE/AThMt75w9SzSRGJXgSqwFsS41uPJkT55sNzg5N5dk1IA4RxAlajK+iAivl18x3k38g1Mnlth+sE84jtz+Px5QjkpKUxHwjH5DOf5CYO9LIzgMKbFILfu/XuLHFccfBMLYkwGaQV7rEpbOnGY4GrH8fBmqEiuR3d0tVtdepwvTOwhJupvAiR6pR7PPl59wNBrXUM8xNgawShWg8j6NcNVw6+5dbt+7g8kqGOdobrjz4C4y0iSzjo5M065V1GJ9RgZkebIjG2OxpPPfZj4BcN9OSMgSDcq41MjNMkJM+PZH9+4hPomEiuGYWw/u4FxEyoqjg49Ox/54gEZbLb144TR37j7m6tUrydFXeh49Sj8n4bUhSMXB4SE7a5ssnT6NmogaR0pwSlJYEwQ1b8VA6csLQXny5DGj4TEXz12k3WwRM+XOrbucu3KNggg2ZeTFssRkbYQDhCzlutnkvNMq1BqFmFRraJrDk0Qc7XM5/e+e5GjrmKM3w4TyNpo+nxFaMy1avSaNtiVr5lA4TAj4sSLOUImhRSSox0dNJiNr+Ed/6Z/wp/7+v0TlM0QCTQKeHJcHjo9jrUuv/f1HBquWrQfrPPvyS85/eondr45oXWsw+0f6FHnG6i9ts3XnEGscRgJa35mMESo1uComapMk/0TpAw0smoGKx8SMm7ducfXCJVpFwdiCWiHWARsmxtRP8UCeBFweQwxp0umMYWNzCz+umJtfwGYVUYUXyys0u2363S7WOoiRIqt/11cIraSXkEBVWUw1xovh0eO7AFy9chVvPZf//CJP/vvtuoavePTkCRfPXcJmJHNask+l80PfXniRrc0tVl+/YX5xnunp2XSDiUkH8fTZM2ZmZ2m3uwj1hKBu5hESwlys5fatm1y9coXbd+7w3nvXyKRgaAJxXPH44UOuXLtGEMX5AKSFvjw+5PXmG5bOn8XEWguiqTQyms4xo4AzRNIoWy1QCSWpz2AzxY/BFQYNipVINYy4IufmzZtcvXKN23dufYxpw62WLl28hIsGMWVy6tk8WdyMo/Rj/GjI6zdvOH/+HBI8xmXs7u1xcHjAwqlFVExCk4ck8c2ROkL6LfRySKCFdabmESp3797m8tV3Ut0aUkMxZoGyVJpD4eaTu7x39Tpj47l36x7OOa7cuEoMyQBioiS3Y13vNS4WTHxHg6PVMQcbQ8QpqhYblM5cg2Kyjc3BOIdzOa75toYeoyrEaDFicZSU0RBtxMSccuSxRUwXBiVGU7S2lCWGJnt7Q8xY0RAZDyua/Qbrt9d49DMveec73sFNWrLCMvNtE2z8v7vc/r8ekucFkzMDQDFqefrkCYuLC7TyFqnvFbG54e4Hd7j87mUIFqMpsvvm129x45OX8aVJUI9UOyC2wdgHMo4QHMNhYOXVS86fO48xGRpDOm4mNeRCSGgsn9wvQNphqEJWL7zJcxCJAsFkGO/JUKIYYhjhojASIUaPtS28Czy6fRtR4fLlK2mWHME0HVKliDSNJRWW48Nj9vf3OHX6NHlInr5gNDlWNeVdiPVYDLESVD0xl3pGX0fTO+XOB3d458plMOmmrZnFlEk/YG16H6JBjeBQJCgjSkZBWHu+zDvnL7A/OmR9Y5OlpdPsbO9xfDzkxOIc6+vrGMmZWZgBAuKFIII3YDXiIjx4/JhTC6coug1UAllpicbgVbEJ/cyP/42f5t/441c+vouAiOh7n/kU3o9xwWARvDhUEuPPxbQ1ilGxpkEQTxCQICAe3tpi6zmvYFJH+y0UXiMqlkcPH3Dm1CmePX/O+QtLybQRE7wESbHYd2/e5Or169y9fZt3r79HKCryYcbIRFxp0cJAqPDW0PSAi1RImlQQ6Vxp0v9si72NI452E72GkFE0Ld35Js3pjEwEYy3BuDRrt1CNI5Ilh+E//Av/D9//d74LzbOaKGMgszQsVDEmHbwYshioolANI6OdQ0ymHLweMdof8ehnljl/Zont/T0Ojo65/qeuMPmZFrsv9tl//5i4kgaZEUPQEaYUnj57yulzSzTequOqiphb3o68S+MwAYJ4nHMYynRnihkhVDy4/4D3rl/FjwAsQxmxsrzMiYVF1jbWOH/uDNu7OxzsDzl54jQpXDLWfYSYxEUaQSxW3kZuV6COXMZU4ijUMhbl1s2bXLtxAzVjCI4Q0hjTEXGxIorDO8FVyQTmNe2YRLJkfgoRVYPL0o5xY3MTX41ZWFxIn8cLAVKSEiCkqYc6eHznIWfPnENzSyaSdjy24s7t+7xz7RrG5bhYos7BcZnoTx/SfZLGwGogqOJtskWrGqqoDA+32d894tSplMC0svKKxflFZk7MEBCsT6alp0+fMDs3zcRElxgCWENeObyE9DpbQRRCSEQqlzlu/tY3R45/LBaBVqulFy9dREgoKi/1xewCjNOWS8QiEoghsdwDgd3dTY6Px5xcPJVEayJISAqz569eMNHr0eu0iVnqDgdNnWElJbb6Mdx6cJtrn/hEip8OitWKaB3Oehhb3r97i+tXr3P33h2uX3s3cQGcUquzycQSJJJmTkLnYk7jas7+yhBjKggWxDBxts3EiQ6qgV/7m1/lyp+4xPy1KZykKUcYKZkoZcMgVeIOYkCHipWcn/2xn+fP/K/fB4eOqgV4z3i75At/7df4nv/oW3n1tTW+/r/doTPXZvFT87z51Q3OLp3Dznmko7SLFjPfPcm9X3jK3teOmZ6agihJukzk8ePHLM7N05looupRk+MF7n9whyuXLmAbljjMwVmshNQ0lECUBs74RM4toZMJlQheA0EtGYBoOjGjYjSVal4yCLVWwygmwsuXL9jZ2+X80hKd3sSHvydB0Cx97xqSENf4pJoUAR8r1CS9R2h4ohdyIviSu4+fc/nyRY6DsvL0BWcvXMKZEamX4VIZ+PYhytraKmur65w4cYKZ2Zl0D1HlyZMnLJyaT0K0yhJMoAgKJiO4gCNig2HoDOorrM8wJhBNwrNpOosxkmAuwYPLk88hlhHNk1oRapakJUms1dSnVmoiCEqMFrExlXIWfIBMTNK7xIrbd+9y8coVXDCYDG7evMmNT32SW197/+O7CDRbLb108SIiBrI0KovRUY4qnjx+xJVLl0AMo9GQl29WmZ6ZZ39ngzNL5xL+KyZ1WLQ1YcaSurqaaloBgoPcKz4KKgbEcO/2B1y+ch1TCAQhasBWHm8sWRAoPBIU0YyQBYw4YiXpfyBlQkmHPDH5NMlSu5cypr+jz8HuEXuvj5HSkquS9x2NxR6u48idYJ0hc2kaEqtU84kJxMzxs//WL/CD/933pglGTOBJHClPwZBi14MjVBUHu8d86W99lc/95U/y+f/4ywB0uh3OnT+P6YK7qIQNaE80mP7ODvd+7gkP/vEyAKdPLzHod6jedjZUUPGo/LaNGqOJrmQNt9+/xdVr17FpK8LNW1/nvU9+GkLF7du3uX7lKncf3OfG1XeIampys0vwTHHs7h5wsLfFqdMn2djcIvrA3MIiXgVjE8GX2mD0+PFj5k/N0241oUyjCxVTw2MiaEE5HuFyuPPBbd67dq0O5jREZ7ClJzbBDEFcTHd2JxgTUoSoOg4P99jZ3eb00ulkJY+p30MAX+PswCLiE8o/mHrBd6ARyRUzcmCVUiMNJQmasogJ5rcXsQgQiRq4ffsuLsu4dOVqskQTcaIoWbIpmwjBE6JLBjKxvF59TZYZJqenExexihhTKwqNgVLwTcHGClSSdDkKajJMHOJjhnWGm+9/jBeBVrOpw9GI9979BDGk8UZu0zbxaFzx7MlTLl28iCEyyhwp0UWQ4JAQUnqrTRx3I6SMOwPPny8zGAzoT/bwMfDo/iPOnFrCNXMyUbRSYmGTAEVSq+/WB7f4xLvXSBIYg6ECX1DlEMshTx494uqVa8QoHI5HrL9+wYXz58EIIVom32ky/d0d9lYP2Xu1jxp48aXXbN7b4jN//iqn/8g8IgWFM1TBMDou0RBxzRaZT/Nx0/DoWMiiUhqLkYgVoRwaslwJdaJSHizDw4qD1T0OV0Z85ScTTbbd6XLhUxcoLnk0OsoNmJxrM/fdPVZ/+YCd39jBEkBdOtmjQgw8epry9RqtBnfv3OPK+TPcefSUq9evphl/SLPs6IQ4AvKKOzfv8O6nbmAC3Lp1h2uXr+EtuAwYpa09HsiSC9Jo4iWkQi0JO2JMvg7RgBVDiK5W73lSkVVBzHEuwUxhjI7T9jlFy+WpTZtFrHeUqtgsokEw4kldNBiNqtSjOHs+UZassLO3x8HRIadOnobKEK3yZvUVedbk+PiI/kSHXn+CdEoJXlKEeVQBa4gxcuf2bS5cvkbXGIKCmIrK2JpMbJK0d5xi1jUYKhMQScay4/KYlZcrnL+Qxs0SlLe5N2ub6wRV5ubmkk9GE5wlJ6Jq8T6iTvARnjy4z9K5JdqNNmPx3P/gDp+4/h5DY2ho4P0Pvg7w8V0ERERvXPkMWbNCvcVrRV6l0Y7PXT32sAQxxOgTLzCmbj0mOaeMMaysriYH2+wAFyBqhlBRGcF5Q5WDBksVSlomQ0MFxqWopmiTJ96YlAOgiWIb1aBWsJLMI6bhKEv90JDkbTKG1P8ORKBzo0m+lKVsQJMEIs9/5QWbD3e48cNXmH53mt58K0nLiwZldZy8ZsZiMxCaQCCWQ0RzNBfisErCuIbBVOnzHO6O+YUf/8LvOJatTpvzn75IcaXCbFvCA0Pn0xPM/8s9Nr+0zfoX9yEmboERePL8OTPTA1p5C5dbgqtn8RqS8cgUNQW5QvFYyQhEbn/9Fu9dv070yp2H97j2zhW8dThHKpmMSw5JkkIvqmJiZHt/n8ODISfOnMCYgJaWSGDl9Su6nQ69QR/1gSAWh6GhwshErBru3L3DtUsX0SzH+xKTGYK3qKZQzyiBYCxWshQpHtO5besmPpr6SkaSmzTGKn2PRuvmpkvkKo1YTVp+NYJI0v+n8NYq8SWkVi7bBjFW3L51i6tXrmFclrIhgpI5pSxBnHL7zl1uXH0XsXUJ4AISYHh0wMrrNRYX51lb2+DM2bOIKi5avICVkqABFYGY8+LlCyZ7A3r9DlEjNiu4f+8uJ5fO0sgsDWMYjkEyrfMkk5OzskNuvX/v47sItFotvXzxHOpzEvuzZCQGawyjUcXj+/coGg0WTi6w/mads+fPpdoIcAgEJaitG2bJfiuq6UtCePLsMYsn5ug2W3h1afwrSogRarFNNApjIbQVVyZjiWSCKTzxWJBMIThGmpNTUpu5MNYQSG42iQErwuATbQZ/qMcoVOy83me0foxR4ekXXnDiW+bpznXwoaI11yQGyGpFYGO6RaaGrAioF6jx2jF4ZBQIhTA6UIq2kB2O+Nl/74u/4zi2Z1pc/ZMXUTSl6ZiMomNoNCzDzTHl68j4OakZFySRdWwSrNqap2CwBBUqqlRnOqCWB6tRbn1wi/OfuEErKlUVeXD/HjeuX4NEUCTEiDMx8QOT7xuPJi6farowxaRjF9J3ZYzHRFOLxSKCqTv5GWMTyZoeKYUQBJFAVhpuPrjD9WvXUGOoxhXWpIUNAhhLyICREgqDHRlcEdEq4lEyIgdHIzZ2duj1erx6+ZLp6SlclhGjMj8/k7bwKgkS0h/Q7vbBlkTJMD6NFI2JBE09KFMFYp7VZWVqaRo1fHDnA268ew1jhOgVzQyUBlyiI7lgKQEbbOq+mtRbMiJYVUKASpTXKyt0O036kwOePVtmanpApzuRdkcBtCy5//AxFy6eh8zhgtTwnbSr8FJy+4OP8SLQbLX00tmLiLMoVdomBqFyYDPFjAEkqbzqZlIUxThNIxMNiEmuLI0hqdAkYp3BhJoYW9m0bEQIVsmNIVAhla1HkYZbt29y/caNNPZ5/zbvfuIGMTiEMWIswUccGUSPWpeUXng01EETzqY6TZXJT04y+FyLvZV9dn/zkLDjCFXgxZNn7O//TizZt/6lT/PVn7zFH/33v5V8AK1Gl1EZII+E42STrY4r/slf/RW+9z/7Nv7v/+RXf9cxbEwWvPuvXiRWNgE3gMZEzuT5HrZhcI2crS/vsfWru8SYanpjPCg8e76c6L6dNljl0YNHnD53ljwrsMFg8nQnxDtUKm7fus2N965w+4P73Pj0NRgLnnQ39XHI4wfLXLryDplESk/i46VbLJpbpAqpQy+p6o5KUsJ9qAhMC0EMEYrUGEST4EiNIgGKHEZjxRjh5q1b3Lj8btpy5An0mQBrikhAfYPcjChVCSrJEpyZWmUaMJlgNbkWjYE3b96QmQb9wQCso6GRQJ1CDEQfEGfxlWKsA1OCT6pHtQZbpmzGYCNZVFweOPSOrB5jhzCEUID1qeyUlOhEpuxt77G/f8DJM6cSxkyVNxur5NYxmJ4iIWSTsjOoqQEiQJX6VDHLE85MQtI+IZgItmjytd/89W+6CHwsEojK8Ti5zwj4UPHowROuXL5KoYqMhSpGrFOiFmhNiMkyRSphY3uf168TOml+eoGp6RnUJvFNVAg2wDjgpCJIIwUUR0AEEzJCZnGxAhw3rt5A1BH9mKvXrxBKReyQKBmjobD89AHvXL6M2EglnlyVyuY4Dewf7rO3s8fJEyfBws7NbcSMyc5nSObwRyWvVl4w05+GCIfjAxBSZ/ggob2HozG/+O9+he/9r7+D/4+5d4/1LMvuuz5r733O733f91bVvfXu6uqu6p6HPcEjYuIo4KAgYju244xwjO0YHDsWssNDoAQQGCEEQvwFClJQnAAJDwkLR4wdxzZmwImxk6CxxzP9qvfz1q2678fvcc7ee/HH2rfaTnfPTAYb9RnNqPvOvbfqd84+e6/1Xd/H3/k3v0COH9ygP2wDAPDBoeKQkInqChOyRYIyfndKs3tsfoFQnG3t/Efh8tUrxNRy9+5tLl68wmvXX7PKBqhcS0pqjkq5Yeojb37qTUQCn3ztTWRmtmmhDEi6bsT1128QxHE0nbD56CFXL1+16Y4z3rx4mwA5zXj1BFXapuXR06csLMwxHA7pqvDu/XucuXCBfq/Gp4YZEaee4CtmTcJ1FWLgzZtvmotSdqTkkY4SWsMfVL1x9/GE4Dg5OuTF8xdceuUKR/sHHB4cc+HCRYPry7Wxtk5SM3zJMTOrk21cWtnGHwIuRvNqJBOyeRBqVKoAsyAIDT5X5KCk1uHxxKyEbL19RST62vwVnBBdNjQ/e5wLOPVs72yS1bN+Zp2s9vmkaB6yj8RGqbPHB8fUZe68d5tXbl6ldgGVPslNbGLQtPzYT/81fuy73/jQtfOxqAQ+/elPq6ZE6zPdFJgyo1P1ibklpsTDOw+4dvVV8Mr4ZML2zhaXLl2EQjUFIWVFMLvlbqfLwtKCdURi9VDQbPLcEoMt2RDxLN7YVdlIKK1ztqBLRrzTxnrFYNJQUtc2JBrIVryKYCUzRn7RUvEuffMAbZSjd8bkU0ZdEhwB93omd1tu/fw9xi8m/4RC7Pev0PFUvYob33MZL10TTAUIYiPM0dkey1eW2P2/99n99UMTQqUW5z0ilk7kciZJJCYItdCq8fRrFdroET+m8p7oBNcGOj4wo7XS3nkyGXEeTTO8ViZkEQiq5gLT2om9d3jM8dEBG+fOWz6fh63nWzjp0DRThsM+w/kRjx48ZHlpleGoh/Om3dCkvP32LV6/+hpVDS0KsUGqijTLpLrCayLnCu8gz2aETh/NMyAjITA9bniy+YhXLl8lOwtUteRky55UCvPSY+SxbFoGp1aBVr7h3dsPWD9/hYcP7nHllStUVYVkpQWCJmSQYNrjy7/z23zi5ifJJIJWRDFPg7adcefebV69bkC30SK6tApBWkRq1OUSlSd41DgG2RfeiwG5Dx4+4PjkmKuvvUK/qrj13l2uXb2KBsW5DrUqs2ycgXfeeos3X7/Bb3/5Y8wY/MQnPqHvvvsuN9+4SW4T7916jzdv3CAmQb1DnMV3KYJvhRwUdQFtSzqLa8sM1eSuIUOLQ0lIbPFVjeTMrTv3OH/hEr521MGhMRAqaNOEaYKe1DSSCdmZ2kwSUBlIlDDcwFFUdVpmyA6vxZG+cOrVW6wUBBY/O2D+kx32N4852W3QsZV+sc689wt3Of9PbfDw7z3iZHv8gftS9QPt+P0TKvQDcRyp+lbAhW7g09/1GsfvJDo3FehATnRWeqxdW8TVatZoXzpm6wuHuOBAPc+ebNLt1iwvLhBNb4KkzN0H99k4t0Gn7pB8hgpuffkWKSVu3rxpcmZVZm2mM6jQ1tZOikItYvTq7Elpwt0HD7l+9Rq52MGpmvV71sKwcIGsZn7qQrCxMKYKDSj3b99lZf0Mw9AniRrPPkXz8nOCaqDWSKGKmetQzgRVmuy5c/sdbt54leQqExmFZJyIxpR2mjPBeVK09sFXFbnYYrisBC0ZAM78A7Ut7uZl1CHZ0eSWWiqbZATQCMErEU9FZuo8VaMla9IERc4XfkOy1KvosQI/WQ6COE8mIgQ0CyGr3bsMGpQYLPwFNf8W+xwVLquxIb3iJ0LqQRw3hMr4Hh2UL/72lz6+m0C/39PXr98wO+js0BzJtY0CtfDlzVragzNasI3Napv/SjJmlwJBrectlcHDh49YXlmm47r4urjRlgc7m84IdQdReOe9t63Ut2/ABUXVnmyMEKczgq+4//Aer756jUagah3io2nVnZJyJMWMq2sDxlpTis1/a5/BzRpXe3Z+5YiTRy3V9Qh9Qd+D+gL8zt+9TTuxF745bgD4pu9/ja/8b/dojuzfP/2vvsFbf/M2b/7g6yAWwhLFDtucFeeFwWKPxQtzHDw6YenKPEdbR7z3i/eY3ousra+hAmmW8M7hxJkFg4+IegLKrJ0xqGpjmxU3WyNGOmJSukH48jtvc/1Tr+MbezZthjp5Up3x+GLt1dK0LV3vUBWSl5eIu2hEFJ5tb5vB6LlVi/lunb1oPiGxAozcFRFwJQg2C3HakPuBIZ5ZyXAIRLIEUI+KEERpUyRFOw4izpyFaiHFTKf25FbY39vnZHLMxUsXidlk09b6RNQpDfb3j7GlqqFSwWhLgEtoNiDQYwlW4iqa2YS620WSkInUEZrKkaUkAmqxzHPmbaECMSdTcJ46Khm6ZKlIYOQJrBUIWYneg2bu3b3PxsUNau3ZRKcD5IASufXOu1x/7Q1EGpaG63zh7//SN64i/AO/RIAGnCMnRzNrcSmRsuW3A5DNUy1lJc5iAQGV6AS8pdOoGmtNGmNfxSZy4fxFBv15njx7bHl/tYE31J4Hjx6iAcRVvPbKdXIWsnoDsFrH9GSMU/Bt5uHjh3SC59orrxqVV96P+Y5eICtHR2N2dvYJ2aHRkO/slcnOjLibkegZfVOXuT/cpRpZP66vKU1PuPEdr/Lpz93gmz73CXrLXbrzHd7++ft84nOv052v6S7UOBFe+9R1MyxKgAoVSugIITjq+Q6Ds300Kh3x3P2vn/LsZw/pH66wembNcBBVdneec3J4iGbBJyVMIY9npKw8fbTJbJpJOQDGWPNaGxjlbNN5/eaNssGl9184Z3FbSS3efDprePL4KYkadSYbPg0ezOJRsQXvqgqyJ00SbWqARBZXfBAbjuOURMJlCEWe++7dd3HRWJVNM8H7zEQibTOlOR4za2eMx8dMJi1vvf0OX377Xd55+23u3rnF0f4h9+/cZTZpDR8JSu0dPjbQRiRapRILFyC3LT4ntjc3mR5HlC6KMhMTW2m2yDJVZTKeoETu3buPJju9m2lDdoFMKJ4B7weMzKJVmG0yKVpWQdVciVTzS+MTlba4NAuiniQelzyo4+KVV+j6rrE4JeNnSibiUsPrN14vCVqe7/sL//FHv34fh0qg2+3qzddvkFTRLNy5e4ur164SvaOKMI1Tep0egczxw3p/kQAAIABJREFUyYSdvV3Onz9PipGmmRFcx4wgfWIajWBQVYHNp48Z9AbMzY/wAqmMw6TsviEVNVYUlETtHDMRpuMJvU6XW7dvceXSeTrdEW1qcd4egpJoxGKlQiljExiPHAujsHgvOylabYHEmT+yRFj3eK2YxBntUaKdNGgSvG9I2REQGhfx2kGwEVTAyt4qBdQnIh6v4L2jnvfMry+x83CP4caQg3cPSY+Upork+6fsyaJKk2AqPoVcRhoxWU7Bk80nrKysMRjWoPYSBjxZI7Ri90kCNZGZF7TNQE3tM1Ebkg90M4VObQKr7MtIThSfbWKdC9c3YYKtUBb/s2fP6PW6LM4v2EuYA0rmwcN7rJ/dwPcqG5mpcvf2XV65fIlJann84DEbVze489btf6I11+/3uXDxomEjKdMJFTv7OyQcS8tLp+cwT59uMj/qMxrNkxI45+1eekEkc3I0ozvsEMctDx7d58q1a1ShptUZVfa8d+sWN155lVkFdXa02uK1QqUtNuUV4oynkjWTMAOSUAWCAFlpcot4T3VKeNBETt5wDRSyKUAzGXUJlzxCxAWY7E/JlUc08e57t7+xdkBEfgb4k8BzVX2zfO0/B74Dk+vfwYxD9kXkMvA28G758d9Q1R//Wg9kMBjoq1dumG20tzEeVYLoiG7K5oMtLl+8aCytjEWEeeX48JCnT1+wsLDImdUVIPP8xTa9bs1obp6Esvl0k6WFFao5s1cK0Rai00zyUDtoUkAkQWxxnZpb77zHq9evoVG5c+8uV69dt7xCb9oGAwaygYvibV6cTBpq4G2wRGOL+uDF8216vQ6D/pDnR89Z21hk5Z9eYv7SiL2HR0z3x+QIBMWLp5WEV1MMagyEUSAeTgmVI2pFdxTMpQZh9VNrcJiIkji4f8DypQVObk/Y+t93S3adkppIM50SOl3qyhHbGS7XuBCYNGMqVyN9b6YdZCQZE83HQsNtM90qMW3thgUSjWZEu9TScnwypTfqmSe+BLSJjKdT+sMeTTum3xlYSS7m0ixZTLgj0TCJ03FeYeUFb/z4pBbtKxGo1GbyhbI8Hh9y+9YdLl95lfv3bn2tJfahV783YDAYMm0mrG+cp1NXUFj+TjyazR9BxFSi1qe374udEBuvrp7hxbNnXLl6CXDU3jOjpfIVKQouZiZpTLceUIkSNaBphgYHVDhnadyIcHx0xPj4hDNnztnXiGzvblP5LvMLC+DjS1Abp0xnLZ1QUTlTdLYzoLJYdUW5dfs2mmE8mcA3yhgUkW8DjjEb8dNN4J8HflVVo4j8ZwCq+u+UTeDzp9/39V6vXb+uvf4IiBwdjME7BgtdaDzOz9DcIUgiZ0dUT6C1nRjMqkscDuPYq4iNcQqJwztHjgXIc0pWT3NyTLffRYIzICk5gkCSbC5FRX7sKY5aIsSSQuMQczNy2U5IJ2g0MIkAKr6QWhTvtMyiTz0IoHq1w/K1HvXZinzscANlb/MIiOTo8JKYHSQ7vMl0ujXDC0O23tpjtNKjOZyxeuEcgyuB2cmMeq5De5yIuy2Rhhe/tkf7yGbZ0xTp9TocHB2w/WyH5aVlFpcWeP7iBd3QxfuKnb0XLC0tMhj0rR1rHT5EkojpGUqPH1zi6OiYwdzIXIqDlctOE/fv3TOxko8k9aSm5enWM1bOnmV3y8Q4IqbkVC0+giXotUmgJCrvSg9tIEdVfPeixYpACUkfj48Rhbt37vH7WcXOL8yztLJMXXfoVD3sw0ckmZrRVKoWJ5ZL0lF25i/x6N59rly8YJRmJ+ZD4JPN7oGGxOO7j7l89Qq1WvQYCUP7HSaBL1wVjY2ZoUqFBjOdnzXFPz22uODNXFeFLPDs0WPWVlfpDLv4qOSUSeroOFMUZolIK9y+f4+Tk5NvHBj8ai+3iHw38KdV9c9+o5tAv9/XV19/jZzh4V3z7bt0ZQNyZQ86KaFWYgs5OJxCbBKaZ1TdDlk9vggqVG3+nJwyPZnYCKeukAziTB769NFD1s+dxfsayRHnpXjARRo6Jg5JUGHEovHRCYPhHFkSwYHEZC8AUlqMBBKMCKdWYstpqYaUCCxwKsyahHjH6NM109uRjT+zRGfUoc0zfNWDmbJ97zkaHa5SRudG7D48porC3EqPw6Mx+S248P3LHL03IzuhPVH2fnOfxc8M2fyVPTQLs9mY4+NjzpxZNeGNembThtnshG63S13VPH++xWDQp98dIN6hziEpgrOpi6qAZA6Pj1iYn+PenXtceuUKx4dHzM3Pk2PC4cqIUEl4KlWilpgEMUMOQUiV0raR2XhG7QKdQc8qADVspYgZrfdFMTmxkrOQXGJ8clTWxwPyKVj2B3AtLy5y7uw52pxwwRGMx20K1ZhN5BOsIlUcbW6Jx2P6wz5HR8f0RvOEYEEtbcr4YiHmcxeRhpyAyuLBSILWgrRmUSZO0MZQQylAYFZhd3eXUAVmzYxOr8to0DdDE+/wGWKTzeTEgxclJcVnRwrJzGJiJjv40pc+fET4+0EW+hHgf/5d/35FRL4IHAL/nqp+KLtFRP48llBEVVWQExnP1SuvWlpMTniJKI7kbTAjTvBi/WbbzjiZHrPa6RHKLm2VulEtSXA0PmIwnGOgHnHC4dEh/X6PixcvWrxzVlzlmaVM15t11vH+LosL80Y0cmZMsr27w2jQJ6vZYXmXLBhTxNDhAvn6whvAGdHGSZmhKy/BICk8gpMvTnE+MH3QEAeZ2f6U0RsVR5tHrFxbwXnBBWimkdWr5gi0+cvPGV7vs3dml92vTHjy+X0kRk6mJ4xG82z/H/vG2vOZfhgw6A9wGcbTlsSMZjbhcH+f5aVV6rpmdXUZG7eYYUDgNCATjg8P6XX7uMqzv7fH3HCOq1eusLO7z/7+HsO5ES6KVT/YyS4zS4hyWMWslYDPaPTG7Jw07GxvMzcc0ev3C/inTNsTvK+o6g4u2ow8SI2KWbd5cRzsHLK7u/v7evp/2LWzt0fG8gGDr6iqiuFgQAgVgmUgaLLNT0SpRdja2aFX99jb22c4P0JTRnJlmIdEMh4kWnXnzda8FcdscsKcG5KlnPRZ8d6ZYUpZNyAsLq3iJZOche1ohuPxMZ2qS6574CAUR24tuYw52HNI2SZcX+22/X/aBETk38UmSH+rfGkTuKiqOyLyGeDnROQNVT38x39WVf8q8FcBzp49q95D9Ik0c6Ath/uHLC5ZFLkZV4AThZyJsUWdsrq2UnZke8lQU6oRQFphdc3yUEST3bjjY+puD4/jcHePpfmRaQRE0GyS0aPDA5bnFkkhGSMsCZfPXzQvPBJmsWnegq6c/j7bFGA6aUmpZTAYgIqNysRZXNTpU/DOPA+yoJp5/oUjBDsVDo5O0BNPNanIIvhgIJomR/f1ivlvGfDov9vjyZMnXLjkzQBTIkeHxwxG82XKYk5GFG1DVGjbhpPxEfOL8ywvXgUVoho6fjKZ0Kv7OPVmg4YiOA5ODul1ugQ6XLh0EREhacvJ/iHzc3MgDq2BbMw80czh4SELSyPb8IrqzSeLYycIvcGIy/1BMX91JDWdZnPSUnWgEzo4EY4Oj+n2FFd7jg8PyEkZDPqWN/n/A5C9t7fP0tISqDCZTOj1OtRSk50BpiIOiYnobO1cOH8J8S0XL13BaUtSR5aWkITsXRFS2YFgRDWHaxOHRwf0Bz18smlJ0za0TaQ/HDFtpmjMDHsDJicnpKDU3Z5FvokwPp4Q5h2eLkgiezNLrdUEUVHVDqHSeslXYaN9w5uAiPwwBhj+cyV8BFWdAbPyz/+PiNwBrgNf1Wy01+tZHx2FWCKcx5Mxc8whqM2jQ6RtI5PJEXWvy3Q6ptfvQxGcgJX04HAlRCKnorn2luC6ce6sORKrMm7GzPsBZI94eLGzw+rSIhcvXjLbq9PhqS+MQC2sGlWcuKIwwzzly2LOmpk1LYO+AWwqzvjdqsVGrIBMDqKzUterWvSXyzz+O89YOXeWt/7hbZbmF6z0xFSMNCN2Huzj1NHv99jb3mZuZRHnHevn1kGVNEscT46Z6w+IbcOkaej1RwxGQ0Qyzayh0+0hgAW0Cs1kQqdyBOmjhf+Ph40L69A6IraBqpgpyMULl/jil3+LxeUVI/6o2XwTYTw5YV5HpYoynkdOgti00X6/ETxRcnF3FhaWFizLrwCSs9QSYkuoHE+fbjGbTj+4aP6Ar93dXZYWlhgMhhaL7tSYfNn0KZx+DodVd65CJZVgE/OSQU3kJmLGIgTBRasiXQXnzp5Ds5hLlhixazKb0JvrE9uW2Cq9AcziDKeOqgvZeY4P91mYX6CqLM4chN2DfeYGI9pgwLqU/EXvHSmL3fePuL6hTUBE/gTwbwN/VFXHv+vrq8CuqiYRuYqFkt79Wr9va+sZg8UhPlbgFI/j3MZ50FRAFiuvNWemKTHqd+lUHUOavTPtmaidsiL2oEovLig+Q1t6eC2bQKfTIYmBfZphNhsjbrFkuBkN1gwkvVGSnXHccfbya44GHSZ5aR/ti633y9Xh7M/yGD5g0JaJSETMIBJxmP2ZsL6+jgLTSQPzBkZ6UTQJL37lkOQVp4nmuCEFJb7YZXFpiewMVU8p0kwn6GBE2wqxjQQxj77hcM6qmZwZHx9TOU+v07W0m8NDxFV0goWoqlh/jxNcbnGpMlvvAnzZwzbmmyukLFe8+V1Wi+12JnVRCw/CozYjF1NwUqYBiBp2UOjXmpSVlWV2t3eJx0ekGD980fxj1w991wXz89PE3/hf38+6qYLwmTfn+Y3f2v+6fs/vvnb3d9nd32UyXWJt9QzHswnd0Yg6O8Sb7VrOGSe+PF/jAJziG+KU3Re7zJ9dQdUZ8UcCYqmvNsYT8wPMPtMbdOl1jSQ16I/sxklmbmHe/kIOJGdi25KrHoqUSZejmU1hOGBnZ5/F+SVLb6YqdumGU37U9TU3gd+dOSAij7HMgb8EdIBfNjrky1HgtwH/kYi0dhv48VMr8q92LS0v24l92tsUMFC8IFnIzlLYQqfDnJ9nb2efxbnFsh7Nrklt+7PSHjVWIEbNTCi7+7vMD0dIx+zG26YtJ7CBd+vrG8X+OlFpbXNuJ+RY8vzEIZhCMaWIy/ZiiytuNBgyay5BRimOSU+Da03uSQEvnfnbObFpBi6aOkwsc/fcufXCXCsuOpSkNAX1jvPr67QhWeqSwdaAUvVqlutVRIV+r0tvaF4HgqXtojZFKe7iaDDG3HQ2pdfvEQmWKuTMJFMRglqPqeUZSG5ZWz1b+vly/4KFyQqKJG807+K25LGNzBYTxlnI1gM7VzZLbBKgzl4UVWFnd4+Tk5MPXS8/8r0X+W9/7hE/9rnLhTsAP/WDV+l2PEkz3X5NbiPqPbUT/uhnF/jlv7fD0Unkf/r5p19rOX7g2tnZZTS3gDYtIUcsFdEblyKpfa4gPH/2gjPLZ9na2mRtaQ1xQhtbE/6QS5/vCS6TC1elvD+4UkFoduZtKHCaRmi3zjYYBZaWFzHr58TO3i6j4YjVc2eQbPF1KibMyuKJsaEWNTPXj7i+5iagqv/Sh3z5r33E9/4s8LNf63d+8AcpN8pstnJwECG4YtckWizBSkxjtgBREUPjE6ejPPtlpzNnzUZTJWTzastyOgZm9ewqDutlLZFYePpkk7X1Ndo24zPgFSfR4gqzoypVhNfALDcc7O2zvLJStPbQ6/bodU43JRtLqtgJmMXIMqgVDwK8eLHL3NIcdRB7cbI3okyh2roSgCEaidkTChiZvVUwa+fO2CaRT+8PxREs2wIrG4iW/w1kVJSFuVFBvM0KfXFxieOjI0YjR6g8OZ9Ssx1ZknEGxCqjJJm1s+cQSXbSC/hkliOIxbN5KV4FIkYD98aUE6fGm1cpI7jT/cuVp2aX5K9Su5brL/7wVf785y7xV/7GXX7gO8/zaGvChbM93nk85l//4Wu0TYN4ISWonPLmtSEvdjKrCzWbLyYonp/9pa9/Q9jf26VT10Z6qgVJyUhh2KjODEysLy/QDFkrzp7bIGWlIbP77AVra2uUt9/0ExFwdi+n04adnR06gx4LC/NGfT+dQpXnaBbk4KQpCcW5VL/gk7C2dgZHImsgatFckPnsv/iDfOl3fvtDP9vHQkq8u7vL0sqSocHqUXFsvXjKxpm1Apo6XBCa8ZSD4yNWV5fNsSbpy5daCtpvbYGyt3dIr9Oj3+2hKiwtLeDEvAjKt5DElcRvm+lXwTjhL0vYaAu7ydn8/5LVec7XKIIPpsJTsdfNa0FoRcilV1OssnG+LPEsSDIT0SqU1gDKywFIphKIai9Tzva2SmEiuqrwycXZbBnBueLck7xVIeXPsjxUo7RaGW6GFcEadJIIi/PzhKxMcQW7bF9utE6M4/B0a5P1cxtm3eaMsYZ6nj55xNr6Os8fP+Pc+jpZUxH4wMskYKc2SUHNPlylALXl71SQXTv5zAdge3uHZtb8njXyY5+7wupCQLzQTBI/9H0XePZ8Yg7NHt69P2b/qOX2swlprGSX8A5Wliv2DyKvXe4zP+f4iR+4zPPtGV++e8yvf3GbjbUu/+grH8CtP3Dt71s7Mb+4QFc6RJKtIe8KVKSsra7hc2Zt7YxNR7xVQSEbQOurUJ617dY+JxND2VIxALEKeOc5OTohp8j8aJ7s9CULFQGfbZaaRVlanAcX7CCw5QPe83TzCStrZ4puwjM9/uh26GOxCbRNY2WRMy96UmJ7Z5v15RVyEGJs2Hr2HJcdw9GoLJpkqi81KTDyPgij2ZmyytvbLC4Tbahj/0nR5r6UMlc9D588ZGP9PPaSZ1KEDiWRt1V8VcAxl0naUnnP4txiAQvNJEPAphW+IMhgG1RlO7VJb7Wceo6lpWVSzuRs1YyTgGhDjGXcqTaC9EVTYZ5rztBgDWgG5wxTcGpU0WbSsDc+ZDAaMjkas7S4YPkFBZFXTBuBg92dXQbdLsNeh6WlRZIKm882WVhZpRYTsT59+pROr0aSkoNVMSHbz3d6huZXgy6iDvGZLFoCPSy/kULe8oUnYXfK0ni1HPinG6FH2drbZuvFc2Jsf88a+a4/fpaNtS45mmPx7XsnLC84vuXGKve2plw83+PuvSNevzTg1p1jnDcPhN29GScT6HYDs7Fy6UKXg3Gm04Uf/lMX+cyNBf7mLzzm537l2de1Vp0Tnm8+Y2lhmdA1YFczgOfx44dcXF9/KWBLmiyJCXMDXl1ZNo+IUvVlDBPSZF6DJ4cnnFlZIws002PTHDhBVDnY36dTd+mNhiRXDpJy/xxGPEqYMRFq+Z7eZTPZ0Q5f/LWf+8jP9LHYBMQb4KStnWJOPecvXISqLmMVYdgdUgXHcG4OTUosoF+SjBes1C6g0/72Lv2qQ7eqbaSk+rL0B/v+rBm8sxRbLwwGPUuGKcQNFSWJKcRctoDOjlj3mlO2h1tmk3oqJRY7rY+OjkhRWVhYYH9/j7rjGfXmXsKD5nhjAR8qxe0mOUv9cda2iMukZEISzR5XtahPplLUbIIcK2dQHMEZOQWv9Ko+Hk+n00Uw7MFhOodUytfdnR1ybvHDoSFZxcSyPxga2FrQ/MGgz9y8RX5LMq1cytZyLC+sgAgLS0vGe1dHKKIXvCUAJxcw36HEyeSI45Mpy8uLnHa8TjAQK9siPjo6IrbvbwA/9SNXWFvsMDfwfOXWAbkRfEjs7kcOTgLPtxN5P3J2uSJr4uTYDFMkK863rC6M2DtuuHP7mJQzByczWhFuXp3j2vqQC2t95hcCTU78wq9+dFTX6fX06Saj0YjKCZJ8Gf+C1pn+cGAhOJkSHGMCI20NgzHnoYwQSjBNycYQwedAp9d9qUysuz17J9TuVKeuqH3AZTPFdc4VbMV4M+qEp48ec3b1DJV6lpaXbVoRLVjHu49+1T8WKsKLFy7gkiGYTs326/DgwHpfr9TBs7IwT78/sv5asVMSKcYMaqALipNMr9+j6tSFdUWhFINJYpQswR6EOh4+fISS2D84NNqskwLaCDlnghaVWDJ6sJQyVnA4d/rnghQn3uOjY46PT+h2K8RFOr0ele8at0DMJy9n4xnoafBGYXtJGZMFEby3jD5HS3TFYjpZz5kLKKUkgkSE1iznRfEhMJzr06kDvWHfzEu1Ku2EgZi7ezuklFhaXKbq1MQsFgbaCnPDkUWFl8psfmkZVUfKBr6qOiMyYT0wGaQ1FKTVloePH5GB6MzaPRUzDVTwwTPod1665tqXbYtQl3i+85yT8fu+Cj/1A1f53m9f59v/8BpHJ5GtvZat3ROe7th92twy23d18GLLPBye7I7xBTuJKbN30uIkM4mgooxPIkFgvl+jOLZ2x5xbrXnlwvDrWquHB4f0+30kGGCNKjFkUgOLCzaB0Srz4P4DKp9NC+NzMXPJPH74GNGyDiNo8nhVaqmYG/QZn5yws70D6hD1xXY8M+gNCXWNZn1pZrP5bIvcNiTNxKTMjUZ4Z5WFiFWHST1UDSn/AfAEfj+v0XCIiJI83L93j0uXLrKwvIym1kAirzScvvQUwEyN6Kbv+/plscFLr2OU1KQZUS1+8WoxUpTRW8cjLawuL+KdsryyaluKtAYAFqDN5ffVbs6ZUYlRYu1Eddkb+FeqhrrTYbF2dHqmyR9U3SIjsgkCamW/OivtnWS8Zh4+esjGhYsozsq45Cx7Ti0pSItqUZySG0U6CtnRlNzAHBPeeSOJBGdhrrH0m07ZP9xHI6YTGI7weKrgjSzlshmBJgyZLu2CNuC7SpZIcIGcbCFF5+lpYyy4bBkKGSVIoDc/z6PHjzh/6SISIESxMWjKdHxNJ3SMcuFgd28PsrCwNA8+0J+fpzo45Ie++xzffGPIp19bwQfl1r0jjo5bm7A4qzRyNjMOjyMW/CEmIThleSnw/GCKl8DebiwcZpuKzIikMfzOe4cMvCcOhZgaPntznrM/+RoX1vv8Dz//hF/8tecfuV5rV7CfMhY22a8gtYI2ZAksLi/bM6xtCvPgzl2uXLrEwsIqUmWaccOTzRecu7gB6jhuxhwfHLK0usTAn+YRAqdjx1yAYm9TKESYX1zEhQDqqESphiPDX3zk8d2HXL5w2bJMK8MnPur6WFQCjx4/woI+4czqGuI8w0GXqvCjo1PaWcPTzUeY5WO0klmMiUaZ2QYUnzwvXmxzODk0gCoIyZVhm2Lpt6cBFk7pj+ZAHaPhHGC8/uwd5IDzQnLm5+YEUkne8erMMVat3z2ejnn2/AkikboKdHs1IKU9EDzwYmuLo6NDG785ULXE3gcPHhPzjJUziwYeOish0WzSGe3SofR6YtFlOFv+ArgQTJevapy0NvLs6RNzxFED3hSlV/eNyegS3b7Hdysz+MjeKhPNpJwRn3AFbMRnstrnzdnaMhdNetsUoZT5Fhoo5Z1jeb7P2uqi+TAYbI6kxHgy5tnW1kucJKMMhyOGwwFgEXAH+wc0sxmfvL7At37TKjnDzk7kYD8zmRYthjcvyiZ5UjCg1zv45k/Nkz0sDisOm1iOi0RIsYxnEzlBTEIzzuzsNmwfthyc2Cn96Rsjvvvbz7D1YsxvvX3woev09Lr/8AmTWYOEtpDHPHROAVzjjiwN5q3yUrGx6pk1cIHF+WHBZGqWFldM1i1C3amZW5gn1F0jz5HJktnb2+XOnbvsHu6SnWFfzhkHpT/oWhxcISbFfEoUdaysnEWd4KoyiMn1R36ej8UmsLJqY5OI4+mzp3halNr8BRRCErodz8rqqll5ZcFVmfHREbfefY/nW5tlvCikCAvzCwx6XZx50pALR937kgrmK4hw5/Zt0IhG5dat9yAnfANJk825sxCJBPE2bisR1YiBhw6z1up1OiydW8IFj3fOwipThSIlqkuZX1qk1xuU8Aw7/RVlbXUVJ12GgwEuFzUjEE8JBmoAUiqAoUtCcp40c9y6dZv79++RtUaC0IonhIrlhSWkodhi2yy+5zt0gqcFNp9vc3hwwLNnmxyfHJUEYuNoiDqLDU+Y1ZVijEUviHdlE8tUAcNONGIKDyPAoBXduo+hVqU/Rxh0uiwvL5G9oD4Awbj5oQIRKsmk6YR/7fsvcG2jy7MXM+4/POHek2NmqSl/t1QoBhWZFq8ty/MV3/KJBc6sVXzq9Xk+fXORlWHFsPZ8/u93+dv/YMCv/qOADxVBPMFbO5NFmbaROMvMpokHjxtmSfjkzXneuPbVW4OT8cQ2owxRBE1FK+GSVXqUJGtMS0EWBt0eYEY40iguwGDQs41NHG3bsru3h8uRkA238qqMBkPWN9YZDOdfjrjVCw2ZnBJt5CVl3olZ5/tK6A+HhUdjXI4/92/8px/5eT4Wm8DWs00go5LY2LhEdIGOZHKo7fQOjuw7dAdmBRadJ2ah0+9z4cIGC4tzZGc9rKsSVa/Ge2/cA7XQiIidfJXzZAGSsr6+btlw3nH+/AZeTawUpDIRxunNLTRizYlUxkHZq1l7OaHjA10qRLX07+CCmCpMHEmFKlQW33UqdBLQKPQ6NR7LF7CwYxPOdqM1EbXaWZ5iBZIQjQQE5+H8hYucP79BUCVkISTBBU9dd1HnySGgUuyxnI0EXRKW55cYjrosry7R7fdA1VzxajthVGwcGkp7dZoHmLKpJgKeNlr8tVNzGrYhZkJ9wnlD/u2/zogqdUWn7iCxUKXFQEwjgkWePtvk4PCI8+cGLC7WPN9q2Dma0rZGHkrR7N46riJJ5vO/3kVzxcaFHudWOlSu4vrlEc24Zb7fodcT/plPR/7YZ+DpjvBLvyl0+sKn3lzk5qsD6mAvy6RNbO9EEGXWtKwt1vz4n7nCZz+x+FXXrOaMNKFEjDkgEKZCItCqolPbsHI0wot3xr9o2mQVAvY1dYEUTDK+trIGCEnM+s0p1FVg0O/ia2sNUwai0s1qraqdSWgxnHHRQks0p6KzSLzz9m1+8W/9Vx/5WT4WzkJv3HxDq1CDt4jpIJYbl9sa0gwJGZUa7zJk8/kjdjTxAAAgAElEQVQX514uspRsph5ECzxoIxSVDFIjlBuipvXO4kjRQERJCs4TbZ2SxhmpxQa7QYAKnxIpRFJb473ZoE1mM54/3eTyxYt4Z3HRFI695FTEOI6X/venBhkY8h+lpiYTVcmzmqoztdSdWSRVpoWIxdjTZ6V12fpyBZVMSJ6ZkwIcJVxOqFRoBeOTI3a3dtk4f95KeMX4Bg4qyWabVRhprhCZNAn3H93l/Jl1QrciiZRt0wBNzS2ezFfevctr118lR0foGNhKnuJdIOWEl0Dy1npVDaTKiBwuYxwQL+zt7bL1bJPlpRXW1laBloePnvCj33uGf+Hblnm4Oeb5jk0InLPFX+GYkTi7VPEzn8/86Pf0WF7wLC06zg46hMrhXWIyE04mE3YO4eBwSrfytFpzcNhw/lyPQcdxMm149mLC3r6JzSrvqCo4t1pz+VwPxbFz1DIeZ376v3yXL/yD7Q+s2aqucDhev34NnH0uW3PmPGQgBMRKbdMuwbRKJGQhlimBT0rrIadE8HBwcMLx+JBzZ9dAA+Dwzta5orik1uZi1m6V2ijWWg/zN0geiGX79jAdT/mJf/+v8xf/5W/9+HoMPn7ymJyU4BI+eXO6nYHzLcF5fK4INKSY0OyBYMScqPiYqLPJTyW7smjVFj/GqDplG6ooORVlG85UgnUXnHD7nbfJM17OdXGFJSeRFgt6CD4aFRNj1q1vnEFzpvXGdgSzH3PeWGV2RJpfgfhg4Jt4tA1UbUtuS9tRTYk4aFuorSKImvBqQZ0Tbyy7rAriy2QwoHnG7dtvgza22aSEbxJ91+XsuXUkCMElKMq33e3nbG3vEaXEd7XgUrQN1CsX1s8W2qWjirmQoKxsqUVonefahWtkVxO60IqNMXOuaMWRJNgphEWCR4t6emkWlW2EwmC4wMVLl1lcXLIeVwJ/+S/c5If+9EV29zJ7B8lCPCTj1MI1fK2szjkGfc9Pfq7D+TPC0jBQu45lI6aAJ+AqR3YV3kdGo4rD40hXM+urnv4w4HpKCJlRv8JXFnqCTyjC1vOWZ88avCRWlx1XzncZ9D78FTl3+RIXXr9C8PIydPTtd94lamuU91QhwVO1tdm/i5B8g7Tmj5DF27jWnbIHPZorBoMBa8urSGuOxtvbz9nafkHCpiCtOB4+fMzRdGpVlnpcVZitatwXJ0qINiEgJfqdHv/Lf/MffOT797GYDrRtS+5qiWBOJgASc+uJGiA15hSrDqkSEm0SQMhkHHs7B7RtZG3tTDEBhTYlKoFHDx+ytLLGYNQ3/3jjaeJdC6IILTjh2quvFiKPI5eo7IzNw52z6LKZt165yg7nAjlUVvIVbrgp/hItHheVna0tKlexurxMcjYKI7RkD9UsECvrIb0z2bIG8+v30TjlgkKqS0RYJmmi0tbYhFnpdGquXL6A+BqXHMdxwtbTp1y5fJkQzAFJ2kDETCoWFlcRV5hndsgQVSAnxGVCp2ftSMoYepfN3NUJTeigbQu9wlDJUKXKyFMCThMtztyUREklq1HEmV4+FpKSh06tdEMwyFKNL9HvKONJw+FkhmQlFAOY0HFIk+h3KrwTupVnfanC1YFhJ5DrBtfU+MGUWayIWZkcmz3Z0nKXUbfDYFDTppaFvqNRoTlJuNTS73lSqyUQtczxRSyqLmrpqeVD1+yTu/e4duM1UghW5aTEK9evWSsZHE6TrQmv6DiTu+Bab7oRukhukUrJrU0WnBOOjg842D/g/PoFww68sLi8dLqD2j33iY2Ns4Ulm+0AUFDtgkZacXgFrYSqJE1pgMOjj+ZAfCwqARWhwhG9ASwxCpohqoLPuFDjKDtb4ZG47HEpQONYmJ9nbXUFj/Ls2VN2tg/x3tDrsxsbbG1tMj05MdmvcyQBak99ak2eM1UwBaKqM++CIEhMgEelLTRi6wUDjW0UmhGNhc9tpZ6miI/mhr92Zo3F5XlUDGUXBT/LdBrjP4TswDtC5ehoIuB5+623Tb6LQ3Nlh4TY7wsFaAwSbGKRlKrXsxMlZEJVcencOqhD2wofbTP1PuJ8Y1HaQQgx4VMuIJu9nG3ypEJx1sqXhVyRMbDUTVrrPyVQ51khZiW+9NZXTAYbBWlApwGfTiujylx7k7dIbWe4QlRofSgbu/AD3znij3xzxe27R+zsTZglRZPFgedWWN+Y4/q1BV65OuLSxTnml2oGfY93Fb2qj1QOmh6ImZGtrlSsLPSonaM/H+j5zOJih8oLLjmOjhLPDhv2Dhr2Jw3ZZxvx4ni6M+P5XkN0Vnn9F//W63zbH1r6wJpNMeG0MFztwzHwQM7UDn7nK18GLDSFntWkiKAuWDKUREjZNBhB8aoM+j3Wz21YNVoDmgkJgrcI85QzMZpL86OHj5jMTkAqYuyYvNkGRyXvcUqsjPFKKyZ0+ojrY7EJvHrtGpocmhwSBF8pwZmjjHeZ2ifURRqnZJ+ZaWZWPPHFWf9dqSBJObe8weL8os1YvcljL1y5RN3pk2ONCrz73rukacOssj+jxdhbdp8M0dYmWIaheJBA27bEYGzBt27fNkedSnDOWhOXIYincoHgHZVphXj6bIvtnV2ePHrE8c6BIe7BoZVCHfER3v7yLcZZaZ1w8/pNzEvEzFSnUIbj3npBLSdPtP0wJ+giEBOBhHQD6rxVOrkt8labWASlaAvMDKXGFrH4isoLfmZAp1H9oPXR2pOhJ1cB6BCKWrPJ1pq89sbrhYVsE4QkShuBHACzYwtVhRbJbHAel6xTUs3FTjyx9bxl76AlZKizmiIsCmeWK5YHcP/2IeuLHc72azq157Pf92vk2CI5I94TaUwsliOf+Z7/i04H+rVn2A1QC3VSc6cqtOV2GhkfC3fvCf/nbxqb1COgyuazE/ZftDgJfOnOlO399gNr9vpr16lcMDFUNhC3VQdOmKXEm2/cIOZIqjKqkUpBnaMT7LMl58hqQGLOjiYJQoWvAknVHq4CISBS4wSCtw0DlItXLzBfzVNHSyQy+EAhZt56621c8CVwtmRLaPOBz3B6fSyAwcFgoDdevU6jp6OmXBRaJuiRUDLupbD1kvnCixYE2lnyi6ZTMY55s4kDV4IiU1FZSSW0xQhEvenBzZqpAx5SK+BPI8dq22R8heTG5uFZSFVCkqJdh2/MTBIpdlCujIGcM7/BrHYy4wwfkIxZ8ijkbLkHRFJrn9Np4Z2LzepDhtamSKgPuNyCJNR7aE145DvZTu2ZMfmygLipcdiTxwUrdWOEzWdbdDs1y0vLhRiUCN7RYHRtEaiJjNVTpWyRXtlCTqea6cqUJB1EakTHWE/hcbXd95aIbzxVnUgpQEc53B+z+/w5V65cNgINp/fedA9/7jsG/LE/VPGlO4dsb7cG+mJKyE9eH7G8UDGsA72ep+v65NByHKFbzDMAnLSoE9qkjJtI39em/Qi2QUlVTF+mLQ8eTdg9jjx6MsErDIae4agy3oVLePWA5+rVHovDii/8xg4//Vfe5c6j99mMIsKNm68SOjUuepzPxgCsMl/6rbf51OufRLyjqSKhBZEKaxSzZVsUroSoMQazNzamL/JXA7+DjWfVxo/amBGtOqC1UaEEsxjP2ZHr1trYRHEyttZOxPMT/8nf5sf+5JWPLzCoWrTqonhnVl9OK6D0je2Y1imJaC+vo+TGJ3zxT0uN9VcztReIUwKFi7x39zaT2TFNBTPfID6VWKqEk5qOt/w8I/ZYrHk+9SUwUq4h3sGhlYVFSsejraUeZysL2Nnb59Gjx4BNF7Iau9F5EEkI9plsA1C8F5vRa8BJTVBHdhYy4ouAqJVkp3dqjdOQIqgjxPTS1CO1FblNqI80QciSiXSgqsErKSqN/Rjr586wvLyIFp6VK6izYLbaVVJmqbIw2MoyRtrQ4pzp0mFI1UoJ8DRgqw3QZAqHo4OEmphrIsoswrA34tLVK7ZQnQFpL17s8uLZc/7sn5jjO//ZAfe3puzutVb9pRYHfPbNIYsDb+5FAiId6I9RjfR8IvtMwoQ6Lvpisw3dbh/nMm2wgkQRaDISE0kzi8OK2VTJ3vEP7x7zP/7iJmCKTmfcULKDnBPi4L///OPfswGcrtmoSkpW6sdcGRY1rXjzjTdINcQSM67OzEZFoA0mDfa5ofIJVyWoo7WqmDZEopQMC1MLCoq0IBJJzrIStRYTIpWcQ8i41vw3xHWoVCxRmgB4fuJPXf/I9+/rMRX5GT6YO/AfAj8KnKINf1lVf6H8f38J+FcwzstPqurf/Zq7AJBbI1a0CqHKiAqxpA+p1LiUcVb1ImIOO2qzE+Nhe8F78GSkSF3BtAiXr17DA9mrOcZKBylc+xCEps2oF4LLpGB+hl4E7zNtMRNx2aGtMAnQF5geznjnvfc+9LPsbO+8/OeNc+vMmhnbOzsf+L7/l7l3D7Y8u+r7PmvtvX/n3Nu3X9M9PT0vzfS8Hxq9EFJs3hZgkyghgEOgyg5xOaYIdgVSwZUi+SMuSCUpypSpOMQpB5PghICpmFAYKgSJmDdECCSNNC/NTM/70TPd08977zm/vfda+WPt0xpgWhLCSc2vaqq6b99755zf+e291/qu7+OOu+9la7tE0IjM2FSROsxRcuPhTzzGu9/zPmAvKhUD6wtSN9pWiipHBE3rqC66o30GTVG2m2M5Npo0+Ps+QEdzxzXYZ6vkLKpC77RJSL0yo0xdESpbktnHWEjDVVlLjGTVC06DHHhG3wemWDyajNTCWy/loZhTQzQqnOPXH8NVaL2ze9GYd6MFERyTgnbn8j5M2ZlKJk3C9mKO7Sp31lWYVs5egiIL1rky9QWegyXo5hSdoRdmbRR3bA05FQ4uOrfevEVKnYsnjrKQ69iaYHYht8CodBImyeDCT/zQ+/jOH/gDfvcTf9wfJ2tCzakaTsGYogtC1rsWPvPYJ3nve94dEyBRRDKpVswbWQtWG1qgeSVpifG2Os2HxX0OCTsu5CCq4GQ6cPr0aa5cvswdd9/LzvYWfiD+n6WC0+iZCNlJ4LPx3rvv4+OPffotn9cvZjrwPwP/HfBP/8TX/4G7//03f0FEHgC+A3gQuAn4qIjc41ezxN762t/fx3SJphXS5ao3PRIqqhqQGL0NPXsD2cyfPUFf46nQGRZiseQxi5NySrCWTLIZzYVkPQgyLlQc0UwuFZkJZKUZLUUbkYbIR6zjWVlqo/rwov8irpdeubZxxeknn+DOe+/j4LKBKL1O0JyeBUrmwYceAq+QCl578L9LjhGoGTIbqWR295znTj/GfXc9CLnF5ESGuLkGN0JyEHSkZBrOVKNlMu3k3vHhTiut0DR8FpoFr2GdjFSFnjP0yCykT8xpTTalDCabqFGa0XOQdp0MBk0q0Y1nUo8KJh5uJS8zz722zyvn1nQHK0Jx470PHCWXwuGDSs6wdaCiLLA5032XWjOynKFOVJnZwmm6z1ZT1pZIJZEkxYSBQpYKy8Cd9nTmyKHM9vIw990Fu+vOzjJz4fzMhcvxbKg7z7+2x23Tgu/74Uf/1AYAEQgaNmqJbkFoT+vAZCQlHviyB2KGv06USWm1U6ZMrUZHo3XsncQ2JlEpbBie5hLalx6RdyiYLHnm2Wc4cvggd9x1O72FD0aWTl8HriFJyd6YXUEnLIede6vXLvq/YDvg7r8JfEGLsHF9M/Cz7r5292eAp4APfDE/2Fjx8Kc+Ha2BC7Om4eOfSRgUQVMiq+JTzFlNnZo7LUdPltQw3aD8gSv4lFinQN+Lp2GD1enSiPSgguKkVuhRHjB0H2F/rWvEhX3NWEqYLEkja/BfxaUE+WfQbtDFHEy6lZAk6lmrDTSEQVHchSlrzc6nn3yU7cU2d99/d4zLkiFCKAPdMR3g4L7Q95XWOr111nkEo9SMUMa0JcIwvUsIVtJMTmHaoZOAF8QrnYxrsBTFFff1wEM6XjWwmQae6pCGR6+bN2L3MaNMnqitc/NNEydPKIvipB7YhSdjORnLSdk+YBTfoiJUb/S2YJmFuU2oVLKPPAAt1FagBPC6V5S2jNbKkmJeyOLsHFxy6GBBxHnjwhoZlnBlUqREixOc/Ixb4af+y/fxle/94wzCe+65g8kzDjRCbYoLXipeQHOl1Cnu41IjJMeFVkHQeM0SEuAsRmlK7lHZigfIm3x4VKjTaFhbc/stt3H00FGSabxnSRH2qjF5qKUyFwl2qjW0GdQ+xCfXfga/1OvviMjDIvKTIrK5QzcDL7zpe14cX/tTl4h8t4h8XEQ+fusttzApPPTQgwjgrSIr8LpJaIX15X2efPIp5q6IlkDXbeP9r9iseIPUDMxipixOnke5o6EkSEBnoq8XI+x8MN8tCDkk5ZHPfCbML8V5+BORqJYNvBmZhuaKX4NE8me9RIgko0FRtrbEe+T3GY3GClclu1JWiienerQ/ZTtz30P30aQGOu+d1nNoK6SREIpuUSTjSwlFmwdPILkODGb4MfibvO2yU7Pj3ehDv+kOc1+Te2NKLRa8eLRQZaKKB9dhe0FqghUh+SI8ADfmWF2olsNZp0e2hNjMuQv77F4Oqqulxgffdz1HDi+57mhhuRXqSXCoM71HuzBbo2H0mlENM1ircNeHP0KWRnLI61hMZj7CYho5wcKUhcGJo4Xbb95mWkDFOLSTObIdoSedmUwlqaHZ+Z7vvI3bbtq6+rktpLDIIabaAKrm4TTkc3gjoGDWsLWFNX0WPEVGY02KpSj1Vwg1Ctw48KRvXCcQEqkltBUSQtYWjlEGzzz1NPt7e+FQRMOk4rXQh2xaxmiRqfB9//XPX/MZ/FKf5H8E3Am8h8ga+NE/6y9w93/s7u939/dfd+xYIJ6pBNNPFuRp6N/cgM7WcuLOu+8kl0qWhK46589e4JUXng9acCgoomR2wWqizkpXg1X0nkHjTVhKpAytCswh0VQJuw8scf+7H8JxuiYeeueDrN3IMqMa4x6vgtY/PTb6Uq7PPv44qzYmD7nG6V8TPfhBJMnsZGFOTi1EGlMd46w2IzVGTJJilLna3+Wp009dPVHEY7EIYbclOTH8yHBxyB4glxhdp+CtI+gsyFJBQneRHB5/7BGqKG3OiIf34h89/JmYunRjLQK+B5NBi6juToBcxRNZc8z+c8emBJoxLxxaLMhbysoL0jMff/gse5caL51ZsZqdVy52VsOYY6FO1opSKE2QRQ73qZTJBZ78yF+mWhr3TplXCt6Z94ymGinWxclTPA3LlDlx7ADHDy3Z2kqkEjTrNCfaOgRpu/vOj//0czz38j4Ad997H1YyrYXIKovg0tESmIIWoMZ9TQWShmmJM4fvn0W1FPdRyRZeCOYR6OqSeeP8OV559UVcKmslnmtNVMlYiYSXO26/mwM7E31W6jp4M0WEIhlEhoZDSbbPj/3gW1mFxvUlbQLufsbdu8cK/R/5XMn/EnDrm771lvG1z78QPvtZxErw363HjmaCS0EpYQUiw51VhUYFhetO7HDLrUGP1SI0c7oZmmZ8MmxKweWeYpMwr4hUem90lK1CfF8wBeIpo7HljnjEnQOE+mAi1VGMT5/fqeXPfGloI3or1LpPnuboZTt0dS52QT0PAk2l50LpTmcLwSjJqBUyiQPLA9xx5+04YD1TW6DObsOPwSPFKaYPQ1xknVShWJBtUnNMZ6wlPDs5N7rCAw++i76d8GmOkeXsvPNdD4EqlhUxx2xBshCugEEOUk31uO94VChqHv8uzrNnrnDu/IpEGpHgyscevsC585XLu87OsjA5mE/UFP4KtYNrIntFJVyNPc1MbpSRqeAtfA7WjPOFIJu1oHV9TotBp7bOhYuN3d1GnoS0EKQkNGcWywU6VJ0iMsjoDKKZ431N7YK2kC+XlaBZUJ+wGrbt4hPiheIlNuTcKTguaywl2mghvASAfeTEddx0062oBQ/FpA0SmY0YPEVTR3pGVch5AQV6Szzy6Gfoc2fugDZym7jKsnurx+9LeWZF5MY3/fVbgM+MP/8i8B0ishCRU0TuwMe+0O9TDfEDXSieUVW6ClkrPfXwDnBYJI+Tu8cp4oRwQrtCCz27aqb7gtyHk7CEFdczp0/z+COPsFrN8aBrp1WjzQ3rwXPv40b1DkWIYFLXsN50p5cOOZDnzhf2wxdNkV/wVu85T6SyAIQnHnsC7XsoStYJZIskykqMhSfSUEFomqk9wlVWSUIgolH6RTSaDf+5+BnRoJ7igsyGt0Y3o9YIPH3xpRe4fOEiLzz7Apfm/TBeAWpuyPDHN8AqeBkP/OwhlEF49NFH8WZQK9qN5E4inG40CakbugrClaQls45cgzrs2odvQyKDdTKV3/uU8MxrnU5nPTvnzlZ2L8/sWrjnxIGwYkmEzmhKSLPgYbSEe6arU22ms8LXUNbRqnTa2Agi+GXVKi+f3+f0y/tUc44dnrju0BZYOCmVZGRp/PbDr3HmfASg3HvqTspyGXZoUsNcSepgdXaQQt2W4H3UGbUAkRdUzGFiFYeZCT2DkxBvpBYTHHcH6+gYCZLCH9NsCsGXGqKVbhUswPCqsGBFb47pmofe+QCWIc+EBVp35PNA2V9wExi5A78H3CsiL4rI3wR+REQ+LSIPA18H/McA7v4I8HPAo8CvAH/7C00GAE7dcSemib0UMuFgDUDLHSHRNQr+Pgcy7gZ4aPvdYCaUWM0N6RWdO70pLzz/ApevXKG6cvs9d/HQO+9nOU1RgrkgecE0FZLGHFZ9gaphKZJ4qQGu6UhzwQXW4TTc+lvfOk2ZstxGc+HuD3yY29/9ITRPlMUBymL76n9f/u/8p3zoe3+cwydPURbb9JNfFuYg3fEeEVQ5Oc1HsEU2ctvCU8Z8TbY1yYLy66HaAYtkpK6bc1jRDpaEPiU6JYCoJKDCTbffzM7hQ9x12yl2FgeDaOLgvVDzIgAsYoQls1AWnYShlsEXPPTAu2AB3Z3PPvFZVArOdFWl2aWEDJgg0mRrqFr0xqaR2RdeWFjeYmWZD76z85HfeJEXXlhxaCco1SqGzp2qa2aP3yslxYShSxCsWJOYMN/FcKoVWslQErJTsYWilBBnqdJWcOFybNRz7bz48pqXXtnnwuV9claWRdk+sKDjvP++LY4fjsqvjTCX5545TSNBbSAZkg6AF6hGziDJUa3ROogjxdi3Gv6N3tAURjAhLIsDjQbSFdToAW9RaLh0ehdqVWYSz7/wAlf2rwxHKZgHz8FzisQhHJmiIurAIh++9hp/OzAGDx086HfceWfIJTuoEW+mz0yqg9fdw3BjgB6ORBmKxIPpAfOJjBUgCaMP4M0jxEAjF09oZMnUjZFoNqw3iizAnW6JjpFxHnn8Me574CE0KEMhHOpC3d/niSf/NE/g1nd9FQ99/b/Ps3/4a5g1Wt1nuXM9tzz4FfE7pCAe7rO9RyYRJVJ9X/3N/4XVfAE64apk0b4nh5pqKNMGLuDVQI2qC8QaCw0LdYg5N3hUBcN6ix6+CSods4414l5NirqivY+2xOlV6JJINoeuIxlpgnVPLKoBE334trsZlpSpFbCZWTMuY6ibOrLfyYsJJNF6EGBcwqMBcf7qX1rywQfhpVfX/OrHnMdfNCqJ008+zU/+0B3cc+oImoVDS/A0cWAKP4J1Fqx1JFW8LuImzRE4MlcLMFUTecsoVegWlGDrwmrtnN9dc+nSzPkLja0tYVoq00hmnquyzMLJ4xMHdpT/8L/4FP/nb7wGotx5150sDiwR72iP5wcrsGyUVli5UaQH29WM3jp9itZAvGJeUDFaqhRf4i1BXmPEaNQtBD8h/dZorTxi64axPT4UgxtjCvNOGrLiOXUSmWoxKBcRXBrf84P/K9/77773/7NU4j/39Y7bbscFahu9Jo6mTB6srBIWLvSR/d5t2F91J2kbwZwpTveNQMg72gfiP8USzhR06kiDmmK6gIDNgvqCuhCKBuc9gNvCOx94AFl05itOKlNYaePBnf8Tl+YJnXboJtz27q+LhKEUsl86OBOedEiIA0CyFuYQJCPd+j76c78TCjbreII1Hu47LjEx6J2miuQUZXTqgFxNsumhuYp/7/EWXYZklcoAVjjz2stsH9zmSLku2IxiZJSZ0GGotAC65k61zFyNgsJCqHNHU4qCXAK43TelTELqDZKQqrC7apx55RVuv+1U8DwI+/RkEc1taux2p+wsuPP+ia+VfS6sjFfPdqaceP18586bowJbLeBIBnrFciLPTm0KsoTUY9SpjdUMC0nkFCi71HCmVhZEzQjVOucv7vL0C+sYOrzuHDmknDxRyGWBps5cndfPVdwmvvXrbuSJ0/uwfZLlzhbZwxl7XeHp06e599570Dkiv1IJI5veKxXls08/xT3334cjWCmMlNoIsOkN0ZkuManJGtqUcHdyunSqABs9h8TC71aC9JYE95miQs8pDssam7toZ3ZYxGrip/7bH7jm+ntb0IZPP/0UijCNgM6nTz8Tz2uNKawYICGAMQ1ynCVHsqFJ6XnkAwZbd2QIBphlCl2jBDNf4/t9aPEznjupOjqVcNSpmd4qrC3G2T0ArtYULSk2oNaxeZ+XXnxu5A5+7rruHfdz+/u/EVUbDj05ejkP4pGoU5hJScDWgVlMwc9P84KjJ+/ngMBCK54U6U4ZNunelT7BKohjmEMlQYugk0amomSJxWm9jRQhR6WTvEIjdA3u3HzjjVy3czjcl1pHTZg7dGt4aoO0UvCywIuQZKKrUIGsnSIGLfIYXYWS5vj93dA5qpIDiwW33XUKGWxOwWi2YtUrNSlNhF/6jX3+xa/v8vwLe/zup9e89kajGtx7/2384I+d5swblUOLwiSZ1hJMCURYZSNNOVijCYQZ9xTPSBHoCcshWtImWK+sZ8G7c3Fv5tJeRMytZlh1oXpwRoKlaoBz/LqJWjv/5P94lp6OsVV20BnWK6FbOETfffedzL3Rm7EmIdWxGSwtkJR48L77WViMurPXyMKow8sBJ/oN/g4AACAASURBVGuiEIzAZkLtOT4/TZg4RWI8nbuQGoEXEFoQrIfTkqUw0+0zSaBJQ7wxeVC0Fgjf/rf/3jXX39tiE1gsFmEA6rCeO/fdfU+AXtOEygKzHLNTAVoQIFIXqkvgAFVHaSZ4N8w7G0cWFUd7oa0r2VMIVzr0ZnhNhJ6lQVGU4HA3h+hkZ2ZRcg1zkKZActK04N577+XkDbf8sfdx4aXP8tIn/2Ucx8KIHgvJsY4PMFw3HMlKt9EPS4e04sKZJ5AGlBK9viayJLw6pTdq6xRXskxMA4RzDYck9zWZztpnRGuUjmb0asxzZ7agRovHCEwGjOCscTpdYvgcdOMU/yid6j0WegYroXPwnKmponnkFNYEvqC4k2yiKldTkqYerE2XQBj2Ls28cfZcDA4G6Lm9NXHz9dv8a/cnrj/iHN42vu59xtGD8MLre6zodAy1TjPhsad38T2l24x66AwmnaDH4eAzYZjalV6DK1PXQFf2V07dM1arFd6NVoXnXhGefXlCifwFc2VxoDAtE1sHJra2lyQyxY29eUbdSEBJcSNLipRhrOLu6AzqIf/u7jHtygIt02qjRcxzsD8ZI2q1oMYTo9tGWLxFFxMBOWhGXVCL8l9FqZt21maQTPJO8RCUeQf3xGzCT/43333N9fe22ARuuPEkzRsrU1568UWqC6KZnsCthV2SDCKFhhXWJtFGGQ+1hxGlqJE0hXZgjMXcjDOvnomkIwTon3Ot8YR4RVcRjRWiGKd2BVdKF2YDt0yqnUUPItK82uWFF5/5Y+/jyMk7uPmBryC8BaIfDxfKmCNLZWQMhs7YRJDq5DW0mjhy/SlMNEwulo6qMOcGKvRio0SUscmFOClpVIqMe5FVQQqkPOQTiWSZPGbnIj6MSweGYAmRgqcIxtBcolUZzk3qkdEi7lh3UjeSFJokzBr0kHI7lZ7DDSpJD3KSd3oDw2i1Mdd9Dh06wI0nT5KzhWmpKL/3qZnf/qNd/ugpofXMh78Cnj+jbB/Y4j/64U/z4ku7YTEuwpVV5e/+yGe4tH8Ft0aNkoh1DleotDYQY66deb1mDkU1zYzuLSg4CvSMaOL8ZeHwoSVf/eVBTS8Ii6TccHSBAP/371+E6RgHdpZ0FV5/5Qx1rvTs9AZNghjkKpASLh02nKJhPGrEphjuWYYsOl4CdFUF6eBNgkoshENWVUqP58VSqAm7ABqK0hZShbFxhfmubyzcECANDkiAlSFee+vrbbEJvP7a62RXlgXuvOeOSGuhk82QFPN9N4LxpwmdYsSjLtR1Y71aDzqMXA3KCC/f+D4BbrvpVqblIuzEhZDQWaKp0a1g2UjZIBUOuXLl/JVQJ0qPcYtUxDKNjFpjE6H15uvsc49w+mO/FOwwQj141daMSDEyic3FNJE1CB29JEiN82efp/dQ5NFDTKIdJvqIFQ+jTsO4tNrDnRH0mTbpKmQ3YkYQYKOK0fuauYWTkmMjNTmCTUNJpVhV3BJ9BtNIE0LipFv3MBidcCR3Wgsb8ktv7EUqczJ0fEZWYmNxC8uuniIcZn9vzfnzb8QCxGOT9owKfPDeib/05Yf5tq/Z5q9/k5JS5ivfBf/J37iV9z90hD989BK//v+8xqPPXeHSG/AP//MH2J2dKyvDVtD6FNwGN2QxUS0xV9gzZ71qrLoO41hhWiZ0oSyXia2DyoP3JL7hA4m0aQMMSk6UrDz90h4f+cPOKxcSVRLmxvHrTpAXwS/Ik7BQoUgagDOkloevQSJV6Ja4vLsfwTU5gelwYDakt8BG6Lh9LrbOiXi8nq6G28Uts4arQYqQVRcZ3z/YgT68Lkl4WbMwv1q1JP1zjAj//7iOXXcsrKB1kNl69LviYRmuKJYigLHjpKEkNDX29ve5vHcl4qx8WHJpEC7dhdVql+otpMc9bEj3d/doc5xEV6OhPF0V5hhw/uLrYRqagkaaJPp6kuIUxP/0zrrYPsSBYzfiQPfw0hdxkBZS6RJpRtHjA14w0/jgRLnh5vvC74/wqXM6aOaNvSs4gDRIFUR57bXXB2ElHjRIKGG7bubxUI1E4L31mt0rl2n7M/M6pijZhTIriR70YeJhgTiVdIoJg4mTTGhpMABNQNe4dc5feA2R6LnRwD56M5pInEgWVUQqiUM7W5w8eUNs0ArrPrNuazqdX/79fX75d/Y4dChz923bvHI2s7+rnDlrfMe/cSs/8fOv8r1/71F+/tde5OylfV44G4j8lX1j5aEGNW8x9fBOm2f218L+bmf/0syl3X3mfYvQWe1sbSeOHV9y9NDEVMrgoYTwTJNw3ZHMwWViZ7HNNOzD0qgg3rj0OusWqtIIAVEk9ZEW5fhw8hENhp9k4/XXXsNTI/UaE5+6iaOT4a8gYTE3QlJspD+JhaLVcapElFs36JpCz+FRuaYkSF+AhTo0SQTUYEoSHffn2uvvbbEJvPjSi3QqTQWtOcQoXahDTQWj3B8LtImGh53C4UOHOX70eiBOMMdxCbBKEC5fukKfO5KcZgJNeOPCRRph+JEkVGARkhna71ngzlOnolwzEDGapuFTEKwt1RRhHm+6rrv1Xk6990PD9bXjvvEb5uq4Rr2HP32SN8WxS4R5ptCIJ8Z0wBI+K2dfO4tYQloGT4jNnLr9dmL4p0EgcY8INYn/T/N4iObVzLQoHDl2nN3VHvurNTKmCRJ1MeBxvzROmpQy3QNgPH95F1WjGUyeQqc/MIW7Tp0K0UxonAfG0K56MsimmLAYT4bfqyNdWF3Z5/Lulaut0y//1ppHnq6IG3/lqxInj8PpZ5XVWvjg+27i+uNH+O9/5gV+5ddf4w8fucBHf/ssvrbowX1N91AOrlcz89o5e27NubMzr5zb5fVz+1zcb+zvNUwL1pUDE9x204KdHfAI8UEUtrcKWwvlqecr/8PPXeKR03OYktAQcW659RamssAx5uaRIekKuWM2ItqLoeYxpfHO7XecYumLaG9T5EsGO9Vj4QcAM8r5GLu6wyZdGjOyj0OuhaLVerS1EZ4bYqQkPWzuDXLK1BIjTzfhnnf+xWuuv7fFiBCgdUNGOIYOhZu/yS1IxYacMpR+KsFaW7eK1cbWNDG+cQBRoSW48YYbiL3UICdMlFvf8Q7whq09GIaDo5s9Yshzd/oIjCzmgwOfw7VnjgeiTIlb33ErL7zwArtXdse7GNTmkcxrHgtGR6SZeIzFXIiTRMLDsHuQoqKOSYgnZsmIJ0Qat91++9CGGzlB7xFxLRCqSA1yVdJGkxRYBDFSXe8Z83rNYtqOFGU2OYgeeAUp7KwU3KPXnGSNeoYs7F+8wOEDi7Adc0WskXPGWxpj6pDDipdYSExxGvXYvCNSu0fSEZ9TTR46fAj1YMht2GSPP9u457Ydjh1zqM6/9RVwdrXHV75PEW7myWeEH/+ZZ8PXD/iL7/0KrFSSCNs7hf3Z2dvrrObGK+dWXKmdnaL0vbFJWbyeQ8uCbCUuXqwscsKtkVNiOSlHjk4sl4lf+s2LfPLJdaT4GKDB87feRnq0cOX8FQ4eO4K4MYlgZBaqNNYk33ApZDzfHZW4f10CBG+WyQuDloeArJNks4EGC9IGRy2Np1hU2L18mWm5QHUKFqc3PKcRgFoQaQGyu0VeohU+/F3/Gf/8p//xW669t80mkIjRl2uLB5IcuyKKaAt78NEc4JuQS5jrTF+v2FouAozz+PrYW8fCi4hvskOLXVY0QQ5nGhs8ek1QGtHHB62dtcTcF4FOtA1IYBWTTNxw8gSnnxoA4Tjp8NAWiPcIqbja7Hm8U3HEarQVhMSUNsRPOOfeeIPDJ25AaFEuDl15OP/G6wn78dgYJYUUVlxIQ6HnEqXp9oFttg9sB+uRDYpoQzE4pgUQPPhRPTV6pAurcss7bqXXyCdo6kFH7gus1WDjMejEZVQVpqhUksvwxw8q8iY5VyHe/wCskE3IKvzq71Wa7fH1H8jccLiwc0C58OyK/dr54APw/gdvYqtArc7HPn2eX/nd11E3vv2bbiAtjN29mWoRXbZrobZ7441G3hJ6NToLyJ3jpXBhd2bvclSWqJKTcmgnc2ApPPFM5YUzA/exqG686UhgEsw6WGV3vszCt5m80FJG3VHCzNZcIxZcE4ZFboMqZil0BRjkGP2ah4eWSVSAKQVHBWMgXQH6mgfRbLVbES1sT4NCQIauGHUcHLFOhJj4SJn5zB9c29vnbbEJHD16FB3JQK4SOmhLQaNE4oMYCymILUHYUYOD2wtseyvMNIdMNgTkQe80DZKKJI25fBAGMNHon8XGjYyoUZ2A9ejFRgBkbuD0iISW+JBlxBMtpsKRw4e5cPFibBYWwJ8PP0SXMbocdlFJYjGbxuhHzcJ1WIwmYQ652tvn8LqG6cTQ4KcknDl/luPHbhinydhw2JigjdHjhjlJcAJwY3+1xs3Y2T4Q+yiDVOFw8dIltra3ybngGJOPBBxXGg18WKS1sBbvaYpFkDtGGtYACtoQi43AJN5vRGwV+hgZSnCkBlBq47XLyOuLz/fXPrbP3bcsuOn6Jecv1eArjPBPdfi2bziJdOW5M4mf+IVzXDh7jlH/4r1z8Upn3od3v/swv/Wx1/Aa5f6xrcxf+dBNfPRjnQOLPVQ7774zh82aJqaUWE6JJ5+f+Z9+4QqffnoVVRmRGeF0+jip07BUP3nTDahmImABTDvqmawdsxC52XinqRSQGU9lgJCKWQu/Ae10V8IjKyTcOvBAUWHvyi4pZ5bLJQk4cezYGA0L1sBUOH/hLId3DsbGMiz4ROLwE5yP/tw/vOb6e1tsAtdff32UhG68/uoZbjhxHJE+HpBIbtl0nOpj5oJg+WptED0xvOk0JhZ43A0ioUDiofUe4m0dZgwtXIfjIFOUymtnz3L9jcfC+XfoGZLkgfHLCCiFxbTFTTffhDtcOfsyrz/zGa4/9a7hLz9IQz3GhT68C3GCcWdhFjJmPeyffwlrxg0nT4aGoTleAvLrAnNdkwhxjSARm6YgPeyokETSkbYksTZECFff3nGNMjNkxjGf7tZx7wjT1WlEMw1prMQv0dKCnUcfP5dQhvMNHgVWd0TCZNUlxpF9lMLuEnJbelQ9tqm+ggGqsW+G4xDw+Ivw7nvh0FZhdaRz8FD4Oj7/0l6QwprxoQ+eZE7OG68KH3/s8iA9CI8845w7d5a77rqPX/zoGb72y46xMnj8svKuh2b++b80jm13vuHLw7RmMQmHDxQObit7a+MXf+sKn3iyo8PNajPZGRrEsJlXiQrVICdl7UqRhtvGsyJAVRmfdUrh/WcDuDaNAFkhSG6qUM0oY7InlnGTwTOx8F3w2DKDghI/24mWU9AQwjE2KB8Hm48eTzZuW299vS02gbNnz3Hi+HGsezinkshxx/HcRxlNlPro1XRWAPMUAAzBwBLbfHuUmybhcyezYSXQXE8a4puRMowIL585w/U3Xj8ohyHhpTmSozwuIrGwm2EtUGSXKL9LWXDyxhvZ67ErqQy/+QHAMQCyMYEieotI+AkNRJA68HGKasctnI6tDYykCzeeuCls0dNw/hmjPB/ltVmipRQVj0ESx5Owvb0dN1rk6oNJcqTBdUePcdX/T4ROOBl3Bzy4CliIdDz54PwH+i/aiW1AB9JdMTNSKuPBCyxGelQ5osMe24cbs40KYIPlOODKr/9BQ9s+3/KhbU4eW7BYJvauGBcuVi7sdYzOe+9zbIJy9/X0lAeTLnHrrXDutUxW5S9/1Um+5n1Hefi5xG04v/OJzuFDiW/8C4W/cH9meytRkpKy8PSLezzyVOPhJ9ro3HyMqjaHyahsSFGV2PBXJCE5PiecmDjVBGoYmTw2PnGDFpFqsaHEjhdTLEjJooIcTsQe3TAKHDp0KDaj6DTJgPUxIozulOPXHcenGEdLD+erMSyM886vPQN4W2wCZ868yvXXH0M8ceLG42FZJVwtiXzT5BMBHpLjZJfu43tl6A7jjYs7SoSBbkCWUYdiRJkUQGw80Co2cgejZO8oN544ifU+sDNDZSgL0wDeRlJxKBmFRVlw8IabOXjiptGLBVfAxqalugnkiFcppsGH6I5pB88cvO5mzieHVZhpshkxWhiiLEnMDaYiVw071EfZL2HhZVF8RNqSxnmBjvvUwU0GuadfnbzEY9DCXcliVJsQeifm1U0CfR5tmXtsGGqKpwg7FRfSADtdPRaOjjNLhN6vBpjHZrChVvc0SFrxPsIcJvEbf7SmduXf++ZtLl2pFBXuvmPi5Vc7O1uFJ56/NLAEJ8hfQDceuhvyXcfoBl/7wWPU2nn+9cz3f0fi058VvumGBfecilL66MEEKjzyTOPnfmWfcxeVs+eiPPfB01cLN+acx3sYMXDW7Wr1OGlQ0cmQLG6S9wSacGqkOrnQNeMMfGu4ajMcnsISLzZOkc2zPDYKJcaYBEagg/izcel26YOo5KTeEcm8/PIr3HLjLZgZX/Phv4GK8KmHv/8t19/bYhNQCeFEzEmjXPRcSHRofYhfonTqrSMp+PKKktziNBo7ojHio2KQGmi9d3pKgc42D092GV56GrjD8etPkA26Z0w6akaT6AWTTHHSNrC8idoC8JiJ99ic+nqf1eWLbO0ci81KQczIXUKh50qKXSjej8SoMIDOaH+mnduY++lxVkSXiMuoPMYmIonQ2oWLbwL6yDkIxyAGOBpZCKpDfr1Z8z6kvBLjT7zjruAV7eFGIym0DtbGz+NxmosFHmJ5vJ4Ab7MEoUl0gLMSnU7wGwblZYSowHDXkXiNTnzm4OGQNJibv/vwHDbdbeZvfcthjh5NbJXGlX3nwfsP8tKrK/qsXLrSuOWWHV54bpf7b9/hyad3uefUAZaL+Ax2DsEN12VOfEC49YYFV1ZR2b14tvN//f4+L54xHj2tmET+YbCwZCzOhhBAaya+5tXpsRuGe9Dw+xsDQkyDESp0miSUNd0LJqAZeo+Rn0owCt00jjAJqzvBGHDMsNdnfKAxoTGP1stkULxls2lvXhOU5YSJYVnZPfcqX/tt3w3/1dt4E8g5B5BhEThSpVG6YRYgngdvFSFhqbO6tOLy/mWuP34cSaOsHAGajJIpet9gYZlFl5ZN6GKjQCrhrdfBJkXncBvCW6jovaEpFn1kIoZ5qZpgPcpd2ajyNqD/ap965jnewFgePMzy4DH2Lp1jvnwJEWf7upuYljsgjSvnXqbW/Ujd7cbh4+/g4uuvYPNl1DvdJGysPRZGcqFhJNUopz16bJPgu7srSh8tBgNMizFnJNqOTakDo4Q3MRIB+CUSkpQNttnFSFkHQBrkpjQAOsRQM2YVhvwhfPohqpfeIhqdFP5+MnwNYWw40ca5NEQs2jUD1zwA1DA9UZzf/dQ86Ne7lCV877ducxDjlusmDh0oaO9cvtw4ceIAxw8uOLqjFFVOHFuwc1B541LlQx/MnD9fOX54gRThwuvw8x/Z59ULM59+qkb7qBHcEgVnTD1y/DEWvAWgqzjzOK37sHI3JBSb6XOtpFrM8zV1vJc4pKK3GCSfMcP3aJnMYgIQbNOopGw4P9nGIIcAzs3COdqJtsyGdDw23agIjl13HKSTTfnk7/0KX/1X/9a119+/+iX9Z79iF9zseB3xFHgGiueI5raoZcOIoUxsyUEgBf99hGCKR0l1Ncxz9LU+TDCtGy+98jInjh+FKQxEQPA52HJdRmae9RhnSScPTnb3CI2MADpQH7xw52pvL21FP/8085WXaYduYH/rIKv1mr4+j+5dZP3KYXKZAKNNB6i2gEvP4XVNX61Yvfop7MrZ8fqF6DoDya8kJnrYT82OSAKvJCrNdZTio+VQG2AW8SB1j9dNbARxb8MbLzgXwS4cLC3wjgygUcVioV/VVTDm+xHXlt1Iw+VJRNHxkCoe/a+G2zCehg9EC0Ul8VmppqGpiCdBegCvERPZcU8oiY89skfPQp2d7/9rBxEXjh8qmGcOHyy8enbNyeNL9lrltlNbJE+IOgcWEzvbis+Nhz+7x6UrM7/zSeFTTw6ANRH3IFCNOE03gNyomDYG6p5CjKwawiDxoJXnTRlvjhWGgClAvIJQKaiFqMgG9uEaeIJqmLLE0Cae++rOld0r1HXn6NFjMdEaVYi4DJ1CBPeixiuvvMQNx4+haYHroAhbfNgu8Rrl8wCDX9BU5BrhI/8MuHd8yxHggru/R0RuBx4Dnhj/9vvu/j1faBO4+567fXvnADLYUjJEEVmcrg3aiKXazPo9j0ZpPNCpjbNN44GFwUoBBJoHq85MuXLlMjtbW5AT6hFEqXOjLxK5Zqx0sjNSdTswQUu01EL8gWBNg5vPBpwc+81YZaISpnYeI0azGVnPo0SJ0lynA8FsbBfAJlgcxusbAYjiJDI9zwE4pQmYSG2OqsVsvDeLh6EJLnlgxuPkZfP+Y7Ii4pFWKyOF13XMk23AJZGLpy1464hRumLLmGKkpnSJ06apkCxKV9UAWMU6TQrFjfU49SWYQSRxat+g7VFJ+OhZgupsoClYcBJIuAr0q6h8MC+RUMe96wFYMPF17575yMOxAX7fX9vhH/xvV/i733WIH/3pXX7gOw/wYz+7y3f96xOHDxb+/k9d4NXzjbZyzlxyzAdbU6Iq0bEJ+RhdikSb42lMMywUk5Y8DFg8sCWdJEaECdLcaQUwSMVpqwB9kwb4pxb05rIo1LYZDQ8QWnpsAi2qkXldwYzltLxKOAoAORiaoqO1ss7u3i7bW1sDvB14RR6VjQU4fsf9H+AXfvafvKWpyBezCXw1cAX4p5tN4E/8+48CF939h8Ym8Etv9X2f7zp48KDfcdddURIOlDRUcZG7Ji0hMbUOII/oO/sG9BtvQQfghzAksx4jNKIt6BZjPcEp8dpp3gJQUUfmCUseDkZpM2YImqlqpSclt40VtNJ8vvpaRMbk2/Kg5I5RJUazjVYgFkB8f4BzEMafVUJMpC2GodR4AM0aXiK0I0+Zbj6Uk4p5pZPIOXgPNEG0xqk7Fmdv4UOoEvRWk+hd1ZTiQh8Bme6ZngLAiorLggmYjUg3IsxCxKIt6QzvRWJGP0POIfd2iwgsV0M3WA6QNuOqq9hEfO+5c68xLZfsbO/EqTeqjebjddPpg/Ogg2CfUK6/rnPmcrRMD9264JHnGu+6M/Oppyv3npp48vmZUzcsWE7w6HN74bfnBslwT1HtoHGy+qadjAlTSsP512MsqBKJWL1G71c1sbUBnHNUipM7K49A3alnem1YMtKgY4pFdaBlwqwRZrYdS4ETiAG50Umknkga25IQr8FNaBq4jElsj+JCIYWF+wS1C6nFQeaTRKCLNP7t/+BH+OG/8+EvLYvw84WPSBwl3w78zBde6te+brzp5ED0G9IFHcaIJkJqiuWKZUGzfm7unhzNbUiIdSQX+QBzQujSx0PaHdpAo5OE1501B2+IwhTpFDQRXIkQ0Tn6N6MNkxLQkVQAMnCsofxi6L0lKhVXD684g3Nnz3Lp0m7QOksYllqK+X/XMdkYD3iRjuVwv7E0WpwkFGto0bA8kwbFMG14KgwXsng9w3OP5MMPH7IoEyHDDd6FITZO2NHbB4kxxneRmQiUhk9OszxObg0TURs4imxUkgFq9kX4H0Bwh1JuaGkDEA05t43FHzqQPlJzhUNHDrNcLkF60FyJjaIlcFqMgGUK9l2P1+LaefWsxO6iiU8+FVToTz6zxrvy2DOVNsNTz1c+8+xMFR3/f8FH0nS88RybgWkQzkLgEG4/XcE6DSEmgIHsqQrFO5QBbFZI1ceEqVP2jOaB38RINA4NV0GLRp5kZIjT3QK0JaEqiEf6U9ZwYcqiQUEf8efJhWTxUSsF8SmUszlIcVo6p597hqrKvGmNtfDxX732Ev3zCoi+Cjjj7k++6WunROQTIvIbIvJV1/rBN4eP7O+t4sbkjOP0FGhnaAVARuijWIyPJBHSzQ5CDquxMpJuPQhGmqKs6z3Q7WQSkU7qg+yi0BLWE7VFLHnODR2xT64J8yATtS2neyK3GDmqj952oOOa5aoDFM2Q1mKs14yDh46wc2SLnKIk9OFj54wdXCKoQ7LSLJNqxKf70oOarJnqC9Q2c3fQtiGuOEhDzMktvOg9hd/C53giCXOlD/qyEvc2SWj5u8TDnVOPE8uMZ589zbzv0BeoLJAaUwhDoYVdt6liViLuqym5O16JiYIRduUeno2X9y7y6ssvxTBv06tJPPRqnakkpmVCNJPmUCU2Dwq391B3ilVyierKrdNi7BFmn5bJ2qgtpLnJ5nCF2lQcphRnbPKfI/OAxCk/xE65Q2qEx0SXoaxMYatmAUwvRcL8IwmtGqkrXaeYoDQoWXj86acwMUQbjHi8xgAYM0O1GfbrMfCJQ00Edi/vcfaV14F4n+4NTxHZ5gCp4qkPYhYIM+SK9Y70qNBuvOUkaRqDJ0DWxjf+m3/zmov4izIavVaZLyL/CHjK3X90/H0B7Lj7ORH5MuAXgAfd/dLn+/3vfc97XNwQWeKrynpLSK0FGUdihh1HOnHqu2CL8Tn26HOjl4xT2sd8yuEq02oj3syulJqpOWy9wsAxKMVunYh8Tah2WpWI3Dagp2g9koWD76YG6E5SIEeKkftQD/agPbvaQNU9aLYeSkeGMMU8rL0khQ+Cq9BHheKeSMDChEeffoI777yXkjqNRJExDlWoVkgWIZTaRy+eJF63DSzTI0R0jEtipCXDoGX411VigbQ6kxcLsDXNEsUFKZ0+KbLrUSujaGuwELprJAPPoVfIybFM/D0pvTeqNRbTFmHlVAf4F/dQiByE2QNB1zFu8R4gl6NInkmWmVsErqoK1jvmE5KdLAlhH3qJz44pxmk0aJmU6jhRQFQj9N7Dw09iIhiMyTGuNJeRCW9kBcuh4jMr9N5wzQNsrRSNMNhOYBv1yhWWyyU2CQll3TpFwaTTLTPV6LAsO9aJz38wNLsZtRlTLsGkJCqJIJn5IQ5ZJgAAIABJREFU0NjEyNkkXLMhpjQoYUTbEt5mrGRcGonE9cdv5aMf+Rf/aqPJRSQD3wr8s83XRgbhufHnPwSeBq6diTyuZ559hlkKjYptd5IZXYRyNT58zP0cLM1h1WQetFpixGM90xkln9sfoxFnVyQrKRU8KfMUarkmjDFkIMLJQVsBKr0LvnCkGY2YGIBjuVBLSJM1RflvBeoQ5iTRsN1aEMEbm9nxVV+BoA9Ljyix3nUgxYPl0yIWTEWYMCjOrMY73nFreBHIRKbFQ7OM95x0hpyYTMhmZDqtGzQZv8vDjEUCVJQxFQAZ2YVBNQ163IyUCHvRlMhJ0K3AOvIMlM9RWR976vEhagmyjiwF9fCEzN1DLk0mp4ntsmTqTrY5ZuRVwhYOIVfH6gA0U6KIvmkzYGgPEjQnLaIiyj2T5EBsF83pdQ+1KI2p0XbhcwCPGhBvz0PTodBozLqZtyeC/JWwpNHWjBbSstFLYFStB7AnkyNaA1tBkd6pxKbR1vvokQXNg2jWaaTkkWjh4blgk9M8DG8X7mwUaqFjSSxKxlJnLRv0I+p/U6FpOF8FrioblgVFGtIM1g20YosSrd8Mc3W++a//4DXX35+nHfh64HF3f3HzBRG5XkTS+PMdRPjI6S/0i2659R0km8Fm3MOuahEYzcCGU7DckuKlhAlI9xgXpiDv6KBZBIV4BF5gUb6qIh16dcQiTjpBRHqJ8dhjj0SloAnPQbApObAGNJNrJy3GCVJDJx57ggIF5kRZZy5d3OOll19E3ck9kbuiqUCOMZN3uVoSz3QwIy3iAZ8HCaTDsJMObzyfLWLCFzrci4WWI0DUPLCKZIpYBI+0lOiaQ/qbja4aDjVq9AQ9xwNvyFW5tUsipYiwMi8xAjSjuTIpceICRqMgtNQp08ypu+5CrQfIOtSIuSSYFLM8KLIjoMPD85+csKxY6agGQOsTkI3JHPc17Wo1F05SYj5i0B1ZlxA3udHbfgAUW07CWdWwpfMpwB+XBdYKaGPREvT1aAMhWVRi3ergjowPtXqkz9AikXotMG+8ImMDcRvzfIkWMSawDjtKniZKWyBD14JnIIdNW9/4S+SYhjBUqk2RqlAziGOScJtIPQ1MK5FMKGb4nNCWSRbhqrhQvVPReOlpUJ1bZ+HAJCytcejEW0aCAl/EJnCN8BGICPI/iTZ8NfCwiHwS+N+B73H3L5ho/Nyzz6KTUlNm8kL1jlk4qPSUuGo5lI3iMVeOaWG0CBBTtMyYb6sgvaMtkUTJ3ZAKCwOkB0iY81Wiyr133Yt5wnKMX0xzxHLbCjOjZAKdto63GbFMGik1ahWmSIo5uLPDDSdOYhayUMvQe+ASMBh9/v8y93axum5ZXtdvjDHn8+59qpW2P+hGxBa6qxrQqBBIIEQuNEQlEki8MQqNF+gFX5KQ+JUYEyMfxmCMFxpUErkwwgWKXICEEL1QI0GBJukmVn9w0U2a7i7ShqJrr/XMOcfw4j/edSpYh2oJF3tVTs6udfZa613v8zxjjvEf/w+YlWpfBd1SAVHJYHK969GnjHFpjHl4kszWHyx8wwqo1cCT5dvaryiFX/To5NkMv+PSxWexU1RTS+tWc1GVTE+ukcDG2VrR4nz/9/3f2qKE7uxYE47LCj2TEtuI+6iIPBWUMJRQtA/ULYu4SiHhlaTf3OynXAPMmGcyziJKO/zVBhtVR5uesckp/r4NuT7ZC2wePAxeX4+8DhoovS7t7Pclk5ZT6ox2tSFJbmIdchW1SyCwQ5lLPXmVPNx9ijJuUFfgd3Cd4h4aM69H4q+bPbXHmjaas68RwWw8HdV0H70K/LvqcDxZkWwUHGKniEqmvRIsjZuJDGdZMBVHV6HOt/NbRJxDI8TDnRcrog7reodr9/y1n/GPIXzkc598Ur/oF3yBD2ZML1YtZg2qHpQtef9ZKFjzAH0RZ6OpnlsPXb/RZIrk46qsz/1graEHNoERxDk4zjrBLM1QqxLfLjtzRAqJkeQaRDnFYYfYY5FOjoOf7CIiBFfG3IV2R0KQ7Y0aLYZelYkEdaYQdpOz0nQFZEwGH9ZihpMzeLc/kPPvw/NF3j0lNp+Pw7JJlKS8NhMySZ/YSUaVWGRFe9aV1lWVmHXqsxv4xpZm2JtixIYMMoOsDVOCprnErByzWGvq5AqZiJyTRIjQYiPJM+QReYJMjXW1ZYJiT5paSx0dnbDHjJGnHXE2x6OBPGE/QZBb2oNXW8wrOH8rGdfF8cPeH/gGe0em8yHgcUSwojp23Q51nFEasxyh9W9OPj4ZdlG5KDbHBsOMYYeMwd6HOFNdor3y6s77eq6eXyl7MDBOFtNNYGQ5Yxivr6/YdG1vQNTrLYPb4Ujn6tWyau8NWUespUJzswu9i4wgbUgzQ+Goi+v1Jq8wP0nWKeb8+/m//vz/9vcWE/h7+fGdX/i8Ln4U29XumiU1X+AKuf/spfx2ZLPlexClgBKQptsTZtIMLsM42Ep8L5Fk5hb6dEDNrXFKt/2eDbQdcbGzFFeFyQfwtuQ1jlZHPMgwcgpyXOHkO1prrgJkKYtkGygPsKv1qaTWjZ/DqwcVN2felAeXP1hL+AQOw7WyskrOfA8pNZ6Q9Kb3+uTy5LG1brTtnDLqBs5gW/ATP/Ul/tqP/3i714jkFGZ9YgsM9D35wR/6YV7WB8I3VRNs6WEeLgvxfLBTKzSW4e9FmpGZxs1jgpkTw7m3sSdsDpi88i9DhCsXSzEcwqu5MI6NCV6safzgX/1hvvJhMe5LghvTmFczuSvJ2Dy8V2DvjVWCNZmTm912EkvuPudwH/E1bId8EEvIvh0HLoGNNRQqmhsbyoV8dIDNTqPWJsrhWv2P8Q1W4ANxPN/DS3DaOGGdzYftIlLtZIbzvg7xYtiL1jceg4cr2MXMIPT+DMSVsHTiNMuwwNhSJBpwDudubo0dEbgMqiNHmMV5kSXZb/n3/+vPfP4+ik7gl/yTv7SyjB1qg8zAF3gcVgaOkm0TgTbF0X6X03NaI6Sp3fiZoZs9X5rFNsjUyffFL/4VfsF3/nyuuMS4sluV1gx4IeM9Yx3OmFjeGO8IDuMkLwYVDVSeSQkxlKDp2W31ieLeZBMOs4vSycMq410DgHs02+4McJgmMkkmPOzm1T4h8oVDb0lGsUne+WC/mqLMtdV7O01ODGI1Bz6sBU40DRitmpoWq5NFMWKO5Mcp9BUQ82w2uSdNUmKy4BXiSkiXgUYNbRoiqBd1O25AqkVNJPraiaTP7gyTD19my2hTeoXaUGMQSNgkPMiR3iMxk1e/ndbzP4pzZ1Ol5WtwLBk2GCjB6UFoh1+IwMPhWDBBu/kmC2UJjow+bY8pts17a0Ek5xgXcOwCirMGaa+MS/fcOMmJ2SvFw+FB1CLDGDY59aIH2rq7eRLI3PEd7LgVw1fO0wJSp7xATLfNrpIexsBiA/62BdtuDHnNY3dA3MSVFJO/+Bf+4scbQwbFseK6k4yEEhdgmejCkmBOnd2GlG8uqW2tIxrv1L1bB+YxhY3be97fQuZtFuWbX/gLv6A9tvOWh7dfk+ud873f+0X+iX/sH8fMGBnsMixvtjs5CvNXwpxl3q9BlmMPoOxB1a35zFtsVKY9faYeoFABqKOH9aKwNdll7HPEi4hBzMWqi5GHw+BqoI4RPLax26fwZIdPtgpQ82FxnoYmZ9BGVlIsoixFS+NmETUJBDqd1Nqt8lUIfw3tuCPxu0SL3UnOwfVORSZliSpbdmtJ8LsDN9RDGMQ+2nZIU69ik/sIw7hOR5o3L75FNVGvmAXH1O5eOOwpVBwVqmHJywliqd3f5cwNj2EsHrh9IFfxPtSaj326hXciJE5iJL607jw2IPWAJwkh1WCmvAHkejK43BiVaIKHPYr3NdTxlLGmwdlYGLaLGa/q/kqr33AxMN0Kq+KDpbDTXBIenSBHahRQjwZWnSEhizcqGZaMHJwY1DFyvzBtSKgVyX2SM5J3lvyO3/M/EOV8z6/9zq/59H0UncDnPve5+q7v+gXs/cAfydiLc02u1+Q+YE8P0XwSW4Ksm9fsSHN3zIckvSYBSrnBVfhx9imusymbxDQ+ZBLpjFLs84PNnd09zJJW4VmGJ3AfDkrhGZH6eyfZ9g6rhQIgumI3Q075cgcfMs6ole3hl1gTUiomVKfOXxLLRAkbpwYnjLmCquR2mC59aaQMRonDWZsv/tBf5Rd94buxCLXBW4WGUSDIQbFXltiULkNhqOL12zEqFnYuDi/4NbCzwS6Mwz2c8QLlzjs0UW3vW9QX8Zrs8blm12nG9DhC32NI4u2OsfH9VMgNjRIeeD5fcMjSPYLbD/OUYtZmUefooTPF1VlIeNP8WbiS88EYD4Ggb3TbVNHWsk0jwy6gZPFdNQgLzd5bZU24Dk2K0nrY09muE3xi1LphBhsnbPNgkATbpCy1F+e8Sx5Dy4ZKxIb1ZAExRZTaSyKfyyHb1r12d23RnpF1mhvQDNXUQZOmseAsYEJsw0MhMedAXcYDXf/X8+D7vu/Pf7yYQJXaoZivihHzASd5DdhTc9GxRaoD4y4oCyYlD+DSfno+AbhIJeW8JJWbKNg+2HbzspcewlPsSy3wS7m8BQ3wau/2Q40FJcW+bYj6oHZjv1NsGoWPiTGokmPs8YJL4R8U1F26vy0FTg4X1yCK469kbUVKrQePbVhOCYpOYi/ODqH1kQH3Ayq4vWOoLTjXO/7R7/7FLNOGqyj8cZijbVZ2UUfrLT+OrdRJlVL3FSkQtNdT7g/sOHmmOqYTvHvdEhcNrWKWGQPlRuY2MgJ8a8sRMKYR7bs/WJgVs1V3NYwMI4ZeY/AKrhi4msaJZmOeTdbC4kivYcU8k3kG5VqHje3kHHgYuQf+bhA5uFZiYxC1SZwZsn0fR62/j5RT77kwjFUbO8Xlpr1/aVQapgxFH4Mchg3r6LHExydY6R40hbexM4maKhIPrSL3al1AHMjDeVwaN3Zb0gypATcmC7iUpNvDGO0YdVCmIsjWHD/qHk6QexAPk96FYh1hL34ZsyBzsYfzb/6BP/q1Hz4+kk7gk899Ul/4wndJalkDT6Ml++3segkAHJrZyN7Xbhk/PvV83gBTT1BUiouetTEukXsye+5V3sCwZM+kVi9XruK6i9slHnnawlczy66EO4qym+0PbMMIscVyh4CunvErbrnl5GjRkMw3Vpt8xCioDffFnIcXd7XwdrTWi1b8uYBAboDATInF1Qm3lLoN6/EpXGAc5yIp7jCtCus0L2B05yKK9CQ74PQdbvIkeGyBkB5qwDnwAnwSKf3A5ZzV5621O/OcRH0g99sVaNAs8QydrBHAxo+YkmKHa5Q5JS/GM+FRsCywlPfDGUcrsgvGPdjcJBduiwYfFFK7CmJiW+12jqJXNBoHRiOqT1KTG68YVgtwRd5VMFp5enufDUebhCeQDMUZ8H5PzegT+JAwFuUC5h4GH2owuOEc3N/jlvIjqC0GYkn/Knlmm8fEM1m7cZs0pmnMSGs3q0aG3XRgvYZwMdtoOzbU8WTWG3v0+//y9368ncAXPv/dItTY4LjsvTzVTo4dvNsHY7VZZ3JsUycZzYJlyyVou2nXb4eyVx4UswZjXLi9sC3ZaYzYTFcr++qJvz4E/JwiPhTbNt5qvTEbJAqRlV6vaqBtMvs1+C5iX0S4FHncpN389R/7Sb70kz9FlGSdlEAd37Kj9hOc++IUfCWdwGUPdRxOUK8qALENWzKSsJINWLmTDLISiGYdbnEciHZI0tzu7YlgCCyLkhlpNBl1M8AGly2cJCpZ6dhZWJu7DINPvMiF9t6bJuRsJh2CuW5qt0OvB8lDa7ksjh8YCz83VvClL/0UP/aTP85x53Vo/ecPgCKWbLjDDtTFPIexB+9x2Pq9ygYRRl0GyH/fs962KUyDUER3mqsrmRp9otmR5jLrxIryi2AyU7Hg5jeLg69BbG+PiylewehcQC5+useYuh0+cSq/AZ+zTUALs5uKgLiUcTmKVcKTvnxvvvjDP4QNvU57vzh+qFrMdYij7sHNyHQV0HG1z2BS7uw6LGh7N/2bCP3sLdwiCuEqn/HxURSBquJP//E/xbmTKy/mAbcgG6h+ncmx4CBE1FNGm2KgQVzOHMa7U8RWG8m9eSW5Z7fDvOf9UuNm69KFx1gJp155Sa3yfOgGk//eYH0Q0BJ7k+eGV2ONkEV0S0/ThQX4SWZAmFME3/ZtP4dv/dZv1bzpKRTcNpWHuFTwPAaEHGpqlcxMs3fJjynNQTiLQx6p/Hi/2a+bfkek3c9BeTB9aaUkbIsxgocPrmn4A8xHW46B21AwSqZcdAPShnCEsahIzpQz8IsPMoIawcrg3JK0zpxKShLArtXjcdbWnG8Gn0o5FaxZZXzrN3473/5tPxfDuFo5Z+soujskUtrb2X5YM8FvMpK0LeIWTvrBviKPxyKpPXFT21+vCRjch/HBiN3uUJZvoSDb600d2A52zUtLqQUZODfGVrxZ3ZQnLxhcgX24GQTb7jb5KGp+oPbisZ3lhkWIxFVF5qFWNo9g8A2PB5//zs/jyKk4d2dKZPQuZGA5O4hWv/c5TtU7dmh8CxMfo5bpYIjBOJAVnDnxGDjwu//D//4zn7+Pogj8wA98kX/2X/jnyCFSy36UHnThxXJ7acFH7ZbN2gYWK4PVVlsHRXFNHvh4J5xhHcw3m8P3/9AXufdN+q2b4RjvMGokjyjZR59brTtQtdkU96tOUT+hHIKCy3TzYUffK0sPTcCZDcJxlOXnyufbpeQgBtwbUrnTeGm/Le2A4/OVIhm+ybI3F6NNcSKwuxhu2L0xC15D8zjniGATm+2KajscteHH4dYalZID0LbWsC84aeSt4NXzIcQCzIIPcgEM39iSY9HgKzCNeBGGvWgsRAwENsk1goiU5qI6n6AKX4WdQ/qN12acoza30GhU/X0SJslVyd6a5b9SaLfftnOxIKfIXNL7pHQcfqhZ7LOpcbGuJtmsYJfCSc8Rm85DnAlKYfQbODk4W8asZxoZMHzI62A4jy1iU7pJXO6Sr68q3h3Fh99TK8D5Wvg6wo9OYfOWYrELS8bp+7nb+RIL1diYHyqSXd2hGVy+sLxhacORNYRhONg+HP4W+diMWAJ1c0G9o77hs5+/j2JF6KGwC/z0Pt+421TRcnGFqD976VSKS23dObMTcV0pQwiB3Z5sE/hnDp5i7n3nF75b0tEh01Brssz04pzDzsnF53jLC3AYd3HPyUwjYynsM+Qi6xR2HlLQzQRL6h4UqZVlz36FM7J3w6dXUeFS2o2brIPdUwowLwmbSh2PCH0tf3WD40pPciOvoHIxfVLAflfEHW2EYW2dpZZSRifW8WHGEkQuUsklYMt5xW7rjmWwbMo+DBmYVIgJOO6LlcWtN1yqSjPqgkExpbXiYIynT6EHsUsGOseVydfzb5la+TrPKDO1x5WLPY0HTqbzrjqC+4LLkx3F46cHaxw273jEJjWuq+Bcjp8kQkkNNWB0eGtQ7IK4Cxsb22I1HgfcmC4npdpGDTEZqya+YIcTtdhXYwf1CcUrQfF6gW9Jf6MZflRRIbMZX/IT3DEJipliiBKtRp1O1lBnyJGYaUfzFuQTIap5O0q5MSYsCk/n8GAvrbCnGasg/f8Rz/wzPj6KIvD57/ounUo7dAqHUOjBIR0WCXVjU7RKLEhfEgzZlPLrVFuBC80POVViyKRjSMvJqaKWSY9eEMNlMHIm9dico1nr3XJuu7FPJnMb5bfEM+8dez2amTEq1I7rxFYrW22lI7+BCXTbxqGiFD56hFhvC+3lTZFUspkeQsy3MZqXcLJP1pLJh1xQpbMrTziGvYZab3Ta5W7QyVLjjySWFMWFyDNu9tZJDZvs4ZLUZjFIfBSHJUfcgPHiZCQ1hJbOcu4qrgL24eZgVzsfpbEyeVhRu6PPHIFbzXXXR3bYaz8wKh+sMGwBWNuRJbuJYP6qNdq6tsRAvFIE2GaPwZUo2i2HXm9McqsgLPSjxgIMogYnDj4QyHoQ+chE7bbzxGMEil4l56lRIcWr3+1W1LmM5rzre/e8CyJvAmfvXkl2x8eQfZ4917olMNpSPA/VDxVRRZSJ1JbjaPyz1IYG2hxm8M6exX3wYrud9C++8uUvfebz91GMA1X0Gy8mWjbonSbN+0D0XSvNV2KiRothDqwbb29/AeMCV8rE/S4z6jbyiGy0wqQAM/B1YzUoFxvveeC+mvcEctOLWVYBL4cTTekdLoptU1p16WTyUIPnukIoONbqXRl+pBtZzlxFnnfYuMC2ZnLgsQeLEgHKllZoFdi82VbM86q1+pSkmp5ryyWsMQ8RIt8V1V9bDjk1eZQlUJx9CA6OcW9DLqr55iBE25ZxS6bsM1RwX5Ix1KbPcGo6NyFOvkmQNEoU6nVk0e0ha27c5ZzDwm0haoxUMhFK1DGKy4y4jJ230HMTecvLuIdWlVbGS3MLVpvUXsisU85MiZ1B1M0Y2gxF4xLiJDi7OwXt/AGSOql8gWtzPInU9/GA1/dg/g5sc0o6BKyaai4C11famyCP7MJO6rXUMWoGY8oajALv7kQFSAKrVeCEkobt8KjkGrsxo4PX5hpJ+dbmxw1W8vJ65LxVB1tKqqKc/+4//bc+8/n7KIoA6CHycnYDWkXvV83ABzOq9fRFDQFtuGEueW0WpCt89FNvAPGujWe71SfQBrLaxVVagQM8TFz98zTDfB0wJF0uUztvFviRC+057QdY4gXYMe34Uywu2u9t22aNT80/kyACgs3th+NG5t2vNRgV3LOAQZ2NVzHz4LxwFoQ7Hxi9BixmE5sqFYjpubVa2uK8d6oIIFehTAGK5iLn0JZsc/asuxDPYRzWh6cpaVLnE1Z7DfJOxbubC1airsGKuUfbaheJHnLNJvrLRrKquNO4n7Ze2W69K/Blmse3yU/GQgUXsAz5Hu5NhAt/OJtMJ+LqQJAkGsNJtBHMPdhbshxbh5HVjsltp667hIVIQYnGqHkuhimW/gyjfIqjUS/iXdjirCT35Kpk3uJKmOmwjzIq25QujXGB1S3syxe2J/cpkatcVut/88tf5qe+9BMEh2c2WxrsI2ale8gX46iqK6lbl/JnXWJIB8kMhZTgxb/67/7Bz3z2PooiUOhE2WJEsqtnx35qkkO+dguYULvYmH5x06rL0tq1RkSiZDR1LfuNDHn6nU93/mm6SU7AMN385hPb6s9eQ+66s6xNRRp3iIOP1fbajjHJLQR42qdikCj072YuKBJcnQyrODUZDBWnKhl9xgYOfgxzWZjVawNhNdWa7pL2f+v9O623sKa2ehVph4ilzqhKQaqZPVMmu7ZyCN+4GHKqMe+baICfq9fqybwH8MLIxS51K54pWXK0fRsDH1O/rckwxJ+Rb801KNP2RNoCjVBFstly2plJXkmMLXVix8adSpmLWHGXcWxCvjLPYE8wL1GIKV4xxnVj9qKRLQsb2e+zduyiMuSn619XxqJ1QrF3gTxHjLzkkNyyDLsPHOeOdnm+HgxrGjvGK8jX8WWL1BMyhLVozsJ2OHJJxjdjbDKT1WvXb/zZ38i3fvs/SBu+YbiA2wYgDXpUCMrVsXqJbXjfF3unxg6eorgkmuj1tT4+iiJwv77w+e/6PMO3ghsJGXZGNWliYjFlymlak12IIqmTWM+6K5ZIUuHqIEnrjLt2+fUhd9/TBaf5KpgforSii9SuPlwWV3po1bKVTyJbfMTB6lAnxdefSpzZAHUamXetG09QPPTaejd9haiwfsT+s7w4N1glq26iHOMTuIbMRQ32SI4luzMYJ1qXKu6sf28BBcAUhtJXeSDFnsdofXuwTQ5HfgVb00t74aMicQqzi3NpS3BP4x0yNDgB76paLLOw/Ap2ZG7qucVg3AGxOXVYdndKNG04ootgBDMmjrfeIhU448U5MvdQclNR9iCWMSf9OxxsbZmBAJRx1WAveVFI2em98Ulqy9HHt+skT5FuTnMpPEFgUl/7aE0HxkXhNxgXs4rIAydJV7ezh0xQZspvsU5QM8ktsluiVa5a9JvMRL2kvBJjmkxqjykr4hTnaPwaambFP0BdHq6uynZhtuSJ2D4WWaJYf/M//PP5enTAr1sEzOznmdn/bGbfb2bfZ2b/Rn/+m8zsz5jZD/S//4H+vJnZf2ZmP2hmf9nMfunX+xk/+RM/wW//nb+dpxd1IZqqTlHJVN2XjDBMF2QBywcCwXRalW0crVUY6kAzZQhSzUMvM/CD+5aCzpXCS0mdxhYgFxdcasg4Y/cuX9uBpDpaXEBmBDxiEgyeWQStLxK7rw7TioESaUY7+u7qTMGjNnUBFsFdxoNBVfHIhQWMrzwonJkKWI2T1DBeKU5qzTX8CbbJyPSEWsnqhwPT7+8l+y3PZFLaSJzUrTiULVipVpnLIDc+DiMuZsGrAf4KeViEuIGmU7pK3VrawIfMsdjIpswGhqy+zYyYQNBJ05ot6vmCM+FOFka6+uuxk6ibvIrcWjuuMsjJsmKHKLZ18m3luAC1jwJVk6YAkpKAt/W4rtuTHVmsVAZG1fPUNXFVDNmuhxPu5MPemI8mWyh9e4wcA2qCR+NBhftRmnY4Hpds2YBpSRyBt15J342YK4CnSmNL3eoMwk35B0fg80mR6O8BM+A2qLP5Nb/2t0Cp6P9dFwEU4v27q+oXA78C+G1m9ouBfxv4s1X1eeDP9v8H+OeRrdjngX8d+C++3g/45m/+Fn7n7/odQOFr400KIhw3ceB3yR8Q3UbMoV18Vr3NpZQIRmqZwZYEJFzRvHXxDA46HWcVjwOK/u7IrAM1BdLU2egxHfLaKXtj7NkyqGAhtLvsxlJmH7tMgJq3574Fe4fuRYLkokaIheaDMeVEVKZCNqtaYrrYIWKEv1sC1kJbDisdUtWiKio4Q+EZlQKTwjo6nKBycFJWVC/nKNrKDEzyy+pO9ZzCthyblYRQYqWIAAAgAElEQVQU4kR4IFcjtdx1DL1zLr77TtljgYpsJuWmhOXRwiZk4lqICptdjNQeKV69jpx6Kp2I4CrjmfaWnY/4lDzXdnk3nM5sqKGOJHozki6p+XB21xZ7p1FFITdN3HLdS45ouidKBewSp+JgnSt51KXYxvK0NaWuY5da0pYs2XsrI95GcvzgJcu2BIGAlRpdS1ur1OZXHa9LW1CpNj5DBLIxUriGHRnf9p8ljtDIk8u5ONgI/sgf/PdkZb8/ux/4ukWgqn6sqv5C//nLKGHo5wK/HvjD/df+MPAb+s+/HgWVVFX9H8A3mtnP+Tv9jB/50R/hV/7SX6kH+BpMk+CFLYlrIZdenm9e5+9Fdw5ehiPWWqJVFm5UFBXrrb1S+6+b1WpwoufWPEogfq9ZvurmHskw7bE9hej7NHwCJiLHsJR193HWea625IlQlSLmPM1EztGFpprOq0k4czfjUH4AhW52pQANoT2uFeSuDVxCyfuUDxNgWmyhzPn83Z2npiBTK0IJIfrh71Mwsw09C83LCbaTAVgWex2l9fRIWdFNxSWEJL0Y1g6PrtCTytDpva2jyBSbXs3YK68GaIuqzUDdkToZJMP9KiAR09o1GWw65897VLR8I3ABOhmzKcVR3QWaPv9EZseWovNZCEF6lFJ7/qb1l7cvsZ7ktegIMq2G09p1CUl79fZbszGzC19h60VR4qjwjX10mp/qjtHFXMgQmIxJEuKpDcfzTUekLrPVa08Dd4muHHESjohgxmi1arHc/o5kgP9fmEBbj/8S4M8B31ZVP9b/6a8D39Z//rnAj3zVl/1of+5v/15vuQN7b37f7/295CnuroLsxEvIl/kRIJKS0eoK9Ym3RSRpnF4uNLhWYJ0ETCpBxuwJeimZiNTps26ZhxZDFdccD28CRkJJqKRcQllUJaKhItU55GgXWN30WllWt7uHEQcL7fPL9psrb5ZmupNaeeYpMFX7KqH1HO3yzUN+go2B7tL+2Hq3zhL8WIZUalvhFgltxirKalhzByqhtKp0HKZa7xxtkmLOdPEv7pTef5UTadQyqKNSlonFJc8Eq3ZW1p4b97aAE1fCTz+Lls35cDbebsfy9vfdkllr6KVUMHHDmDLXeHOQLnEzkBjKU9Rkr5KKzrsYtj6BQ9OMFWUn7FjS8USr1NFs1azqsUgGI1o1iHthbYoSCfjhpLoNY7CGa/9f2TkXztWFNlxSd/MUN6HQ2OapE93aurw3ZNWji7CnvtbpIjdZodw6jTPWwKTIFVvd0jMZ634+OP/fj59xETCzbwD+GPC7/vYcgaonyvMz/6iq/7KqfllV/bJv/qZv5k/+T3+aaafVZdLuZx2BY0/aaa+XolwXLsHoNYKb5id7/gDw5h4wdKN4SD2nTv28gWbDDEbh92uv05qSCm9mnVcfpplyNzKG1pOu08rt0xUgfVJgrhitp89h6mRx15rPLOSZZ9Xrrw6/rIGSFa3NQKt587I6D25RF4w24azujnShvdqB2ZttWK4b/xSU+hVNlq3ATLXHyk0osRE92RHsYR37Lj7/LLDdzsdmWo+60o3zaMtBOw8nyoasp+ViFYm2Nd4PlKULuC1dz2zPvMzTnYMwH9BJWnWoIYnt3K0ORJ3joTUaKUXArgs/9kZBr077qaPWmuAtTdnNOl4cFZzMVh32vUUqn9KFqexz9buojqFKo8luzgVIN+BtIKvDKJQq7NYU98GpxJH9XTVQ+gZY60ahmr5aJaFXbhnD2A5qyxG5CnWVpei+Gtp2aI4Z/Uq/9sfPqAiY2UQF4L+tqqcS4cefbX7/+yf6838N+Hlf9eX/UH/uMz++7du/nd//H/0+zFx0ST+tb5ta4SUcDUvQFds6z75cHH0z3eADRXV5Ir58P0hZMqaogjpCh+0Z2T3lt6fVi/dpJJplZbyRWbJbU9UOVWddTi1jrLcS3mv5rD7Ne9GjiKlmgCUUpyW+JvVgdfLx0yOxwS0diYWfIcQ6DbOtk7aDQ5x2+H0y70C9dTMMs0qnnUmkAgleeoBTZWGdZhZSXWE0KqQnw01F1ej4LYeOwApTh/QcmwrUBWWnI/fePPv9zfZQtCb5pIk6nG13Vp2FkHa0VnW9H/FcX2SSpFR5QNXFM5L9BBwbxEiBbwZa73ShdrCOiivotVKDyVYiBaH7zAwV0D449K6poLsl2O7rq3tnmElRuRVhT4rZKSxJPANK4Gy1V4BNcSIUnKNNxTOk1fs9tdRJH94HjW2q11aCdbIt46Q4rCOcwu3JJjxSuH7Gx89kO2DAHwL+SlX9J1/1n/4E8Jv7z78Z+B+/6vPf01uCX4HCSn+Mr/Ph7X8/QkRfUVyDcjHYaI87OcZ2VRZxuMFfMfKEM5X2XBbiV5e+nj7pdHrqxjR0gazaX0BHQcdrNzgzguNSsLlLJZelk9yfuFaq4lqIdpJHGYPV34cngu7VHgPq99I+DUvFXZ8fmk1Xxtsp5fAm/a2cmsdL41FQzbjUrCnfwmp+QFuNWWmnTFNg0fPs9AMRMEwx8JF6eBxZcbMF/sXzi7pFfXO26bfcrFHsBlgjlS2QyAXHqsR/aFDwdLRWPR9O9yfSqYdQDa3k1BW62tO6g2gFYNnbdTKQHsGKOsF4ejJYtlWc68JHn8rdc1e1VBfARFryoywLvV86mQOjdv+f6s4p6Z+8dY1MgirS4ZbLVGuDMNtaB7cH4LNrtbqaSeI8050MdZBeB6dHST/deelZKRN4e448F9MmYe0tQbFQLNkv/6d+Xeckfsaz9/UeTuBXAb8J+KfN7C/1P78W+P3ArzGzH0BBJL+///6fRIEjPwj8V8Bv/Xo/4G986UuiwqdaOnsaJ4zDW1JsydCzLMnamiMJdcBPcEcQMuTUqdyGEzVQMUEXYzdAU60rLxMIacjjMEmOaY0YPYLIb6BXa6UWUWJtAXfV5BPvGfx8eoW7BRRCrtkVDsF20y6co9MR3Xy9UNa4wlP27MpltkBq8tG8cLXr1fZoSfWKrE+W0u9M6FSP8nauNb3GRJRTp9ex7QBENMLdv0dUi/y0v06Uy2hZMisZ8i2gBFaexisUJiMBWPdaHd2u9YaV7FgbwmFgbx0VPbCIDCaEvdCpmYy3kQjbIh2lEyflC2lwSpsMgbPGMzpurMRXMp9eDH3au+nhoTslO9ndkDYxGvP8026rlIMpctYUw/XIa9E7WzD78WaLUZqeOpm709IE1zRtq7ftQGFvYCUm8FAcGl1X77yHqlBRPHqPyeoxUZuDKhjvPsf//mf/yGc+f19XQFRV/yufPVD8M1/j7xfw277e9/3qD3Nri+7NXfDOYHlX4XJOalVIRvsIyDyiusXFP63a1X2bMgv1BpqLVlwVb6ODWqzUm2g9w4ce0gBqVqvfGkUexi4xE8vlT+eleKvur3lGjGGG+XjjCaSl5n2Dp8tvMEnbPLn5EUgeGkKftdE26iTLiwtv7TvQAJKAOAleKoQi7FRREwNOqLzxaUfwdLFRKIZAQUwSbDnniHeetGml17OJUq7AcGoLsdYJBayj3Lv2zLeuPbI/1mlm1qItrPERsUSrhVxlpuugVFl1U2Vv6lKeW40NlHNmG8+gVeSzM5gHjBCXYAk/qtA1emsyzpaPAijyu57rSgDX6xJ6ASa5cJixA8yWMAX6tQ8VR7Oh7zOeoxgq8tYZAUPdqahwbQLTlOLq76/ak70ZkM1dNR6mx0oeAqI7bwKnmLJXs37fj0JuVuk9XAP+3J/5o/yq3/CvfObz91EwBr/5m75JTqsu1pXdHc2UOhVmGcf61Mb6RG4jDKsOk3yeCq0nsNP6gg4sQeOEWTGQUUfVc69evV5UxU9vyepp5HrAyn5wTu9pN/q6rdeoJq7/qSbA1DM9RxeVPn13SQoahRKXzd9srWvpxgxr7kLtNo5I3Nu5mN18CJqS2ztpsz5xn+s1wI4e/G7xaQS/ETWl4TZAVykjTM2oyUg1pqIsdPpyaS04x+a4mHjbvK3cxG3XUCK35ek6DVc19daW5ur0DoNszNDtDQnXGtiaSBMa9N21PittdnyUuqJ2Cspaav9dMd7iOuhEzWrNPlrrbR/KgbRCIlwV74NWfL3o1X0V+m/VOIQtjR/2BBxPj6T7OYo1IFpSvF72tFnrofD0JiINJTOj+Lvng171llRlX/U/vfresPRoV70XdUtG9P1l9elh1Pflr/51/xq//Ff/i5/5/H0URUCLdLWhV8I6rlkye271rZPennNUAbvR+edN3TM3SaU1C8wajabnfiXlWNEFpNFnwPxwkqaYJrYUM6Z1dXu8M7SDp2fBFEPQoi+EUBtEoknMt8Cwsl6HCTF8RpPrmmtuPiZueLa673kqPBl1OrlEcDEv7QhtqtpXUk//OR2oDS51UfDiuH06oqSQ5oMeIvqlPS8F0fM8DXWUibtghrE4G8CxWELQob0bSqdkNrciRXry9jJwk0GqVWMAmB6cUmFXuyZwNAX0YLii59V38PTp91S5yyd6rsZGk1RoDzl8ittfow1gmkvZBZaKxpVEVbYSjkITcaxvuHiOaoW4AdZjD8/C2WQiL0zqNA1TtqkSklOYmKGuwpLpkimjjlKJ1bp4ArHpe1qEL3OaTIbuFZPBrp71T9+7YwpeeZaOYfA3fuQH2/fxa398FEWgmjV27JlmK7eeQm3O8tJa6rg06dMI17qPTvupRPZbKSXb8yJrffPMpoPbZeWtcSDkiBMNKtFinOgZfxjOlslk6+mjwcQK6cvtuadPazUa4giYwBpHnH1rhNkKhoVwhW4pbevvnWYZJoosK0oKsW5p7akLwN7a+y25ktpZGqdInQRPA1at4PXn1SNMgYAymnPgjXTvoXY40EmZaqOZKrLLFA4rYaI2Jp6DYsg1ufkYSm96xsS6iECl30WiJoHBPB/A0NgWfWLmp81Kf6RS3Xxi5xBPdqNrbZvm2FSnF2VyoCIpjrYEdd62Ikrx6eKTQ4WHTwuV9SojDm1lh078Bj/3OWRlOyP3StGE6tcxnKOciBQ2pIDZwuxmW1JN104vdg7xG56dgFe/Zz3+pDcWkw1kanyo1Ptsfova3kXcnU5WtrZLT773//xfPnug5yMpAgA5luKbAmp21Y4mOtTAQl7+Hjo9zukq3AXEmmCjiGmH9iC00k3lR7TNaiDQHdZR8IZv2PdibRF5RloTfkqMnNJd/9w16zFydncr6fV20uihO02YkfljlOFnCBeIoLjfronGY40jbvWmuR/WCw4r7irNx4C1W24hT8P0RXjP5meqDcbVnXhLi0mi7NMhwXou71MmUgzBco1Z/dnuVpTVl8slosonJiq/O+SzibGp7WrVQ+2+VoebtCWKez2hR4EGvehrPCB7feakScsf7hqFwjklD7+oI2Wh3ZqVm7Hrns2X6IVguIxbamNkI/zaDB0Ds1ZoRsHQ/n6HsboCiTcht+djn24dsMBtUFMP8almEAJ5wOpqkxmZ2j69GgwIi273TQBxKTLPop5fLEC31ZxpuncsTIdMatRwU3f0LGSBQ83GUUbDLvEpj+Vh/PH/5j/4zGfvoygCP/qjfw0vkWN2Sr2lfDtV7azsm2irjadw39RoQKuNRw2t5bhTXPbTyHWp0mqfnozTs1NLS0mtXdza3GH3zZ3WWIGAqUSrmYUosE8sItNY3meM6Z/sFg6Pt71wNkFGK8UmzVjz2lMbEEL5AKCQC2d2xYcsVypRF5Odgefp894pVk8j+nmngVPvLUS6KKliVeu9ObZ64yLw9Vhx+ljJXn95PbRZ4HBqSMmWEHbYLjMT8TR0qme/9k1RU1TX55lp9SRLdVGjGYUiuXGqFOLZVudJUbdGppEKos0wbu174UPIR8J6I9PF5tKwo4JRhUWTtggZ0pT+/jHhR2D4tu627I0a/FzPunX3QBEBMyUFdtfvRznpTy6CyeDGoWIzaIVnhsaAMu4ItjnJJu8iTnTmYoPMTe6qOAIJTToUK/3eT9ASorunxNbGVwfqrtLGIMEz+YHv+0uf+fx9FEXgW77lW7hrEHNgnlxIBpkVYpRZcNoZR4BdCOArAXQgXTzezK8waj554aEiwvM0VbKtpb8RQ9wTfwyG3Q3cpbA2V4KxYaSJFuRPSXJq320APog+wZ8mpW3tpx21lUQnVio0pbmyBk01MvbRrHdyvFF9FSjSqbyp1nZXQWhlGd5Lr3Ly6CR9zopPYU41w4DUjh7qDYAylKwDU/k5TTwKC7Ds1ZE3eQcWq/33B+dhGHJnWuWEaxzQjC+iDw0RWngTMcUt2BjLnoi22tsdwXFxIYY9H6quxQ5Wk3JxGLRxcGwcpi3M5AssXEMPoDYkjRFYcLY6I2uKrgPTmpGpyy2OxGlYtVWCQh40bhC9ciyBolDYMmZDGs+4tVx6n2eFMhNN248cJgUim3EWtvVarOjvJ+9D9yNnqtnSeVLjnzdoeVyy977GuRb5SOqKt01OxeAgD4YE/uXf+vs+8/n7KIrA4/27drzRPOT7OUtOuKwRfUVEqZqnYpZKxBedAnpTwpNwSQifIpNS6Cy4ZtUdMtggdNPJwHG0QckD+dWLjTZDLEZrUC1LryeHLnTSwR5HxpgC5JwL0yo51QEcV15C2u7krCJKEV11lILrOlaIGGzTiT38YG6crd385bpRyBDA5UbFlFJviMUoSqIrv6DkUrzNGv0GNwWBHBzqonN3FBtuWgtSjTX1YnGA4tyn2mqRsKa2FnYEfGmnSu2S96JNaSKeTEhXV+KevbVIjXOmAqGADcNumlgUKvRo7aVEZL2ucPA9dM+4CqsfbQO2Gafb8lUukpV3nLv16Ka3UH1ReNupm1yeGoA+dWQLRpFnSHjmh+xgVrO7v7bwczoS3HrMMrb1mIG6v5OvAJy2bbMG+CIchpytn9bjpLY24G+YQpDa5oSwrieHo6Y1xpSEaSVstnU/mfMbf/t/zLd8x3d/5vP3URQBr+KaLikph3Vpt1x7sRJxxSdAsQN8QoTBs+1CbzauJJvc3uqsIqxkxV0PRU8l0mW7JmS51UxObsaA9/M0+Wdw7WCR5NjQHYrc6A41HLu8z1kBXFm6iWoOztRe+Q2Yu9t//nlD2POiQcYlxuitzQG2GO0wi291OENo9E5jNMfdqphlRL4yy7h63SjiSKNvnF6Fqnt2OsMuHB+QYW8A7B7V9GkTTbgLLnU4S6CtNiYqoh46QpNihQNL5B+X72FxGKeYdmRoMqpxiuoAlFbFVTWl5sgX4iF8QirL3vp0W7+icDPOlmHnwfGzWXXwjgT3ezNzs8kGKzXMVUJtRXblRvJaTywXlVubplaoPTsALSaO1Kj+VdTlqqaYb2L1XH5Md8hRN1AkNZrwFOpci5CVWIN8naurkTbEhmW2irPO20F20siaWA6opYyGJm2Va5Q80TiCG/jhlHPuV771Z/8jfK7HjK/5/P09f6L/Lj4MWK/FlcYoYxNkTJY582hvWp0S7BnsjdrNczRa084zpXaMIaecYamMAEIXe3/QPJqn9+X0Gm3jBet18HLfemr3lruxDAEYvuRyM3RiUK8Cj0IpwEr6eIpdmrZ7tCuXMFEFS0Sco9mz4e8wmUWOOLyLrZvcldQjQpkeyFt0GLn2GUr1sWJVyFe/15VW0lfQ7ewoI7J9EO2QqTQnXh3LF7AlG7UMYhmLwzFndeJtDFfRnZ26dwQ47XrRKT8Nf91N+BpUwtWBoxUyBqlj1MqWUwenrddVnIzcLvBxJ9xq+a3ZjGGfFvyBpL1WS0VyPjfpRU5vM5mkhrIY3BP5lDoPV2R6esCQzm6JxKH3JmWv/tQJiIwGsV3uxZacasQujTqDPWQ2IhBVK9QRkJ9srl3UcvkQnAM28G1Uzs4aeOH44USxWsC1bQKDUy5DlRR4GCEtQItksVFKjK4kj3HShNHkYZcISUExr8kf+gO/Tb/nZ3x8FEXgh374h3u/Kv7IqNNEB6GztouxNtWKPquOZGrPAAFNWg8qmRh51qP9+wlxtTkhWe8xoteCfpIjbJxlRYyHWICdc5+l0wOb5AQ7qui5Xeq4k2+rSqczA0tz766pAuFad5678B0MgsEhEu7qddCRzc46k3s5FyEaSyrzbm0ZbFTdKnTng2yqb8QXeDRbslt4SnjDLtPqFJ0+VY7HYODEFQTvxRTEWZac6Q2OOeTWjr6KPZJxG68ZNAGPQYBtrtdqirOKc82pIJAC7t0notpgO060wWcMmWSUGz6elOXilVcy4MQQXqN+rJOcjJxb850PPVzlzAz8Du4N2CC5nk0Ma8AZKS1GKtHao3gMeO+CJ6tabJXeeoRsHdbgTHWgzCIuOmuyhWT3gNiUL9k1IKKZL2M/Aq+bjIdi2UIEH5+gmKdLnZsYvjoIUoG5gfOoIKp4BpEYz82UdR6BqRs5Ms2xF+dxA3vxIuICvoN/6Xf8Hh7x2TvCj6IIfMd3fAd/7I/9CQEnwtUxDhaLCum+T6sFFiIPjdPrq5umXmp15TN0upxBltR955Tos8NJJmu2ueQJtvfUa8EAWXAPkY/2AY5jXNTSXrwIzoILAZDDB2lStNXpjuS02zENPD4v5NAcWgGvDstEKqEOy5pGWrKVfrVFPIlB2whbKJ3sgVCuicUho0/nnY0ua3Qp2/2ggl0htV4G5xgnDyeT3cm/wgnVLluJGmwuinRaCuBM4+SD4cltmlLOGlw+OoyjFx4oISdDYBazDVe763nygp6OvHePR+JliBx0Iccic0XHp8Cc3vUnuwZVolMzgrKbnAd/J85CxGG9dICobUZo538Y5GjiWc/lp/kjInfdHPtA1pZwKlP+iAljD61Al9B68lBsLH5aLtOYsJCa8A7AYGkc8HJqadMgsxk5E3mJXGWVXKmE7Wcq8qnd95WzL+E/s8VXT51DPJ5S9QTkZfkakBHMc2Anmxc++cafxetnNwIfRxGwCL7nN/1GOPIPYOuXqiW/uKf/PkPst+Wd4+4JnysYaoeOFexX/GwuN95ZMTK0hqvFatUby6m7+hgdxDZydwiEy/Z6Tnik4Z8cTixO3Fp9pRD5KmRsamhvm1oJHk89QK7E29kV2RpQDA65jUixCp+0vDGGfBPYhA1qD60HkbLwmIxFpcZTroDZO7bDWqVk3KHCFmjd6RHCEkwGF+GbEdqzj5B/Y3p10YUrBJjdaVy58SnqsNl7nOD1esXP4irIeCHe3bxYQRRnOjwGjIGnNBoy3xgYQ/x64MkAOiVKtmTZpx8q48z5Jnd+yrBPjWbwyXPgAQo+Pf1N41DpsIvglZ0o6GmA1yHPlsFopwQxLhmNbsTI6wdQOsWHvBAdAZkDntIqr5KBX5lGv2tQ/h4vZ6RTQ7gIKf3+bjk180X+DPllBhfUQ2lBplwBP+KacC2yJIU+Hm+EpXhVZubL7I1KtTBrF7FM0fIupWHEQuY4h3pIo1LbmFyf+fx9FEXgh37gB/npv/llwFi+tWtZhT+caY7bVGKMgXu3SEs3rkxUpIs3E7/dIlhZvO5AuZSHyw7usHeRU+2ym3PWYs5iRoOIS/zsVQ0aVjGi8PNgoJgqv6QJPxR33tph8WgbKzEUbcnX7U4x2sISy02ZEdfB2ZKCbvWCto8ERaVtwmCQc5DLuDAeYyiabb7iAwmojjP2xh+yOX+3FfjxHAdsi8BTR7bf+k6jhU9ae8keRQ/LWU07K2dlCPMYWpUaxdhwxxBPwSfV7WqmTh2WZt/XJquIGFGk3ewJaf2164l+F57FNrXk4gxpL1RPpl5t3A/nOCvV1b2EHjgbr7yuos5F1eGufCPkXBa6L3r4snY1rTNgGWdoo+RLI+SHVT2uCIuQikxGnpcne2oksdyyVDsPFe8qxZ8bZAYMY+1X7ba2YTmFC5gx7RMRlWhPQETFXi2cuu930mmY7FsLmeScQN3GqV7/BpXGncFtwbJguYkibZNhyXwS6Qz+83/nexpP+tofH0UMWZLYe5OKrkavbRa1BOj5kLvslYc4wUnnzKKYAvG89XEJpJMmNlte2fWvaW67wye1eCZNnO89pKfPNVozCr6NRCFiaw0RSvJBxNb4ISsgFSa03rEoPA8HZ3s0HVSUttQSVIDcVnpspvzo7EigFNOIzg2ocXNqEJeRqiREBfh7ihesgss3rygYw1KEvl2JM8ENG0L1yxQNXkdUVsvCbGIPGiZPiov0jXNzVYghd3urETfnqKMaGGvKtvuMwyTamSngdvaZmL0qzu1R+H6nVWd+inbjUDaaN6Ddvywf1Cktpl5Xq/QsnXjc+BJXZL4GPz0Wlp/jchHyYyPGqZ03/oJUi05xwC+sbnVjVpy1dUXKefo78ixGnjCMPGqx7SBGJkmWQMBdB+7BXVvy7wssF3lQS99jj7s0GFWb2OpIy5I7YQ6nTGzPfQB7gX3ppB/PgiyZcCTNppQexVIBqTm+wtX6j+XRdnPqNNd54V19Qv6/7Z1LiG1HFYa/v6r2Pn1NAhojIcSgiVwHGeklhAxCRj6STK7OMjIDwYkBHTiIZJKpggqCCIqBKGImRgyI4hMcGY1iXoY81ICGmAeKSnP7nF21loNVfdNcb5sbQ7JP0/uHw9m9zxn8u2vvdVat+utfOgP7W+nPg63IBOQiTfFTrzbiqZIL2NhIG+HrWA5aj4W9VDE2ZBejVxKJ4gMD+2u/8YBKhPx3k2lWon2oFL9s3tO0yVBK5DMZ9jK0yiDv1k8GeWLSBuUJUrjYTmkVyrYKsgHzwmShFchTRl6wKpJCsYaFweiUoZVuKZUTZJFTpKF+cZ8jW/8VN6esQpEWe9o9CmEybLfSCrjOUKuTPTP2LZRtSkSFIB6u9RRLhNmElEgjIZbJUXOpm0arit25xULbwIhN3achi1FRaFQyEg3fQGnxy+QpMoY9A7PGlCpZE4XCmAdSC84TiqdojOzdjDB2dVEssgkvsRnIFNr7nBtVBioxtq2Ea54lpkEhB+/96qw5td/3cbIAAAcMSURBVBWGOsbyGN25wBK+abScSbaOqaQ5U41A3Sw4eQnhlhH6BXOwKaEKQ42t5+HPMBKbmoChksc1mbCjt96DIHcvNbPu85DE2sNBe1+BSR7IxcArWgs2hdFBbcRGQSGMRBWBbX+fgrmdnVZOaYqNTA3ChFUk32MwGEw0JkpeYWkPmuHVDn3+tiIInHzvSX708x+EYGcwpMSGqNS2IQxHbc/QmRybOrIwNVqroF087YbEn0LxgZUndrIjr1iuZIedlKDbedlq6gMxkJWopUUfwhSrEzstZLVNA21zAvqs2abCsF6TUurrxSFlLd0frw4VG4zVYN0cwikyxmQMTRRfxRKPGUMNEUrLFa0NW1uowaxACUvw5I0NwnLtfRXi/7XaW4EXWsnUAVrf5mxu+7d/WKnVEnNMb7RqeBWphfNRNFKN1+iVvMmYDzQcaWJnAqNQW/jkJRHr1wVESJ3zVMJOi2i0gsLyPVqvimSJ3CZUo/eC1ejA5Cn27GefMG/U0AChFrqE7LECtCK6ChffgInisOOV6hGsq3v3/XPS0Gh5TW1RP598AkUxdmgZGNEExRqxGbQrKrvZR0KUonBvttRdkgFlZJnBc3RxKl1FaIVa4zt7Ud2hsQKN5JSorYVtuRljeUusaKlgaaLW7iBZhGWLmk2LeX3eGNqIMqm3X0whdIvw2ce3MJJIrSHlWDLMIelyxZhayyTPTG2HO750Pzkd/qhvRRBA8MEP3IypMnnFbU2xNcVDLVd2wHdKLHOFZQ3UeEidEwy2YpWdNE5UTWy6O27vcM9mamymykii+EQ7U7DW2LAhVe+uQpkyCOXCbkpkDZEd5rAZMR/Ig+NDOAFpCENHK11QE4vitHWiWtzo5onqCZ8M3xhWDfMBS5k9ooFqmWL3WRmjUUfRvtjWyXlkSAX5cNaENA9RqKxT9JlTjdZYZhkvIymJQnQzHopD3z2XUrS8nrpNW19LBBqVuMFdlcaAK7FORk4TUo1pF6KaM3VxUZaHOCd3ObSHmo4icl6hoYtphsyJsFKi73Fmf/e3eYnVVXdqr06oJGyImos75HVm4ztRaFRhnY1iG5IKK4yxZdxbXKcuZiC0GXioPIdhxeSNjXZhDCmx9sB6VugALqyKqULNwkpMG6Uc3pZ5E/7/3ldPDNK0IdsORuIiot5U8pqXX/4HZyoon2DYiLYR1CmCdYrsbsgVV8PWq7Ny6HAVj/+PDVOXXXdrOoVwytWjJbGClHM4EnlX2lopFBXWAwxNVN8l5120LrEf5rDHz/+HiODNgqSXgF3g8P7J24/LONr84ehfw1HnD2/sNbzL3d9x7smtCAIAkh5y9+vm5vH/4qjzh6N/DUedP8xzDdsxHViwYMFsWILAggXHHNsUBL42N4HXiaPOH47+NRx1/jDDNWxNTWDBggXzYJsygQULFsyAJQgsWHDMMXsQkHSzpCclPSPpzrn5XCgkPSvp0d6W7aF+7lJJP5H0dH9/29w8D0LSPZJelPTYgXPn5dx7SX65j8sjkk7Nx/ws1/Pxv1vSc+e0yNv/7LOd/5OSPjwP61cg6SpJv5D0B0mPS/pUPz/vGHj35pvjRezV+iNwDTACDwPXzsnpNXB/FrjsnHOfB+7sx3cCn5ub5zn8bgJOAY+9GmfgVuCHhEbtBuDBLeV/N/CZ83z32n4/rYCr+32WZ+Z/BXCqH18CPNV5zjoGc2cC1wPPuPuf3H0D3AecnpnT68Fp4N5+fC/wkRm5/Bfc/ZfA3885fRjn08A3PfAr4K37rejnwiH8D8Np4D53X7v7n4kGude/YeQuAO7+vLv/rh//G3gCuJKZx2DuIHAl8JcDf/+1nzsKcODHkn4r6RP93OX+Shv2vwGXz0PtNeEwzkdpbO7o6fI9B6ZgW81f0ruB9wMPMvMYzB0EjjJudPdTwC3AJyXddPBDj3zuSK2/HkXOwFeB9wDvA54HvjAvnVeHpIuB7wKfdvd/HfxsjjGYOwg8B1x14O939nNbD3d/rr+/CHyPSDVf2E/X+vuL8zG8YBzG+UiMjbu/4O7N3Q34Oq+k/FvJX9JABIBvu/v9/fSsYzB3EPgNcFLS1ZJG4DbggZk5vSokXSTpkv1j4EPAYwT32/vXbge+Pw/D14TDOD8AfKxXqG8A/nkgZd0anDNH/igxDhD8b5O0knQ1cBL49ZvN7yAULbG+ATzh7l888NG8YzBntfRABfQponp719x8LpDzNUTl+WHg8X3ewNuBnwFPAz8FLp2b6zm8v0OkzBMxv/z4YZyJivRX+rg8Cly3pfy/1fk90h+aKw58/67O/0ngli3gfyOR6j8C/L6/bp17DBbZ8IIFxxxzTwcWLFgwM5YgsGDBMccSBBYsOOZYgsCCBcccSxBYsOCYYwkCCxYccyxBYMGCY47/AF7I84S4a9l/AAAAAElFTkSuQmCC\n" + }, + "metadata": { + "needs_background": "light" + } + } + ], + "source": [ + "#@markdown Now we can initialize the environment and render images.\n", + "\n", + "if 'env' in locals():\n", + " # Safely exit gripper threading before re-initializing environment.\n", + " env.gripper.running = False\n", + " while env.gripper.constraints_thread.isAlive():\n", + " time.sleep(0.01)\n", + "env = PickPlaceEnv()\n", + "\n", + "# Define and reset environment.\n", + "config = {'pick': ['yellow block', 'green block', 'blue block'],\n", + " 'place': ['yellow bowl', 'green bowl', 'blue bowl']}\n", + "\n", + "np.random.seed(42)\n", + "obs = env.reset(config)\n", + "\n", + "plt.subplot(1, 2, 1)\n", + "img = env.get_camera_image()\n", + "plt.title('Perspective side-view')\n", + "plt.imshow(img)\n", + "plt.subplot(1, 2, 2)\n", + "img = env.get_camera_image_top()\n", + "img = np.flipud(img.transpose(1, 0, 2))\n", + "plt.title('Orthographic top-view')\n", + "plt.imshow(img)\n", + "plt.show()\n", + "\n", + "# Note: orthographic cameras do not exist. But we can approximate them by\n", + "# projecting a 3D point cloud from an RGB-D camera, then unprojecting that onto\n", + "# an orthographic plane. Orthographic views are useful for spatial action maps.\n", + "plt.title('Unprojected orthographic top-view')\n", + "plt.imshow(obs['image'])\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "b80e6ddd-bebe-433f-9106-39ec6c7a4545", + "metadata": { + "cellView": "form", + "id": "b80e6ddd-bebe-433f-9106-39ec6c7a4545" + }, + "outputs": [], + "source": [ + "#@markdown Then we create the model (a CLIPort variant).\n", + "\n", + "class ResNetBlock(nn.Module):\n", + " \"\"\"ResNet pre-Activation block. https://arxiv.org/pdf/1603.05027.pdf\"\"\"\n", + " features: int\n", + " stride: int = 1\n", + "\n", + " def setup(self):\n", + " self.conv0 = nn.Conv(self.features // 4, (1, 1), (self.stride, self.stride))\n", + " self.conv1 = nn.Conv(self.features // 4, (3, 3))\n", + " self.conv2 = nn.Conv(self.features, (1, 1))\n", + " self.conv3 = nn.Conv(self.features, (1, 1), (self.stride, self.stride))\n", + "\n", + " def __call__(self, x):\n", + " y = self.conv0(nn.relu(x))\n", + " y = self.conv1(nn.relu(y))\n", + " y = self.conv2(nn.relu(y))\n", + " if x.shape != y.shape:\n", + " x = self.conv3(nn.relu(x))\n", + " return x + y\n", + "\n", + "\n", + "class UpSample(nn.Module):\n", + " \"\"\"Simple 2D 2x bilinear upsample.\"\"\"\n", + "\n", + " def __call__(self, x):\n", + " B, H, W, C = x.shape\n", + " new_shape = (B, H * 2, W * 2, C)\n", + " return jax.image.resize(x, new_shape, 'bilinear')\n", + "\n", + "\n", + "class ResNet(nn.Module):\n", + " \"\"\"Hourglass 53-layer ResNet with 8-stride.\"\"\"\n", + " out_dim: int\n", + "\n", + " def setup(self):\n", + " self.dense0 = nn.Dense(8)\n", + "\n", + " self.conv0 = nn.Conv(64, (3, 3), (1, 1))\n", + " self.block0 = ResNetBlock(64)\n", + " self.block1 = ResNetBlock(64)\n", + " self.block2 = ResNetBlock(128, stride=2)\n", + " self.block3 = ResNetBlock(128)\n", + " self.block4 = ResNetBlock(256, stride=2)\n", + " self.block5 = ResNetBlock(256)\n", + " self.block6 = ResNetBlock(512, stride=2)\n", + " self.block7 = ResNetBlock(512)\n", + "\n", + " self.block8 = ResNetBlock(256)\n", + " self.block9 = ResNetBlock(256)\n", + " self.upsample0 = UpSample()\n", + " self.block10 = ResNetBlock(128)\n", + " self.block11 = ResNetBlock(128)\n", + " self.upsample1 = UpSample()\n", + " self.block12 = ResNetBlock(64)\n", + " self.block13 = ResNetBlock(64)\n", + " self.upsample2 = UpSample()\n", + " self.block14 = ResNetBlock(16)\n", + " self.block15 = ResNetBlock(16)\n", + " self.conv1 = nn.Conv(self.out_dim, (3, 3), (1, 1))\n", + "\n", + " def __call__(self, x, text):\n", + "\n", + " # # Project and concatenate CLIP features (early fusion).\n", + " # text = self.dense0(text)\n", + " # text = jnp.expand_dims(text, axis=(1, 2))\n", + " # text = jnp.broadcast_to(text, x.shape[:3] + (8,))\n", + " # x = jnp.concatenate((x, text), axis=-1)\n", + "\n", + " x = self.conv0(x)\n", + " x = self.block0(x)\n", + " x = self.block1(x)\n", + " x = self.block2(x)\n", + " x = self.block3(x)\n", + " x = self.block4(x)\n", + " x = self.block5(x)\n", + " x = self.block6(x)\n", + " x = self.block7(x)\n", + "\n", + " # Concatenate CLIP features (mid-fusion).\n", + " text = jnp.expand_dims(text, axis=(1, 2))\n", + " text = jnp.broadcast_to(text, x.shape)\n", + " x = jnp.concatenate((x, text), axis=-1)\n", + "\n", + " x = self.block8(x)\n", + " x = self.block9(x)\n", + " x = self.upsample0(x)\n", + " x = self.block10(x)\n", + " x = self.block11(x)\n", + " x = self.upsample1(x)\n", + " x = self.block12(x)\n", + " x = self.block13(x)\n", + " x = self.upsample2(x)\n", + " x = self.block14(x)\n", + " x = self.block15(x)\n", + " x = self.conv1(x)\n", + " return x\n", + "\n", + "\n", + "class TransporterNets(nn.Module):\n", + " \"\"\"TransporterNet with 3 ResNets (translation only).\"\"\"\n", + "\n", + " def setup(self):\n", + " # Picking affordances.\n", + " self.pick_net = ResNet(1)\n", + "\n", + " # Pick-conditioned placing affordances.\n", + " self.q_net = ResNet(3) # Query (crop around pick location).\n", + " self.k_net = ResNet(3) # Key (place features).\n", + " self.crop_size = 64\n", + " self.crop_conv = nn.Conv(features=1, kernel_size=(self.crop_size, self.crop_size), use_bias=False, dtype=jnp.float32, padding='SAME')\n", + "\n", + " def __call__(self, x, text, p=None, train=True):\n", + " B, H, W, C = x.shape\n", + " pick_out = self.pick_net(x, text) # (B, H, W, 1)\n", + "\n", + " # Get key features.\n", + " k = self.k_net(x, text)\n", + "\n", + " # Add 0-padding before cropping.\n", + " h = self.crop_size // 2\n", + " x_crop = jnp.pad(x, [(0, 0), (h, h), (h, h), (0, 0)], 'maximum')\n", + "\n", + " # Get query features and convolve them over key features.\n", + " place_out = jnp.zeros((0, H, W, 1), jnp.float32)\n", + " for b in range(B):\n", + "\n", + " # Get coordinates at center of crop.\n", + " if p is None:\n", + " pick_out_b = pick_out[b, ...] # (H, W, 1)\n", + " pick_out_b = pick_out_b.flatten() # (H * W,)\n", + " amax_i = jnp.argmax(pick_out_b)\n", + " v, u = jnp.unravel_index(amax_i, (H, W))\n", + " else:\n", + " v, u = p[b, :]\n", + "\n", + " # Get query crop.\n", + " x_crop_b = jax.lax.dynamic_slice(x_crop, (b, v, u, 0), (1, self.crop_size, self.crop_size, x_crop.shape[3]))\n", + " # x_crop_b = x_crop[b:b+1, v:(v + self.crop_size), u:(u + self.crop_size), ...]\n", + "\n", + " # Convolve q (query) across k (key).\n", + " q = self.q_net(x_crop_b, text[b:b+1, :]) # (1, H, W, 3)\n", + " q = jnp.transpose(q, (1, 2, 3, 0)) # (H, W, 3, 1)\n", + " place_out_b = self.crop_conv.apply({'params': {'kernel': q}}, k[b:b+1, ...]) # (1, H, W, 1)\n", + " scale = 1 / (self.crop_size * self.crop_size) # For higher softmax temperatures.\n", + " place_out_b *= scale\n", + " place_out = jnp.concatenate((place_out, place_out_b), axis=0)\n", + "\n", + " return pick_out, place_out\n", + "\n", + "\n", + "def n_params(params):\n", + " return jnp.sum(jnp.int32([n_params(v) if isinstance(v, dict) or isinstance(v, flax.core.frozen_dict.FrozenDict) else np.prod(v.shape) for v in params.values()]))" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "3sDqFOVUMV5J", + "metadata": { + "cellView": "form", + "id": "3sDqFOVUMV5J" + }, + "outputs": [], + "source": [ + "#@markdown Define Transporter Nets train and eval functions\n", + "\n", + "@jax.jit\n", + "def eval_step(params, batch):\n", + " pick_logits, place_logits = TransporterNets().apply({\"params\": params}, batch[\"img\"], batch[\"text\"])\n", + " return pick_logits, place_logits\n", + "\n", + "# Coordinate map (i.e. position encoding).\n", + "coord_x, coord_y = np.meshgrid(np.linspace(-1, 1, 224), np.linspace(-1, 1, 224), sparse=False, indexing='ij')\n", + "coords = np.concatenate((coord_x[..., None], coord_y[..., None]), axis=2)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "e5efc933-59ed-4824-b53e-d27c7bd05c21", + "metadata": { + "cellView": "form", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "e5efc933-59ed-4824-b53e-d27c7bd05c21", + "outputId": "060e1a54-b064-4ba1-bda6-83313b816370" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stderr", + "text": [ + "100%|████████████████████████████████████████| 338M/338M [00:02<00:00, 162MiB/s]\n" + ] + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/usr/local/lib/python3.7/dist-packages/gdown/cli.py:125: FutureWarning: Option `--id` was deprecated in version 4.3.1 and will be removed in 5.0. You don't need to pass it anymore to use a file ID.\n", + " category=FutureWarning,\n", + "Downloading...\n", + "From: https://drive.google.com/uc?id=1z3C6bPT3hhRkH20qPrqc_LrCwZjq69GO\n", + "To: /content/interactive_n_train500_n_total1000seen-colors_seed2_epochs32_noisy_1000\n", + "100% 51.1M/51.1M [00:00<00:00, 142MB/s]\n", + "Loaded: interactive_n_train500_n_total1000seen-colors_seed2_epochs32_noisy_1000\n" + ] + } + ], + "source": [ + "#@markdown Load a pretrained model\n", + "\n", + " # Create clip model\n", + "torch.cuda.set_per_process_memory_fraction(0.9, None)\n", + "clip_model, clip_preprocess = clip.load(\"ViT-B/32\")\n", + "clip_model.cuda().eval()\n", + "\n", + "# Create dummy tensors for model initialization\n", + "rng = jax.random.PRNGKey(0)\n", + "rng, key = jax.random.split(rng)\n", + "init_img = jnp.ones((4, 224, 224, 5), jnp.float32)\n", + "init_text = jnp.ones((4, 512), jnp.float32)\n", + "init_pix = jnp.zeros((4, 2), np.int32)\n", + "model = TransporterNets()\n", + "variables = model.init(key, init_img, init_text, init_pix)\n", + "jax.tree_util.tree_map(jnp.shape, variables)\n", + "\n", + "# Initialize model parameters\n", + "params = variables[\"params\"]\n", + "tx = optax.adam(learning_rate=1e-4)\n", + "state = train_state.TrainState.create(\n", + " apply_fn=model.apply,\n", + " tx=tx,\n", + " params=variables[\"params\"],\n", + ")\n", + "\n", + "ckpt_path = f'interactive_n_train500_n_total1000seen-colors_seed2_epochs32_noisy_1000'\n", + "if not os.path.exists(ckpt_path):\n", + " !gdown --id 1z3C6bPT3hhRkH20qPrqc_LrCwZjq69GO\n", + "\n", + "restored_state = checkpoints.restore_checkpoint(ckpt_path, target=state)\n", + "print('Loaded:', ckpt_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "6a9a8217-6d5a-497b-a020-498e19430394", + "metadata": { + "cellView": "form", + "id": "6a9a8217-6d5a-497b-a020-498e19430394" + }, + "outputs": [], + "source": [ + "#@markdown Define ambiguity measure based on persistent topology.\n", + "\n", + "\"\"\"\n", + "The contents of the cell were adapted from [persitence.py](https://git.sthu.org/?p=persistence.git;a=blob;f=imagepers.py) and\n", + "[union_find.py](https://git.sthu.org/?p=persistence.git;a=blob;f=union_find.py) created by Stefan Huber.\n", + "\n", + "\n", + "Union-find data structure. Based on Josiah Carlson's code,\n", + "http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/215912\n", + "with significant additional changes by D. Eppstein.\n", + "\"\"\"\n", + "\n", + "def softmax(x):\n", + " e_x = np.exp(x - np.max(x))\n", + " return e_x / e_x.sum()\n", + "\n", + "class UnionFind:\n", + "\n", + " \"\"\"Union-find data structure.\n", + "\n", + " Each unionFind instance X maintains a family of disjoint sets of\n", + " hashable objects, supporting the following two methods:\n", + "\n", + " - X[item] returns a name for the set containing the given item.\n", + " Each set is named by an arbitrarily-chosen one of its members; as\n", + " long as the set remains unchanged it will keep the same name. If\n", + " the item is not yet part of a set in X, a new singleton set is\n", + " created for it.\n", + "\n", + " - X.union(item1, item2, ...) merges the sets containing each item\n", + " into a single larger set. If any item is not yet part of a set\n", + " in X, it is added to X as one of the members of the merged set.\n", + " \"\"\"\n", + "\n", + " def __init__(self):\n", + " \"\"\"Create a new empty union-find structure.\"\"\"\n", + " self.weights = {}\n", + " self.parents = {}\n", + "\n", + " def add(self, object, weight):\n", + " if object not in self.parents:\n", + " self.parents[object] = object\n", + " self.weights[object] = weight\n", + "\n", + " def __contains__(self, object):\n", + " return object in self.parents\n", + "\n", + " def __getitem__(self, object):\n", + " \"\"\"Find and return the name of the set containing the object.\"\"\"\n", + "\n", + " # check for previously unknown object\n", + " if object not in self.parents:\n", + " assert False\n", + " self.parents[object] = object\n", + " self.weights[object] = 1\n", + " return object\n", + "\n", + " # find path of objects leading to the root\n", + " path = [object]\n", + " root = self.parents[object]\n", + " while root != path[-1]:\n", + " path.append(root)\n", + " root = self.parents[root]\n", + "\n", + " # compress the path and return\n", + " for ancestor in path:\n", + " self.parents[ancestor] = root\n", + " return root\n", + "\n", + " def __iter__(self):\n", + " \"\"\"Iterate through all items ever found or unioned by this structure.\"\"\"\n", + " return iter(self.parents)\n", + "\n", + " def union(self, *objects):\n", + " \"\"\"Find the sets containing the objects and merge them all.\"\"\"\n", + " roots = [self[x] for x in objects]\n", + " heaviest = max([(self.weights[r], r) for r in roots])[1]\n", + " for r in roots:\n", + " if r != heaviest:\n", + " self.parents[r] = heaviest\n", + "\n", + "\n", + "def get(im, p):\n", + " return im[p[0]][p[1]]\n", + "\n", + "\n", + "def iter_neighbors(p, w, h):\n", + " y, x = p\n", + "\n", + " # 8-neighborship\n", + " neigh = [(y + j, x + i) for i in [-1, 0, 1] for j in [-1, 0, 1]]\n", + " # 4-neighborship\n", + " # neigh = [(y-1, x), (y+1, x), (y, x-1), (y, x+1)]\n", + "\n", + " for j, i in neigh:\n", + " if j < 0 or j >= h:\n", + " continue\n", + " if i < 0 or i >= w:\n", + " continue\n", + " if j == y and i == x:\n", + " continue\n", + " yield j, i\n", + "\n", + "\n", + "def persistence(im):\n", + " h, w = im.shape\n", + "\n", + " # Get indices orderd by value from high to low\n", + " indices = [(i, j) for i in range(h) for j in range(w)]\n", + " indices.sort(key=lambda p: get(im, p), reverse=True)\n", + "\n", + " # Maintains the growing sets\n", + " uf = UnionFind()\n", + "\n", + " groups0 = {}\n", + "\n", + " def get_comp_birth(p):\n", + " return get(im, uf[p])\n", + "\n", + " # Process pixels from high to low\n", + " for i, p in enumerate(indices):\n", + " v = get(im, p)\n", + " ni = [uf[q] for q in iter_neighbors(p, w, h) if q in uf]\n", + " nc = sorted([(get_comp_birth(q), q) for q in set(ni)], reverse=True)\n", + "\n", + " if i == 0:\n", + " groups0[p] = (v, v, None)\n", + "\n", + " uf.add(p, -i)\n", + "\n", + " if len(nc) > 0:\n", + " oldp = nc[0][1]\n", + " uf.union(oldp, p)\n", + "\n", + " # Merge all others with oldp\n", + " for bl, q in nc[1:]:\n", + " if uf[q] not in groups0:\n", + " # print(i, \": Merge\", uf[q], \"with\", oldp, \"via\", p)\n", + " groups0[uf[q]] = (bl, bl - v, p)\n", + " uf.union(oldp, q)\n", + "\n", + " groups0 = [(k, groups0[k][0], groups0[k][1], groups0[k][2]) for k in groups0]\n", + " groups0.sort(key=lambda g: g[2], reverse=True)\n", + "\n", + " return groups0\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "328c2eab-74d8-4ebb-acba-5636983edb47", + "metadata": { + "cellView": "form", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 286 + }, + "id": "328c2eab-74d8-4ebb-acba-5636983edb47", + "outputId": "f5140ac6-9e76-4d2e-f090-88177d6ab36c" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Environment reset: done.\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "
" + ], + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQEAAAD8CAYAAAB3lxGOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9e/Bl11Xf+Vlr73Pv79cPvVqSrdaj9bBl2fJLlgxxJmASBxvywJ5JUeMk8yiPYxIqU1CFbRImzAxUIENIYDwwMxAqwB/MTChmyGRmTMBgMMYGZyzLlizbsmyhbkktWc+WutXdv3vP2Xut+WOtc38/CwkXYVR0lXtXye7ue+957LP3Wt/1Xd+1jrg758f5cX58/Q79s76A8+P8OD/+bMd5I3B+nB9f5+O8ETg/zo+v83HeCJwf58fX+ThvBM6P8+PrfJw3AufH+fF1Pl40IyAi3yYi94rIfSLyD1+s85wf58f58acb8mLoBESkAF8CvhU4DtwO/E13/8L/7yc7P86P8+NPNV4sJPANwH3ufr+7j8AvA29/kc51fpwf58efYtQX6bhXAg/t+ftx4Btf6MuXXnqpX3vkWpAX+ILzwp/9Sb7zPD94vp/N2Ej+6NcB47m286uO8Se+jq99jV/z3x1cXvib4DiC/nte3Nf61XM//+q/7/7tud974oknGMcJxMHjsw0uFUERnjl5kvW4RtzxPMB8nIsvvpgj1xzZPeheUCuwXq35wj1fmA+3+cBx5DkTFjP01XO6uWbJA25OMM+qIPN1z9e1WTzOoUOHuPrqa/7ofLlz52fvit/Zc44/36PHv5dh4NU33xxX9u/1+HYn7Y5P3/Gku1/23G+8WEbgaw4R+S7guwCuueYabr/j9pgRExCJidlMTkyK4bn99mzEnBTvIArm8emgYHTMheqFZqDK7lOS+Y8do1Akj96hC2gHqfPxHXC6KzgUBMTiQ8sLUM/DOqB4/iYWkcRDzUtvCkMe1bCcEc076kABJO/XcWlgQ6wRnVAqHaFPzmIwRAqO4bnA1RUBTARx350vB/eGUDGFjlNcUM/PJedHNG/ZEXHcFcziPtTpIhQX6HFkx1GJVSsGpvPTie+CUVA8d4vmb372X/wMDz30EOKOeaFWMHNsMsqwYLEY+OAH/y0PHL2fdV/RzUEK6pUinf/wHW/nX/zsz2EKdEENGJyJOMfxB47xtre8heOPPcOyFFxGKHlvVlDtCEKTHvNnBTFBi6AYrSsiziTKAqdLB3OG2aJIrBUxEDeg4hLGxNR45zv/Uz7wgX8OQANqdyjGemfNtVddT5cRtNBwFAN3JodiHZOBweGyl17Op/7dp3JCHcQwBO1OV0FF2GtDnmuM4w9C685ioQ883158scKBh4Gr9/z9qvy3zXD3n3P329z9tssuuyyvWBHdtcfuYTVn3kKJRYxJLnqf9yfmHTDUw7KFtVcKBZd49iKOuCMYsdkAr4gJ5honVKH0BiW3sPc08yUmKzeKu2DIfFFxLGm50D0XhqAe3sK7z5aMofvm2tUVsUJxQzrghfmmwv4I7jWNDuADjlDcGQYhLnTXKMb99VyMjphBV7C4f1UFddSdwUHVYtNvDKTQLW7ZRMKw5AYPh+IUh7x4kDAAQgc3jB7/HCdEZqMZDxP13Xt3lOoCFVycPjkisNhaUAeBYqiGIUWEpSheFGi4Cq4DrhIevHo+MxjMKDhHrrmWf/mLv0iVCaShRdFaUQSTztqgiyOtoFaoeY3dYs2VNOnD0FERVBSVkoYNTBSd1wWFrk7zjiH0SeOZNYGW3lbjeX3mzruZxDAGeol7dxekxfPTPrDsxlQ7r3vd62JJKHHPKAVBVCmS85xLw7DYL5bT3djAkzq88GZ9sYzA7cDLReQ6EVkA7wT+76/5q4R8uXwJTztvsNmzlliU5MN3YgK17sKlPfjMLBZdrOGw/GGrwzOJAJrzmBBRaniAGOGVY3PlZeL47AMt/+wAFZ9hpqZ18oS6qrHZKlBig/ge5+u5yeK0touCcj4Eib2qkptiPifhqfM4QsWpYaCAbnELXuZ51TCqia3dS96VbY5Rm6NmaN5lwB9Pw5JLxhykx/Vb3LsglHnBSglM1HdxtVggCYS8d4lF20Fy43ZXeut0d9wLU3HMBe/CVMExtAiXXHYxt93yOjSNujPD+zDkGJx+9lk+8Yl/h3lsDmHALY5dOvhqhGa4x17tIogoRRzvJTZfAUzo3kDiXCJCU8F7Y+0dM8FFEFOstXjEkshuyGPsCWW+4x1vh5LzN0LpindnauBdA6UV2Fps8a9++ZfDSMTTCjviM4aUWLManxWRzbpCJJDsnnX7QuNFMQLu3oD/EvgQcA/wK+7++T/2Nwgmga3mmwZH5rjA52+Fh99MqoYlDbyteI/NGUNQ9YRqbAzEDNXVdz1ukdkiOy6a3i9tycawBDwXwngojmouOnZP6+7pQTWst8zg3um7txLfFcd0NwwK41F2zyWxXtzSm84/tEA4QGysuCNmE6UIYkIvsueaZi+cRkgcsdiE7un13WMTu23mZvcZzeeZvVIJOFoTEakCNc4yo6O4pI0hwxOVOJgbogXxOFavFUfpKGrw0IPHOf3MaVqbsBLXNIyGdecV193Ie97zXXgipJIXFgZRWE8rPvDTP8U//bEfp9SBUgX3Eest7l+dxaLirrgYYh7nKD0MXzFEDDWHKYyPNaElMpUeRnhwBxfMc60NBeuClMYXvnA3d3zqjlxLRvpqWuubuZX8TBS0CEXC9DZVfLM9fa8/Ic1BchhpSBMFiG++HsslrMbGnD/feNF0Au7+b939Rne/wd1/9Gt9P0BUxKBCD8+b+3m+cXFFRHfjenfo6Wklb1OJI0mn6fw9xW02FKDyPLbRw8CQcAoHyR2r8+YhUTMJw1F6V0x0jyFI6OxzkLYXyYB7Zw7gZtulaYBsY/zyUHv+sLli9/BUDtry0Ca7Zs8i5MGTG8nrmBdMAJ/Y8OZxfaK2S5wpYXVUw5vMv/I8h8Xi75sTEuFZhmikwQ2EI8i8CCXQhMzP2QkUVZ3inaJCEafgAb1L5f4//BInnnwMk8A1aoKIUKXteQ6KBfQIKiNjtp0za37iv/9JWptwCIhvFuGmxn2qKl5i/kUkw7fYqkasP++SoZygkr+xOK96RazQN7PfWXg4MkW4/Y7b+fhHPzrjOASNUKkMGdLm7AYoIFivCKu0C//dP/7HsS597zaNNWLJm/VEj+4ljXiuvU2YxlcTBc8zzinFoJjnxASP7XP8TVjOvQysJEQUJBaTSTg4LVhGEWIzNCcii650Z7NovOfC9s3j2NjXQCa7hgGbYN48u3EB4h21vjmG5vFsfqCbEWRdRRPJyIxHZmycz2p2nXkSyVBGweN/4rwJuyW/0/O6g0MIy7/77GU+VH5fkYxxEYljzZ5a5uuaryH+M8t5IxZfmTe/k964z1MS8yfp7jcPTJjDKBdPdNTp1ukyUMXoPXmA6rgUrDvmRtAYRhHBdEJU6YWwcvNN2jzL0GziPX/n3ejkiCpVStyPJo/rFWGiu2LmoLYbxgksA+NEwOcg1RBKPKMuuWGFns9k5lFxoZYSoUz+nZKbNefm7777PVTrgQA8jb+BieGVcCBJLr7rP3/X5umJzw4yrnMXI8STDlTRN6GBmOPSMe+xTzZh3x8df2bZgeeOagHnZyJ7TuHMbOs8ZM4TpZUTTes6k1oEcSXIrtGAWIRlhuVpFbQBw2aRzl/OrU5onhLaq+ABVPfA8oIWQMK4BACQ+ejpQecPgvU3lNxzeQ+ZnPJdltfz/khgPxOP82IzwmP5TOZ5LEzHI4412cyfiOO9Q6m7qSiZDYJhItgmM8Gu8UHAwMuMZJLV1xnHgOo0P724hgZWki9MaNEECpm2yRBivgz1gqA0LwGvqyG90XVBkTAANZ+zWxirTuHIFdfyUx/4H/CSWZcJ5kmVNfz17/iP+OQnPooPylIH1i2I0t4NuuDN6NsaG1OgaI3r8gnDwXTj22sp6W0NN90TfiY6KeF9LcOSJtCZEKuoFAyJ8EABjI985MOs2yqO444a1DqgrkzmDC50U6iRphIcF8M6kCSo5vm9SxjI9A2K4kwgNZCClU2WbRcr/tFxbiCBXNA92fvYUXvDgb14JmNZCajpNn+aIN2CrApA1oMdD7OMioY3NcDH3ORx/O7xn89GZrMQBGhIHzZbJfBCSSOlGWJAowWSkN34VyyJOIxGR3HaHAe6bexPcCEZywGbpwcZaghiFgy/7HmgLtAEUUfymLM37zhjk0iNMHvsNCRi4cWaoG0OOOezCzbt5sDDyzidXY5GOwQBqVjCfiuC5Dkk4VLJHS86o5HZwIEX6B2WbUK0g0wR5lhDxaAIYy9MSb717mgXhqVyw8tuoEua8yHocAfe8q1v4e67Ps2BfQdY6IJxbXQTbIqp6kXDcfQC3igWiGODnmzeaIKp0BaOaEkjE5kWcUW1oKUG2hsabkYvSus9vgO4GR/4iZ/k1z/4QWTKkKpYzHDcLpkExrxTTaiiNFU++cnbYRA8wzIvoF0Q62y4HSJEinklUtiUCGVktueyh0t4/nFuGAEBmdlUlU18bRrxs7tjDm5TpD0kY9DnxDpOGBMnbsxmWnyGjW5oN1Q665YxFBFuyBx4z8GxbhAmUDaeJkKDHuvYYkM0T5IIAprP4YOvE6zFxqpj7PLSgAa95WLYc8V9vqo8vqdHpnuw0BZPN5IJYSV7nYLklCAjg2FxdIKhbKY4DSpxvCZx/QWkbKDBZh7L4Lh2MNtkQAsWBnpy0gZGFkEMxzIGDaM4ftXK8k2GUzIdRnICVQTftnACCt0rQuHOT9/J5+/6IlPrAd+LQnEWqvQSBmoXxipW4C3f/m3c+8B9yMJY2cTYztDFKLUhVXAZGEzCALeeeZSOSQ+4bAtqpuDiiQneO6Vnqk8VqR2RHkjRGi5CXw2IVpYTLL1QhjA0Lsrp0ydZTTswxLZtU6XnNNY6sBgqlhxDx1hJx0V4yWWXbZ4ZbZeX8k3sMa/TvpuhdmfMlJe7o7lBRISR52yWPeOcCQcmJgYGvFtwSzVSao2OUCLvrsNm8XUFd0ntR6RtcOi9UJMR1+4BmTU+065QYAKGVOvIDDd3gwTcDRENezDFeWQgf6C74bps8gwEd1ERH1EWkESN5PXSa3g+gVLjqZWmCbclryVCBSPgvaUGQVQy1rMkO+bkXWaoe83FkHGrlPSQwBrcFdkC5tBCO6I1F3p6/j3+QBuJ8ANTiQexpz2+Y+Z0lwjhSoYyc65ag9iLtPScExGKZWiX3mlaNXpzvK1YyZItgFbjF0U4O51hsrMMoqgVuk5Bfg1wYGuZVzrPP4w7K9anV9hasD7x7LohLFjieHNUasBmMyiOSaWNHmulrTHpaFPWKqh1bCFIC/K0UTjrnWWSwFvqVFF6FaoM6ALOjhOtC7aliLTcmLv8FDhvuPVWVk8/Q2VCUcwrpaSx8TCOg8dxd5GZYbUEozTDMkuSocY55nVqg7CIp7DHicS5Fy9sA84RJAAMPoRjVcFK+EQITxFCi3jgU5JKKqEPEWejtoKMk2enVsjNmWx0EVyMCugEQcRKpNsydRVOPbQBSofqsZlmZmePMs+x2JPpk8OYLOJPqkipG+/sBmsJg+Utn2ZuNIcU9MURwhj1zBj6xtJDeO6g4MLFzzoALOSN0iTVfRYhxgJsazewoc/CqNjc0gW6xNF6QzsxsUYKnWJCxfM8RSjLEBo1lWA4JOe22G4KNbenAEUMq6nNIEjJX/3Xv8wTTxyHuqSiaIPiJXmCBUJlUbbQUul1DaUj1dm3tc0P/dCPgMaZ3IUTT5/gnX/7P+beuz9Dt7OsV2dZFmMoZ7ECRSoqcOGlF3PVkevRYUA0rktUUN2mSOXgxRdz/Q3XceCiS1iilDow+JKy1TkghVIWHNje5oJLLmMlFbMF3TxQbBH6IuJ0m5TSNDIpTWbyBlVhlWjHqbh0ZA1mRhEYutJ84uqLL0fLgEsYCpEQIUXWajYGM05t8VyHzXJPLiGQk5ZYQ8+XEJvHOYMEOlAyhVcynoksQIHmWBWKx2R1JHP8sNlBAk5jkBpiECe9XkRdweDFBhdxTOKfZlGQQFiWQMBoCbmsEnAUEj1vlINCEstIcRrJT6jhUjaGepYL2xAynoLufpDpLDzFLh4mxkQYSgkIOGsIMrDQDJsMRz3DfTfQ9AhljjPjFyFjSE7AHSmeEt7IW6Np/NzRUmZKACk9DOPMBKbAWY2MU52F5oNK2kV0T/qWNGSuiQDivjQD1CYliCx3qnbaAHW02LQY7p2exkxMaKqUrlhRypzCDUfJj/3oP+H3PvqxyF5OK6oM9Aa1HsRlwrxxxeFr+Imf/Ane9pa38u73/B0efOgY6jWkym64K29961t53/vey4//83/Gh3/7Q6l/7mgXRlEqxhtuu5VX3fQqvve930OVioowJUkoHvyRuzJJT55GEA1VnzdwbYE0WmGxX5m6gxk9IByilV//8AfZt70dqcncxKq6oWy6gFbFPShd6yBll0vahMRzalGNP84KnDNGoLjnJtpNQwFxNzUWM93wqmkcfM9GisVkmdcuqRkoG0OhG8gfjtPRGhs+jpeEZHICJRlg7Q5aw8ZIeMaNkMktzlPDuBSgF0FDmhcSVHalv6opytGehGTG9r6b4pnVYAXb8AGeoQx7YGM4AtsQhnO4M0/YJg+f4QiWsNlzg2nCpdlva6bsLOWrIlhGxnvVmRorC6zTBaoXQmeQSIK85gzP3ApIA6+YRLgiHjIXnQMwMXqPP48FFmqcOXWGp556imZTpjIjk1Co1Kly6UsOxz2588V77+WL936ZySYWwxK3CVPNuZroKEeuOcIP//AP8ba3fisO/MIv/GzMuRcQi9hZYNZ2fP/73sf73/d9xOLas8MSlf3S//pLkWruLTahWGZYOrVobFKPOe1J6Xz8Dz7OztkzmCmLbSjTgLnTvWHxiCgFigWZTOnIuiLFcRriA+HkfeO45uvu0jOlKfN2CJHTvCzSiL/QOGeMALAJTmadzWbluodHqpU5K7bLOCdO911oinkWu8T3IOMycYoFCSVSoPpG3QuhM5hDL/MgudyCHwAytNgk9TYeN3+9ia3CY+fmiUtjFn6qO66zcSCccfHNNagW3C0UaCXj+fnaSNaXnvc+xwKztoB88JL3SxT6zIZDZ6gw5+lllzGUhK2zR2F37pD5fyKfvpsEzec1/2IT/wZHYBgl2WrY5VbT/kXaNecgHmEs5OMPP8p9Xz6GZnzuOqDmmHSW1Xn1q18JOPd+6V5+4Af/Kz7++79PYcCl41op5jSPzXjV4av54f/2v+Ed73hHajdiYopHWnfOpOy5hRnW5BIsm+cKiqvz6ptfxRvf8EY+9ck7QA1rmoYNzAriDSsFMUXN+dCvf4jPfPp2nnz6abYWSxYqtALaQ91R5vVuxt/4zu/kggsvinMt51mumzBAcdQk1ohG0FqzQK3MF+9zdmAOUnXvQ/0j49wxApKklVjG2F+tdtpo5o3MN8+pwT0Qdv6BFaSE15cekxcb33OTa87VrvAnk3q5F3fllyJzXUEqy3KDuwSEnBnYOX4R74hXpGQ8LZ1dkOG41Axl5hvL/9nLzniw3QlM4ppnaWjCkjm374D0GRHFcT03axTKpExpsw4COczX7UKISTRCAMlc+GxUZjzzVeSRhp6fzYzl5rJEXNLBaqCQtIJlKnidZWBJcGIUB2MBshOEKoIslGEhtKY0E66/9noeePAY1hrr1Q7v/773oRXuO3qU3/u9j7IcClOzMDxdmUpHi3Ho0KX8+D/9Mf7KX3kbuO8BTGU37LTkZaShroFWhFCY6h4ULWCZknz9rbfxoz/yT/iBf/D9fOrTn06nNCF1oGc5q/cQQIPwr//3/4M6FOpiwLrRx4ZZav/NufDiS5AiPHPiSd79nv+CCy+6kL0ocjZU0md0kQ6KjnTHSqAl91gfgYJsE7Y6Qre9wrWvHueQEUgImW7CiZCsdhCZS3NzQ+OIh7eLmomweC5R8SdFdxe6esaPiSY23jKNgiuZNCAEIDnhe5zjrjEKSbPnxZlIVnKl9J9GyEln8DWTb4CEAk2dINHYRfEOaEtYPnvw/LK4YJoQHw1P7Bl7y+51QizmeNSZQpyD5py/We5UNpOwawhkQ3DoJu4MJf/chWCOK+O/8lWxf0q2Zab+LNBO1hRrFrnkjATI9iy40Y72DgxRSzCvSI9Y+YZXvILrrr6O48ePYQKn+4qf/8WfD/groY3oOCPCElDtmAoHtg/wCz/387z5zd+UVxfZFCGrNbOgKqTRHve9cT6yETvRI+wyEhE6iBtvvO02XnbjK7j9jjuBKbQSllkSyQyB1JBmU0LOPRljW0cqVRcYzsH9B3nVK29CF8rn7/58poRgTvTHMohV0tP5lUS7ima6aV7Wsru+fa/Ohby/5x/nRHbAIQQvFgtHXChdKCY0TV4sDYBIj+IWcUQDOWw2rcuutzcn4rkkSnJTpLaHvgfpau6VmGrLBU96xPRuDbpohgBRGBRyhiiqlwkCtoWmQE1C695DnC7JrIf7IReLB1IQoohoRm4eBmWG6+JZl+/BrCdPSPfd67RN+BGYZjfXHbB4Fi+VjcHoWK4ZlXmTplUhUMSE0MViU+NzWQA2CjrKJswJFaBmrYWARIlvILY4ppc5lg1GP1KhhnTBs4quUugNfGrY2GjWuejAfoZt3dwTXmEoTB1GSzFYM6pHalHMGbTymx/6EG9+018I79g3YH1j+DaeYApDO+vzFUK9uEGaAp3s0bAJ6lCBH/zBf8Rtt9ySrSSEYo7OClQXSHKwVKdLVAlSKhMVZ6LYxGJ7wcH9F3Li8ad5//vez6tfvaeBCB6iKYmrKS5UTSmNCsYEgxEELNA9yuLZU0OAwbTRaT7vOCeQgECWus5/t03MGkVFAnS6F4pphgYRz29SbAIbzNuINNe8oxIEdKLxhHuQMEBCfsNkL8DWDSIwDyOhlcCOlI0qLjZd2cM95CZyD6pCddezJRG/hwhg5hYCmvc4b1dQp0ulWKADmSF9XnDW5KAbrUAs5EAyce1dDDVJDz3/N1uAWUhlWUwV9fWa2gbLkKyK4CJY97iumudViT8n+RTnJ/KuUjbedJZ2z2hnVoCissmlqxm1QpPwpmoCmnemyj2fuZsvfO5u2tjQ4oh1tFd80RlcMQORRnXFJ8O3hM98+jNcdeWVwZoLURBFhG+W2hKZl8lC2BSPJXLZaDdsd+5Ci29gJfGecM3VV3PRge0I/9RZlYmFRWORpkrVgnqoRPEwvOqWi7HSvfH0iSf43Y99mN6Fw1dcwf79+0k1Fyk/ywRMQL7iM7/VUBkyNExHppECF5nXWhC/Muy2rnm+cU4YgRPPjvyr330oq7gyJ5oQvVsJtl47A5Vvfd0lqDWs+qYvCA61QLNovqE6IaNi+4SHH2/ccd8OWqKG3EqDtdKXQfiUFt6zKQid17x8ydWXLPBuyFaBEfBG10qRErG2dYpLKO2a8xt3P8NkiofAm8SMiAuTZ4rHG1IKbgNvfeNBltIZLaiqSJYJqor1DoNmtW/DbOCpUzt88r6d3OiSfUcGpBtNVuhWxVtHWqWL8qqXLrjxyiFq4jU5FlHEl0gfgfDKrVoQhwK/+7lTrNZQbGSURcBmctERm8kGQVZAgbfdcjD4Bpesl1ekNNyiSYpbpv4ICfep0xOf+OLORiJ9/ImoOzCVuE4CHqvADa94Ge99zav57d/5MHfdeResQNVpNmDeGaRSa6GtO9dffz1nxrM8+shDFKkMo/GRzz/L4t6HKb2zEmUQT6VlbnDpDKaMg/NXX3/RJjTLvAYqMPZQ3IUAqyFeOLNufOyLz8Z105EuPHxqwraMOkItlcmArgzuiI5cd+MrOHv2NI8dfxTTlsrOBaKrqEswoBvve+97+eZvfjO/8cmHOPGshQ0oYKoMRJiYGDXifSkUNcycb//G/YhVrM0ooUTKF2NdInPQphfuKvKidBv+k47rbnyt/9D/9P+gTfHquFWMzrKGV3KbsjBm4NvfcBAZK1YjLBiK4sUYRGitoLSwhNXRXpgsPMrdx8/y4BOZrhpHRAfaYk09K7QFsUBMI7frhrmyFOMv3nIBqFGshu7bdvXzJjBIQNAZyhuBOJ54qvH5Y2dC3KXOYiq0pTN0YzKjUCM/PeeIe+jSv/n1FzNsO1veGSW+F7qJNBDeMq5eUBYj0ivelTOj8ZEvno7F7CRSimo1bwtkCaw6fTFQWsPE6C687voDXHGwhLc0pZdG0YJMQe75LGKSFgitLiij85ufO5ngJWs5ehQvaXE8SRaZwKWCN0qBbsqRK5bcdOWC/+tXfpUvPXCUoXtWBEaWo1ahS4iC9i+XLC/c4mf+x5/j/qP3U11DVm3CVilcdPhSXvPa27jn7jt5+OEH2b99kP/6Z36NgxdeTO0F086lB5TbXnYgeZVI3fbmjKIBtal4FaSNmzhcET557w5Pnp3ozaEKpc9t3IhaAg2Rjgr8w7//dk4cu58D+5asvEVPC4Orrriam197M3d97rM8/pWHcRW2tg9ibeLyi6/gox/9LWwoGIUvHd/h/sfPQFNqEWyqeJ1wGYK0VAnH5IV9i0JvfUPJftttFzCnMyOr5GiLGoSsNWLLRg5cftkd7n7bc/ffOYEEisD+LaFMjaZKa50+Oc0N8Ql6hbGBNn7j9pNBKuGodF5+eIsrL98KZZQ0GGMDexFqcZYCZhOvvGrBTYcjz2zDQKfgfcnHP7+TocIUOMtj4rqEJPRjn12x01fBwEuhJaQuOLfcsM2hC4dgaZvRVVFRRBsX7hO+6ZUHElUa24NSSsG1UHtnQjlx2rjr2LMBlw1EjU/ed4aqwtg6pSidkeKKYqzMsNExjYf9La+9iIUbrkYdOn/xVQcxFxYWWnmAXiqymmDomA9IcfYV4YFnJo4+NnL0kR2+PDleOtgC7Y1SljRt0TDFkuNWC4pFT2NaeNs3HGB1VlALshENA0mNaRya4mo4S7Q0ugWvUa2xc7rRi1AodJ/wllmCLZCuVFPqIoRNfWx83/f+fX7yAx/g+CMPY2eDwNyRzsXdGAPaRnUAACAASURBVHeepbc1l1x4Of/LL/8K1778aoaxsJLO0KNfwE6LlnRmlVaMT33xFOu10CeoC/DkC7aKhi6sx5rcWlSGRae0wnoBlZFO5fChwisP7wOCi/mx7c7p7VA5VjFsiAzAKGt2dnbw3jh48ZUsFwP/5oO/wWeOTWDwa3efyRg+hUUGrsJiUWBRAyWKhXx7VZBF4YbDC665bLH5rrghk+C1Q42UZKpO6GNjbU5dFNb2wqXE5wQSeP0tt/jvfuS3k3sXHnm688ATq4jTepBsgbwjP97NaV04s85iHVL4luo9bcEcH9xa8NrrFjPThmo2gOgdKYU+GrVAHeDAtkL36OwiHjoBEaRXfv+eU7Qto6L0SaBnuqmmHNOV0p02GDYJp0+v6GWJZYsuxbntFVtoDVmsWnSiKRtiz5Mxd4rWMH4ZVVy6fwuRjonxwGMTx58cZ/YAcIpXWhljg3XNGN/C85ljqqgZMghPPuXs34bXXrsMY1UkREma6sBJqBKezybHFlBadnNQKKNBVUQ7F21v8fv37MQT0xRTK1HZ6Q6qlHWnly28TCHTLlHRVxDu+fSvceqRB1PQU6g4rRTqUNgeFtQ6sL3cYrmAsm+LRd3HT/30T/PFe75IrwHxrTl05/Irj/Cu7/1xrn7ZzSjOoQPCtFB0Cn6mmzJ3jS10XI3ek0tgCQoXbBVec80Si1ROPA+Bk2cMmtKkMXhUSs49HaMjkvDd3/PdfOYTv4Oboz6ynjwFSxN05SVX3si73/vPuOmVN4IadUZYLsE19c5cYdgbvOnGg1HbYp4kc+yTudNeL8rJsx0bM4wRxdmhe83jKWLClx45yxOnR1wUNeE/e9u15y4SmJrxyFNrrCsyKNWNW65Zsr1UShcoihVnMKF5hyo8e8r4w0dH5qSeq6PNoNTo3lMAdx54YifgqinPnO6czk0cbGIU0uzfdm68cgkW9QJBxAtmndpHjlwxoFWxJlk0ZMwtzi6/pFJU0AhEaNPE54+D20DJmN+L8cjj0UZMFg2ZUozUoGgcZxI24qdHT5xFWoUtuO2akDiLQx2UI1dsBXutBesTaKHqgm7J6UvIl02DEbYK2iuHL1buqGdxFx54bEJN04h5iHbaAnSkOEyilFQPep1JtSm2qgtVhQuuN659qTI3KBGf1YbBQ5gFKTuIsE75cmQohPHM0xz1HZ4yZSmVqQqTZcsSi2OUWjH16C3QK1IGvv/9/4Dveve7oqhrgIsuupArDl/FO97593jjN7wGJDIhtx7Zzv6rzqMnbEOYIlAotFQnybBAx5LNY4QHn1kzuTNYoZRwBHcdHXGP+TI6w0I5dMEyxWCOaeVv/90fYMkOq52zaFXGsyNPn3qWo3/4ZUTgP3nX3+O2193Ea6/dx0IdLwOuU/RIEOXsTuPkGWOm7546OdLE0D7gtYfXN0cGz8I24Z4HRp5deVQyoinznsl0uGC7cGCfcuWhAWfWpjz/OCeMQDc4tdPDK66iO8r2IBxYlswEhNeTzEE3Cgf3F95ww1ZAUAL2SXfue2rEW0ApNUesBOMryjOrhq4CTseGD5HM6R3nM/etEbfoUZkxuJtTPEKU5VC55rKa0tqIuUXgYi8UojFp8c5yCbddf5AmgUgcQYdoVe29IDXKk8aeBVAFnjjZeeJ0izp3nCefXjBtOWbO/3v/2ejjZ5HrNhWkK71G2qqzoorBJFli6/RwxIgVrn9JpVbjyosH3nDt/tQhdGYpNeKoGs0qtWxFDz4yxeVESSwKFuWzptHirY+Nzxw9SxenWsWIcEg8jLi7RMVejW5AapVZ/P7U/Z/lmceeQmhMzOrCWbrtEdMmcjOPQqxSCh/+zQ9TKBFymHP48GG+46+/nTe/8eXc9LKDke2oYGPFvOFj5dSZM1SB3o3Hnu2cXFnIfVWglNAopFDNUpqt7tneHMRa3G+J+SwL5YLtCM/wqGlY7hzlb73zb7HuI/uHJY8/+ST33nsvR+/7Mn/uG9/EX/7zN3PTDQeQaugkrH1iyyRqi6yz7sbJnU2FWrZt79CCHzn2xIg3QzS6LI0uDBZk7yKL7ZxwUJKy131bcMFBBQq9D1Q/x8VC7jBmM37N/vEPP9l48ESnNqXTQGORVZzFsvLyl2zFlCV1bYSoYwm0sit/8BLxlopz3eULyuWaJQeW+vFZiRgNMVyFo4+vObVuUXrao4uxeWPq5G80NA3qfPmRdcI2QecXFpjSS+j/Z819aF8cFLZq4aartiI1SaQfhyGuqxZ4zdXL6I/okS4Ui45JkiFKvItAcHVGS/nB2tFF9LAXnGNPrnnmZGPdKqadO4/FYje3qDLTTCuhaHGaRNYgWqUFJ6LZ03GDtqSFFFaUm49s8YYb9tObYhYbwqtkxVpmJEywatS+FUaVML53Pr7khJY4nhLKqF5CZGVRnTioUGqllsKiFMSNX/0//w3RxbghzcGERS08+vTIdOwMm5ZqHrX83pyq0cRlsoJZNBGtRJ3J669eIu40jRy8mVGiLVI8F4t1pRKdh8UtGo1aZHNEnGP3PUBfr6JDUas8+swTPPaVR/nYxz9OUeXlr/sPmPbfwGcfOIvohFCYcAavmBlehdIiJUqJEE6JugtBmFoD71FTk8q15HB5zVULlik8M/F8gwYp8Cq7gjU8BHQvMM4JIzA25+ijY/SW8/D+Rw4NHD6UltoKTQaUikljWQpVKt2jAYMI0TWoOFdduogHUmJBYRWXgpptDAUYpYENytlV53P3rzL7EJ5vtc62Yt4yxy1MY+HYVxqmEq2dPRK2ZVBaoHKKC6OMMBVqzfZVmWx+/ZGDbA+VKiPdg203CY7iwv2FC7YFqY5amaUK0Q8hyiZj02ejzmDqamgBGoj2zFgUvvL0mmNPjYwNqggPn1ijRbARSsnqQQNQRIzSBdNKo0WxylxA4YZ4jXp42CgEtYCPcLY33vTyg0EWevYNUNAeKsCGU7XQaCxMoQq9VRTn/u1gsrVIkNo2sLAereEkLMHcbq2IRrluEdwarTRKMy48dIhb33grBjx+snNqGCnpvbuEtKh5pwhBXmpBpEdj9GgywVdON2hxX5ccWHLjS7fDgM0pRUl5ujtGNP2ogGgYke7wyP1PMblFld/C2Tl9lnE1cf999/PNb/kW3vk3/hqHLknhTlnwqS9NtD6CxQtPdIi6EGlpzb2gEmliYuljlsI0meXamvcdHYYrgSibpGhYnWsOwRUHa7bF231tz/ONc8IIHNxW/tLrDya0jwq2fRUWgyMsEDWaSkgl2yLIQQw08r3mKYhdBcmFLpgslFQqwlx7/dljZ3j6rAMLindYGuuVsF5NWE3dvMQisdGoQ6FroWiPFoFSsjFFj55z2U6qZNOTqTuUmi/nMK69bB9HLg8BzgXL/K0Eg0sJMlESk6DRPKPIGJkLg6aOtqikUCrWe/YEEbxOlLJASqNJZbXq3P6HZ2hTYz3mO3gkNufQgnSymsfzaBXmSW66T6FBl1A4qqaMt0C1nqGRMBfLUI0TJ53JUriUpaqxcUaghiKwG6LxfIipiToFicaszY3BBWEK0rcUGLKngIN3py9CorxchDEYvTN4ZWv/FoevuDLlvMEnTHVAiQ2GCrUN+dkUPJErRYwuiplw4ukRt0KvxlNnOo+cWNFdec1VlcMXDDQxZAzXa7XEy1qIPL1YCs6EWHtaswrWE7nC4csv55U3XY1KpExM4JteOaCyHQhqYZRmPHJi4ktfmTAi5exV2HR0yk5IBQcfkhw01ODJUw3DI+1ZK3WUIGFFOfnsxL0lUIy2SE+/0DgnjMCZlfOJe3YwJqqH4qyZourpxTpSB9yjtZMXpXjo6KEHDMqqLcQp/QzD9pJvfuUW1i06jIlx01X7Ay4uw5u5FZSe7LzQe9QfaJJ1pXd6DVgmHtbaCrtWtabKMOXOqyb8zhdPI2VFacqjT+zw2JOOlAWuK9QGuoK2jpVQk8UdSLDuloy+g/oSdNr0iZMWJbtNc+ONwPIsfZx42y0XsG8x8Odftg9zo+sczwsTxpYbkwwMNMQK6hFeoYXeFeoUYc8osMzOTlkzUAi2vxjoYHQR+iT8zmef5iN3PZt8QQsP7KCU6C7UO1MfqLUzSHAYTKF3f/ipERC0aMrEGyKdoQ60PtIW2xhBbsY8O02UpSyYvCEFlnXJcv+SySZuvm6Ll11/IAxMOUBJ7YVSmWQKdJEvCxErMHTQGjqGCkzLNGRRhrzssWHdFBbCRz51il5PYU3wWjAP/b7u/CHTyZPItA/XiaMPPcJ4ZuJ/+6Vfwnzi+NON37rjbGSkNArL8BaiMFEYHJ08ob8zlNj8rcG3vWF/1DhE2ggVoWVPjBCeRiFbFLc5XgvSGuh2IsZCtx4vZjJh54/Z6eeEEXB3VmPHZeBssZQDG6Q4QhB02ok8twi9K2XVKVvhod9w04W89AJBu6JLo622oUyBLGQIZlSF5cLy5R/OsgqtOcNQaF4RGrJwig8RazdDFwMLB/cKdR0iGIyulrGb8rF7TnPy7IjKMoxUF4yKDUpvinqj+kS3ShVnknjpiRTHAuvFu/3GUJSPJWLuyg7TFGGJGtEjD8OnlBmb8dYbL6BuVwoNn4yhRrqIbCLqOAd9ABujuWmpoS/3AVFj8sKiGrUtsYVhQx4rtIKYhd7PCbY8Niy0LeFbb72QD9+5pvs6JcORs54mAMNKYfCRsVdG6ciKlIZ31mN0/vVJsYWwEmdZF7QqWFd0FZxH7cJg4JOiFUpplNFoA6x9hPXIG19/K6+44WYGCYGSL8DXIfrS1hhqSJ+zjBSEaA0GdO/opIiscn4j5hbAROnqDDbx5lsPIECTyrPPGH9w30madZY7U/Acg1HoLIuxKp3TZ0defetf4Dvf9Y9YTxFQRu+WELiJS2ggWvbQILioQFKF4o3fvHNFFcEmQ4thWrn2JZWbr1nQTami+AhWI/ov1vAaoa/oGG9M8hoS9y3jwB+jEzgnjAApP/Whoy0gfbR/0mBKK3hfsF2Vv/yGfWw61vZ4d6FCeI3aca9ITYuvLUIBjQKLgiY3EJV5Zen0HmhgrAG5rAkygRahMqBMjIuRcRWpnceeWnHXl88wN21ZmtN1SZHoNRza+0ZtRvEoXOkYi2UQTy4erzAYPZtaRLPLUkKTMEzOVDuTFsycohMuWzCNKEHmvek1B7joQGVaOMM6PL9WSQIztBY10xwFYRKhZAYBDQWje7yI1JpEU9Am2QGVeGOqTswlNaFsFVzjnYl9Dot8pIkiHap3JlVkSMmqGFOtxHsZJDxuvkVJpaMaTP7alSoDhlGssG+haG3IcpsmYQT2bS3ZKdB1AbKK3PkkiAyYDpQaxrL1KfQOtVAnkCGCapdYQ4YwTFkEtDaWi8KkikoYpZg5w6RQDGrveCkU6ZgWio5ctF/5q2+8AHfj7s9WHn4oUtjrsUWW69QpZDFhVRm00iXWc8++EENzWlG8K0U6aA8+CgHveO/RwwKjeYiX1ArWJo4+3Dj2yCpSuETtRBfhrbccRGwAWQdxbEpVZSrgpVBN5iZkzzvOCSOgEA9j3aMmQArVOjdfc4BDF4Q1a1kJs3M29f9ZdFfJwpEa2YNSoY87FF1SmoWgZHB6J1RjtSCMDB4FHGH/hbHEYkUcKZV9DpSRVXOmyfmDz52BsoM12Lc/SKssY2LROy6V7a60wdC+BDwteKc0wbXBoCwnhe1KtDv1SKuZUpOkcyJL0rti20JZx6btvTA6vOrKLWpdcmZlyBnhrMKg2UClNbyS9e3B0ruNeNMweCZ4C/WfeaT1iihFI9fMFB2QuvfIxQuUufWZOGgIdMQq28vK9kLZZ+BFMq0KhZavSSzBQRgRPDdBhjCuJ0sUKVtNgdPU6MXx3ih1P1t12AiOrMYbl3/w+99Pa+vAJTvO1tZ+rOcblTucGRtelHom+veLGMWE3kJGblkjYUK+HETwHaeXaPuu3UNTkT0mikTd/jA11igDQpRkrykr5ejR+/nKIw+nHgWOPXQUWVT+5c/+zywO7ufQRZeztdRUjMabjp2CqlJ6D46rKWjFtaTAx+nqlBLN6TbvoC0lNAAe+ovFkGlZhwWFj3/ubKgyU0Nw5NIF118epLoLWBGW7RznBLaWkv3yPMgyEboXPn3/GWqmZnpRZE2IIsRCuWWVuTCvu8frviWUf1LW9F5R7fTSKdna202ToGv0lhp3lGEpHFjEZ81XvPH6feyrkZNeeuUvve7CaDohofHvYmgNpZe6MQ0LFqsJWxRsSm1aabiEyCnSeoXRO0WCES4Ca5SqcQzF6WYUFlgZ8V4w75w44zx+auK+R1d84YGzdBuRYgiVeNHEmrg7cO+oeIhbeqgCW5koFuW+Zo5ro5Z8G3OPIp5BomnqZD1epilzLaDhXkiCn94Ehs7bbz3Et7zmoihiQZnrjF2ir2CZPRslGnWUeBOOF+Mjjy557EFBuuFirCugMMgyG71UFsuBWoVBBG2d2h03Y1DhwAUX8ba/9u1YCSa/qHH0iZFTZ5wTpxs7ZiwU1MM4M1cQF4FuaAmN4pi8k3vco0oUrsk0t1gTsE4twiQ9Wn9LCWXk2XxHQnQgYXtrm9V6pNYFL7nsCN/9PT/Cq67dgmx5d3ArQp4QMY10j+61kvr+iBccc0VL2yBWQQJhCpG+Tb5KASzUs6USaVMp2WcjyN8gcgRv2Rb/BcY5YQTOrLJl0uB4D4apaxgFiB5utJlJB+lCpUYMVftGqWbNg3muSjPSw3lEBRJpNjSIPRPliisW1AYgHLqwcNWhZbZqCnQQUniPjTKHEB4LvRSJgiOPfq869UDUs7az9GgMIgWX+ExcqSVeqBleSRnmJiVISKBVaNKRrjz4xJpxcr70lVUQSUmazm2lpPTsJWdB7NUR6Yp4Qc3xaojWiJelgLUg7ialZZwa3WjBxwaLaG8mpjR1imu+NDTy79YzB93h4SdGDl9aQQZEplAMpj4/Xk+evRMl1JXBp/RQM/aenZIgnkbMUReHEgVFIsJQ461JTWHqI+PUoieBChdfsJ8D+/dz0SUX0dx51VXbqDh3HjtLl8qDT+2k5iTLqd0i5WphjFuWMJvprIsKFWXPV95poVinERwUYknmOkwraKdTbKWcOnMaHD732buAyoDzzHri41+OSsmC8oqXFvZt5f3hXH1oSPVxFHLNPQnV+vy6R+ZmLSUL2krNd2o2ydRtYShjNrcd4s1KHiRidtlAMYoqnReuIjwnjIAAvaypolAjfTM3cFBXxHvIeCVZep01QpYNNpxaPIU/mVdO+Dq/BtgSXh65fMliCTpVXn71QCH7AVhq3iM9HPRYKE/QwSOb0KNjDiWbZM5Co1JSbJQ530CBQZblZo+2Yz0hWgg/OtGBWFz2tCCDLz2yBox7HhqT8yjZ+CIUdaVE+qi36GcXqf2OtBSaZwoQQJmwRsJSUpwTqbIojI83JctSsQlKteBkPBtTSPx/vJ0nUIOMwrPrKEhKC5lkWuQUzDuNRGlSsRICpOhOOHfyCbGNEcTolnssZs2Xks7t2rRTdEGfJmqJt+zUIgxSuPSlL+HaG64j0sqRY3/dkW1chX0FJjeOPr6b+vWe2abMuXubG5FIFEi5bDypS+hKdjUQmo1qFG3PIOOJ1J8oz554Fip86Dd/i2FryZ97y3fSTbP5TRCmX/gKODuodKwrO1fFJt+3JVxxaIt4OmGQipK/l3ibshWwCcvXlrPwaOBiFo7HBbPQECgp9pTQt2jLx8w5jgSAZLzJ9MYc18499Uqka4pGmseJTd6j3j1aWRlI5EvF+yYOvORA5epDy2BJi3P4oiWlRifhuR12tPWKXgTRQ6ekGCgqGochsLuTi6TapomkZUtscpGYkRJmQu2XLcxc86Gq0JsxEP16TOKVV4Jz9NHOs6vGQ0+NmDtaBxhbdA0qu31txEJEpH3EtUbTkzRQ80KREvfvGWaEZ84XpfaEitmUQ1NQgsZ78zw3azyCaIwiWrOHXTTz3Fn3gNslJNMYGfJk6ywPUZKbZToyUNNDxx7gkUeO01P96BgDhe4lFmOP+XQjqkFdGahsDUtOrVaRyjVjbFGeXCRYdsn3UszveHjVVVvsmHBwe+Lp050Hn1yF8VXPN1SnAIueOhJlolM0k/9oeNuqob1TGJrli2/jNXTRAzc0DOs+Yi4sl9t807f/zTSi0TDWIg8cvSk9sM+9x1c4ja3tyuPPzkZSedlLKwfqgl6chURloVmusVk+XojOVgZeFOtT9pSMzEfz7BXt4ZhUlCbnuBGIHnclFqjHG2AlVWuu8fbYoRtenT44jNnKShNiShJpxSil86aXbYf3FVhU4cJFYbfQPuSwvcQ73QSJrtj5sg4zoZSEtuIsBkXMGDWkoljJdmRZR68aG9CTp9D0PCUXuUXWoTcwNyYdoLTYfCZ4KRx/auT4iTXPnGqMEp7Ji2EyIXj2rR+A6HYrPdJOrp5S22hjHi/tjNe2SPaYUw3oL82RSi76Qmi0o5AG8o1Catj/x9zbxuqanfV9v+taa93P3udlzsx4bI9tjG0M2BhcG4KIaSEYWl5a2lBUNVGSRn0lopWQqkpJq35oon6omq9VvzSRGkWqWgGJaAhJHZCJIRJgA8ZgE9vYBht7PDOeN8+cc/bez73Wuq5++K9nD209JG2gOvuLPW/77P08z73Wdf1fx6YIawMs8JHkcGIreAlRmiX4/AuTd3/NQbcRKzHXNIm5BWWK4fFYU09JmPDUc8/yzJdfuDbtJA1HDtFt1uvd2GNlFLgexnT922FJj0qYE7lEsuv6SxPSLlypcObw1Y8deM2dyRseqXz6i0e+dKE8hrZWnBFC6dW25C8ffsSyrC+FYEJswMWL+P6kwL5InnnuSa7m5Cf+zt9nK5X/5Mf++ulOl7zckjTXmnra+2MqKMSM+8fJ5dPS/nsW7l4MSgmYR8ph8p43P8opbCWQv0YJeql2q440NDPWt9drNsl1SQns9f0BZwdunznf+a5zGSQAH0WIc0nKWE3D5oQb9OR4SNpwWjqDwUd+/z4vvLQkk33ysc90jfiSqOGpyKUA5vKRNV/5hX1NAjiUVVoSKyLLEwYU3RFQXSyDgUWIqzWNjIPU7rXKVH2l1gqyUf7ADDXCGLp9Hzk43/iWc179UOHhs3PmG6B4YfZdWm/bMK4kV/WNLEM3/Qqv3DpEC2pWenbSZSVW0m1huiLDwjp1KDPQysTmSiC0FQTCas6tDY8rxjDcG3jHUs7BmR38ACVoF4Wf//QFv/CbO1GHbtiVtnSi4zOX2IZL0kLW5QFfemrHQsEXdhxgVfHbW2McnbM0pjm9Bo3k0Bw/NP7zv/Y/89/9F3+Oy7GjRWrypeeP/OPf0m6eIZGTFQmcQuH/azJQgvLxKDv4t775Bue3Kti4toKbG8yuycc1IYQVLAstjxzHBofkxecu+fhv3tMi5wpVNwaf/9zHqcX4kT//Xqa5sINYpq4Chwj6KPQy2PYNs8n0KezBBoOiQ4NJ+oE8bni5EtCqX4Unnj3ymac7Y61nHkYpyhpMq3pPw1amaAHmy/6Yk/DqK3w9EIdANXikNbJpt/EqEUQAtYQE1FsjY1DOjTjCly46H/zcJVGT0aFWsKORxXkhpuq0dnHdhvalSKfOzmxtuQPlAEwWEoNuxggZLqrpBh1p2FlifTKmdi0L8NCpPeY68b1gU4m/RVEbTCojd85I0aASenLnzHjHm2+RXmg+2Q4yE43i3LxsWJM02djImBr/80BWWVwxiDahirtvtOUcTCXfrFDKPo2wc3Ls3HJjJJRSsdND486wpC6L8OyNvFHoIN+7IeOS+DMdxDeNH7h1k/d95CWyaxf3CTDIdgIfDJWeKN24E5Tu7MdF0QWUbvQm4Kt6YbvRRGfO1I1ZVBZaivHIG9/EX/0bP8vdT/0j2s0DcTyyZ/LScVJi7e3Gconqc9SKKu28B+bJKImb85EnLvnub75Fy4rNKiwig2wHPFY5i0lRWkph7I2zGxWbk7loP4vk7sUVYw7uXV7x8x/4Zc5Kcn7Y8HX46g0ZK8+gcJ6TScNv7eRlYdo5ZRU/ZjVKaF0JJralDuCiz/305NWPbDx6Z6Ng/Mqn7nFvnwK5M8mpz2GrU0W32RVGugJSTy1UX/H5+2N9uv85vxLIA8jlNqGJhd8YHA9G7mDzyPs+fF+7vOlDNBZNWEpgfbIfYBuG9Y5tlQ1jn7bss7mmA1Es3Z3CWGIS6fc3N8YJkIpkbAahETyGsS3wDtqybQ726asXcor/JZfNUwimuSzCKgwKSmx8/3tuUrup53BOqfnCsG60lrDBCMlCLbX2ZDdmkTrMDwlXTniBqa66lkfojWi69XwE47hTSl1goslgUozctQJY0QFYqJiN62xHolOtrtchKJFEKexR2SrMHd7/seelkKQyCLLpMIqAchS1amVwRLTXtnrZ5tptiwE3zigoA6+NQj0LtvMD27ZxOJyxWYWovNTfSj8YNx+6w413/xD3PvMB+o07PPKmb6OmCfudGgPm0hO5I/rMBWZW9GDOCMaEn/vQBe6D7/+WOzqWTUi6L2zApvwHZV0IMyajJI++9nG+73u/n0996tP86gd/lTmD/+Av/nkeefhRfcaqlJCDiceGpWLeSCfYsLpWrrNkm4OkC+8xuS7JitvOXgsbTtqOp8JYamsCksP4jm+4o2liM/xq8g8/fI+D60DmpBWJpNZCHuP/EhH/f/96IA4BDCZJi0HuDmUyLfjQ5y555tmu0+xkbXWl5kxzKQwjyWEczfA5BDO1c3wclwNvIf6l4N0YLcGPeG/4DcevBO54JH2NV2kwOhxygjWN+CUZQ0m118o6NIpnL7glRyqVKyhLpTYkRDmY4R78wDvvUJphXWGqPs+056USkspZYPNU0FWYNfGjLlXRbEHUSh5lEnEfCsyJWIGYkzAn50KJi2goucuCnA3vK4bKtOeWTKYPpptSiRO8bwAAIABJREFUf6xIepwdK9Jr9CqUpniXwjI2vufdN3nfhy7BhkwvY42e28RXnmBEpbqcd/NsyaVtkDmJEdSidt49F35AIXfoNak5wKuAwC05S6cXWWQfetsP6OKIYNjiQaxRvFM67KXC5sy+0yKhTEbK6m00Kp0ZnY7xf3zoBW7cMr7zbXcoLsZiSwi/wSQYXEDcxragGcSuvINvfPvX8g1v+1pNie4QknOPEdhMDu7sLlpmzqKay6k6sbCd1o3uSXUjjw3qpNugzmT3A2dMdk8Vlp4f8LH8HqfLLMCzKji2wQ9+6y2y6rb/vad3fucLHXzgyJE45wM+CcyAi0vjpYvJr3/mnm6YEVAa0nhUcmo8nJnYkFHIj0JyVeks1HqzZGeIc+6J0QiXtt4NPSC+qq6PVa0ykVRvTAYRE7fJmblkm3aBR9Me6olnhTE5cZVhQR6CUZwSmizMnY1JPTNKVr7lrQceuq1eOYGd+jBmdoZDy6Q5MOTdj+iSPc8C1WSzrSvg1AObBzK7Hrp9CumPotN+RVwb4utnWfzyKBxqMo7rsMgidHlCKdIomKNQihrkDrit77UMVumUlngeuXd/Ie3uZEfj5sJsOqaYtjkpltIkTIixE3a5wltU0BFeyH3trVGxczhrE7fKjNCEmNLxy2BV5WMYwTxbugk0+cxR2X3ixFIXV/YyVKnuRmQoMKRpsiuZeHMu7hvv+/BLPP5I4+1fdc7ZZlTrAg7bDRqK8sIMq2OteaKr3WR/zlKYHCFVeDvbZJtwTKglFYnWlQe5WXJVjM1MoqFNk2BRRRNtKqzEQuue5VFS7SkGoaQxrZNFxq/CpLvUscXgLY9tvPmxhqGJ5IMf78w+XvH5+xc6BMzss8Bd5HAfmfmtZvYo8OPAm4HPAn8mM1/4w77P3cvg/b91d4lO5BdwK+ATpjEYeqEl+9J4ZArHNKvY0gHMJd8NglpcuaEIpCvLPBKW4pqrE2MQRWBan2tHRiGlVF+ct3z8SdWOXlfLTyx7KCZ1XW9k7horPXn4vPKm12y8/lUHhYkwYTam7ZRsctqRtBANlkvbUDC8bAwZxNcoeEaULoB0BnMeNcovkclxGpt1UZumGCzLTrDSk+8XchtiKEzU0rCBNzEoSS5EP0XDDtmfrS9vfpuKUy8SZFEGv/Q798mJmAxbEQcBLiE30yY04SdE0kfwwhO/zXOf/03kiy/MYXhzap1YaVh05u4cKzScsQdXl/cxf5az8npoq3wrlbPnIa9g2YNcdl8fDiWw2iE2TmUxEkMsxmbhH9Fl6KpmRCZPvbDz1AuDt76+8dht55Gb20Lxk1jYTSxtSDic/CuBUXaBtPgZ7kdilOtsRrmvFSW22ySn0zyYWSlTn8FwiYD8ZCZqDqMsU9oqzYWVfJXYrDg7swZXR6eUyqGcSmqWonDIwfmn3nFG/2NuIPruzHz3Hwgw/K+A92fm1wHvX3/9h34ZUK4NK0t+6rzs7y/JMFlU06RyEw9br8cwWTGL1oXQh/XlpiCJdsbCBhzx6ZkbDL8WdqTnGqchUqq3zMqwKjEOvlRxYAxJd1N6BUOmmNe/ynnrVx34k2+/zRsekYfATI7BNNWWc03xKNXHTMGUQ9TIOlx2qnesTuXMlSX0sYJVYQ4nlVldLEZOZKBBOlx3kwuz7eu/NSi2MATHxuKblngHBkzRmpLaT2qZ1Mwlx+lMC3IWXne7cVKmuNuy/aZoOpfffVoB02Tz5Sc+wnOf/XXpAEzaDi9ODmAm+9y5uLzk3r27vPDci7z05UteeOlFvvziPe7/3ge5evH3lLxLrE4KKRo9DZp4e10ep+mlkevAUEvycutZYENyW5q0F1Pvhg4LjE89eeRXPn7J73/pyBeeOYomLLtkx9rNpM0I1+vqxjicJDnJvoJk3QotjBaNHFIilpQxSNOYLOWS+WqSM1+pVDOYqS4D6Qv85XSqBMqUC3U6N4pRFo2ZYZwKeawKH4qMlXHxlb/+ONaBHwLeu/7/3wY+APyXf9h/kCir/ro70BGynAqAKBmyXkr0umSz+ldFR811kgDhVFZGfkpNNak6leVsobgzXAo49cKubMGTxtzFR2cC02hL257Bde+AZJlLax6Fr32j0Urja153JnptIArI9LuYLb3cUBAEGcQsS7QS17XqmUa6U3JSCDKKuP1cYhzdOwpFscX9hjG84aSSlP0kFIETlJkhxaF7X5w466FIpRFnpa8iVxsDs3bNDKi9t5IxqBh/+2/9LS4uj1I2GrSmE2rvIc49g2//3j9L9dtEJnE88uXP/ybXnYBZKG7KcGSJdkIdCbMPZhSI+5Tq1K2SUbj/5Ee4cectRKl61BbzYPayulSWXN2oVC3Ohrh5/TK6ceUp0IFp5pSYok9X5VpYYFb57S8cqZbc34PSnLe+VmuPLVuuIt6QspCmYKBcsu4BWCydg8JcxaTmAqT9WjnpKezAXcxRSbVnxzylDa32o1ydkgWtlSsAR9fnIL2tfktNtbHCdhkvqx++0te/6CGQwM+aSub+p8z8G8BrM/PJ9c+fAl77z/ONiol2myuvTzscgCyjGQIBvWlE8jSmybbKChjRbVXXKSvOP0IioCzlOk+wp2FzUsOIzVYbrRgJbJAhFDaZWFVE+EiJWjJ1kIQ7mYVaJ+98c+VNrz1TQm/oNihI8aaTapHnKBySyYreinUJa4UwlK/n3Ulr2tPj9OF0RmoUz7Lhuz58c/ULThOYuGp212GC7MJZhJwT+msXziJGdOU1pAwonuv1N7UMhjk+nP/tx3+cT3zyYzjGP/iZf8BVv1iYBVgtFCv0PhcACc8++zQ/9Bf+CmWTsKek/BxkSLpLXeafQT/1AFrFfDCRk/HG4RY3b9+k2QEeeSMUBWRkccWcoWd7rs93XQKFcK1ExR1nkisJeQn/JGpyo3J6wPT+ZOj2dVu0MUan8sknLnCvxN5421fd1PtgCbnCOwZUGysEaGq6Qz9cIK+IpnR1KqTre9dia+WEaZNdqrlrg5nZFJjrtgRqJ0OQtAxWRAGWLNiYjDavU57dWHJx3Y42//iCRr8jM58ws9cAP2dmn/iD/zAzcx0Q/48vM/tLwF8CeNVr3iDhOQMvyg6MkIil2GBkkYNqWSpnVooNis1VmilENwOyDWxFB6VrnXA3zKc09jORjXcy84ANOd1OnX+2TvacC50vQY51S9uSp65giHd89Q0euVV55JZTwrGa5OWktsUHn5bHlAaghCn4gc6IKjYnF0c9dJNPl5BopqhCuzZOmbwB8+RIlJ/cJux16fFTtNbETlGKKiZZfxEJFMdmEEVjZUF25lyocxSjHsVxhwU//3Pv5yd/4if55O98kheffxZWpDjF2WrhmEH0wFK3ZYZx47zwyV//Rea/95eFeVhZstWUzBVbOoUlpgmY1Xn4zkPMq52sya2zG9w8v8F29hBtc2qbXCVofygCH0NaDNGABbkW/XpNISHKSS4rCboD6dIGzNSEYKeJAU1CuRyE1dGlkIop+91nOk/fe4nXPrzx9a876HfIuSLnBSoXNk2X1+3KysUIC2aRwcx9EOOMU7l0uEHu2ITYlDQUJchsa2qRMC5cE2PMUNSZwf0ZsC3D4En7YVqXHcdi6FL8Q570P7LyETP7a8A94EeA92bmk2b2OuADmfm2P+y/fcvXvzP/m//xpyjFqVZOQyNlaqTvRQop7YB6YeeiC0EjfT3t2S6lWqQShp118nohpjN8yDtwleQqkB1pS19fKJ7kTAVBOPhJRxAKzvAJb3z8wJtfd+D8zGg0empMLl6Z84pStZCQCg4ZNSmji/6rAWzE6AvcA1t8tMLGEvcq9LuearSTjLri1WxVpUF60fhKMvT8L8HQAC9yZGaQvlFjMLxiHPGsWglsikHNwGqQ/YDT9SCZ8+sf+iB/+a/81zz3zLO460NchzMKlE3ONuvHZXAptPONmPDn/rO/ys//1N+UhbUmf/pf/0Fm3Gcfej81KSU+C9YGNgs3zwvF71DOoVTndmu0esDOD/hWeeHJZ3j+/iVnZ+c8/s5/S0Mbes99QJRcorAGFXxqGhwOW0CfDqWoeuyo15SaxDSiTloUbJYFOGslVW/gXMKlymFqimyenG/JN77xBq9+ROUwGsIPlOhMk1yXNZU68lFM1OfgkUQ5w2ysSWS5VgMFqvTCKIOaG9N2MjdaONPGokOlF8lMrtI5S6EgbGNR6bKIjzVhloA5jUcfe+SPtnzEzG4Cnpl31///PuC/BX4a+PeB/37979/7Z32vOzcqP/CuV9FKoa98OqnCkfrpHPI4dbMXUTMMrnsDZ21rtN3xsjEjsKLbxuIMy8m0oRWiNGm3s1L8yBVtFT52jYQppWDMZPdJMSnAcjgv3Bt86NMv8flnr3jyuQmlMvMl6c2z4dl1e0+NgQXwUZk1UM1ZZToElUMO0W629llPxlj26eV45GRSKlMPN1VpQ5nMsuKocoo+YvkHpuOuY1QGnIr5S/i6GSISW+1DMwI88ekKaI0j0zSOP/v5T/E3//qPce/iErbQ/swyTJlB10OV9cCG4daoh8q+T97/k/8Dz77wBa72Azda4fLyRUZJDsXxocyE2jY6gcVG1mDHuVk7lE2Mx4raOjhsWTi7fc7+9NOUecG/9s6HqFXTjdlK3U+N+HhivVBSiUA9BUKaOdDXLKDm4D0nFd3yNrVCeJH4qZREee6NLIPc9SArC0QHfGmJ1+QDH73PxWWS3FfrUdMaFQuoLITs7eNI843ZDSt36Ti1DgW9pLoNmKx1oIDflb35xj2slzXJDtgac0q2XmYn68bILsFZyqyV4bSZi8UwwvorPn//IuvAa4GfUtQ1FfhfM/N9ZvarwE+Y2X8MfA74M/+sb2QGt88hbFKHYVVv0umF7sB2KDTW6JtK4jUqNmE7AXloDDocjN0C9oZzpBwUTpGhsY1slLJj5pwjM0/1KsBPIQCQhfNi9Iugl8r7f+t5bCae2vkvHSx2HKM76vtz0KbaGVkWYLVjvQnO89AB0TvHJn15rD+ORRn2XeNjX9XdM6BEo81JesetcBxFNNhUwGSOSZ7poW9zKCAjRH3uc4eW1DDVb43EixRsakHJZWMOrE5sKB24XyVXl5ectY2IYD/uDCrVTXjGRafdEppdaLg5fXZK3Xjp8h5E4ywHI4N/+NN/j3/z3/nTyolYoZiz6wFOH2zeqKa6sC0DawVrBYqLKi7w9O9+gSg7wzZunvmySieWm/wmiRweCV5dt2A6h9xJKwtY03tMFYN0K5LV8022WC3LQ9hOKdRNOfQ5As9KnGlCVJYyEPKSfMc33qZm8Hc/eJdykAGMqYkSR7PVMGbdVujoEayRvdOnMIliyTw6sxl0BcNEGOeR7PdBIJBcgXWlSV/W5eCcQ6nIC3oayN7dKcQY1EjtC6/w9f/5EMjM3wXe9RX+/nPAv/r/6nsBfZ+UQ2XaJH1SYsNzB3NKGFcUunfoaxxGAIgVeapxoFdltk+jUsg6sIMxpiuxFSHZxhUzC8Eki1J4ErCFVoehhGML3vfb9/Sw5WrGodDTNUInBIXsMg9FHasOXCaWBejiofxB0mEbHHZnLOUeBSwLMwVGZaobscyNY1UlVgxNQbZ2S9smlkWhITEUEJpGHfpzcwbRULDKBrsZ2zTGDGqZjFWy4KFA0hHBeVUgSNK1/lgwpvHN3/mDeAa/+oGfomVST4UZtw+UciRp7GPHqZwdzjFP7o+79F0AXjkOxs0hRWTIt+lT4IuVRQWbWpal5NsoRRVkxWT7ferpL3JRBzdvvBo7HiXQwmmzMQvKpyxTCjqbq+k4sV3YRRLUkiQHRoeYheJdFBsJ5868Oi7uKUk/QFcrdAOoba0bzhwKjM0IZkWU9LKl/9C3PUzvyc997IKkE2Pivi1bN0CQV0mhMjeDbCjHTQi/W7+u3auZ9BjMaIocC8nEfQY95FOpx6C0M/YMScwt6EPsiddJlknNSjkE3V/5UX8gFIOWUDZlvtXNGKFa6cxGAYlJRmDWGH1Q3Oi1UqLrtF2jI0s/ECw6ZQr48SkDRWZnj6RlkZrQArcqsMX13170YAb88ifucnkcC9lfHX+ppKLCUUo3xDKc2oCUixfij01yXFvfW2m8js/CdCMZXJehxKDkopDcOWZQU7EcHnMJp3TzhbvwAltCHZwNiW/C56LAlosO4Bhsm5Rj+hA3aJ0tlbgzElqVbmI6bFaU918kQ//gL/7vlJXVoGpyZ2miGblhGdTqtCJg7rhfMWZg5rImt0qOQh2mgzJXnBeB70UHwQGiFK4MNnQYewrD3cc9nn/2eXJPbj/cODxyBxtGXWEouXIM6kwohRFHeip5updCNYm+RleeAm5sTI6z0tY+Pqc6KT1M3X51J3BKspx9tmLcg4aERVkEqmZXQtNEtW61FP6Nd5/z/L2Nj/7ufXokxy4bt3UYNsGqlJR1BaMaEFPRdyIxGCa/66zCojApCM0KNCdz6OKIoCwBUYBufLOl89BquE//Y2UH/ki+lITSiEMsf/5UkII5UbT/eHXYg9oaaZeS71ftWtZ9AV1B5AZcUS25CqdO2WfH2rPbET0o58peEzorRPhin3zssxd86UVVkBX35TeXIq5StKawgiemyFgryWkWM7NlEQWLJtTYVm1VauSsLZlDvQNEk+zVkkgFg7q7wJyhn4150j3oz7UiajL3IK0qvgwJl2yqNSgHyhf0SjvJhEteC47mQsvVvCzRizWtI2umhISGotuSSpZBTz2AkOQY1LMmzt9gjM4+A8aK7OpJ2ZK6MgUMCbcGgdu41nx4BtQjZ5yv1aJq4jpecu/ynvT4Xnnq+Wd5dbxKuoo1vUVJJEpUMlSJbWkGTMh65nprFHqauNgk9mtWqI7EmESpgixTB4fs5x2vlbmDe8HHvrIiisJdzFZpTqzuSl0uj91pvPedt/j9Z4JPPHXkqk+apW7/Kr+L8gQLNmNJXAQQJqlcR0TVVlNqsU0JjEomtSDFqy/58tCErAQusCGKuC/mw/0B9w4Y4HOsCCgwk8xX9d3GsCIpZTkl+VRRfqYK5vAgd8dr4BaMBbLVpS6DkNc9IJooIUbBKIww9hg889LOU18Onn6hi4dGSlMsKFOmJiXJmLL5lgBFaH+C7wokyWXbXMout3KdkjSB5oXoXcBfOnPdIcYajVOzYz9NDqwS0hUSclLcqedQxathmhYECsLJn4AtGW+XjDdmxS1WXuEC1lA4SqQRRyXd9qsjn/7oLyn0RNwoMwV1qq3XdEgddIiMqTiuOXfsaFSDK4QdlAHjTAUvw1/WWUBZH+yh9J5p1DM9aM2NkTtX9y64d3GXkZOHzx7i7vElXvP6x+UGXKEkCnrRGjdtQMQCXKGyPBGpfTlMeIjyE1RuYqdct6wKa0lWzJxDkdx5C2ieRA5GW0Ej6987/RQRiXnDck2P4mN506udrMmLLwZffOGK2AOOrpJaqurp179+kiRzYpaowK6keF/8NQKtmQblZW1I1tXGjSjTU28haxLyBz1PQLSRXkjiNGopaZVQom+cdvK+9sm6fsmph9DWLTfZqeuDDUOgEH56XTHUgZdDgNgnnjpyOTpPPLlTkNtrrJ+HGNTlLEvASlCynMRn+jC5cWRwmPZyX19KzRamhKJMAZeOUoi664XPsvhOF8Bky/GYODH0wZshqXEAJ8nzDI2e05OCwzhIeswSXZ8AyjDCpgJT7SShXbf4ScG4/ppQx2KS3L+44ud/5n8hqm5JHYaiUUtIT2DFKUNRa0xf6VBL0WiwpXINrjJp5ivCDHHXpjEcS6iHdZg7k4Jlsu+dHp1jDE7qSCc43za+8eu+SdJx1+EHfi0bt7Hkzi7R8Ex1COrJ1mui17ioYVifBpJdugigLit5uh7yFhKtmSsxCVvrTJyObmgrSFYr27IE2+l1Dr7msY35WHLrCX1cf+eJziyGd3lgWOeQr66Idf4woyvctSYz9TkvY+lOCgKpU+/biFwuQ5f025ScJM1AMvdXBgZfeUb4//NrveiGYU03rCqr/sD+OJMYa68pttJ1FqgUjrcEb0xTyEgaC4zTC3yiHANFkf3GE3f5jc9c8JkvXvH5Z6SWw2PRjkaNoLtu75ODL0IrQA5WVLdutrLGU0eNPeteXyJBI5bir1qwTwMTpWnh1NBYeQq5XB8HtOka5bSbLi759MyeLjBnXsusfdmSxdg0oKjyjKSc/A2rkNR08q79XrqBmZBZuXnjNt/9g/+hDhGMtWCu3IU1kdigmDr+ck4dqrPIG+962JMkSlCbU9tGK07ZCtUrBbktWynU2vT9ZxARjH1ydXFFv+h4Gm9605s4Ruehhx7iTW99vX61MJgFs3WTMpYeRJ+NCLvO3cv1vl+TAQNNSphCVU/eA1sV5EC1fU1USV2vOct8dep5TD+pestyXNp1ZiU+VRSL0ycwCm959YG3vWHjXV97xje94Uxt0iEqeMJ675IIpyd4kbhKEVhLqbo0AeESryXBqOCu9dlzBdKu389CB0jaK9/3D8QhoKk7qTmps1At1FBzMk64AhSLN3IJLCTE0X+vSC0jY9AwuoHoI7n0PEw2TDN+6/cGv/zxSz7/VPDEiwMyrpuAg3E9cah0comIYuUIoOdB93Bbj/tUx2BRlgGuKmzd3KsJWZIWAjsJ2ZhUiVksGCltQU+gJ5H9GjkfJgHz6XOYmvHlZo7TijAZa++NVfYp15n0BW4wqtpsJi7EL+3aS5GceHS9RoeD87Z3/8mVZajpyUIeA0dszXRjFPThLJPik3VaLTWyUpLPy8a//F3fRavOWd0o5tTa8LZRfWOzRkGhF5aTOY9czZ2c8osEk3v373FWk5u3bkM1xqhq6YmEuWrmWLdzrEaqwpJsS4IVqYkOK8x1S9L0+5ZYwbKp11fxAE6bCw85AzyYNoi1xilcVaGtnaSG0qQDF56C1rWcWlVwU2IWxhsfbXzNY+e85+03efsbDpgnxdf7lUGZkn73XO/4aqu2LKvwVJ8XKy59RNdDVAMkE5UXBlK+gXDhEa/w9UCsA2B0C1qpq5AimVRyrkqvrEwPUU7mEnb4CR0XoGIgF11pVJ9Ax70KuzP4p09e8eQzwf24oiPTSHGBKWOiSC7adTX2XjQK5ihEdrL5NViTpSsGK4wwoeJYuQ7qsFBeT4YAvXBRPgNb5iNZdJ1VBVZSnXLjdLvoQ87CH9KU4OMpWXFdiTk99VHDJDLKISzDbBlk4rCSaQaUgq1WtpMvwE7eBsRrewn9DKVonPXTGK3kXKxcj8LF1ZmYNhmxbn7X+FsojBSa3bYDb3zt61i56DSramJytTfnnIRXBZFM5SJGBNb0+r72kUe5d/8upd3goTs3mUM5BJSCpfx/k8BLXQCkZNB+WiWLlHPqktg41QnPDKU0Dd2oZpUylry2atcegKXkzVtxjitNmtQkZUM2cisqZbFAlWybvt8koRY6sa4MTShmgVV47Z3Gwzfgda+6wWee2vncyg80H1gpOqiW/l/16Fq1SgG8MIPljlx25yo5+0SUuNY7TXjWXpkdeCAmAdFuMglllcoNU7zWjWIYQyMVQaaxuzh6syUOCYfRCCan0HDDKCRPfHnnZ3/ty3z66SN3+y5wbkU4jyzMsqHHc9eBgAofG7r1zCe0Al1BWtggR11c+r6qrlYGwgIxbVR8jWBWTHvmoh4n4totg2pohO5NFWgLr0t0w6h1Vnp0M42aNZN9vW4HU7wYC9gzW/QQC0PgPgnUJu98Cd3Qp9fGY8IczNz1AcLZ0SIiy7ExfU0OvqGJpjDcmKOTy2LcFjNjIX9ETAGkO4bNdSt5o7ZCOywAt3TKAUo7E0uwQ4wr6I5RlQKX8OILF2Qv3Lp9i/d8+3dCa2wkeAcvMtG4KxG5JXGWqjgfnbZV3CqbG62oVJayX2cA7sM0HW065K7350hGamWYFsSAqynq1VaKNUBUp3sAAnnsoM/L6J05dlW3R1BjEUlm1KxEPSezchXysN7YjLe/6Zzv/ZY7PHJbqZe5AkT8oM/EvqLUZUUv1BiUDNyusMPCNtMlPCp6XwQsJbXEKn/5yl8PyCQAFjs5N6YFdd3GDnRX72BLUYVn1Yl5AmAda9JFT5/UrbH3yQ1LXrxIfvG37zIluVtCi8Sumiq2WZz1rlhQcxhVN1PmrpvAXfvgVTCb0aNQPfEqB6JnpRfd1pKHK01neFJxDinHFz2Y1bCmpt3MjlljLrdAlFNjklB30simVbDNJJvoJ1uThxPsNinVMb/ErjZaOaqYNFa1ehi5bWwdjrsgsD0K1cb6s5yoE/xMWMasQGhiCSULKUpFI74CUwrhg41Klo2cTo5N5SJ+gsmcYh07FLbjxGyjeUNUhUZVa00HhhUlB3WBoenneHTpQMx4zcOP0vcj+0VwfnaT7XCG3escz8GsKucglKswTbSqhdGGc39zhk1a6LULa9hxY56NRVcOztwZcwFqrci6m0WUn0tCXLphTcafPSEZ1GykTYiJWVMJbBZqKh5zjibGhmVSoizWpjC806ZWNp+rsj2T8zSiTb7zm86w2Hj/b+yMOdj3hINxmAPNLsHBkusEcWuUqESbzCOqZS+FDNHAW3eiQLdXThZ6ICaB6xmnBtUr04JejO5rv5pOOXS2zRg7An0CPCcxcrX3Gr4H9OTeRfL+j9xjhKi47E7rU9bRFvSRRD3KoLQoxEHifSH0FC6Z+DEw26UWy6TqnRQlZLnkmbGKNDrTJi3EVEwTOj0BSlNgCDAtgIPwDqvqlY+lIwjtl7mQZUvhBfNSO014YSwQ1a3C1RUxnCydGapDE88tUNIJhmvkj1I4bIBDOazfOys+lWrM7AJgq+YEKwuHcKNXKXri+vc1LBxa4odUElOaAl/bosq6U6JiKd6/HjbKtlHLTQ52TmsHPKrqzmvjUEUN1rrhXqmjcPHiffpunN1u3Hn1HeFCB9gGbH2Zq7KxJ1NZAAAgAElEQVQgt6nLRDXhskzanNSuFVJFnUHdAu9JmRJnjVIpB6dNJ8bEwuTjT+3h6TtRNuY0ekyU2QeYAGNBtULrDzmhJztV7/UU0FhrUahNXdgKyy8Sk7nF9Wdi+FSsfW6MUnnvn7hN1sLmB2ImmU3gcyYDidbmcRJmjExsF9vTi0mDYUetiFXTTTtJnb/C1wMxCRiyU5aeREgvUFbIRuSGNxjzuHYjp1pnsHHiWesY3N9lkf3Ab1/odDd0C6/AjokAKAysGcYGqYThzsIXUhQTVmhpUJA/OzQ2ZoZ2taUfyCFhzkZRPDaFWgCSkzxfOuDBIeX5j5Ern9fwBaxpvNQu4BO9cSXJI5QzhYQYSpEvFPUd2iSbM0Jmoki18LLMRRaioNKmykZxlOQtLED5CTBzGZGaUUblWAeHEIfvnowhqjQxRmgUrj7kWeiBt0rDmAQz6oltpC4lW8aEassbD1lCvQdpktxuSd1fDmhXCcxG3Rzzgm3G7dt3eM+3vme5+4xMRbuPIrR+rrq31gxyKnDluJEHJ/NSeQFDiLobzFHFMMWlnJ2OOibWFNbSyauG2aCXLkl2qPciptGLVgABepMdqRvLNok56UVAcw3Zt+fCD2oxbBmEIgp1eUfSC9PFMtlINiozne979w1yTH7hE0EfRu4KhIlsQFDOJRLKWOBhRXRtqsm5YPKK+LKWv8LXA3EIgAwWOZ1oTnN9YE77loVKMOZUkq0vkIwMXrrq9A4f+ew9LnaoU7eXLW5XZF0CnTBnxuJd3fAtV/Co6KS4TiZazrQSVBp7kYZgTlZzji+1mZj5nkrmKWkoJ1HpQKcveVQWDeiNsE5mI12yPAtFmslLoHaT2A2vpgDNFfiZS3tfw+hlpQ+lPPwe29IfBUsDLYrUnWFO9oFvyrcbpFqQTT33I8StZ0zaGKLFYsWVLhnqSgxf0VVKQS5lEYHh1LIUdGmQvjoNkjwUzpoKZFRookah4gLPqsGp9MWzkdU4Pxyo1bl/eQFZeM1jj19z59GlADXzhTXkcliuSLNVwNIPYx2eEiyt0x1P2Ntpv9dYbyGy1VzvfJj2O+UxptKpinwGxVjN1UjROhdmU7oCcaiqFh+VWLew2EU1NVnIoTCXY9UMaiY+dBBIka0m7W3XSvfef+mMF+4d+cQXjKur4PKoFTbHPGXMckVy6NIYzDJ0cZFaZWa5Dl75Sl8PxiGQ6MZcar6RnWKuAWaNXiUL1jXexEzu75PnX5p88fmdZ+4JDXdDVmSHHDCtUnysti0XflNlsCmzQemSDI/1wZ76MGVJ3HWanmqd9lQFtHMCbKXKq6Go675uwzRfcVEKyrBwIdPpqDXriEXRAbBoSEAmExPNZqFUXyUP6yWKlYyTNdetWzDbCNdBQBlr8pBnPU6UJkGZRhbt/MVWzZtPmBJiFe/02fAGW6qfwKKrn6AshRyiaXuAF4M6wDbhktOg+gLAFGzhJphBGYCVEk5laowuRloTzZdS8GWVaIpwRhqjdx5/7eO0ww3e8Q3voLZNVKlL3hM1KVNhKHNoepquODpiLuEP6OFWqIyUj06d0lREbIQNFuvKCo9ctWO5xDZGXNd+zzVFsQJN9J7UHGRbSr40gUusvIVU+QpWVG1eKp0jpRojxgpD0efuFDeWrt/NrTLmoNbBw7ca3/51jadf2Hn27s4Xvzy52uWwVZ7h1NUl7zDFJoTSm6IoruyVvh6MQwBbadW5ghxOTcHAUMIvaURN+hh89ouTi6vki8/tFFeYqIVcgjMFGlqBMhUIkYtKE0XjUlPZEP1DUejlfHktOSUaY2v/TWNpdcFE3bijCC+0ixNiK9aTrukjVMs9JUJXv14WhZkmkgijNwmSNaYsimDqD5G5gcxQQIS4A92gLuS9zE7nJBgbhK3vYdpBcyQcNpg7mQJEbcWFm4FnoSWyDJuzzarotiKRirtAvI6RZbBn5YDosOxaGwS+gRXjLW/9Gt77r3wH23aAWnj3N72TpfDBCGTGzVUdJq39Bz/8Yb7tPd9C9Im3imXy+OOv59FHHlkJu9Jk1BQeMnM1Q+mFx6cRRXHcaUnJAtEJa7hNUaOxDnlD8eMum3Y/ScKX+QpW4lKRF8BjNWKba7ocS1Ow8gsyJ3OuNYXQ6OfiUuykViOlZ0n5+mfRVMVJ1ryEZWbr709jr0jolTqM3JLHH914/LHKnS8N+gg+/oUrYTqnzEUQsJxF7A76OZaW9Ct+PRiHQCLds1cFYnLKaJMQKGpiM/jNz92nX8LT95QE7GUFMK6v8FAEV1+jkKP9qM4lKV4ii5BwpkzX5FHWw8a6JZfs09YDE65oaOXzv6zdrzlEaU4AZ+aklkGOhtm8VrNZLAvwvlSDJcFCum9bYOQpiWZRahlKjskpmywnw4x2EsnII5RWa6z8xFWnbdIYFBPbQW34nArDmKlbl5XEvMZ38yHePtbrTa5uAB06tla0XOgE0xirfDXQlIALi+h959/9i3+WR28/TJpR6UTWVe+V2FBGYIAcixG083Pe/vVfr1SnchqjxbZkWfLmBeLmGnOsBAJu8tpFZxhzwlZiGYHQBIVrInRJqc2caIF1Ge8yhUd4FqKcZNC+TEKinDMkIoo6OQk1welFegVKrKCXpJ5qxE8/c4fZ1GatHz6xUPKwQvIXTpNazcxyTcBOMMTmAKe0qje+WrqE85r0dD782aC5Q3Cdk+iWWF+RdfmgHwJwfVsXX3lvWq8YY/Jrv3NF9skLd5M5jVr1QMdJoEJQhvL7bSv4HAymABRbIKFNudtScs4SVSKTFepIKgQ0LCiukTeC65JU1Y4Zp+zmTFY8ucomwHFrzD4oKx24cEq31ZscvvZAhEGoKSAhZTKKtZ9GSXYvUnmZ0zG2wvogVqXNArjQ8DD9nBlFxaIlNXqmUVth94TjVALu0gdZKgIr10FhLVe9uW7/hx5+mB/+4b/Ak8/e45d+4e8Ia+lgZ05j4Ev+ohtPoGak8WM/+qPkTLZyEJDaNsZecJ/rgdKqkF0gK00f5rd93dcBQawHPrOIQD3t/SSsROpeBy0KWQV86aUwbCTTE+cMi0tIXwfk+vnqktQWgaQ+ILNrmoyGW4i9SfClsAuh0+w1OTjYEoDhUgNiCDQEHeCOIuFPug99yvC2LrbohDcBx71w8PUz+8sH+RyaGNzLat9S2pIjIV24fCCexutf40QUzreHuOjw0c/dXbkNLx/0iiPfXvHZezAOAQPMyeWTP3mii8Ov/e59nn5+1yg4zsWHlsBGFeg31rjuU8DeTDoVn38AaZ9O5Nq5hNIg911Zo7d2+fSTK20yjnrgi4NVccsjFCbia8ILHGoX8kuwGxyyMNtcUm/dZiBHnfWjUo8t1HacDgfJ+E66dncBObX09cPDhoRDqh4XhmFetMOihzbdlkQKbDnpLAcxN6oFYUUcsoPPhGorHTnxpgNr5gquiCMP3XmMr/8T38Njn/t13vWO/5RDa/z4j/8kX3r6eXJzuikso4zkG975Dfzwv/3DxISHb9/km7/p3dy6ca7b6ygMh2ZL/CRxj5OM2UnfVky2TE2ECjxLSWqug1MvocRYEXhUsStjaOynUGxnd+dQYOyXC5Rj6ehz4TWyMusjNxWwmmWBx5oUctW1G7qVA4WbuIwV0uHjqvsuEDmkbi1KVGbGqq2bCwheU00OnHoNGKfOfnVOLiu2JrsFDFvDZtB8qB9TOnT5QmbQSpCl0scGZrzmEbVNx7zBx79wqfXgaJqC04k/5FF/MA4BWHVYGsf6ND7xxBWfeeq+IpVdAo59S9jL6htYH2hhV+LIo+DVOOtBN1NZ49qtlQMomq154QSWZg/CRCBaGmNl/JWqj0LgAr7mZHagJqXa9X7JperPjmdJ6RCuN8NtrqBh3QAxYNyGdmlkl7+geCyNQEEF5jpcuut3nNHZvOlgjKmSU0wOyuPAzhQVvp+Qu4CIxf0DlgfdBHuALeqwm7CEoSlo9yVAymT4oJnoy3H/JZ79zD/hxtlNbtx8mHYo/OiP/Ee0ek56oZ2d4VUPXWXDtjNGXMHVzq/82gf5ru/5U9y8fQNrlVanbva6wj9msheIeqAcAzbRgyMELvoaoW1JnVm6B0OMjvXFfCxALC0ZyFcyGWJvQrd9OUmkXboJnwdGnWvUvqKE41vF7IjFmm5W7gO7cTiDY7gi6EuDGHg00ZsGfoVKUGMiw1fqMlLLub6/TSKDyEkton/pC9ArkpfLRj6UnclQCK05+zTKikNb2bBUl6eGGWzUk3EQw3jLo42vfmzjY09c8rknpS2tCaMeX/HZeyAOgcTIatiofP7eJR/+vStmDHyskSqAJZTYM5ek1Ch91U/hlNawXY5Dc8NqUqaso7HIa19dMzkSTElx2YwWdTUDS847V5rr5KAS1FyId8lFO8rc1HfIWqSj39c/G4VeJq5WipVpMAhPblw5lz05r0YWo3SUD+inCjQD1Oxj1jkrlekF9mAspgOXui/Olqc/fTnbpP0/AUzFYY5gt4a7ALVSlPQz1lhdMmipFOGrvWr9GMrv62UwZ6fPQUby3PN3efTOI9w6P1C80Uqlnh/Ag4v7V/TnXuTWnUc4RnLcj5RW2XentQFRyTbwsXT8J+en6QO9h37mzUI4z5TT0/rEipRvhwW65xFyM3ysAI1ZyCKio3ojZ+UUzpht0cMpk5Dq5JISRXmRNE1JsyguzNQy1EPTZXUYXYlIjcEYE6vGTuAxhIU0RYDjhu+dflbYJoxasAhOzVVmB0XnTaTDtCPmitAvkrMw2ER9MrXL19V7kQVsELNwTGgLNTdLrO4cZ1HMTR14kcz7nV99g3d+1Rn/5JP3ee7eoI0HfBLIDO7d3fnFj95nuvL76/Jp2hqTKS4JaPPTEYs1HQaUIK86eQPyqnJEoBDHsdxxa7fMirWJd+kFrOT1qGULXbZikupSOAunW2d4UH3jVGQxp0Q2s4rOORuD9AM7wVkRhiBAUCKlanKA7QalGr1OJROXbSXTKB1ZyRhdh1JzRndKXDBblY6iIL9BgdyFHUgB42CdPmO1CgdHCl6dlkM+hwR2o9adkgfdTKbQ1DmCzcbKuz/iszLvBzFWTNpIHn74YdqhMTcnbLK1TayAGbdunjHPCmN0qdk2Y797j1uPnuOhIhGGdAfmHXJqQpo3GHVQXNRWzrZ8+QutC4GQBWPMiselVHWxDoBIWqKEXoYQ/JDAKCM4C6NvBqMws1OnWJkKzFKJ/YJ+U1bfSmLiGWklrxt8MpIag74O/5lTJrZ0vSdVUuFjVWR9pDoniNAkmWUlM2nymiWvtRxl2brVqyhmIvskqjGLylMtOxFVeEZTxoRhRDQoR2wUdSlGsFulRi71aUBO3vu2m2QGv/jRu6/4/D0QsuGXLoN//PH70ApWpdIbnCLDANsxXzLiqXc9ZypwARMfWoJ5JWuuObSRlC1guURVNmLEKIp/rgKe5AFwik+adWzCWA/NlFCBg8lJOE57JVDqYCsqgbhfjB21GXdb3C36c7x0PUgxsTkYe9fPMPNaoXiKfrKuD86wlMDFJ3OcLcxDqrM8CuGX6lFYSHqHlhQpqUgKdYWfYMv5NgfFnN3P6M6Ksho0W1kJVR9UMgnu8dl/+vfZNufs7IC3ZIwrjn0w7u3YRXL/8sjV/Usuj52rfee4C81upsnkp3/mHzGjr0ASUaHpnRhotPWGmUJNsy8FYZ/68E5bfYG+aFiDMhjZELYpsGubytrLTD2x4cJMQjXrV9GIrlXDUhqG8GRnMsYF1A2ujDaSar7AVSOmko6W81ivTVTlDM6qySELvroMe4E2VE+2Uddtv/CduopkTrZ4nBYylQ0zRjnCPJXhxVoLpg4IKtD0sw2TRP4KRg9sdjJN3g76yhJYJ6JpKh1eibNKLRvf865HX/H5eyAOARYXq4Z7lsVWP1wjKZyTRZXXmp1QCEiW6yqytEaZyponCyE+RwlFRdx/iSCYzBmU2HXrDGOa083pyB2IGdWCEuqmk9rNKf8nde8a8+2e3XV91lq/6/rf9/M8e+85dHqgU5i2NBHSKOABJJqgVRN4gxokBRML0TQkvjG+kfBCX4ixRANKEAUN4VSIvlBpEKkKFlDOB2kHSmlLZ9qZzuw57Jl9ep77f12/31q++K7/vXdw9hSrJts7afd+7nn2ffj/r+t3rfU9rhClZkbOTRedDUaHajJFm4WbaKtTTUbTlP5qIYoQW1RuzBI9pp9KqcmjgkKIcWXTgw1dZRemYmJHFP2l+naO/kQn46TNNmK1n90GWcY2Vycl6+aaSEhSa2n1yIClwo5nr7zMN370Y4wYRAWjJpOTmcW2JtMg56SmbkQ3uHtyj2/3bHdDwp3i0ShlKzAPPGBrcDVOoSGWRe2QYzL9lmaUzL7Jq4Kls5C5Aq7CNiyaoQhwHK8bWxNcNiUqY6bQ1htlWE0dhxqLbiW21oeL1zuipE0FlGyj2pylgY3HGz0YkqO0Z0QtlaOp30zrKUMjv8XNTboxEmINqVWXSknMi5mBldKiqtScXFFspmLaMN0lgXO4w9oUTb/080wXy+EmCnRij+Kxr/Tx/jgEaBPbQj5oupzSaEXUwpdceGat4quOFGsxULF0hZQcgZYL2n7r3RdQLsNHAjOQCCRU010t4Kmc2JTibPrSjZiSCUvHozc0On2W82RrN1sYjBz4FKYg1LeoUjaCUdgWGieHEP+taPpHNd3ZYZHeTyKh/HQSbr+ZFfhssMxOYRYGiQRO0Gj7Euqtm6WUt48uOOvgiuqDw5ZUcGlNve7OB15+CdBTBe/8AYKMk5kptLMpv+ibw6y43y9s24WB4tyHziVukepBC3NiSpNwm1iQb7FRPPHtFtI7sBjdY2iugM1VTi4Bvul9uHLoZ0JxZSNavWglRSGAD1XXN2hXJTFQlqaGW/VXhb7+Y8KVWVvAW6df0qlIspFdJxbUxck2fIlNmsqckhCDzEXawcyeNsreyRhMAb/ZojcdWIa2/qHrGIWJ1gpqBRkhnGxI3OYtnLOUDT+tWtL8lT/eN4dANAWCqd/NCinoSnthLiGj2aot0IV+E7WuVCGXFZyrWK7KZ2Y/IKl34qUaKFJMVAsrWKKN7DZSScipeCYdOkWbjG7ahDQIecPSkOd/2WOHfIlCkDacpooa6Y/M7jvQLjg69ddJYizSVF5idkvEeYcCBDrZphozsaY4bweiXrdVYEPpueHWabsSMEkjcMNLAg/93FXwhU/8De7Gxtd88COEmZ522y4FYWvqbdN/H9VpRST0n58+vSci+MG/+TcVRjKzD+guRE3lHrKCPAVSZjW9QbGZxC3mfXO2A85p85YviKm69lBSsjePWO5qOI7EWy8nZtgb5K1+QPTTuka/qo3l3JgWTBkAO1IKNh2bJVFSdXrTWGrLHrFxY6AzlXsRS7v/ZMMTJREvJUZHx7LfvmW4JilNNDJ+JQFbm6oWHV2mYpKgBW59+NbNm2L9CzTOtTEe1bTv9fE+OQQMqmO0+kbVyZxUnmq9sR6xE1je6qJmeK0rrpfCSnfXCGjduVc9WlpfhNamW7wo8/alg4UueMtdwSYpFJ+Wz7atSAq0TjV24vGJXSaV3TKNvkLgNTAkRtZSPFSJyKKBtWXaBxV7LV0DnS6cJjluWrZU+V2cOv5IQZKzg1ZazISeJGcuzHWBGgNzY0xdKFbyyd/+Pg7mxeuf+7t85EMfVtNxdUCqD/bLwKamKRmg5I2gUO+hQ5nMXfevvMSP/viPc+1evwzwVgLebqajE5mlqViNZSgfgjJhA0tPebd6/KW2FX3jvpPjaB3FRdmje/NWSaEORx0w5Up1om6KwlZfUrrx1q2gVUYkmzCrHoNHp2llXLYUI1dSUzIUCZfuWC3kiFzyg7jk0mp89ZYGe69t1ntCCZMw4QJRWifeOceSjiGgOtgkS76IOWa7IKW10fUpIHFVdbT7+34SKGyTtLcMPLYej8G2m7VCT+pbmo1Ovqkncyn+0rbqxNYiYkp3YP1CouYcw5VdsEwo7Tq7BUcBGVYCl4ZJmjylPZY3O1zRTtBThOK2hisiPEoZABXRs7Ge1KuiL57Rk0lonzYFfwaqxK4FtXoHpb0DmS351GETRGvf0bSyRb82LhTa++mOFI6jilrKG1g9TZx9sZYXa0RPBaJXBUEsvvT6l/j8515l2wcw2ZCN+clLmpooUZfKR7CetJIvfPEL/NAPfZzzxZXywc6uR+6S21B3beDlDA8u0YavVomeU2k8lMJa9Lr0qN83tE5HaSnKFr5SWM3sm7KQocpazlt6xGodLOImDjO9b7V0WFs6Fj1R1NJ02JX213fpDpJUAS0u2tEg9YaSmXJg9vSpaIi+VkxTJLdMiOpW6p5pE6PmO9Pr6ih5BUiZQmfKYK2OKVuwlKvgUz93evSkLPeoyk4Kr+097773BUX40pPBP/ftL8m9tozOWe5rWRiBBDO6Ic1bUdJx3lbyni8mgcFZrF3aeTByaXd9FKyPgHP1Xio6iakXudjIOMkqxlS6z24u0LJQzXRC2qRskLnUXW9XthTjkN6PIPR00qXXtuYyMOf0YiD1YTamYUhvfkkapDu1C89dOEEkJ7rg0wYjlyinegKNQSRJ5ZBu4dZqbMJBMiRiwtAhWLTuYSo4ZTnnsfjbf865rsnr4wVrfpo333iDpx98hQ/cb1TdYX5ly6sMO617f/Ew8WNyd3/Hz/nGrydSSsc/9ud/guF9iFu8k9Hn1nmGmn4yFxF3XMbOw4sHgWpeEkadwEjW0mp0//QZD289J8MIYL+/cNkGa8Jrz99gt+DZs5ewWZoiFuA3irKoNR9zDWsJIPQpbKbCyVLUuU9UE+8n+CBmqStwARPOSNxV+yVgTo/tCOswU2lFKjdWFFstOVdjdBjpgtrBlq4DemULb3kxgMtAd70Bwjcb9NGsi8J4Ivt7LoDirOj1QkG6yZff8/57XxwC4XDZnUuDQ1kD76jnhbHnTq0r5y6O2Pf20i2N7Fsl01XAuKVjT5eAoWwv99qJKKaBzwPciMvOnEsKLwPfSgUZNqUcrGAbhe2FX6UhcBNAaDFFXZXD6VQkxcaqyeb6GRZCtEcfYGXK35fjcGJb6OlBYmyMGqIZq4i647odDBxqsFALUqYzsthGtg3tJG1IaBQGtjFXMXxiF2dmsKFy1lt7mK9NlOmuwypqsuYg07E9eelu8Bu+6zfwvf/N9zLccQZf8+GPMPZB5T0ZsL/8hIfXT1Rkrmaml57dyQJrMMI4ZnId/yh2POc87jh2A1T2mcuwYQxP5umcxyTu1O7zpl+1VhTUWXANPX3Pasn0UuRWnpwVZBXbiytlMGrnAI558Pz5F1UQ44GPC3fbBWzp93Rjc2edk/QlXX/oEOHUg6SYj8awMu3wNZrHpxudziFg1ieWFz2QrMgX9qhVqICoA47FdTh1qFJksailA0ZDYytgSew0fNvIq0BJ5pV1bhSlxi0DzqA4sRhw6gFVjTmZJbkWbhtlxs7R/ZNf+eP9sQ60M+qmmtvCCNul667gmgUhM9COHHVWF6Iml3iQKy4nd0xsB3vdURPzaiCtGmiDGLvGO1/EuZHzljykXX+bBVNNxxswHoxjbyDNFU1lC2ZqHx+d6b0fxmYXRgTYJkBvwBqB2R1RSs8JNzbbhD0ivvhi6iX0VFjFydtstxM/1aRzsxK7GayduZYUjSPxM+AwqBP3VMMumryXSSI8VsEpNDviIO2K21TwaMA2llB0HvjI17zMr//OfwW/QAzHR+Dbzv0T5wNPd16JZ7z85J6X71/ipSfPuH9yYb8LLtsuU5BtvO4/jxyw8o5572wJI4Oywdp0k+cc8LCICLYjcO/ddZ4wTw2Ddmv560AYG0xPjm5VthCr4wnXOthnkRfEQZJQh8I6NiCCuhOYe9pk3nX1mAf5MJtdgNPlbyA62GUZx66HxURY0+byXGgl3NRm7aYMi0uQU2GlXlqTfO/hMIzsOFf2JLaCUFoyhajv4QpyHYvMxZUh/GgIYLKZ2FR/IuuqYpIsevTFytloQd1IKRO3964mf18cAlaQnEJu14QztRIYGGcXPmpUmgylC6+TqsFpbSTy4LTBeriST7Sr1hIKvXyRZ9+8Vcwo/DDGdlXl2CbKbzGUkDsUApmbdrGxWg2WSpiZBXF2iEYd2ILjTs6vPJJVz8FSNF8a6ZIP+UrmQ4rObDQ6Ax5c6UDmajyK7Q475mNbjjztgS0wn6w4sQ2Mg/XCOC8Fl/67lkTtzTZoySA2phm2Te2TBWMNcjk5NW3knBQBcyeAr//g1/Ebf/1v5Jf847+YcX/HflHa8PO3XvDw9nOtMCE/w2KwfON0+PzD1/CZ/FZe7E/IEtB2zsnVzk7GK0l+a3F1Y94FEaI3s8CPpEaQW6hcoZopIAkXoDoWhA+EgiuA9WR/dNjFKngOeTFOK7heef7FL3O8/YDNxRHKpI4MLo4yK/bUfj3Bz5SiMgeZBzUMX0ncHcRAE0UJN6hSMnRtLboCMifu6mw0S6kHD6VEZYOQQQn4xIkMVnZ5bRYcrZhdsOJgX6YT3YrooJIaUhmmleI5L4ogs6kQWUJBLjYhrxM/3ucZgwVg1sWfjeg3LGp0lj4olSX1wrgBp2yUp3gZUXNjaDRyRXn5KuXoOY2kKwU+ELgyQnbUdNE025LCKwVLy412JGwNFE4FRxIwS6m49+39LmubMBc9AXKJfmvuPby18gyCpNbsOiud2hI+0aKmi/zy1cEmhsbLacrkLwFteVGEek6QzBAqT9il0vNCe/UxJT5xg5lq3tGLwjIjtp2sSWziuD2Lism3fss3883f+jEo5+M//HG+9NqX2H3nravWk0Uy654vvXi56SkdDlsmLJVvjn6Kp3ccfBhWwV79HptTezY/IODL0SjrC84AH8YqYQCripVT2gUghzPqKui4k4/JUokog1yJhXPOF1zffMFL93fYtuMR6qsAACAASURBVHOmUoq3LThPTVl2EaDqE3IcrFTf4zA4zkGsomIok3IlNiaWAQ+TOTaig2OWIVAviwEKAO18iDWCXBdlTsx6jCPfSr+bzFwSzNm8YHeKhYspleU73ZSSSVOB7YvdoELBozmX2AY3GGLe3uvjfXIIGMbOCrmecsrX7qWT9LTBhakd3BPmUE5eI+DW9JwumqJ2GUDqkN4gBkryDSfqjnuSg8mMTbJL7+dyyQxSePPzAqO2bdeNk1KNmXdijGtcvw4h+3s0Ap76ncI38ft5YiVR3yDa2ebtjbBG2FFs10olDo1qc4k0/TeFH0OYQq6NiIMqNfCcrvIT66dtrlYFeguL2hLlj1wgosksNDJmR7jVBEICGvGKLQpKfv43fxvrmyYVzp/4S69xTF2wYQrlXKdEScoCLB1KGZSrUcoIyE4iCtGtkvNqfdtssGrhvkmim50c5MmqaJWeWp5vfGn2j6gHa7KbyxMRydBSRN4COaai1988HhgvrlzDGPmcevLkMVchj8T8ogasaexRHDU0PaVWjOq0a4DKHcVMCFiFQ+7AFeQQBZhWjDJsbYQl5znY7KROqLuCExnSkBZicqWmnvg2gqlnAZi6Fqib/6C1HXlynO94KrxSB385ftPc+Pt8EhDdf2pMNEVZqRkWKGfYojzk4gIwGtATtcLt/zuKl1rGOYzwYNkiKlQI+q64JfX6OWfNtq7e/N8Gm8wzqh8L9jk5XRz3Gujn1AInHn3R1KDaX6Ijyj1Xq84AN8ZamDuZ8jQEUrK5adxn3vQEEtdIiCNZcPpS/+L0Rz5fPrZg+hS1l/K4V9C6GxVZ6CpzQDhKbgq9MDQhSZ2o8dZGSDHpQ56FsjZDFZcnd9Knj+Du6YG/OLjsAbFxzonHFRuaIl6sh7Zxj/4S9hh3tbiZY0TTKRi2dACZcnJpbYdHcpaCV7dQSmE0CwLag80UAb81e2BT74VNjem+ejKoavxAct4QPMfDW2+BSzdQWYw7MT5xiMXhEsqS6Qgvh3coQOQA1fWgaU7X6DtpRCNK4qFQzoSnotlzc2qqPFU3eWGldGkIarUW4DzpVFUp/5pAszL8JhFmx1w+lQJJhk0BtOaT+VU2//fFIQBIYOEDy7zJ36WtKOsnyOQdvYOoN7tlqXdFlnTi0t5HQaR1iwwYCiq1oWPgpLjE5DyrYaf+qi6hz17JCtWBHVFKkWmIKjcjslNzQzdIywTJdTIiJIH1ro7qW1EiGT29DakMcwEdlYZB2UbVqcw/D1hSFZophl1BmtbqsYFV6jVy+S5u+oEqSVx9FdNC+glP1moAthOXrDPubgNCyUPEsr5svBtxQFFva2LTGTXwy6BigieDYBv3vbvefo4Eu3C+eEuhJ72yKWX3FqJarXYUwj3oyvKmZOeNGfHVfvoBXfTpCHMo042j/AbTGker89xELQI2luYkyfLI04ntNjuJNaCSuU7inCykDhwsbA1Ok8FrhBHjwlynavOYbNtTtkzOEmMANJV7QwCSNKkqJf1Vl6KVmCfe5ZsphP+cXrip6tzeNc2b0a3Qui7Tgy0V2KIUJr0B2WKmYAkneI+P980hUK5Y5KGYGV0oU1r2ChmMqlxg1BnEkCw0XZZOx5ju2p+ZHfDxjnAGCh+lWGqcEXIUhp164evWtptQMAkyE2d2TJlGYvX8baKPlsM4OrFG9N9uU4pDX3IhtiLPShdpdgy656RqKNI7VUhhCTmM3TomLFsJZ1PUVodrpKf04bnBCbZJbzfKez1qQQqoUTn7QMjERzD7YFHQZYNOa+LboGwyp+nptXQjplunIb8TxAkLhmzVUe1vMBm3HNgvTwhbTDY2qz54FmctjuNQj6mVlB0rYSSTIXygkrCd/V64gEF7LBQgkrXjJNernpC2BtTN6iUWZ2/RVrbeP1uk0z+5QLsoMGfdxvWq1hNI9UnJ3Xe8vUh7Dpsk2JnBmCdznbpWvSCfc+3UIaqwcO4vuw61LB7OB+I88fuX2DpmDl+6LnQao3zt7it04DE1yx5VqinNtx4YLjxi35I6FhnqGlDUm3Qz8kYok+G9Pt4Xh0AhjtPWrRpcT4oqo1xOr5nJcPmoFR/dfXv9VDwd1TH5tbXhu0bDJXCvShQPfsqhWEaEfOK5tP/6TRZseiSaixVYSw1HdUuDLSNHUkdXWoUMIDiyQh9S05Mmp6I1XcXgiYvNs2UQ/RQ0acwB9jlxC2oUORv8cnukT2WrlWZhleFDirBboo1q0iUx9aELsn8rwhxl6ug191bXZYdfrPYSbD1NWSomvXqjIISBTEv1L8xgY/Zu3w46FxoOupnCi9ruW0p74rYz9h0v4+233sI8Gcjrcbc95cXxBvvY2S4XTS+ma2FOYQbTFr42bJ/sw7EaeBrPH150kGhhrmBTmHg6tzRHxWwZm8NKAZS3hdI9masVja1dL1vy7VtTrjmpvGNlKsHItfdzwGmHgm5Lr67V4K22MBsC6tZMdn+LN6sYYXRTKPvlqVbilveG9aS27cLAVlvOzRhtBqqlaxo37Ag9oNqIRCtXpSKVAKori7/ix/viENAFuqgQEms3NH8gGsWtW3iNzReXpV5AWDKTWBDljEzOgErVPVemml3UhCGrYltxT9+p42RtolhsWi/T1nLNgZ8LwikWp9FDvTLdrWjedqekMqHYWbm18UWiDUNTjJl1U4zWh3JNDrUWgaTDZUE6zJrEKRt05lQwhSvLnkQ035C45nTFr0W2xr1LQ7wlpqu4LU3k6oHTEkLYhOHgQZ0KBZ9DWgIl7Tgbt0o1JRFVBnkpdoyDAruQNXvUTbyDObyZgJWwzaQiwTbKYDe55e7vX5H9djpbaGl68uQDUhYiajBqY9opsefaWvK9iDXIEsvilTx79kTGpOmc88tMS5699AHGKXPSua4c5wvlTtbg2CaXQ9NQjGItGasV9dbYwoBjGiP1u1Vtki2rPQWbizgGNlAqtPVal8U6DVtTnZmI5WAMjrmoKOaDYzUpT9Y6O/uwuFzuGb7pQOEgufU49jK6+r0z3egri7G3cK6kNZFbcWmy8CVg96tQhD+jTsDMfp+Zfc7MPv6uz33IzP5nM/vR/ucH+/NmZr/TzH7MzH7QzH7JP/hB0Hn+hrTSvS9v+rrUqRKIyiQX1Jg9zg6qU4QfLglx0b7MIne6zkxPxBknmCKpIx/U7FPWdlHtq974gVtRvlGh8TyWC3WthqQsWH7H8gO3Yrgq0VgOy+XobXmpuOXiHHCdAuUyjS2VbThqpwjOkawL+hkbxGKNVpQdmii2xGxTFPdhali2xgEXrBB0ZWj96KOEquSsDXrVYJ0C+RbAZMVSFZwFuW0EAkVV99bgcga+gc+Tdeqpddqp6LVUMaaMWA0ELlGRk65cK60Ts4y5SYgEO2Ehl6ObDFl6uVjAQ2rqs0o8poJXTBaw2DWRrDtXrFYMLuXcP/kQLz/5EGOiNcCdfb/jydOXefbsFZ68dM+zRUd5gZ2OrTtW7I95joaxziFWw4C6E86yFp5daz4gx2It4T9bG94Exus9Xm4YC+/syrlDTRO+G8JqKpN1JjMXLx7e4o3rl3n+1pu88eJ1nn/5y9AW9jVSMvGU/Xkz584QXpUnSfL8fAEWrHAybkKr4L2PgH+wSeD3A78L+IPv+txvBv5UVX2Pmf3m/vO/A/xK4Nv6/34p8J/3P7/qR09QGMa1nI3FKCmnLCec4kVV8zU5xsR7dCsrGAJAagZzwXbRSM5DKFqr68L0NcWfzrwogGQptmuZUOO6XYh5ispbxcyN8gW54UOUX1XTV6Ugk+Ps6q3L6h1RlKMVzJlYblzabGJhyvGfNzo0FZ19XZymN2+bcKQirTKNsiDyEHodxorJsTb2mOSL7AnDyBeLaVA+GBM9Bebk+T7wfIsnhQb4VAJxcWJHMGdxbsn+1pXpgzMWWSedy0zZYivjgQEEZy0Vvh5L4p6uIIsO3JSt+kLW2zj3vP2iBTRNvXJKT6DePOM4tQqdccfusz3+ct6dcwMWviZruADGgDjEuwcqDK3cObYHjutgc5iR1ETlI5uKUbFSHsDTj3DYc8yC5y++hJ+TQcvL2zPSIgRyHVioZTgugzNN11SWVqx9QhYz/RFP8YJtJDWlVk1SduRTF/uowGxwutKpzgFWakbOAqZk8xUnL954g8ipHEyMu8sT9u2ON958TuZslWbxZHtKXa+8wSH86BQO8+TpS4+FuD+rQ6Cq/qyZfezv+/SvBn5F//sfAH6gD4FfDfzBqirgL5rZB8zsG6rqM1/te+is1Bi1gwwTQB3JaXozaiiyOW3rBNqJRVK5cctvC1uSzM7CN2edSueJUvPvkrcW35oPD7UDhSXzhtJm4n6wfOAJA1U7mxUshYzcIs4wCWbOLeAOPaV8qGjishhkrxlDwo0qHjbjDhTswcZuCjb993/bf8if+tN/Gk4BikZQl2LkieJsJx5F9pJepeKPaRthD3htilYjJNSxYi6DbbAfybmXqLRVKsK01F5p1poIE9I/nQhnNihott5lvjFWygdfPjC/4kePz+hpVbRpx5Tyc2ax1yC5CkTL6PkzBSSazDkSzivJiaymAB3P0R4bJe8sHLYGUVuSC4vNByuTgfOc4i6l9iQllPnl/8Kv4Zd/x3fiDBg7p508mTsv9uLluw+zLsmw4DxOcn8BHqw5GfXA2oMplxO1oldEhNV07FtkyaXZOENuOhRt0FbIoOJgzMFa2cyR6MJpi60mcEcOjfr7gqO0vjmLZaubpRZvPVyx85C9PozKxbDi7Yc3KUvGIfvl3GCU8fzt518FFvzZYwJf964b+7PA1/W/fyPwU+/6e5/qz/1fDgEz+27guwE++k0fxUxVzotFLFmDBcg02t2/hk/RPATkaawGUgrYqngwcbzaS5u7BcIT98magedsSrFgnpSJiw9vh16CXRI/7FEAY6nyiqwEG+/s/G7EXDzgjEiiFmvAvuqxbEMkUrKF8fRBB4SXUm3OrfjSF7/E689fBybmyWWTaWWtYsuQ796NdTRmCUL/hzOui7UrYTeG+PNzaoKoTd02py8sh2THe6ovsJxSHiY2Dd9S/Ys19DlzPCZr3gRNyFVnwUpFlOeURXbVhKSzBxbbFtIXrUVcRMNG23gZEyupOrOEIVidrBzEnOQmoDXmYHnn8K9FpYxSbDRIKVBTZbIXVi2MwaK4a/pVdKQqv9/68uucD88ZTy5kOSOdVcVYsuruSJuy3w1GPWshQPH2i7fxdVBuAiZzCkilcC+9pji5FTmRBdxKh1oXw2KrQ2CUy1ChVSltUkusQLlT3pJunLm1+OjWOMWlaV0ZiNZazWgJrM1tkHMxbmC6OzyW/ArPea+P/8fAYFWV3ZIs/u/9d78X+L0Av+gX/+IiFX09TP0BqntT3JaUX086Ubb0IlLYneOtjCPhjMAY7Aln8UjZ3ZQAXqvlvo7XkHQW1x7dnXgAwaDORu/PBhVdSa8LZQh4a7nJZFYSYzBoHwcdZNJJOJYK4IyEZcq78SoBeVb8d//99/HX//JfwTPkVaBDJbtJuWLhQ3JeTogazHDWSraBDjuCmhIp6YkshWO0qCYrYaOfPpChXESvhAFnnfyyb3nWiHy0H9+J0/jkq8WrX2wZ6sjWvd8irFTyisnfkShObK2S5j4HVSHL8U1W3KaJWgt3J3E2M2brHlamEng78anSiLipFbLTmW7Ki1Y2tnXb/dY8dYqGTGePxd/6i3+Sj37sH+Lbf+k/21OdsBp5+G/5jaVpxXTWLIynd69QN7HVnNTDAzZn92OmfjdPcimivJDKL+fsNGGjIh7VpquLUencAlW9rduvQ2aymbNyiQFCNXnOlKDsXUYhkaIwxh3rOCHaY9PgZgXkDrXysdj2K338bA+BV29jvpl9A/C5/vyngW9619/7aH/uZ/jQrnOLSs5s7tpS+3s5I5eeBJ7KkO/DwlI7K5vRqiBm01R7hS546FPWbkOmdqZMCS2QwowaVE2SqVM8jSwp+s4ILGezF2hfZLBsgruQc26Ubgt/Qhy1uHs4LVtoJEdYTfjEJ36aH/6RH1amQUhWu1xef3cdhrgopobMcJ8M04G5arGy2HonjWgfbipw05pGul07t8xCVVV1KpLBd/xjT/hNv+YlVYRfJD9O1Mv4X/2x5/zxP/finQir6ZjrCWsmObe1wtOsmE3VVffxla8ONJHC0VKHJ0PMi+rRVAc2Eai6eYdsZLMydeo1X0PXRWr9yIWmIjNw7zDSSZ23CLRmmoC/9+M/yDf9gn+El1/+EE6w7MRxMqeotmqruqkoJNxZNuUTiAIG+2XnxVrNJPbBVLCYcrrOlq6jSPjoCUC5IrID64EDpyn7clRPDE3rneVqXB7Rr6mMVBW6mBWfWdwEcoYO8nJNa+py1GoUZRyuqfS9Pn62LsLvA76r//27gD/2rs//a80S/DLg9Z8JD9CHKSW2YHXZBYhGihXdXW/tGFRJZEPnYvxaTKH6FldenTVQ5tFZetGiCUk9ly/SgxX2yJlXdp105/slRWz9MtUU8KdZXOi7BZ3yTmHMSrkUaVPSKkgaXdauOObEVjKW8elPfZrf/V/8Z/zVv/qXwW+c9WQuGZJe+bZ/ig/+/H+Sl37eL+X+Q9+m39tkdpLYTjkG5ACcx1a1VgOK7hg4Q9UE2V2HyKhEG5wS5zt/5StsI4mox8BLVucxdKybLTrVWICcUnLUgqSkFWEMN/46rNruiwAsQIVgp35XK0CT3XJ/lGZrhG69QQXZhz4tqonbf9vRcbdHyE3tWEEXtraEt4oa8Lf+xg/wA3/89/PWm2+rcRglXLOia8WrNSs6hKtCh6q3BLmpRmFBOtzDJD1+/EFqQZ3dQN36l15tV1PLaXqgO802SZ+m1deE3ZglaafKcVOHmfynxS1tMq1aoHmK6syWKi97pzl5FWPR5a1f+eNnnATM7I8iEPBrzOxTwL8HfA/w35jZvw58Evi1/df/BPCrgB8DngO/8Wf6+rqloDaNr7iemgsFLzq8ywWYsDZmtMoPe3z61LIex1czDbfbvdBX0f7emJQuxoIIfb+ZzRlLKM8t3kpn82SbwPDbFa0LPTXOOmrOyXNQPlto5JI5p0wkAVxS8eNl0hS8+upn+Gt/5S8AxtOPfjuMl3j7y68RLSE9/ImkwgHr6R23Nt5nX/tNvPUTf5k8X8iJZk7lDTg9obz7+5RcdMvHy74Nq1LjrekFrgYKwxRXPdI7Grs9Ei6vvQOUsQYK/EjlMIT5o+GLlLAqqwNO+1AbPRF4x7SLxj2BXeuUa6pbvGudMtFwdcoIFVMgalp1zl4rGE0HHNElK6sZGJRu7CXwzq340R/6AX75P/+d3L1033uJKr8oTSB2y7rHVVA6C4axUuEw64SoDqwxF1VsyRowTkWSs2jRlA4UhXwUHtEPDFnVJ7SOYOmaM93wZkvWb07MkqLBYG21uib7xzTX9+akOxfQQ7BX5kmwsVhp73n//YOwA7/uPf6n7/gKf7eAf/Nn+ppf8fssyDDW1BPE0P5ePR4mJ+6DSSf8pN7USOsmWogh5HXhDFRPlaWsWiXGGqN4rDqP3cmpHVKBkCCFXRswGLqRLcF3qAcomCYDkw+jptJ9VjZvvfpih97LDL+lxZY4W8z51Kc/w3/6n/xu9g9+DHvl5/BiFuQGl2ddlOq89fprStNVCwq2fVCBGFwY3/APc/3yG1ze+CH1MbDLix67nrLHVEMTjs8G5fzCiOL09QicRZXQ6gc4X2l7zBC6rMNi9uFpPYUIBQ9Q5+ag6Ty9B4ravqkSRbFqU2uWpNSumzY6i6CEA3Fy4mzp4Bu2TuEIc+JDlfXp6qGY5WyuMX55s0spZaWJ8uAwFKQSXWHdpST2YPyPf/i38Wt/029l3j1TzuI58bGhOPleQRzm7EPTFEBbp/ZrkO18oSe49wFmngSDVQdkG7+GvB2Bo80ebKjY1KpXRwerIWbKWv03VHC6PJWsNJvqNvrwgkcXaC7hFrkJR1GstP7aSlZHxb/Xx/tCMQgIOOkLbWDahRcaSx1UFNWCHld1ViB34CyDWGSNBuGK0y56yW3ohK3V2fB6mI9yxpld5rnY3Ti7iNKXTmVnkRFcl3FXLzACQmaSWJI1n1LBwOntghx4TDkIHe2r7pByNFLtCfA7vvzhX8LDZz6Nvyh1Kwxw9w5HObhvU4yPBYdquzPgxfM32PZ7Lt/wCucbfwdiMa8yGflEYNpFOfpZk+mbcIB1Kh4sJDyzW1YBxg98/DU+8HcHPs6WEbt223Q++ZNSvGUUGYFddWNMr0dT102zbiYazeKUzbeCimAqk0cOyxvFWkpk3nyRR8Dm0ny4zGJVIUT9qmiCSuPabcBH9pN1eVMcUswps1nfF1PZ7NhuPLMwmc9+5hPU2RL8uwsWakIyn0rtScNsKB9hFBx6gFyvr4nyOxMOJ4aCRnNqShg1mN60XccaRK8oERrXH4DtanCX1BWFgjY9qn1OXQ00s+W4siIcNUZXsllymsOCbcA8Hbv0l8hbOnX1YaZJwu19niyE9XTayHq6jEGFM10RzBvFNh2fh9xmPV7mADLg3FlrNkhURB1EnWw5iXVC9QGyII9gPRhHaZsMkllLoRIVhEfbjnUjXUwRWGuj68lheTFHtg9BO9e8S2wPBWfaAAZ1bqxr+9kbvXn7jQf+rd/xR3j9Cz+tKwUYm7NOiYciT/ZNAGEekLM4QxxJmIQyxmLbN77+n/lucjoxVItlrtdgPoKXwd28UuOgQhPUQk8VjY391Lsu3jyuvHiYHEfx+tsHbzxM3jpTDThbMnNATWxbxN40bhZMcdvHdspDH7COAWOwdhm3N3O2OsWA5N0j4GUka8Lq3Ej8VCNSoRjzcI6LcJsCtQDlqW6BAzDJcPEGbRMuDjGupDtRuyrvS+rRLQvC+T3f828QFwmpMCfSyLlTtYPt5BQCV8cD9jTJODgx8tTTOC8nyw9WKgNDap9FjcTrIqWqL62DrZc4vJRbCfjVu7Ho4BZrRiWWymXITMXFL+u0aRhzwZxMkuAkTtHlIyCWAnBPX1QuouAC2LnINZlfpYLo/TEJFBLBcEAN/IwemZboOk/O9rpXXcQbG6IGT3CXDdmnkbsShCOLnKM59SSGd+NL4nHF7xaeu/oJy1Qy0tVN040NwxasuVhbPAJklYoqrwI7pYe3SlYEPic+TuqFMVkKm9y6Sh3hFW8fJ//qf/CH+NJnfxwn5JL0YE5Tk3K1DdmCCwfnnVxoY8kHVmPIY2CBHydRd9joNaQWsdTZ6LRLr+C8D7ZpHLXYzsavdj2AKEVnEfBkBccBdVeS8t5ckKkG3BGLWYthwcyNxLE4CJsEwThDlVdRsGeDiw32HUbeSewSMfvmGIxKrhvsTPyEc38HX8iQ9fv+HGQO0Y4y9hO+U7XI7vfDjXF72lVR7Gpcup5UaB0qk6w6rsmDv8XDiyvb5cJKxc7LiaqJdCvnrJMRG3WmClEsFIFvgAWV6rLMVJsSVlROZBsOYCOGMgKiiryKwgsvMoQruW0gcaywnNR6U6OIY8cozm6Q0ni/AaoXq4uTL4y6qKn7ZCPySpXh01gOV3a29d54ALxfJgHQuNMutrktnJPRL1Y+TGXaJcxcnK2+TIMVS6EUQN41H34kdWj3HEztfSXqyOpgLKjjGXTajXAU42JD+13xTvtMFJHOYMGpXoNb70HmBqXuvPt8YLAzT60SQ4YBLFUaokq14Lt+6/fypZ/6UVgSqxxVnF1WYWTHXBisJPenPHn2YTZTjr1RRAnVdg8efOecb4krriUcxKwNSZPF4GIbfiqoNEhqT6ojxGo5WwmU9SO5LolMbIoma0saWc6a8riPdScmZ03YD+JqmqBWCsyyJFLYQmzGhmjAtQHt5JsVTFdbb4YRS/XkGQPvslZSykbO4LkVpx/kZWF2JW0DCgb4SDYHr8Wa1mrIoNbBNpPhzogiTrDZb4QXPnZ+17/7XURsRAx2NLpvS2Db4Qt5OO7Iwzn9nqdP7rl/5Rljv4e2di8gNpNNvbwVit4swWRlsifkNHJz7i4mCbI5YaFeiCrOTPK82YiFdVWqfLTmhr3QjW1LJTMZslznHVRN1kMxOpAkQqpKp9jGak3De99674tJoKrIetFWVXtUlPkNzbZBLeXqb6iaKkyUk7TdhdmBTefiSW5QLzYqFtbUowQ0gV121jGp/YEsx71R37aQZt9sxoHPp8Ah5sCtcwsLlgQ9Zslhji3tglmLZbfOQu3avkm+awGf/PRrfP7HfoQyrRU74KavO5jgG2N7wsp70q585GO/CN+Khy99kbd++uNYFfcf+GbO84F5Xrnbglf/t/+yFX1OdTOTmz2K1U4/yeVsMToxWUEjYarRLi2buqBCh0xhZAZ49U4rR2Gs7ihAy66lYdEGbKOBKm88xUkma018SyI3iu4uTNGLZQd1JBYbYfMxuNM7CitvQGKsxocgt3s4W0Myb2uNQN3YSgrP0+AytC74kuArEP0rITjbg7Euwec/9Xf48M/5Fk4Eeo7bDegN7Y4DtTsZtQd2wMU1+p/5oOti3fSsxl7FWqv1/OICzzJZvSuZS6EhMSF34VVjHdKjeGG+kacmpRpFMYiAWgJBy3SodagluxWnKbXobPtVUmSKESNMhi///1Ax+P/Gh5lhtalFxSTOQZeMAkO8OIc3rX0Ayu5j6UJ0z9Zpy0TE4diOENK2aFomXqH9cPAY4ZSp05UbnVgtMMmt9zW6fFJP2PN2NBm6SQjRYFHErBauCqQyGqBBYONv/X3fLwAhZ+vxB1D4mtjdM559zTfy5AMflI25TIEbNXn6wQ/z7AO/gkXx/Euf4eG113j60teybXeS6zq4baSd1GgfeaIdOx0/qw+nEsJdUB02IgGP8ZnPgw1n34oqZ6FOP6/F87e1Gp12pSzwFdiQpfcMdTxUFHUq0amGSmO5Sk3oVNTDwAAAIABJREFUzW6IrNWkNMs6UwGMQXJi7JgfUvO1gq/SsAnDXPLkChWTDNe60BTgrdxTRaKGt6xWwSDdstTR3MvbjpHJH/7dv4V/6bt+C9/8878dhLVh1ZhEJn6Cs5M+1YR8o65dslQ3fY+hC5kTMDY2S64md+FICYNOh70Qi7NvwqpM8jUqOtVKikiJ10IHYPKYLIUr52G5BPWTSdQgB2zpFPMxVyFXh4mYM9b/DzABEbr6g4QQo5/esy2+W7esDGwryXprkNEVZC4Z6spQuUPlo4LQWlFWKf7WUyNudQmkEniXDERLqTOK7oLs0V8ij457ZiGmqMCUDlPLm4bS8RUlt5uBMhLLOD73E1xefsaLL71OSHzLvt9x/9Iz7l75Wu6efaifcMWs1cEq2hFxw3MxLk/wMfBt4ws/+hfEgdOutkJBIGlAEiHFXu66YOzodiRDyjaDW/3Xn//BwRwKNA2S65RV27rlOQt8C9m2WyXunYVYHpjyVbpwtPv2Nuv3pWfRSiqlBNii5fUEZVP0aNN87tKLrBL2M1oANWtwWZDeEtyg47OaOQMeS1lLVW9eSnkG45awJYGOxGNHPfC/fN/v4bv/7d/Z7zckTliLoW7hJCs4TWGjC/kHLCCZWBgrBlsKZ4woVnakWQO/VcbdEi7hHeiqiVD0a1oJx0ra9LAk8bQJNRTGWjTV2/2QLGlWUBuWGLBSq1S/5DZF1a6vcqe/LzCB4iZ0uEV3i/UhobaNxZAss2dCP6Hhk34qu1R21SlDsXQhGHpBzR9jsTS16k0WwnNyc8i5SXXdK7ledEnwlNFfDlNfSxJ7rRDTtXas1BhnFuo9VEQMlcEf+cPfy6t/+8/y8od+LsNbwVeJj437V76O7fJSi23ar266IUEXFMsYFth5YHPxxuf/HvnmT+umro6tRqpHJBZDZZkh/j50VVga0QBplTRoFsVscKko5lJhq3eZqKLI+oArUZ6dGyQhSyo3LzofsCg9Ka1HeG+hkpmCY0LKzbRucCZ77D+aJk66VxOQRJel9ycTPboMVJXu6t/r7ytllFaSMNGm6iYoBc1UPUa44/rdX7zxZf76X/p+OSMtcJP/viywEtshUaaBDQmHOEUB4loXp+nv9fW8gn7tGrjsP2MI3Fy6vm+ZsI/iN1cQa3knX5dYhgjNqo83R3bkXjq3U7pKv79JrwYu8ZJRj96Er/TxvjgEDBiRWKiaqvUTusRa9G6OAhpqKO7Plp5C6Gaxgs0TS92k8me8g4pK3Ktfdys67qrFFmjnpbKlt9LnZzav371umjg0/rpJtVVL+uzhUuzpArWW0KpTwDf4k//T93PO57z9d/8M9uVP8qGv/agkn6Wd0iz6p7xrZ19RLqT5er7Fm298jlc/8bd57bOf4DwfOLKlzrIb3aTnLW6CdGNVdzKw6ybZBXbVjTZ38epSW0bHaOvFd5wtg22IMg138CD2QewtKy6tCbpb9Vp7qQR0TaOWlHqj+uB0eyztmCkGYFg9euyVh9fJuGV9+8jVuVxqwELTEaGL/vbE0zxYWCjNSViS1qNV3lYUrWa6wQuY2Cwenr/Oj3z8f9cUUFB2srrDIakuJdXUqBUk2tegcBCzoenyvE2ROiyhp9DRh0E6GaNl733txxCw2r+r+iPlDAyUo1FWnVKldCrF2jsrTJhY0WlWktSrj1JSe8rYzBhfxUX4vjgEMFGEszRa+6NmPfq0PNiOS28MsrXqNLfHmifDSRsCslyUklXpYkEGj+p6aouDCmMjHtN+LfUUyNF7cl8tIuZ04Pha4IosW2jEHxYCxyxbN397WqZOm1H8jv/oe3j11U/BguPVn4QXr/Laj/0VtvtnvPz134Jfdiyv5Dx4483P6c0dxud+9If44k/8Lb70Uz/M65/7cV688QXmeRVI94VPsNUV0pmmZCRCNtXwxWZdPFIKBLFaVAbLN91IzcaoATh7NpfQ1CqIIQMO5RjBcAWKOtWHT4/d4XjA2nRDrNQBKDfjKUktRY2leLUCW87WX/eKxuMyOGtjhjIXb+Uvj9Uza7KTWFw1lZwuJ56rMj4s9fOuwNfRzkqDKVpxuvZ1NU0XnJJbe8k/8dlP/gh/8fv/KNA7we3mc8M5dbAsuUfHvfH07hl3988YMSQ9N9hsyoBZht/SpSZkJlGjv7bE7LK/q1CFcqVFFcwZtxG0fz6TDqbkl7g9ytRsdGodxVnZrtCh6xIrGemWymbXux6If//H+wIT0C6mVh0LBUd4LZJi5BBAaA/SnAfYkmtuxaa9vm9SyokBbos5jDkNP4zwxGL0OOsctmEpj3/cnmJ56J9VzOWYC4ySC09gZZb2W00K1VHwVzXAEqJ2ZuCNI8wSNfQTn/wUx4uJjcHRIZPx5k+S8wWvz7PH+LayzuSNyw4E68WbQoubNhwVWCy2L/wYfn6ZoxLG0Wq9ljobTHMlEcfJz/3138D+bHE+LPJt5yf/69fEHqTG5XRlFzCPLqxQLp2eXO/Yq5NiG8aaztoVs1Uh7pxc2JrsXYAZnipePYMawToWtTXw1YdJduX6FovahEMcqLNQQ4aBSSGXXSZro7C5Y6WUIqvFYrSRKYV5mInbDRmDTHsDVtkV0wtbzrDZDtHBdSX2/OTt1159pIzJItgxO8gVlB/4uBeDsQYeQu+f5BPqSJ7nWx0Go0lfQlKxSecA1sF0uFgqPcgclspglmvti0AW7YF0H+FkXdnmJqUmHXUmyBnJSR3fJvfjCdfnSQ4X6Gvw9MkzbG48v76A+eI977/3xSGQvXeqbUWade9UlrVOzJzDnN2Bs4gVaglet9sjKFvdeAvLQyOuQ6x6HJMU+6U+gW0t0ozBCXlhjinAxmR2IaC2A39+p/2ulrLx3XvH3KTZbrOOrRTOEJ1a60Ydi9/+2/9jfujjH9da0IIgX5OsQT7/PPMLwMsfwzC202As1oO0/ZYKITUPVEJv+GufxK6vc06I2KFWI9TJmsnLv/Cep7/wjtdefZO6Fm/8vc9ytumHI7j7J9TifP+Nzzg+mbz5f7zd+6NETVpqjLm1c26m1p1tZy4wJpdlXH1j1IHXlB4+lOzjkVzNcDuwlEogy/CUUccMPvxPf4CXfsEzHc7WlJpf4Fg8/9zJZ/7bL6qVyUqIzXJlSvSApqTpBE98wsIV3Nu167Y5MTUpnoFqwL1xp6ng2blDrjvMTjVFrcXH//qf4eWv/yZ+2Xf8ywokRc5BFtS4U2eDy3/i6f0zJrXLsj6XhE5bLso3RX6j7kUYDDs45h046gnolu2tx/ysBgmQLiRqwty4VuGcYN4R+7ClcW4hReH1nnH3jO3ZFG4VzvnwNkyntuLu8gSul/e8/94f60CBzR14oN9bUTuzlNtOsJuxTYN1qhce8cogB6Gxy5JqF6Gy2TtbiJLJlCGjGFxWshhCUtc9OVJuwCEgqcxVo/XCWfvC5+I8wUrRYdZNN+p9LO4c9mGsUawQZTkK/tDv/wP8ye/7H7DDOVx7+vQkQ7ZjY2N7/kXG888CByeTo7Rq2GFczHWRXd/Af/pvwE/9Nc6H1zgIKoq1JSMHGcX4UPCRf/EVHl4++eKn32ztgnOtkgT27b6JDGYYb33hBcez53ztr3uFpz/3CffPFuRgm3Jw1lUH0QxnDpgd45VuzISxFiucaw6Krs22QcVgb/sv5XqyXbQP24S7b9158i0bGwW+kbsxd1my19Pgyc97wtf+qg8QWezIvKTRWCYdBZRDEdRStJye6ijDMJMYxjGVvuwLeRIQnTdMPn07NoKT7C4Jwrnm4s/8iT/Ix//a/9oroQg/i9aOZLIfk1qLI7oTowyPwZEPyljszMhhx6ND1ZdhZ5Jv73giWbRpt0/bOTFm61iWDepMjMlpJ3MUsS95GdYi8sBjMjfdOJaG7YuTKxHG7mKjLnd3jG3nLpWgFeO9kcH3xSRgFHu8YObGBGItajg4TCvCjLMtW+vJTtbiJPC5GvCTdNNCiTYDqdhuQZVYS24nOJMciszazTg2BXSQ4o2zgNjwS5DTOGexE2wOxpImPIsyAVh2+P9J3dvG6ppe9X2/ta7rvp+9zznz5rGNsXHAvBUaSAl1CVJaNWrSUKpKkJaUtEJVSwoJSRSpilQlXyiBIiqlDUnVBpQ0yK1EU0whQAhREiAmFWBcEwzBxtjDeGyPx+PxzJyZ87L3fu77utbqh/969viDx6b0y+kjHcne58zez76f+17XWuv/Rhy0mGRKzbY43L265OLiUvsIH3jrJBttd9wXlhxEbMze8NvPkLefvr7ZvQdjwoUFzeVynJjsbjzw2Kt1N8Zh8vo/+Rqe/+Ad7j57V6EozbDc1XbvU9Hi2SA3aM5og76D9wMXz17R/vXO537+5/LkDzzHHKLMwk5kyvEHhzFYl2DbO/sCB5u1hB21wUbFqwQzbuJ8WK8v9EngMoRNOUaZ7+y7sbLqRK6FsBeqs3tq2UrieaWHtx2xqxONTBv8MUUomqmO0EtHmh51mMiolXKKnlNbfWubukoAC53IBMe7L7PtyboupE1GasdBTvkWpkZXLQKcOTYeu/k5bC6y1eW9e8oPLPLZ4pIW9IDcJixa/K1uROzMblg41mU5F13eED5Nu5prZEHPhIUxtqAtgqAHySFvSOPhXSNQBqvH9b7MePWdwIPRCWBKbzHNmjOsTB4qXDGM80jG3MkN+vDyGvTi5Q+6BS2d9WBEl/CIXsyvbHVzlRWZg+eR2YylOu0w3SjekLnnNknbOQxYYhNP3YZIMauRJt/76FbR4Wpfl9Dm+Rf+2c/x9h97O961Le8zYDTSghGbTtYu+Gx64C6DFKnSrHaKTdZQU1CYp9h2MuEP7Mak/WvGC098EkJGHSqek11idnH666EbJmhvGY43Sa2HTZbL5N5Hrlhef6b23KZGkGjMLPvx1ohccE+WImn1qetr5dDc3VgKpyeNXgalZMOtUIxiIGIKSj3sEHEFyMlozIEiuxvMhRmTPYMtndiNuNBmfpTHBOGS6c6y3sKJSBbNNmVTJ7MW2R102hCbcQyXJ58cDLA56GPyjp/6IZ75yPs1ehisqdAPy9RYl9pBOEGOwZIaE3w6bp0bNx/jcHaONAzOnjA3oSLeJ+FJLpXn4H4tHY+5ywiWV1yONI1ot2HpMFRTW+/4rJCY0bi4dx+bO2yXDFRwr5JKN85qrz/968EoAgbmB8rDk7Y0eit3nx1iBpvpgso/STeMXAek2Z8T9qh17ChH3F1RZd6zrMZTG+WRYLpBRUeWyjDcGb7IaRhpwA1jNpnzyZ9eHUVOEWOybMWmCwoKT27ffpn3/9YHaMG1K06YpMUeeqDMKzAy0fsyyfm03Ett6u2kUhQdOEpAYbvhjzaWLzHm4lh4HaGdbApT1Q2c5DLZLHSCoPctkbRowdaMkVryvfHrHqY/og5JJ7V+7unGkxDNay/ijCwGIFn/QC45SpESBJf1mVi2a6z6BOGFNawZ7kt9F/Hc2y1nebxrG+6J2Sy7tdOB6HirZZ8JCx9umLn2FlPue4J1RYbCkmiavW0BmhaT4cni6npEA5Fp68c/9F7GxQW+NWY4MdVZJLN8BGR/ZibG6dhSNnVRn+GiIuVTSMC6qP2fhc9ahcQYkmwriLQyGhHvwyqVuTHUvpBYng6+IJbG1nr97sH9i3vc3e6zX92Xz8VRcu50L+LCp389GEUgZaqpjbCw0nBVbnrH18RdwZQngtDJu0+4sR607sqVk53TLMaM/N5zKoIqM0rnLz1C+GTQ62dPfE7RUi1EqrHqUafIP5mvvGdlEJoy48JoA9wnTzz5JD/xEz+hD1PIm9CGHup1I3FfC0oT0cZSAZXKLWtEFzSm1eC4huI7wa0v6vQvd8FzmyxFTrJeHwUvedGVfeHsYaURn7gT2aTJ1wnquBvzanD5ySOv+bceLv2EQ1P+YhRtp0KDVcDaUEZhNiVBmzIIM72Wdo29otuy8g2iOoP0ID/FJxAXn8KmSFmHN6489CVnRbySzsFdhTgM5TCmftYJ9l3K2KSbc4LEhwktUIT9CUASSelEljjN5npiT/Huxi/+7I9w996LZRpchiHZkWWZhFCY7kV6YzHqvkyNZNFY1pVlOVwXwNk15lrlXkZMYMin0YuQZcUFyChpISiUtn4pUxHUfW8soL1LJm09o3tn7ldcHV/i6nhXvKJZEXyv8nogdgLCgcvy2UeJeYp2adIQNFMFz72xNaNXxNeJzeaZNZuKXNSnLpu82axODYqJ5fSlaMXItcaK/24tqtVLAnG0PVpBZVbbWyvJp76/tYYzmdO5/fJL/OSP/31O4ZJesLN5K3ehSVjQgVGhnydPwCWDacZee4V6V/I2sJSxqiX2Bp2a010t+em9IbzaT8XUjdd8wUOsj5xz92lnu7Nz7/077WHnxltW3Dv7GNdUWz+DW296lE/mXSwFI3rGtYJR5CclLzfvEtqYdAIJ+v0oHLBrZGqnrMKo4BGMe89t3M8da0pQmlWEzQdzN7wHl3cGmBG7HKTkvNs4a8kx5NV3bb9tqaCNChgN2xXH1vT+J0qaEu4P0abQgyL1pLXax6ihEdkreOfPvp0//if/LMvZQo4FRzVaC0oRfjSTu/Qu5prBHZZstMMtcoF93YQG9GS/eyV/xkOhRBXCGtUXGFX4RjC7vDfDoyzbrbpfjc4tkxlDhx/GQ+sZM2BMWZzv+8bYLlnODvz/wlnIqSrdOjYF+0075fid5khRXrGQSktcEbLCSW2qrXZgDtNf1jwdNk7+HZBNphTmtEkZNs7r7bkVU8jTyEXYvXkSdE5PtZuWQ/M008+Vi+M9vuu7/1t+4z2/rlMnkpOzH6blJXVuzwpJ1dYZ0iX2iEi6b4oKOzQlMw9Vcid4+KtusNnE9+psWi1G07CYeO8cbq0cbh1oNxduvG5h8QXe/DDbvcFhHfSHGnYL7n3iPrFPlraIrppapBWRF9J0kxKlv5BAaLiz5+nB0vLVqp2mT3yDzEHLRtaeJFJRceaT7W6w3524H6Evkim3gnEn5L0kn4rSXpQOIZSsZAhX7yNK1+9YRC1sndk1NuaAxsSbnHmqT6GxFflGDDuRR3aCKmoV68aE9/2Ln+N4/0W+8b/8Tj16PSF1DxgBC+SmzkAql1kP6ry+fgb0pdOmlnbDjkQPMVa9S3BVxK5pRosQS3Iq8g0PqQuDCoxFhLqawGjOiRmbNGwJlr7Ubd7kcGX9xMH+tK8HpggYCtCMMsR0Jt5E9hkhEpFlY++wVGAlcxQHXCW8ubOXJNOXybBWH6pcgzLEmrM+CJu1THKMSbZgaqmKn1bPJoHLgcCyMTxLqQY5NIdlM/nBk8S84D2/9m4VMG/XrW5maDM/FtZ04uxIjnMS0YJbM0Y6IyfYpE9j7cGIM72RCKUDA/0xgaYXQ1ZiuhsMb0m7tfDY57+GsxudvpRlNXCMwG6ec/5Qcv7YTnTj+PJ9tquduDKWG06sia2NJ/+3Z3FcHnioE7PUDNxmMtukFZdgL6GPZe0xcNl9mWE22f2ITS3GzKU1MFtonmztkgiteDpGxFRntUBuybwKdV+pazsaLNNE7rIsV2hBdKdEoJOU2gbXwp0Z5fFIfXBNDLyIZC7JMiU/6iTj5JjctF+A4In3/6Y6IjeZeZai1LMzB2ImTrkLzXRZ23kjjnKyyi7fwpiwXd4lTQfHEsC0ug8LXTKIkylol4jOSA7loWEu8RU43gcxjUaH2Alr3L33Mr0pq2Dp55wfDmAL4cb2GYIHHoidQAIzsqKdd5qrBSYncwk9SM059sD3idkuume6GL0x6eHEIVmWydzQ0mk4HgrAsEh8ONkaNrq6AXYpwbq6iZZyzuUkMDocsTFkf9UCH0mfNYr0YDeTC1IMcmz8Z//FtwIuOLBR0WhGy0mOhnW5FEV0Mo8FoakVX8dkCWPxlcVXZq7MuYv8tCykrQpITSUNGwPrk1zVIdnqPPrm13LzsTPWWwvWKtwjymNubqwReHfaNBZbuHFYGB9y7BKsG4+/7iH221dcXY3CydVNZUvkGSRaN0e7njXHVnTjqf5ppgI2JmDzoNguj9K9GLnvZEi6jbmSkO2q7NslUqoQYEV6NXUEa/Xesvdu127CLU/S54Z7sCYcFqOFgl+II5byRVDIbHk99sHhqCTrQbJnSBjVRNU1F0X4YEd+8L/7NgmPrBRCtWj1yqbwplNj7hAYh5n4wWgHKrkRcjE2kx3YsnfYl7LLp+zJhfh4VnJ2pTQDXMRJWZlyd+0BseAnvUQao7QEMRKbwXa85KU7d7hz7zbMyWLrqz5/D0QnYCStiRMd88BgV+UNE3SWK3MGPQbrWRetV02fljmEFnlXMMTsYEZnUWghGcEMo3dllMS6sQYcrbP7xEdiJu9+OZWih3/rHNpp/k9iNbLYWgvG2BNrO8dKhLl9+y4+guE73dSux554X4gWMJRJuPvgbHQiZMGVluSCmG2Ryl+cosZGM1g22tTDu/bGy9PYTDCi6Yjkc77ijaw318ovEQsPkrGbAlozS1Uok5b11g0e/7IDj36JFokXH7/Hh375WWI3+io9AFsSuwpZmLOXLXbvgq2ay+x12E6PUlYWT05jhTFvJG0PZR7MBeNAMrCQI/R2kK/AOjfGnMSh0xcjFnVxM8EP6g5zS1j0IDkp/+9eWRQkl8D51SRuqmWPaWCd2RsMcRjmMHqciE3BGJPOgXkWtMpfTFc73wPZml3eKd3IZNoiDgqVCzB7kdUM7XoDZpaox2W+knKmslT83ZnJdnWE2K3WKEdkjVctTRkM48jCgpuUjwyJiBQmM3T9XAvslVFjY0nJi09CNiZwrbT6NK8HohOgJqkosYu7WFq0hZgdWrASZF+U5R5yYhFdNEkmigpPGWEsk3XuRFTD7cFy9orUs8U5eyXAepuFZ0+ISS475FS6bG6MopxmuijIDLYY8jxck+mTbs4f/eNfryXUsrCai0eAkT1IGyy7TtNsgyWaMirxgsiqxXM4G0qUWZamEu07fuWwJ8Mmz/6TF2n3FUstCpTg0J6CLKVsTLIJam1N4StpMGbI0p0uLX4PfDG6Gw+/+SG2d1/RVr2XGU1+AEsQy1Aij+1aXpa1FS1YbK+lmlru3ap9jwMjkn4hgZDRsBWunhwcnwroyU4jr+SXt6O9iF0G+w6z3HVsd/KikRuicu/nnFamuPj0jrSz50AcnJwbbkOuUezY2IQQtCmmoimUJIBYG7kO2piYlWDdlGQExlVL9osr/sfv+0/xvtKYZCYtgyVl6dUyoB80hmYBDQWbbsPYw4l5pnFo37jnxtU6idWZPhg5SB8aY7YKc5khTUhPjbWWnEg/OcVKHJ707SgF46HJBi44yST1Pxpc3H8BuP+qT98D0Qlo2+4wtChyk41TNIWIEgvRhzqFbJJfEiiBJ4loeJc8tfVOzEtZN81kQUSgfQjeax7s7LKATiP3VSoxT7wnsTdaNFgGmx3UGewL2WWNvYyTSusk/jzw7Mef1cIsBKdFONY3Oq548YISZgMfKzavylIsxG9IWMp34Kpr5t7DyKyMhWYCC82J5pw/eovLcYd5X8u284dXjlcbHAwK5+61TEtvsDWGw0KDBea8Yr+/cXX3CGncev0ZT7/tBeUG4nL0TcWmueuByTbBF3wkXru1GILslilXZvOQL38zWhta3A1gBL64UpIZ8GTQnuhkG9LTx85C58qHTEtcxcxmgm9478yAbsGwi/IVKNKUbJ8U8AlAlguRQw4O2ZnRiLKKF+Rrmv2HYWuSo4MfmVkoQarMRATeTeae44y7t5/j1s3X4J6EN83bbLQFbF4Wu1FJ17LBChZ1+WTunB8eI2/A+Zzcu7xPOpyZSEEzxduYBqT8JiKdiCn+AQ2s7ndTYV9nMg+CTnseiTwA6vhOaMc8IUWf4el7MDqBAt/b+aIs9+nl6qK2LBpkrGqdT9UwrT500S0HlVk/J8QqCW0UD96GZmjLU6Ct8u/2Abmzl1xzTGNY10nXeiXGOMuifRJbkHPRjdeq9TP4M3/6T7OnuNuLHXTyDgk9VhL3RZg8BrlDl1NtlFJuMcQk63Kg7drEkakTOC0gXI7M0XjuH93mzG+yPuJkh8e++BGWZaXTZFRCyVl7tZB9p3UvhAO2fXD/9pGLZ+/iJM+980W2i706qokvkC1ZG2X/5Vh0YjfGSObZwGxqx4b+OEAuuHWUVFywWUrZNnOtK1AhpQuEBTOD8M5V7liKLlsZXWJnpjMn9BzskbSKokuaRrhxQmG0k7AuBaNPZ5hLodcnDco0RXyM8NSY1QLznZyihxmiFze0I2gY0YPLuy/zYz/0Xbr/0rGpMdPWFdubDq/Su1DBOWYuk9omWrP1iS87zSc3bp1x6+ZNDmcPy2Px5K+JluOgBGwhZlmIGKItI4bqyEkMw2ORaI2B+6Cdgk0SbKhwmD3oAqKiso2hJUmP+YpksnXRJ1UDNcdRTL0QQUQRW+LYjwTvpi01yDSkmGm4o3wewANbX/HiC9MM3bMCTqfm2N5E3MgcZO8FOwYMQYTvetc7GXNqqRh2bd6ZJsFQmOa3OZMZO4QMMrKPa7JLWpLT8YpC26cQjZ7anLcx9TNTUKYZfPKf3uZ8v0lfO7FrsXq1D+blzjjuCMmWniK3wdgnc9tgC+blwGfihxu88KG7XP7mTl4ZdJlnzHFNtNVI1ayUlAbptB3Sg7kIgbUG3kP8/yaIrdVi0doqLD52FLdl0MrpJ4TVxxSWb6kuK0MakCihjdWMLUWlOqvmU0YZfjr/vYhbZSxL4qx1gxdZxuqq6EPHszgis2BoEf7L/kv6kn0W+9GS4+UlH37qfSKWuf6+iTaJ78FsCUvtUMoLa+CMsjIz1JVN63i4XJIXY1kPGkXr3jJbiSFuTBuuPRPldnWy3mLB6HRvTBfqJeqCDgowTpQNsjG246s+fg9GESChVYx3E6nCUhwBKogCoE8X4SSruYrkAAAgAElEQVRF5jnNXRGvkHLoCTkU/aSektzLgcYm0aB1lyGkw366ApbyGkzIqcw70XqDaWUImYIEowFt8rPveAff+33fx73LSyXYMsp2utRORVM7tfGOw4p+pwh1ByEYKOtrCtpUh2GtEzRBYF2QpBdJJTGe+9mXiE927j+7cfvDd9mvdsY+GMdguxzERUr/7g3bk9sfu+Tlj97j3ieueOnpO9y/fZ/5gjFn3S2pszBHI+Ykdj0YOQYtwPqkdegpqy92bd1D6JlyAtPVLmdWRoPsVD1QcnMVEkwt9wglJmcujHpQpJw+QVoibGVK3akHwKuF1ufvQLNZzkG6fhrZTiw7v960mzUV5xC8aIgVai1UyFohNiAW6qnHsODenRf5uR//n/jQE79WDM5Ru5Ao6bc8IQKRmySwUiZSRBaFemrEMtPi8lTskNQ96VjJrqNJcDWLRXhiy7Y25aDUxHOINnRJXP8+bBabU2hFC2O/uvOqT98DshMAa0MzGlksLvnjnT7HpvB3ySybbi4LFYssrz/zU3gD151ERPHNiy1maXJbSUk3e22RLQ0rnLi5MVsyBnSTRqGdPPZMGQVJ8jM//VPcvX1bmHJAFI1z0ximm3QYp+yHyM4SUXbQRroksBjKuqsbGu+vnIarbtbrABMokpFovcff2thfuGJ9i7O9YGwEc/TiUGixZ23FCe4/e58ZSvXFjXwhGB82OeZ0LRQdCYucYlTWTCnWvAg3pDgKZlWgTyyekj5DXBuXUJJrWaiJhaOUaIeWzF2hrhGnWDl1EubGKIZiuqjI3RqdXSeiF1Sa+ixOhVTZg17Lueqe0FhxvR8Pq99kCrY0I3PFXcnBWZkC8mVJ6C5S2gxefO5jfOBf/iJf8KV/EJ9UKEgwvev9z4QoVMfrMDmNObPUrZXvACpkc9+hHnC8rNtMMHdrDRvlcV2cDEBag7p3zP2klJLnRfM6EYtLYQgHf5XXA1MEuHZdNTGyHGIXg8u9w56kzxrs9JtFJdM2U5ptRuAhhRml+nKylniUEchOTqucylEnjx5+m00tnalVpSUjwDIZ1sFGpeAaP/72H+fJJ57SDY4gJ8vENgmDomWdBIjL0I0RIeddS7CFEWKYEdqfmTnNk2M63qa2/VCcwyilo+nGA8xWcp/MT8C+JS8/fZ/2Brt+eKc32gzJeE300shJ3E3mx5I8ZsGShi9lud7Fu/ARZFs175tOI89iWbasWV43/Engk1mGnyl4Vw8giMhVOoo0vLqH9KB7Vw6jZ8VtOWmrTjsg5149rRa8aovVvp+aLcuERWSvxTfmWKRNsJP5aC0vVF00UpFM7yLgZKggRx0WUShVb8xZeYuu7sgMPvzbv877f+2X+fKv+lrN7RUYErthNqD1k0OYfCwcCd0o1ilRHEOd5KMs6C2NxYds8BNF5hVjsCECk9UzIpv+nb17kdV2dTbppToFqriap2DbV3k9IEXAMFuIdoSxkj7Zp7TZpDGXZObGEg1skLFcK9WiNiDNq72C0tMrXciKBhoB3srF1VWZ9zE5UP5sDHBt5mcifrgtzNR07HNKhVbt1gef+iAvvXwX0ILSx2CuBeI45ZMYZX9meBiLywVHBJdTx5IywsxE3HNYI5k9tYC0MjBl5+RPl5ksdI4ZLBvszYgXB+NlaC80ls8J7LVCFGIxjJ2+LRx/WxLgOZ04aptPBp4VepnB1WwsCWFnWszRyAyWXXMubTJylXEoAaZlo4dO+mli6Fm3kgRrQZU7zO61J4E1C+BscJbGnk7OQdrg3/iyG/zn/8FjHEcwt8F7PniXt78jmVej5N6jrl+HJh3jzGr5p5UxTGkrUJFOK0SAIxadYcHqDRsiCjU3CG32rWzq8spIX5kMlok+34CXX/gkL734TP2++iVs1jVYxUa0RHsHn+pQzEhb1CGuFfk2j/J8dBdCFQqcXVK0gVGhpopPyFrySn68Rgjx2HVo4lZmo9T9NclskozPYIgt8GlfD0QRyIQ2N+aAXLRky3qY0hptbNDgcgTnq4xHIyuXYOqBmjhmgolaH0Qz4qgYM8NFsZxJ76KWUirBSeoDL9aVhcgaRuJrsE9tmlvvWCov/kf+3o/xc//4HSIpheEhI0ufKZdi9N68AcOE0Ydjqdz5My96tGkEEEFsKBfRunLrhhd5Sidto5Z2fcpUIrVAlbx1ZdJpM5iXBh9x/KmAQ3UyfYVtMofTkD16a7L5jkJXJvIbaGOyZ3I2AAy6y9exXRGmfQAxWdZku5KysoW+V/ag1c1PtprT9fvHUoqEnJr/XdfD6zr8r3/1dXzipSMffepFXr53n/d98EqJOymOyJ/4w8Gb3/Qwv/+LFl687fyVv3lbug31UIrgQrmNeMpD4ejgUxmPJmLZ9HNyRKGERrNOb/JsiFogWg9iqKNxCyIWtpzYcSXmEWs77/65t/P6134uX/iVX0PzznAF0MxMDrXXyBjMGEIyKBFaT9bhHIfQB4uNNBGr0iRDngcYRyVESxhlMs/VkE+3YE9wFo1UKVl0F7QingNNTt2p3qPbq4OED0QRMEuubMFXtYW5DVor7oAZmRUd1fTQhXW6J4co51hD2/5MtgQ7BvOwsLSUyGUarRmE8lwxY87JmS/srryAhrMwmH5Sux+IOLJYx60zfNc+YSRXF5ccx5GMnc4i0sZhJ8fCmIsKjBl5DKzLHcnKzcbTOWJgEgnjJ4RUtubYIDrs0zizVLBkV4HziRxoQksjorGcHZkFPY5sLD6Ykcxc6JfQpxFnydwTawOo2LaJWJmW5EgtwPx0nZKxTmxW5lIY+AqJ8h2aYdsB2gatq42PQQzIBUmjt0FfYTd1T5gUkG6QJnQiZ+ev/aXHcb/kHb/6JG3cIGfQenJx1KRvtpFr58biLGPjF3/9JTw6//W3rrx874wf+D8vhIAonppcxDHJigh30+5gB3DxEryVpQSTvalgyJpsKHlpVpTdbBp12DkLJ3ywL0KU9u2K4zxqth8bvk5yrjVqzLIGV4ZBAqfk4bxI9hWWNVjHwtVMGAvYzslF2Vc49MFuQXYdYDQVzHQ5W/kEEETcquuZibwmllpmuqk4mw6ZV3s9MOhA84HNYEay48xpovpuu7wBp9NtIXJVpNhIxhDBJqv929EF6K1z2HVz56ioc4V6ScmWwXlrDGvYvhfma0UW8VIuTpaUhJc+YQRbJD/+Mz/N333bD13vEMI2FaqtMbYNR8uyObT8iXB8NGI9wTqTG0z82KUGCxmVtJl0s+L7m3IYQsy/JZK1ThFK9uq72rvLXGjbpqyFhNh6afy1PMuWjEp5dlopNINrqRzgi65dkMTuNFuwaNiiAJKRU4SsmKwWnI0kuARLWkqMla3Jf2EYOZy5OPu+ksCWWlR1nJkHLDrr2cJ3fvsN3v+B53nP+16GcWD4KK8BfZ5pQbKQs9PD+cSdo6K/mnNxHPR2yX//l8/5d752ZT3TlrxvDVpy6Qm5sUVgwzjMoE9N4zNFuY4w2uYQwR5HdkMjn63YsbMiiK7tjasV9iYhWzMhUv/oR7+fD33wVzii+Z+zQYvGNoR0hIstmFYGoDT85oHzCLhs7CS9JaujhWyi1n4YI6HJ+gk86C6OTBvAyJLMa4diNNbRldEoqAQ/2amn3tzwVz/vH4wikHJNjd7xsdDzQLSuUM2lMHUmbUx6DLq3+vskD5rz2TpRhJqRmsfnrNXoLGW+dYJOpLGP2mSbuPSJbninWFvTiDyoxY7A/MB7f/3X+et/7W/io8OYLJ4MSWAIOtYPzJZYk4ioWcfRHOyzMdpgGGwOnI3rVREuKu6ICeYc6fTouLXSjs/KJXR6lqPuCjA5t4lZo0Unh2zaS8rD3GGEJLWKJyutAq0eMEX5zAgWm7SmOZ19x3LqZA8qBPZYhiFN6uwZ9Ai2HNqz2FTcmUnj4RPysME01lDXMTJx23noZvAd/+E5n3zhin0XLj8TcpOR6Mm4Q5HzSWsDW44yjPXOMZP7l05r8MwzG3/oK5K/85cf46v/lVsMmwxLblqFj3rDvDNP13KqA4sm/sXskwwU2WZO7ikq5CJ+f1pia9ByFNTZ2FswejIv4Cd/8Pt46c6zaoGGgj86KQOcGXQLja6hWLR5Pzi2G7B0GsnSO8vDN/DDGXjHmligHe0H5Ii1YtFKgSgGaUtFz4VplFEalAhyHJMZoZBcgmXCZ3IVeTCKANLRy0IpyD7oWQajXVZXnslxkeNQJGIEWjJGbXqt9OO5EzTCFK5pS7C0VMvEwJuWVu6lQEzHc9fScKp9wlFrxn7CFzke7/ORDz3B0lLqvd6YDZaUeYb7xGMie2MZQ8w2yTa1KGOnR0j8MXYyg5mrGJEmogw02Y/Fpoc0L+jCipgp9uNei7cZ6lL2CXvb2QvDb10cfnmB9RL1CCpiNHwV+9FOKAxGi14PAOQi2m8i6iqm5auX5Fb7Dlj6yqCxhE75zFbFpZFtgbbhe8oVqpk28DF4/Fbjm//YGXF1wd2rvUaFDZtGO2juTZ+CgHej76rj2zzDy5VnSQnDro7BOAb37ifve+ouf+4/PuMPfuVC88asUBUbjashIxkLGcadHJKyJ+BKFk4hRrYOostPYZZByBxlNNcCctBmp03DKwfz+Q9/gJgSB3XT983qwvAyx2lCrqzBkldKl2o6fNp0ztaVh27e4vzmo7T1jL0bjFKi5pFkp3mUzL0zvMF+KAh9svuVEApLsqtI+GiYL2xEjQ+f/vVAFIEyDKNluYSQp4h6Yoiqai1ZQ2yr2bKYX8iqaQjGYip0w00PYUxBYJYVf6W9iQgYAcM29nIGogxIMc2inoYxmCYO9yeee46//v1/g1lBpt3ymuDRbcrzzgya2IeZtSwLBZw40i7oY+/MEWBXxVyTzt4V2cFsk96G+O8uclIl2ulnTqEN3dV1rNnrZwg4ThPBSCIsI5F7sdTZEmrFDMZMRsp5KdLJOND3wNqumLUCog0ZaZpRaECyNWinbpWJ9wGUlVsOIhexCWMTOcqg0fg3v+qcN74G7tyXLfuJ5CIc3lENE8m64pbJmcwc7IXwWOt0O8m+kzYnSzOe+sglf/4/OvBv/4FzToayeth2xjIrIi7AXEU0Gz2EQrmZxsJdRTfRAi6aNCWZBrts0LJYktmN0ZKf/pHvZ993ONbhlNIytBOHwWvRbEF3Vwq2oQ1+O9ErtItx4PzhWxxuPFx5E/I3jDByUyBMkIV+CWo+fS7Uv7cQROyf4vUwPkMV+KxFwMx+yMyeM7Pf/JSvfZeZfczM3lN//v1P+bu/YmZPmNlvm9nX/W4LgZm04+ZqaZLCk5ugoMxO2BSVMqMokafwSLVfaUrydVrxyfUbCgo32tS831wuNm4rlmrmxa8GWpOLvjm7L0R0rq4u+Qc//Q85GZyHtaIDi2duJkjxtJCR595p4VcEKKhIKUlul8xrLN1riys+w9TmIDrQdVKOYtUV713n4CSnVysuTXlakf2FsNfDWHTllCiqtqiSseYE9tImTF0rd2KKtCOzSzRXhl3zFRpWwSt5/bsRge1aiJ0w+fTGMEOpw8bve+PKmz8nuX3nCvqu0SPV/bhpDs4GD90yHrm1cH528n6UiUfDsN5oTYacFLc+Lbk6ilz28U8e+Zav73zDHzuvv0OCtEItEjjWnFx2AKRrqTjCgE6Lk/mnuqNexDLFlhdxK5QfQICP5P/+hZ8k3di8XxPcshijybi2jZ9sJA32E/Gs6fS+lvom5GTxjfPDTWy30mdUgU+0rPRU4Y1NvJRs+gwTTqvtqEKRrV0H8n661++mE3gb8O99mq9/f2Z+Vf35mXqQ/1XgTwG/v/6bv2VK2vyML51+IkN0uoQwKE6ppTqEaUIK7ESxC5edVsln9QE7o07hSdcJb2q31a4KesysLa9pnyBBkghCKR9rstUMSLDvR97+oz9MX5LeJPOUyEOJMBnKKLCT/c+JwGLiCKilbsRS7aFBLkWd1b5IH78YQJAn5eGJ93D6N2rfvdunYNS1gDz93LJqNyaZk31EadtT2Y4khGZOw4jSJcjXVCYV01pdC7HU3JF/n1c9aKGt94msU4Xl5Isv91srXUbXzmaFz3+D84VvNF6+Jzh0NopsBdFlWHLjYHzlF9/ky77kBm98w4Hfea5xedQI5AvcWJJHby2MXYKxxYzeRKCKsePeeOHOxtd/beervzhQlBmcXIczGq0VM7McYGsTot/F5RvgJTYCF8yc6vpGne6YFylPRf6Xf/6HVXBSfJIovpQVfHo6DGSAe1b3iejAalGr00IpWnovUksq4VjMxGzoPQXF/hSULA5bXI+yjkYJUOf7GRDCz14EMvOfAy9+tn9Xr28A/o/MPGbmh4AngK/5rP9VQUcxBPGl7bKEMs3oEpVF5efJlZYpyEV++nBMmStmulhmJSZIk0+8nYRB1rUoLLmsoEUqIDKUQbA3LFzL1gz+6nd/dxUfPWwnoqrPxKYstcIWxgwsO+HSDrQQ42vH5YcwYeSmyh/agkOym7DdTM2uGUoItoQ2yysArrFtC2OgeTUGpK1MRw+vV7x6dSNuyq6XXFq4u3fAhvj89Hr4Kd2Fxgp10ypqZTQmw08Cm1FdtakFDRGuohni7xQnP9EsvncIY4zJcegz8ujKaGAwZgeDL3vLOV/6+bd4w+PnfN7rb/CWN9/grV+x8OY3LPxf7+38/K82vuwLb/Cm1618zms7Z2uD1mlu9FCA6OWV0IqXXt74pn/3MTzKzy8LCUlluJglrdfp7iJRta79yUlPGCZ6emAMLxy/tepOVAiMIJcgRvJTb/ue6otM6MpQshImF2xPw7trXdOSOYVQqGvLOoDs2sWZg/YKbrVLqu4xUeyZ1ek+ExQzWyUhTpby6lq9VRDPq7z+v+wE/oKZ/UaNC4/V194EfPRT/s3T9bXP+uqjS3CfwfRi0IVL2dYnHSvLS0FBzXUT2tA82wVAi7prB5YUb7xP9H1MLVzkgOk6+dJLhCamm65swJLMDgdrfMd3/Fne9au/qmo97VMoneoazJy5jrL4lliE7ES0Mv/sWDqTSeTCQYHxeIyijmocMIuCpyD7hExaV5ZcmCDSxIX3drH4yxmElkctBFswMoipzqdbU/w3RzXxGdo6TyddyjY/NGgLW0i62izEzPYs914j21QoSi5antpCHDe2FEvNbWCn03WIFCNP/7LEXlAxCdiOG9LFB2Nogba04Es/7xZvfN05r33Nmdx6Ozx6c+EPfflD/IEvfIxv+8aH+Yvf/Aif97pb3DhbiUju3qu5GMqcxfFmbNNhNW7eSv7it9wqu7ri+C8a9SadrNmclJHIUG8EKNUnU46+ELQ2BcHGfu2UREIOE1qU8OHfehf/+w98J4cZ14xRZTUIHk4o74KdsaOuoCjXNGSllkabp8Qsjbk+TZJkU7pQ2iS7TGsWhGjZNLCFVtcWhrwdfMKQfP5Vn73fzQP6aV4/AHyPLh/fA/wPwLf+v/kGZvbtwLcDfN6bP48tBm1JZALZmT21uR+Jt50RK5sFq2trmvKKgkVCE08pvgiIPpizmGpIUWR0sk2p09pgmwuLyyhkjqYUGLQ78FTrPPvgtz/4JIcsa7ENzVnp0CZtLVx9LBzbFW4K7SxQXO3yBGIyZ2fpoYy+44TloD0AwrXbhG6wuWH7AF/Y9iOtn2YFLTehU+kXtEPI3MPBNo055uhmULnEcy3TTTErg7Kf6uBryInJin3YGj4b0xo+pxCTRKrCZpDKP2iZcCaew7Rdvn+7WlV1Y42ZIleNNHrBW/teo44nfTjDkz/8pY/QbzXe+NozQIlIjZL7Lslqztm58chrJueHlRZBP0vGBvePg+WGLNdIOCzl95gwNtmQfe4jN5hRmxIHUkafjB1zYwmXFdsqD4YsTQpouZx74q0R2YjcMUTectApncgEhWBk5+XnP8qocBQ3ZWXEVJx9EQHoAW2xihpDqVtNdmbZRIEWOjSJnPQTHdgNYqIczkbvqbHAFtyjgm4lEMusXUNQZqqvXgV+T51AZn4iM2dmBvB3eKXl/xjw5k/5p59XX/t03+NvZ+ZbM/Otr338tfR+Eqcccb8ifSfnjred7cpI2zh0NCJ0VXPbG747y14b5JlEHETkMcOsK+kV2UABhDfSOocIyM5MJ7qKjQUscyJgdeObv+GbGDnF49/LSLrJEsysTokmJ9nzy0W2UL4X7KZ2WiO+sXfI3FXFl3NOclfDZCuFxp41r2rLW7Rjk8HncKTXZ6ftyqTPy8Aj6DOZXZkFgfDwwSRiwexAa8liCvzAoC07SwzaFHeCYYzWVEBtgytBoLt3kaXMaTG0R3ARunomzqAN55zO0oq7nshX72wlWERiWrR09B6wBGmNPVRQcjEeeahji5U/gYuPcASmC8qz4OxcRqIbyRpw85bz5DPw7vcW5Hno2m9k8e1HIxzuboPWtNFPb1r6ja6oeRozg+xd4TRRKAETylOAri5xnBKGPclZPocT9mjyxUyRubZ7L/HDf+O/Yrqcmfap+66Xt2HOYCNo1+5EKHZ8uPQOdX/diEX30CEZkYwpAp1MUOVfeQxn9wNLQNtVnJWZuGgPg0aPWHbGZ2gFfk9FwMw+91P+758ATsjBTwF/yswOZvYW4EuAd/1uvudeJ3jYSovOuqtlH5m0FVFl9yTGxHIjGWQzkWgaWDZymRC7TszccAsOreMpSqVHY82BxSQOC4tshPGQq8/E2LOTV8bl3Y09r2TvNa4E9aVa6GxJt2BO7R/SjX0xrHVarPUwCSobLnHT+ZzsatzobJisdYof4AwWpsHV4VFaP+NsPWD2CM0fJZZz1n5Gtpvk2Q3s/KxyF3oFfDY8Fpr3WkpqIRae+NwYaezR8Nm16Y5etlM1IrWpkymN9HNYd7Quq7zHELfdvLBwN0YOeebvg2OE4ExMFm3D6cfAbOA3NKMSWd3WSnfohzPe+hWPcf7YgWU1et9YMdiN+1fw3Ms7l8f6PWxhji7tQ3NuPbLyljfd5C1vOOfGCusC5xn0g9HOjGmL2Jeb8/teN/hL33ILspFDSsOZFbcOEAfxJSqXYpZ/i7AQFf5+EndZUYEW2eBnFGYwNjpn5N4018fOuLygHUVYyjIcjbZgI1h6kDu0sWnP4kEsMjFtAWaNvSUsztnZI5jxisN1BGMo2mbNgc+dGZMrm1w6hHX5UkSw+qSTzF2fzau9Pus4YGZ/D/gjwGvN7GngvwH+iJl9FeoxngL+DEBmvtfM3g68DyFzfz4zPwNNQa8ksVVsrZZosTa1IPFo9Dk4NuimoNFIwTjqpAMPODRjxsJ+Hvg2SV8ZNhgk3Zv47YvmUB9BzMHmWrqkNSybZmB0Qn/HX/hzvHT7Pm4d95U5lBg7Qn4Ceyx0Kxt7hja4ezKbgrot5MYLQsxyNRZaIRVGxq4dZWjhtpggtzd9zTfR1kcY1om4wr1o0o4QDAO3lWTw8Sd+hf3Jd7Pl1M/KJm86NEf2ro12S7WT8zQeIe+EdGgkk5V2HCSw9MkR9anrEGQ43UWHbkkfOh0zD9iicE0f9fQYNZtMrtJ0Ah9Tbk+Z/Iv3bjx2c+Grv/RAboOHz5yHDk47JOYLpJO2c//ySGuN4z5YGizemSkRTAunL8njrz/wjV/XubgbmBt9aXz82Qvuj51bdsXoi/YD2SQ9zmApVWhaSL1nyMBjipLrGHuj+CqNiKCHFK3LMrFYmB60Kb4+LOKztBAPxbUreOYTH+Ifvu37+IZv+64KKaWCS47Y2ssLM4v05epqq/CMmILGfQgfj8atR14DJNt+QZTnhPwwFmZKY5JZXJchhGDklL+FO7N3BCz/HotAZv4nn+bLf/cz/PvvBb73s33fT30ZxjLk8BpRixITiJsmRWAHsA3PVZh0pbq2qe1opsMOB0zw30A22MiuKjPIzQnr2r6WuUWm5rck1P5F8tRTT7IfL/CY4nePA9YmszgJVEGwBVFLx8T3VUsedwVUHMXyO8GzXkIOxtCM3vWemvsJhheykcVUHNB9hQ0RUzKRZwHMGIzxEuGwNN2cYTuYyE2HQGlA2RhdJJhYNJL2opMrmFCwaTepIGHW2Y9ivjvKWbTGxGhbhb2E0XyQF47bUqiDAmF8qf3qbixuHBHX3lwIwro465p87PbgKpJDDFqccdyMZBChm9csuXnu9AWJitDoMDIrgAQePjRudW3eRzoPP94Zn6yI8nIP6qlOjOZKCJpgvRUTLegrjFooR9N7lxfkuJblciOx0Rme+FSxrVwpPbzNRZhKYz8YNp2L7YqXbj/Da173Zv2s4SzuYgnEIGoEsRBMaiUAgqgHvUHfy7BlYUawcsbZ2U22/ZIxhliiI7VbclnSRxM3QYrCWgK3xnh1cODBYAwKVRlyS3HpuWemWHfip2hBNQ86uZmaKUNJ9BkmbkB3thRstlsRWezEsdZFsZiF3Tu9denNBQ3LK8Dhf/5bf5uPffw5tmy0yvoLibplrOGiLO9TzDtLw+auh1hPUFFyC9pDZiAjZSISxfrybMVMFLZ7eOxN0FZV9e41M2YtquR+O4qR9skPP0lcTHJ5lOlyyAmMHMGxfBbSpgQvTaQUb8HswVCyK+p6BvsJqbBGeiOy633PE9pRxqKLujNwZf255swZIi1JZVlpu64dTUdIQ7hDT55/SRXqbD3wwd+55OnnB3fvDzw0gizeOV+NteU1f+NEgtgugm0OcRlSp93lnNzfg6vLwc0m85JmchwW+UrchZmTIGQ8mrJ1n2kcPcj0WtyV2UddP08YbSF33XOpxYxkV1kneZmXeEHSfQQ+jWc//AHe9c//gUJaQlDfTtG+p5Exab4o+YrUTsEDT5mrZJgCaziITJVCdKY77XBGv3XrmoYWBQuHDaFLVHdN4rlh+wXLZ/ATeDCKQBmDWJ4Cx41erapl5QUkBJM5QHhK12Y1a+FRhiDeBu5NMU9RD+5UMcium997FBo4FQpa5DRXXjwAACAASURBVA4S3vlL7+SZZ55G1lSq8rEqVjxDHAULPQjm0OZpgVQn7Yl4UhLWSOm9LZNllJFHG/q9hswfCOUN3vyCt+L9jJ1ieMVQ/FirTNoK4nSTaUWmMV7zRUpQKgTA7eTQlHJaTmPMycidZrqZek6xFCdkjSNGuTJZJTuF02iCqCxokcwjaMrb9RDkCdqUD767KNMWKoCxFAsyhb9bJu/+lxv/5J2TGzcXHn0Itgl3rzaO+4ZPnZaP3lx4+HzhsIo3MVN1VYXEqpDC/Su4fWfykWcuefKjFzx/p0xe0fta3Dlb6lEZWpq16yDbYLUinTGBLgSgrNSy7ksZpi6FwWe5+wAUWlM08zCZgVqW/6IZn3j6ST724d8Cl2mopchNShfS1sXCMZ/1Hkw+BDQMWaSTiNxgp0c7isKEdCpeY16kDFwzBX1nMnMyJvIh+AxT+YNRBECnkC3MFLatG1mn+oxXONaKYqqLWmwz3fiQ6OZuHkowtvJrL48AArwtJdoR40skDf35lV9+Fz/wt36Qjz3zNFhiPkt51rTNN6ULDXYViM0hZQltzbV0myrFUZx4IQCNJRVbeQqI9HrWhjdtnw2yiU/QTKd+pgxLtJgS2iEnY40c06WLnzPKSUbb/zqL9YC7PP68Cpjs1sRhcNQlCOHTdtqPhUOLjYK5ywPQsyzAi4aNMXECUXi9lGoJuiZhRHShHHgtGcWkeNf7B8uy8PCjC+uZSFvH4bC0SiBWYYrpBWuWpPoMlujMFKvu3uXk6WcveOJD93nqY1c8f2fXPdMUA384NI67849/6UIPasjmSzRcPfxLGs0nzlaMPvR7p8hpWZTm3CfWTRq3QnJGys1ZnAGX2zSuwpDJc089wS/81P/Csx/9ADWbyMewFZuwJMEtpQWpjRRpoWveHGNUclSnwghkgWaoc04Z4ey46OMn1mGZC4oAa5opX+X1gBQBe+VNK3imFjhqM3UAhFp/3WXCm8vvT+a1Ym8NX0RqKfw0zeQlV+w8WT2l0oRKSWhTqqsPfuADfPTDH8bRWCDGnVp4m65TPptgwV4CIgMQJkvTeUEkvqecXVzjyC6ImMlCG3qQdLNpZn3sLW+l3Xqck7+teSuHYcVO24mkZMmdZz7C3HckIDWdbknNTOKonzzsCdhWef3Ldlu8BFKEohOZpa2y7Jb2SL57szgWsixU/FgWMcHRz0sLFeYUhyJQMfMFWXhveuSMUFfRtIB7+z+9hFh4/vlB7MnF3hAVBkYVR70z9PP2iuJO0UNmqHC9fJFcXSXHfepkN1g8YTNGDO4dJ+953xGRGZVRmDRJm80xGxXBbpoJKXvyaCgBKIprYgqVCZTvmEITvPYXno53q12EDiVL+PhHPsiLz3+ckV7uzKrcXjNotsak1YGhe99Mh5OH3LF3E0qRpXD1VBwhLpjWMFqT9FxkNnlhekpE5FaJUa/yekCKAKiVHXhTQjBet7gjuVo6bYTat103yG7OOImFwunueGy1jjcRKMwVxsB8JeZsBtkVULK4KMm/+Eu/xI/9/R+thaGKzpKCYg5TcuAgpd1Pv44tV2x0tZU2yfKhD1cROAlX6JNjGnvsjCY5iO8dUmk5/ebj9DzTqZoTy6N87UwUXe0rVFQu9rtyLZq6+eKkNqTYiKgLsS6L9BYGYxC7qysctatwnajDYU5Bn7m4/Baa9iZzmJyKQqy0tYkhmA0pJzFZYNWYtXhyVjuc1lNScDW32DQBj5Z86OnJ+37nije+/sDzL27YVfLJl4NP3L5iu5p1GAjdMIfLObg6Di63wZaB586ZJQ/f6NgCP/8bye07Kb5BLNy40Xnt484733MH610HS2gQaLkTu05vBY0aI0Xx9XS6ua6DT2Yo6qy1wBiy/DLHehPiMup2w9krPJWUzh/X3uGf/czbeO7pJ5jZGPtU+AyNiEUmpQSt7SwWWIhbcbJoB6lQWwR9hkbSsj6L0jHEqJ1KJtbBvInAVYekop9fvQg8EPZiqWOENFVVBSrCyaPVMom+MPukbTqdE7Dci6vfxCrcG9k7Hkc8OsfhRK80mSxpbJOCkP0SFt34MZO79+/wwgt3wFY8N+buSohtk+FOokjva7GX/z/UvX2stnt21/VZa/2uez/ndKadQqXYCtgI/gEBHayGhMSkRkmUCCb+JVHA2NQoBEVCAJFI8A+BQilENIgQC5YXhZJS6VChb9hCgaHQ1ukLU4aZzgwzc2bOvJ5znn3f12+t5R/fde8z4py2EaLHe+bkPGfv53n23td9Xeu31vq+lYpRlTzge+n7PTbdi/uzf1RxW4bXhYjXOPPC2cXFij6chXPaOOuuHoz/IDJ4jNQpv5U9uCekNdK5htp8S3gxjUc1UrIaA1bWyKVlbGqHil2GHI1peTh0H3g2q09x4vtQARpXJvOSqQgaOzCfbAKjLzIyMXvQVj91yj9yUpfAnm8iLq8vSI1hMqrt/qPf9Ej5ja/4l97Ga5+58YN/91N84QvOWz/vwts+f/HWF2L895O+Fa8+buBCns6zF50v+ALnS+oFXrtufuUvcb7gxQM74YXl/BNvW3ziNefP/TXRpW0tmXX2yW548MXt2UllYOekDbWMV8rBt05Yp9lDwvI+sZIRrjnyMWwhUXRxrMHp4xw4V6jWK598mXw8iRAvhW1yU/Yiz1C3MX6XvpNzhajOlfQteFjyP8yjJphHPhvmSe3An4lwRo5F+9jZgUaGrMWx35gn8OboBHrYTXGHqiQGsT3c/y34qCe0gtkO9565Ds3+ywvP4lhOZfKCnTxLUHkE7MRPqFqc4Tw0ZD3w/f/HD/O7v/qrWaGH1nNUdJbTem4ymqwDu278vNF7U3YlnxkdYgrSm7ZDnYcVeYHnYVQJeot+kYsZD+1PW/OkeNuX/Yu88NN+ttpPgkxjx+ZhLVbK47ARJ+HuLOt+Yizyo+/ktVGdPRAiIVlw2owc+4b2S7IxOwoecq63XfAj8WNopshnzwg2Cy4G/vB6yq0dYhKa05QKQAf0c7CrSsU6WYdjV4A1M2voJB6ug++Ql8Eu/vA3bb7tr32Kj3z4yj/7JV/ABz9y46VPXPnIyzc+89iAYumuj/DxV4x/8NJzPvjSa7z8cvL4/OSnfr7x5T/vC/hZX3qwImk3vuxnPPCZG3zl7/gk+doml2y84gBbjkVw7cQfmzONE+cw2J700g5GvoWyuguTTLuZkap0uFKb9ufgmzx6Qk1ukrvnoWvnFx4o/swf/m184qVPcrYJbr5ooTgqNsC5mHN5gJjZf6EF4+6cACqn3TkriC2LNF8Luza9WmantoUSHdJ+2C2IfRcqfe7Xm6ITMADbkLrY0oo3HZsdEHVwPcXSg02aj4RYzJjqK/TBLsMWPH/Uhe5uuf/YherC80KbCC7RxnPA/ZHHz3xmwidl9rhY7JEt9yFePhWsLLiIW49BVJLb2AkXN4oDm3BJY0OVKJ2u1vLxZkRoRBBjb2txMzdClQgdyzdZAfUq3ZOHZy3/wGVaHq4HPFOSad/ySPST1YsdPMV8+VoaV9xEtmJz7cLyYHVSaezlxMCgXdpUHw+S4GKnYLZ9kYw1S4KhI2AvYrIE97AP7QzSimWwvdUZabUghiUqKAHqNJ4nf+jPJb/6l5585FObX/z2L+STryWvfvrk9jzpF7XD+cQrj3zoozcer0KAHuuRV27P+KLPP+i+8eprjl8WX/rTnvGBj22+8nd8ik6jfSm1V57oZMozMQxyG8ey2awknnd0QKPWYc3Zj6w8qGO/fuKHzGO1XD1ki9eFjyU4OKfnhKQmO7Tsvl0/g8dP4ZbBcV2cgHNf6Aa7p6P0DQ/JvjWezhkPHON6Ecgzs63ZJcGQBVxPcHftVYbB2V1KxS7087/B601RBIR3aelyRCoEAm2mLaWb9nDqPInDtf4V3UvVLpeCPXqxtnyDV0MtbX8rb0+b0jQtsXx87r/3+97Jb/nPf9PMdcL5MpQlGLufjCHEMgnx/M3ZS06wkTf6cmGfIhB5biqWnGobsBSlNpOHkLW3z8xt5WwrPvGh9/HJT/XTArdMuYQ4IvOgrfEuJotPrf3VFitP6STcoJegr9QytAcy8t7sLs4+KBQUWi0rN4+QSi3uLjhyJE7gPJGlFZqHVz1wcnL2heNMiJOUmSDBYPGhGVeSg8K2j2nmmGMSRDdnG3UI4l27+RPfbPyyr7jxZ/7Sy/wzP/Pg5/+ct3Lu4tOvvEq88HmciC9xPm/OBesw3vZW53ka2OKFFxcei6/6rz7Kpz+Z9Fik2XgGkLIM8zvrscYe3gRB7duEkiyeYsLOSo6+0A3HttE1FI+TvOTtIxLTtWRJnBQ4hxVny5HiKAGXX/cHfj2/+tf9Xr7oZ33Z7AJ65OQLRrjWfWJb1F8qsAOOKReLVgDKsAAe3vJ5clcyuF4/RZ6lLgV0H1P0Ch1m1f/wU/dZj1+/8Sf/33q9/e1v7+/89u/EaM6BPmi7O0FBCJK52y3LakDC4q7xc6PHzVZbqcqFDWcAJk7cdfN5wOX54hOPn+Df/OW/lHB7isQWKUe/t9MxW+NHcNMNM/NszJyIleC8NPooHjKwCDbD5KLx7WQUvWDlxurg0eQyWyXpcX7el9Bv+ekESlU6rDknecfLgRrPVAOM6KTCufzY32SvcTdCXIZG3IIELiWvvGVrlosiNdHOWKzQR9E3o3xxtMC8hbFPIR5mslrDFqmznK4WApD+BK0JYRexKVtLWRGrQnRW0w7Fh7q816Jrs8xkt0bxts9rfvHPN154Vnz+W5qf/7Pfwtve+qBOiGe89vyKW3G5AFz45CvC7f/oN36ad79/0xFwE+s0sqg1KFAN7IzT3IhWSEh684CRswQmtIANJNixJVFUhPj+dhixTXZdDr2PJ5clluBqIU9yWRb7ENILb1nb/2f/9Z+XAtUMz5Q0qUG289pd5UZOVRPG6lbKlGwJmqIlctgew4kwbrfmervKsyLvqUWKxTva+DX/9i/4W9395f/w8/fm6ARAM2a1lGaTD0cU2TzZgtXhmrvDRjJaSni1geJqUocYAstAjSD+drdxhFNbjMNv+/ZvY+USBIaN8YgWbtkTWuI3eb2dRviwv7rYhrbMCe6LimDtZEcqAYZ5KOMJrKBOKL9oqVSlXcfhCu947QPUW74A4q34hnPtSU9y6fknhEJiMLX3/srHJYstFY5aCWWTohN46fQzlh7+gu54MlipibWCInxhfQ9ACbYXrFGtNWAH+kkCLGXzNWhLbRXotqEOs8X3r8S3xgMwzDY2Vkpt8gasXlTo4T3a+dTz4h1/A37mFxs/64ubv/eBK81r/MwvgR/7kPzzxO4zXv6M8cM/FmMFhtiV3cpT6BrjTZPhkXTlWCtpqlDX3UtIjsVAse4UJ3TKs6IE11aJ51FVilND3IjOhElqstoa9TRHyf/Cxs240OK7jB/4/r/CL/gXvoLurdExpvOdBOS2hovo7gwdmbmGPV4bZY0tp1vvA2n4Q/CiP3D2I2desS1Ga8y1f6PXm6QIDNPJoXKWYANxKHBBAgxPmSaqKxC10jG1Y5ODt7vlluNbnxt56xgVqdAY7Evxtb/v9+url+AyhZ0Me84c1kn00tNLaEGWOS25OALm91NQRBjKJytOklvhzc0Rxmn95D2YNu7CffcxAPvMy9TnvyhTFVtsa5mluEJMSnPAjCQNL79HJ5dpXPASMSpAZJcn6nToBJxRyBAnoAdRqFwwD9Lde0DkCyNbBBej8Rpqq2nZiduYoc41aFD+3eDnPYk+NmYsdQ8FVefSZthZeOj3NvfYNfjgS/CBj9roKRY/79XND7/PqDxJDlFpQbbkwzG551LWaAtE9RZSorc2BO/ZgzgNPsIaE0LlMKOoikG3SFzeTuWIuMqeOHuFSGvFEHeGrdhmZJuUf4z9OEanyCd/+X/5g/xzv/Bf1n1tDDVdo194s23jeZCpOX9OMMHSVnNPB9XqGAe81EFBcvHFMift5Mpr6kx4447/TVEEGvHApSQD7jCHVrSi3qLU2l0+VnbHRD1rHLiHt3v4mGOOz1rVk1mmW7FDm/Lf/7VfIw8CD5ZJzls2hqAljkGXtPZ26AZLY5KRRIVt10lZO/GjJh0ZqR8NKN0W5uoq1IxqZFnIBadaQRZ4YK++xJGbiuDs0a57ADUqqXqytqouwmtyBQrikOVaD0NvqTXXGlGnieypBNJplCyt7Kc4BAenpXwMegRVLgKMZU+HYbIx661LHj2qSP0dC32PRYnhOCO5IT2Ijd+CmRaDHEn3wkymp272tIGviSe3MH7oPRcJrioF4RkSGo1Cy6ylC9gqltGyBls9fIMeH4hGnBPTz9VeGApJaZuuqhJIqpYiwAqo4WlMgfHZC2gXA70cUzaYyE1T7DtjCqfa8nvAzLf9+f+Br/hlX6nC29I42BRMxtzELDUyMKzNlO7ivjj3+f7JHvsyvR82hKN1HJyn7iVTBtPnfL0pIEIJRZzaOoXD9IPdlxnWzbrNOsRai442mIinKpE09rHAijOn1fbQSTUnXjdYa1n0v73jW7CA5cOKmxb+/pDJlcwkU/DXnYXrHnFd9mRNNu/JU0fgfVKlGZcYJt1V/oh9zglmurk8xx7KZOHV5yewVz/OenwZf/5h7PnL2Kdfol59iXzt4/QrH6U//RH2Kx/jps2DSCqD85frhlB3pZPaChUVE8Ii6zYVu+7WzxRFxpz4eb8RQaf2BL72oR3BuYlDTr9Th7kXbinjTLsdlIREFZ2yHLO5TtoljPjKT52urnGsyyTqMohObCdpSCc/TMJtqX1Ma8zrRp3GoYdde/kZARircFyGIK20Z47GDlfyb2pb4HOd0ufvtGkjXaIqDM35NQWo4AL4lhsQNc7VwxQxv1OMZzy0ps7kB7//u/ESwiVj0Ht+RmG+JM6qJobheWdj2hS1S8v1KAtROp88NpWHSOuw6xIHNfx4w+fvTVEEQG1PHHvouhKAOMXqjfkhDBZjmXwFdNMJPlou334s6Dpwh1jGtsl/N/3ZawV9Nr/+1/5aHnOPH1s+tVGif/Zo/ntCQ5tdC5b428Xd7rkn2ahlvbUdQl72hWMdhB1KkkUpRJkObHYFac7N9WcqBEFawY7iuTXPvLF4xs4ix9RDC0qNRwtYrUDTqCYrZSHuEr3EtOMiuEzqsmnvH642ct+NRcRYp3YhE4wSC9B1wudNAiQz6barDFIed17TimvBjR8SW637wz/zMYfaZG8ZobhokHRf6FNqQ0K5EXYPWpVrGGbJiueYFztzRE0+6jrN1LWMHQvPjQ8uHveldy610lX0MgWZmtF5w1LGofL7Dbg7/tyW3KXTddpvScm9bejEuj6n6euCOrK2xp4eq3xy/G1MY48/Eh185tOf4Bu+/ndORoaWvd0IGm7UeZjjJYl9tIprI8vz0bBBL2VIojG0Br62LlYny5IsqT3e8Nn7x/ok/yO8nuCjOVmyiuxNMbDaMtlKD+PMxmiv3TjbOFmcpZNh26jhxpFYc3ERR/Jrf/2v4wd+6IeghjHnM9O2Fj3RTZx6wKKHjlyNbd0mMhid08cWz0XpoGrTeUq6O+12oXkXM/qhuLq27Yfl0EA3tdW6runh/Na8EFce85kkvSkx0kmTPgYa5hwDDfXW9vs2hChvV4ilg1nR+8A76TyIdk4WtZxtgiLpxUL8cjMZbGZDHS3D08MUBWeLOFK8jaXR4ihZc1mMRVppPOnLGkFLkuMqxCl1YXvi6eR28KUuzl3oygbylOgpHTsRfdwOspROtO0iNl67QmgOp7iIZnuesxtIDhPnoadjaOF8xCg0I09Zq4V2RLma7CuY9hWXo/AOtjXPGo5n2ra3qfCvnTN+NrVv9OU5Z40U2+YpzWMg5iGHdVK8wM02D5y8511/k2/6U1+tYjh7E84bsZs4NbI4N1bqQLMlbYd5y3DWT46+QV/04NvUBYN0+TvWjGCjLPqcrzfFTgBgVbCzwE1xT8dFvm+dsyR0+tycHizlPmMAvXAe8VVYP9B55VArwN7GhU1upfHsTD79ymc4kRNMiGdLXxY8V+U1T9o3xMbtQfN2NrZSXvnH4iCpXTQnhznJBTtuOs3TFT9V2simlRZSCYedVB8TlIKs0SbNyNiyzy5BV/vyXOlJnHgv1pxk6U4+bOxEHc+Dcd3BhRwocVOHEonMkdpw2Rh9uJKNUuSVOyWbgo2zyjhWcWuNKNwOyoxjTpHzUaSoXoWli1W4bqxX46nwZDdHbi5myia42BRQRLHmzn/QMvPYzTUeOC7Jsk1etJF3L0F71vStxU5cLgaDjeDM4SjtPLabKMkN/fCMfqU4Hh7o3eylBZ6X3II0w0+0GBozqhvvi8bMKDKVHWjuvFYJp9EBy1pjhhlny27e44DzlMhvbXgUH0O5bnII0vruoFNOSnU7aIfH1x41uCifni6XqW0Andzs4OCGmaDA3Qv5SBqrbqIQxavQ6mbi0bgttYruC54F+fgw4+nnfr0pOoEGzlXkg1RtXs06C79uMbD6NRiyySWZPMKaJc2mQ2vWKtlB1mqqrmKz5VRO4Lf+lt/E+97394iCdXGuo8Dq2yZX0XFKvGOL6EOwQTYP1nOiIVz3NrCjG6th9YYOyjfHka9HeE08NdMgZj0AW4tKd9wLDh2UnZPqm/rcOqHP1Aa7Y95swV3cwLaUkOnG8jHLbsGornAEtitM9WE37jIrTXwWjDJBvQVcl4qFjzISc8yCDm3EleLY+DPj8WJ0L7al5t2bcx4jXTWb9CTDrwfpTeVN3AJrFal22SqvA3AeXwiJsFpuxHXqtO0DITVl5ErO9SD4rg22gmHM4DRxSqK0Z8jdcJ7EsTm3wmWozS4tMhPDDnhMdZJ+NkeZ0KXVrGNr5IrNio3nVTucFcSkNTkyjzm6CTux3mrT69To8Qy4lDoOu+D+wNkvkguGw4u70qDe8yPfy1/+xj+kDndk1BKCNCtfEMzbQgr2trFDu6GshwcsgjyRfqYW+8GfnJ6yNy/aW/Cbk/3GncCboggYyoNbeeLVZCorLiZXIP0FGJDDl060aH9ayPl4+WPNkdp4ty9ywT7Ed3v55Y/x+OrGTYKdfS2ecZmw05yFpNbONmmv1AkH3GYkaU/m3NcCcUknrxg0JSedrZVU0/Q6NOe2FpeHpfj31uTerHZi68bu1jIySTidOgVFPnRgcZIOt6iJlgpBew5+VbHxBb624qx76SSv4FjG1YJdsms5mR1JD4eg7nkO08H2g7jzpWvyZDZKQ99YmUTLwEUBgqfa9uxZjmpm9qVwE/IFLRlT+H41+KPa6cXiMqiO0AiFZzjj6c8w6My4dOsBC8VslFJX9M9cbxwOlxlpBDw8uNyZqzn0HROR5GNqSTuY/2lbiEgGeRNJzRFJKlz3mbsMXUhXQS6jkTcF1oJgXZFqdWq0tcO1/G3tu7iKkNS2MA6slBtwe/6c6yuPkhkjmzMA8gqpRbcxEGU4LL1jWSlqul84thbZcoeS7biZDHPshZKZyRu83iRFAPYenJ1BC8w0259LjF3XFrsCvLV5BbXEdfYgB0m/df7GM3CbUJ0y/sj/+HX8nXf97XmD5dne0TwoW5dlQiHuS33Lhe1QrBetRV/L6FJUk6RLEM7GqH4+34PO/aqN3WqwXe0easIjzSR7FuVUBbCtJ2wy2XWbnc/mrBu95Ujjs4XHcm4+ZiG3nohSOLIRaynR9L/NqlKKbr/eBlunNBZePBxwG11CmtrR6Hsw69Bb68IK47ZQCEc67hcFX/rd1QZo5QqS4HYF08ZdBm9OHcGOpGLjNePeWKnJuMTxNLycsItESraJeBVrUcAV36jdgOzX1vwsrd1CNXtv0scyvEvuQtvGwg4OC41EJpePzThIgeLKbiGUwo3erR2CM8iAeP/iQwy1eus6HcBKiX6EsoioE5ac3pBXNilzkUp++J3fyQ/89W8Z0hJYnOxq6qHZ0awIdWa+sJQqNCyUE4FTdiOPkzRt0Ny06qWQUrFNBIg3eL0pikCDLJfGP096F0Fc1jK3zHZVwhR+jgsCklFGiXVnrojoUgxZb9FH//773s373v9+gMkQVGHYrdZW8KG9DvPdFyxhT9vpixd4UKFYJ2UKuH5jQC2dGHU3P/ELdsjmib6bpt3ddYotmuKT+5G5TEQletEyKU6fwV6697hfrRa/vbPvnB6dvjgninIrSl6LCcuCDLH5uMOuwD1/0Wpy7ZKhDeshq3HYGexEZqhlRDXuQSFiTy3BbuV3c5ikj36KvfF7kTMJWdrEuNwUj8sxX9pg+0CnzjjiTblqJS5VLDEoLwY0Nqwg66C5kal1sVuzB1o4s+me6DSTn1Cb4ePiRN9demxMq9QBVMupevvCzgksuX+9uYL34lM+HoP+AOj9zxo40MdmrgJWyJm5/HWECy10P/qBH+Xx5ZeGA3DBO6mUnVlOwpOSxMSszdb1khZmqau0khhpHG6XLTqNix9E/P8AIuxCm/q5+cSqm52A3du+wejn/RCZ7m6sAbsK6gEOue1AQRff893fw/d9//diDLknxmX4zge4M+vmAbMxa7jDNvSEcLhiPWqIJpB4CcfW7xN98wkLHzcXw5+CQntOhzmLxyhko9u+5oEOmYeKRqakGg+MJbZiHMieQ3+Hjzkrpky+mL/fkDXWXTsy7Go6fCLP7MnDQV6J9++31VWkig9TKMLu5iCMfZjMH42YmVQ8uloi0hBIp+AQLUGVDbcierGyiUq6t7qE2E9OTXqbRRIj7su6Q2o4b/E2XC2+K+SROkuU6JQhrByaRZ7ZqGPLGqydgpFGZ0qlicE9VLldmhDvzbYm9hTEgp4H2Lum0EmGnaFu437QWAs3tVgzwpx6l0f/hhU5yck/8n3fzQff/yNyri7RsjWWyoYf06mugUFeew+mrtarqC3HJxnL6MBkyG/PLi/w8OJb3vDZe9MUgUAQSbmrqlpLR296YCZabAAAIABJREFUEGM47GtudrOxVmoDYtRUpugsX2PfFXz/j7yLb/0r365KbyLIeTEP+fgLIrivfU7eGq+2WXZVQ11bFbwhvDFumpcH1vUyKu+YsApQtz/50WPNSnv95FkikdwJT3cBiYX4+Jod+ylZ2U2WXtFFZooQkvf4ssHgbcQ9MwtWmDwIzn7CzCsaOgc2Ht5qKtiFZjBpZtGny+GeT6eMzYbAPAi/F2KJZkzMHLBjuPo+nZ084+SEo8ImNgWE3E4HuluMBZBgVRe+Xh2y5+ocQ9gezF23b1ZRbBhuh2TmBiHGqLQVPnuLHAGqTQEThHI/3aVZmfcoXPTxU/wPXEnQFj2HVU3WAHgqZ2INhN0DB6uANsHWhp8tRKnBzhmRTCPe9373X+ATH/uoCmepWK8O/VzZM5oyUXNaxiYJMSGn6843aP0ZSSx0L/mbfCego0KJusad5jtqKprgUW1vb27m1HbBTy1+d4OYYpjYeinl3o/+/ffyu3737+Xdf/fvy6Irk3Ixrc2GW/5ZxCCYUyLuD7JO8egmLqrBkpQLM/YO2EHZ0sM7rsUm40Jh1NRgtca5hNtl+fgEaPnm3awh3KjLUdhG9NbDPRZXPuIbZTBMSrAbeQ69bh66ezgqTLGIidhCoZpxjq4/RMoqFweB0sl6mhCEtISe5aYHa0OMe20NitApRuQ0JXPz6UFvC8K2iFxTIBojxrG57JAbTjjbk2u1RhzjyXpbmgXkytsLy43nwrpl9mLS1qtDKknOPahV4wMhJ2C3ItZ9jzTLFK5YJsvEFmTCaO6CNazIBXZZLB+asu8xlGkFs5hJT8Jmz0WwNtpceY4U2FX3gG1sBfeVlgUDDelO+8B7fpD/9Y//Ll557WWs6ylajDAUlTAL6k6aHPt9LcUFdxtOPe1Ltf/S82XxxgKiN0cR6CbZquDy5FV724gR1odObAs8z3m4DFhTsYtlzqWbzBdxS+IoHl95zvvf815Ri6wg5A9IJnm2uvdCarEGbvp2Kta02vJqezSjU4XCypUL167lIfa0oFpjVmLlWC0iWmpGRuDRonBmqG0tnDCJeaR63LRdeFiaNUHEF5u04hrcn5iO53JQaYSFFoquTIXoGQla+YRtPUGpQfgaGvQdDrQRb+kh0tTrgGN5gOmE7UpuETr9TIzJbi3i0rXA2t1YBZdTe5T7AjP2DC+1RWByuSNTTT4s+izBiiCdRqATue2Jdy8exYh0fHGu1CEw/4s990zK9ry3waXkJLRcHWXpZPdERLHjGSBOfumDQmiqyeHyczZ5OcVD8CT2gjimEy1Wi5iTOH6e2swvKSSJYltNVgNkXahyBc5INKA3dfsQheAjH3w3Zz3Xw28qpm4q4r2kbMRhZ1M3hrQ03WwjW/3QeJuxwY1ad6ny5369OYoAvJ7zBxJwuG4Ya70h+KZGV1CXWWqZqL4AZxXX2poJy/nAB1/iN/+W3wC21Y7VmsXDZOIdAb052oaOmdghvf1xjqV4px7UpbQipRE3ftFN+rott7oXe9x4hnINDOy801pdkd205stILQDx17ne+6D3ErQTzTLD48KKhlN8B1IYtpnrc2bwAH05WX3oNDMTN6KC1ZdBQeBhNTES6J6bq3u6kCWOgZEKwSyRV/oegjEadgV6g12Nvm4yBFWGaUsdJpHQ9pQ2YG8e0Mc0V6jtrgq2Lzxu6iziHrQJfnW4DjhprcIRdx9DoTSdW0YonNr2p5KWInRdEoV02Lk4TD61lgIStwnbCT+x1AyfHpAXRc7V7DBWkmvMUW5FpxyniBFstUkOnjeOBYoAP6gK+jZMrXIO5uApJ1oRY3lqV8XZ2CpsbY7D8KXT/E/+7t/INU9u98X3qXn/SNnNE4pd4yJUpc+QMIlxbzKNcxaCQXc56wl3/L+/3jRFYEfSXDUzu2HrmUwxj2bZdATVVFwITrBTBIocv0E3zC8EV7ySx/ORV199TSKUY7bVp+G5iEsQVhopXJvruN/kaa+n+1rJ0mw7FuLga2GmuTlzIqwYifDDoZOyFk2CS0Bz3IpYJ+FKxvES1GS43uQwaiXmV9bRZNow46B3sA9535U5vcV7uHcxnTfyurgFg1WPCjBaDsLngV2b3etJaHQCfcyS1JqzXJZm3vQF8pAbkcnkeVJ2wHfxSLOPxlz+Cb6MRnRp94NjqWtZDpizLWa/YXAIdjFPfBfb5JoUTKRJOrclzUCUnHT6Etxoqq+419iEO5d+pl4tElZzaeNMA9OvQ/2BZORbEecX5LxsXRzlVKqQv+jOYVe5H6EciZqx5Vk0sbRou8awE4cn0fKGIx4LP8GfFSscW5fxWpjtUBUHRa6Aw9gxpCprdgXXPji39gIczvPrc84U2e3Wj6Q/kgO7ZpmKSDo9OhaeXcl9Yr458sY6AYwaJ+TFOb4an/v1pigCZs2xaxYeIyw5bxxn4aeWKGUh7rSpZbZ1YC7DBK8mr7cZK5xPPz7y7//qX6HTekgtaU2tLSHMPrWFr1bX7QPXhXNLbZBX15A5kG6hlgpJQVJsS45uVpyy9MbYtnQAFE+t6tnFGYVt5EjTzu0OolvxYILljlO+f483RWWdebdQm/1DFctueMgy3VtbY+fA7ZDZajVjbajRZZXozMsV1mILq+bBNpds6IPwwGuzHeoUQStuAuisBxXohe+mV3EAcRgrDin6KoiSk2/ZjdOlqqyYTIMl45VVRl5TDyo2zNCDGycloIyHNi4mwktaqO94XPhVyxKr5hmOcZJ9g1xYaWeTy1lhWAV7FsrXi2bmNOf0E780HIf4/iG4crlzqyu3gtOlZPSz5CW+R+J+U/xbbMf3A14vaJQ3oyOoi4hh66q23m0LzmYEamHcrGFLh+JepAVrgUey/JwBXmND7ea//S9/FdXGsmdYHxqDSqYyyxXqIhepRZ4vwLpAHgpaLd0vhmmxjERIb/R6UxQBxTklHEs4sjN8bn+CbrIFO4kIIjVglI/Jg7HWgU8+wfvf9wEKLWCqmzohTm1sbWkn0LdmHymRd0OV5smYjfqmh3Sh7W7R+NZ0fzFtg2/LsJv4BAvH9lW8hriviKR4dCTqucbW4vOJ2N88GmQtLYwCIpZcc7Mo33eQTHgxhww1fHYRtsSHP+AsjRF9aTiUwlS2ie3I57DwR8Piwk4jN2OLJcjNxlH4tpMVTnFga1SVW8szq0PLw+eCNouWm/HWrsQS7Dahn73FxMwD1qZy4xFidLbGpTo1tggv03injlDXiWypS12LG8NnEYg6uIBkVHMza2Mni5OjkdN0wmHNYWv8D+QI3ehktW78OPB1qAso6D6IfAbPjHTxAcJd3IV1Yn4blMKUpVAHhye3BwM7qQrOCpYdCnSZ7tbXjVs1i4bdwws5p+AHeTqdCw51qZ/+8Ae1GL2ran2DF73vHA+lbV+4cWcF+2riok6tHUnVuzne7LRh8UEu+I2p3MJcTxhZL0RtMl1LvCz5xUexZh6/C4/e+bfeyX/4H33VsNlyIqSFKXeLlcaTtlosrEwZkCga25+2xJROgg2Cl2ZX1QU28V5tBxGbTUELf7fBZyvRPN2C6GLyCOISdG251exFO9PyigDRbayH5gHXSX9HSwBsi9lGUX2VUxJ7REjiV3S1ilovThofo8k85Dxrtp/QQauGeqBxpfeeeyjTOUpJAzf2GnMTa/rQxr2752H0O9FDsmgztgeF0nC1F9Eo0Mdszs2IIwe/0ruRxjAoVSB9jbmHadFKLxi+BEszsHuK+7BLlG2pndktf8OKgZ7nzbMco1XgGTNODs23GS+CZTKSPU3oCMz7ovHIMZblE33FXPFkPnuqOiTpplvwKRKr2U37ls4NB+xDo+cSiCG42RNqU33lj/03v5H3/dgPyJvsMNoXmOveC4Xq1NZ9Kz5ED18CKMNPydPv0Wtv9HpzFAEYPrvP7C9VoVvTpqRiGSzYE3a/TP6DOYu5vp3Ebv6L3/Zb55RkOoGYrroGGZCYwlBeoZsWkCBYUVLhRS9T+7+cSLFscjb8DO/Pams7vmVmWmGq5BU6UZYLnrqzIEcjwG7cF4eN5n3IN266SdlFRghyargjBRoNEvccxENjkuwoAJxx1h7N/hVskS5ug/cmxnZNs7GPIDvFo0CwXlvgvQeRGSuw7IEA9Y/lbOxN8WTCLVMjltnAWw6RbDNSqpZx6bEhAgnHt/GFEDIhWSylYI8sBtY18GSf9mTsasvEGx+CzIFRRxCl9+fMvqvEFRZagY08G5sCdhjlJ2den6C7MAmU5FNY+vrtWgiWejMzWcg1w1PommuziNFR3N+Tu/Cdy4K16HpA442pW2CISHd8f9yO63zkHf/T14oLc/aQn4YfUcV55wtQUIOKdM2+CWylkAkz6sdJIHpTFAFDl8qj9KBj9AV527UBNdZQd8+6caBBnYOZdAJ/4k/9SfLccgKest34k+WylRMap4ST7wtWd/qmttTt2k3czTKO1AJvjH/l5y5GwsCIY3fWMvNqfUWNAHe2gY9Ah5qfCcaHFlwEmjXiGVYjHMTIAItgkfMzm/LwBsQ7zJTG1LoJ2MA47wQtzrjPybpkf12DLYupmIOZQyNOvodYmIbPuDBbZ9OCsEKmpnjRfbBZ9LTFbT5WYj0YdgOG95xenWOZNnx2DDN9X8wJS6fowLOT4b6fYY/Bpxaf3tCpcVAenMmmnvIf8eAIdAaWakWGkX2yEelMDEbDY9iMTOte/ZSVIEckKMRStNb9IvOPSaRuQbbFHDo1NGmS7UWbmK5FCm0JoQ02VM425QiGBTGUdBnrOPn4yDu/61v08NNYaQnbbtCyqBM1Uz+LUB0X32IKoNH4+caP+puiCHB/oBsy1bx0S5Iqm7GnvmsuqAsCWoHHUFsj+JP/859h31IaBJcARXmGPn5se8gzItD4mcOCE75+J6d0g43DTboPnbnFkJOtDd2G9wUozkJU1hSuL9hMUl8fMkiI2IVvh8mn16x+G4LHJklqyU8urKatH1y+mO9vKnroYU9Llst3Tm840+c7ZEjk5Oihj4EsSyVMdl8Jp2yuMmdGYPz0xmhkzbLP2jhadmOdUIdaWe6HmOt7cCuuBnQJKswp5j3Ulb4vKkWWPrFx9dXfgSNNhSW2NG7U/TfH1hXo1IhTsve2kC7ATLRwcy2Fa83oRj+hOOlwduEDTcQ2YryFSnIm3ZMmp56aQm8+ZO+EYzwfjbsB6JLHYdfTjqN6Sn+JjHS/16b+iY1h0Gy2ieQjz4XUe9fF89ee84N/9VuA1DXp1sizDfPkLL0fhsmcZd6DQh1mz0/z4xmHvEmKgAYiK6aCGmydqWcYiWzFI3Lop8OhnjbaMH7P7/w9fPLTnyFdFNNFU5b3/ay6aUyEkBP9fZeadt3EJlzQT5p6WXKXGX3cdPpX4lFwaH4Ta083Z/rSG2rG6f66p1yAuQwkNnLibSBdoSQ1iq8MCaDWKGRLfwAfya9Mwu3J+qxHzNKhzftK0a0zGyrVdQCU0nXWLdkZ5EZ38bSJZ/ncvPqGfToxn11Mm8anaNlbl+t79tLXjWSIWMONSOgcizeG6OKv5w/2VCq3gNp0CQLuISWlbXb16yPdYPK9HgSxthygakY673mvWsKrIlmZVN3o2kI1PMSzQCrFVc5aIuAkMkLxkEzc7W7RjcYXl0PhUSnc04sKyJhuaSLbd+qeKteIotHDZyZPqmGVi/0ZSBE4NcNH7xHWIox18IjjlyICXvrUh/iuv/gnRlDnQ2vXYRDOwKZNXHh9XDUVculF6qkYfK7Xm6MIGHMTqTp7iyTinixOHi5X9mWxe4utFyKoRLq2QBTv/uEfgtupEyaLDIMwrE6itxhXTOt0qBWvNEmTPajwwZTVErrLYuxhI+PNkGS1KuE2iq44Obe6jEi1+yIeFXmIRdclOarYnye5UMs3D9m4qrEYUk4mlhurk0uZFkVmT8YoK52owGXgxSrnLHud6ecFNnLhmTOlYj8IksOh/ZDDccHhika7rDGjQKSbbrWoSknWNTlc0GfhnC53492yItfPI6ShjxvRcxPuDQS2++n6WBVVJ3DBTilFT4banCbORoSccYgpho/0TQiGj2ag2yhXtLd8ACXUuWJjXS/ORXfTsfDTROiJrXHRelr9kCGLzhhx/oFaaCG4pSacLRxmJ+0pf4QtE9WwHpRPiZLYCJGY9mxNl7PsKWNzt/wOOU50kjSUKNGXSHI3tTf1/JGPfuTHyLgbjzSroJbRx+Rs0PJBnGK+DkRSK9hnkvFmHwf0frBd37R5sdbSoqwO8jTWqxuzZ1ziHBOM5GZgl81v/x2/nb/3vvfipvjxHeoSPA13+eOVhUwmLbHzUbhpN+t0biVhSnNv/2RFBsVJqrL3yZFyoQkUeBFb+u9wdTA+BJ9IJfgc1YqTZtN2JfuFUfVJm14daEc1Nle+6X7g4CLBy9KMqZDJYHOML4FMTMMeMFNMVR/IKLQO2ZyMtNfY0iKwaVNGXluKdXkJsg7IC2cNcmLGzqL9EevN6sJjkyFEQG6oaod3LzIUqXbOKOE4RwXp6KZ2bemJY4I9tVeQ6EXkpF0Hl74Q5oQfU1CaPk+MrRY3ZSS7bSLi8CHi6OGJTnpJE+JmrFVEXDguYKdBbjE8x/ptV4iqXPdAWxucvqFPjZRX56HAnvnE3Q0UWgfWzuEuSbIZyww7H1idGveyWD4S7z6whEg5V2GwLbi7G2erAOc8jQqNtvG/dMw273nX3+avfdPXz/jQVC9iw8OtsPPQHzJnldyjbhkShUVha/GEIX6O109YBMzsZ5jZt5vZD5rZu8zsP5mP/xQz+0tm9u759xfOx83M/oCZ/aiZfb+Z/cKfuAoo1qlLnnbbFKyQKe61Af3gcEvylCa7vPF18jVf8/v5ju/4K5y9yaUI5tVX3eg2ctJKDttEFdRB+4Ma7JJwJhqicuZ5RT9nLHaLuHGrFlPvKJma7lZrbK6HG52cZgc5HgS1FYaKwyVlM1XcUKDFFkEqmhWzCynoXrD2LDsv5LEo1IFkF5GzgO6Ts092n+QZXF1MwfAgIrXZn1Ukfmj8iEU37JuWi31KOuz+iPsJnOqERqJs/YIIP3eItHw6tNID54av67j2CLWoas5dgiUT8AUt70izK70bcFYntjeXHKehGdO6na4bGRrRlh/cE3aOSs6jpQjMJgIuBA9LSzm2nI+ziiiJr6zvULKk2hsjtmDS5am2PzQ23iE+9fpa+sZRWuydRe01IE2KfNbF7ZRl26UV8ebruYqsF3bEuC2IHNTLuPpBrlkk8lkemediZ3BmY+GEbfqysRtwD3TN5/yN7/yzvPN//0btR5YcqM6l/UmdQZsWvVHFat1fTGbExfP/eRFAO+ff0N0/F/hFwK8xs58L/GbgW7v75wDfOv8N8K8DP2f++Srgv/uJv0TJfDOuGrvaYAsuHCMmHk1zWrtm2Gi4vXrj+fVRPgLAeRasxOKBh3K6Jm/vUGPGqblrkdxwWKcgQRfTzBNWN6uMF6uwvk0A6lba602bYXtW1LNkWxLIi7APhzNZ5VgsGeVkgMmgK8s4Wvz66CWiE6mfE+jR24ch5VtfZYXezXFtPBcVpV1GykMhCuzYHLHpCipUOAt5CLpfOGxjiCwEhyK6vWV1NvsUM9MC7yaxkSCRDdmzm9Cs/tjaL9gSrHhnBYY53ouIwNfSwrTlaLy9uBxBcmjpORx3Kf3gWMEq1GGEBGR2EyzYgC1nH83V5jrQ1GrO3VTJjZod7PA7q4LyG2WLaoglp6L2xVqCLCPgUoxVlxaIcRl/AJchTXBCPRKdXB38SKhF2kGFFs76tp08riRG2sLiQjGSaNPSEpIjC9+Qp9CDeriy69D9EcmxNm/xxs7iyoLrob0IOlw6guzk+e3K2Sq4tTZrF17NxRvG0DVa0vrC4XaKT/HjMAV+wiLQ3R/q7u+dX38G+CHgS4FfDnzd/LavA/6t+fUvB/5Y6/U9wNvM7J/8cb8G4jnvbVS2llHLRwPdXBtii9gSLrrvafAN3/TneMc73qEFTTrPdrKtFTwy2+Y8xCLc+MBxqtKXNDgXtpZ4BqElWIWWbeeZgOMhL36ZacLhyAV4mwQluMxEb1fiWMqmKz1g+Knx4tD3s9lU3HBXwvATeWmDZxK+ubVUaT5Ow7QUe7bGXYdiX0xn7yGnotOfCTm5CiaKCjxdBJpTp9TDHodBN85uzlRwRZ5gdcMiOV4o7ObETdqERq6/Ziq6FwNYnNsU11UPrA7OhOpUy11MkMvCtsCGvN036Y6FdhwyBtVCNn1jIRu5Rxoujkcr5bgktIkUfZx1oMWKcT6EZuGLVu035qGzRfRJ90kjHkgkglFRsMfViswbdfOne/C2RI7aGOe6QF9oFs8CiY08BSGeKlqBOkhVg4kxO7XPKtTN3G3SDJfDsV+1Z7kFd2m3i4+qHIqL8YCSijKurKsOEUnsF9/zF/407/2+vwG1CHd2Gxw9DFctMnNtii3kx1R0zvpHGAc++2Vm/zTwduCvA1/c3R+aT30Y+OL59ZcC7/+sP/aB+diP+4qGS8RAdSJPeEKUYJtw6DiEwTZ8+qMf4YPvfT8HLiumhrSDZTHbWqPT8Gw6XXPWKs12G0AU4m2n5k3gziyxGp+9NYsZLmQi11tdifm/8PDjUMEQsYlpAoX3yngm6SN1epiPwYkKgPXElx13uw5tk89jOgXTqQmS6t5Cm/HycZNhsc4k+xRTLaRKIxJfG3toLRxdW3or+QKYicfAkvdh2jNOOyhXVoPbQHbmg+zpVkmaY42eYd84aw8qpn2KpazY1d0ZDz4cD+BuhIHZyG91zSucrE1bsbrohF3qUJ6wb0xGLHmjueGYTEpTHgTCfhLbeg/30H37jJnLJ8yGJhOOYsYNhnBXyi5AfI0soMTwzC1Ckpmswk7rJwZimuTJ5s1pUnmykPzbhVgYmtMXCVsYpbnMYmQb59CvMxsHi6LrAPcnp4IV8id46cfeyz6fD7lJ3WaHPDnFOhXCI4bhIVj7H0cRMLO3AH8W+E+7+9Of/bnuO9fqJ/8ys68ys3ea2Ttf/tjHtOxJQXpNTvil+OkWPhHNNTBc8a6/84N88ze/Q5/3htX4Ifw6o1iX15VsjBqOufFPlFl4w7E9b8BgrVbS/us9MswuHMeNI40+Q9wDURF14mWwx/Elx0yiC0hp028GvW0IStK2Ca5Jfd0Wu6+HcGSNorcabIQilvqz7pKTrgotu5DjQebInE2GlLKckgMuGXi0ZtmY7x0f154aJCQ42ASn9BeubbmIaDY8DZ1IsZwqsehA6U/hqFsBfE1WHpvtRWWDHWQ0nUZNF+HuLEvtOVqs0NSNIQhs0nbu3NwyuR7rRAyKU1hPKH3H2nV9ozALhcti83AEO4Y34IqD20/mjFvQ7zwMCvyUSYfCb2NyB0XAMV093e0j6NFpj0Rm8zkzLXq6W889Q946AtaWSIkcfUhBj2mpC0UJwT2kCzmyah1obXzPd/xpPv7ye1+HsmUIOUVhRHOIRmyjZp0y/DlfP6kiYLKG+bPA13f3N8yHP3Jv8+ffL83HPwj8jM/64//UfOz/8uru/767v7y7v/yLvuin0r3kaKMpXu4+jmi5nRBSuKXDh1/6CH/x2751phwbh6CW6GYNdbWLRGm0jmZ+2xPtbcLRbWAsZVQPkmo9XD6D00k20cLKHWbZODiyS9vgd2isWw+AfNDo1kPXw1mPUppM79l+GzIzNYNytt+x8YPIYE1Mmd0NP1vaUGHqyGIahFC03e9iastE1REObSXWm9xwxYUAhghlMgdplJqrO3jucd2Q3UXfSU6NioTNe2SmXUSZuBAt4lOHHhbRjpWLIO+FfiqEzj2kVUXDK3C3cVAankLfk5e0y1Bn5MTIhHMXmZvu2XHkmkIt0pXfvRPQniGrORhNSgEhaFbkI/TU7Z7TNYe3suQt0Srm3mN2u3Rw4DaZjBpfo2Q0I5+/8Z9D96ONlkE+P1p03qlMNri+fCvu5iuCfvWcG2O7xd/+rr/A88dXnvgqOjwat7GKN2hP+XXOffpGr58MOmDAHwF+qLu/5rM+9eeBXzW//lXAN37Wx3/loAS/CPjUZ40Nn/vVYvOpw5oQRtbcwBA1ykIvqM3HP/YS3/VXv4s7QVsebwauJnydLoqrS9nGPIyFiBgeF8FKyLIr52RkKmcPuUWRU9OyHdoViJmlCuAF5jfaXUETOPR+uvA+UJbMPRWPZhMMYgxbj5AVOCUn2s3T6cw5iTrR9/tLv6aJtYczLtz9nmYLemDMJ0odkWVs8gzb5Gpks5W3lnFFmtrJHFstbzEvj1YykaGfv0tuxKp1rpu9lP4rU5PEyp88/qq1sbdZlNE9h71s0c8QuavKZSp7P9GsJQ4TJ1gzskuQI+fiafNNIpx5Z/CQcaks3pzsLVx9vq5Xc2aq/ceQ7/e0y95P94HaZ8Gb5qWlJ3qYxWaeh8peX7mJPyAfwUiDku1YWHOxZllS2znidVIWaKGLSQvSrZ9pl5ydbL6G8gTmQHDjXX/zO9jPn0tpaaHOI2aYCRHLoydlq8U2fKPXT6YT+MXAvwf8K2b2d+affwP4ncC/ZmbvBv7V+W+AbwbeA/wo8IeB//gn8TU47qYH7XqD5r+bYKXa3urNxz/+CX7P7/vaocZKBOKx2GXSV6dYdT4c+rZN9QncMeSibtB2GWy/pg7rNpI+XviutTb81TacpDE/s3r9QU/Bh13yPMxp8WmxuLYXPQGL1rpxtTWeCTXnBDYZoR4+sJXf2OuBIxnCy0Bp82aXKweh5sapbmJP0ZjKKEObGDad6WRG3oSLlPLQVGAu87k4RPOVWSlKc7pAH/r9NBxVT5HfMgrN6RjuNGmHm2HbcS7qQtjjdyCzD7PZr9+CB+AYE1ZVdRWVuR2IFknLUqfsOjd120OpTtYs7HpNHoE1LAV76DkbwU/KfJUQMuEmYdA9GKlL+yguPc6+iyOf0nfjAAAgAElEQVTFRdjolKZFaKo5DBrxRKobP02LQgsgZsbXHZYNtl+k0Q6legvuxMS8bEnVpTzUQbSWiqBMYnPQIy0Zy+Ab/vhX0/kcCd9bzktMrR39Q8fcj2/cCGB9f/j+P3y9/Z9/e//lb/825NsmxpTEdc1DCzXI48Zr18VX/rv/Dh/+6IfGgCTkJbfmpj+1B9gmG6e2YUz3pnph3hzdpInZdeWRYx/YRawxbvp7LNSsBgdCvXVy04Lm8r6FPg2N0EH5FU9hw8XrmXk94iWNt44d0Kfw96WDXw+4N9TiUtruZhfOAftK2oGs1GaWd526avf1/pn7k5nIcqjUrsNmTMFTfIuW90GaLMAIEYrcD9EKqrl5ipU3MV75JOfWdTo8uZmzQsQo0oRRt5CdwqX1f1yYJ3Ep0WpRyvNdy0EmXhfs3NSzliHKkpHsuodoptr5bbL1ytDO4AhnV2op10uIkCXtp5auaKw4exbNpSWrLSErOzfh0kxsHcQ6TRFCJcKpzE8q4DhvZD1j5JNz7UaYZsopaJaU0aH9xBoSmlXDJbndnIs7u5MHjNOn26wRB7U6rmXNoxexm8oQy9C2cg7ueRVIEv2FP+Wn8x/8pj8op2Ep38THKJ6Wj+cuXnD4Fb/ky/5Wd3/5P/z8vTkYgwbygBf1VKEaTa/kJImHwuOB6Csf/sQ/GLzw/2zv7WJ2Xbf6rt8Y47qfd65N22jFNKQ2Sg0xwQOVNIaDpocqGIN6Ige2JTHxRBNN9ADTSHrigUaiManWNq2gKRuMtGlNamup0lZNaYHSDUgp9IMUStlUYHd/rPnc9zXG8OA/nnctcU0IJ3u+K3teycya653vfOf9PM99X9cY//H/0E1KnDIWTSeeoO8ATrlMIjDEslutTcMky91Xszj0EPTMx5fUZgCWyZ48AetmS73CnrJMQ+hgReg6tmivdgLcJUGtyeuevu3mDheaVT9pU1iPqqUD82THSXc99EHUuoFtDMetWFHcribvLi/BHfh27K4TAmfoPi1XjUNf6zEC7Q6uDYr0mhKa4N4SumxrroduYTFiI/EXbI2bbTvv9fmQzFOe4iKYbNRtxp5xuxMhBJ/YGpkBVNJX4b3YdkmzcR5cZvIUpCGDSscscS/8Mg5ctnqmyK+HqYxw1kskoArd1QejMUn2SnrEijsb67ss6jO52rntEDOyBuDzgDi4lrO2Ya+Ds29sT21CphFmt9EhJqod7z1PMbIby62W0iXFru08RXNyYmW8DmZ6JeC1UthQeXM/RTDrWsTtwkxgqGXwgJysi2XNL/0/f5+24F5TqezB1EZZmBcc+MwXPnq9jE2gETEH8DUPQDHqrOK0JLL5l/+Vr+O2TaSNlL/AykVfA9hdRj5JtBE2iURTvr/XhefmfU52N0r8lDPNtsZPWDP26dJMd6YvYhS2TsrVY7HFKAhdH9KTOU++WE+Lo9/T+M5OqdTC8FicJK8RpfSq5nyd7FN9o1XStUXmyYBqep3qzblRoUnD3k6H4ccS6PAbivuh66i6hE3kTBU2apuWc2Rw66Ub6lbEUGx12jrHVvlq3QoK8SDvDmMNJrLcUqVEUxyD5RhdQbaCU3M2HFvOvQZR9yTOm8CvLUFOm/z6Vt9GhHNyGHQERxR2CF/INjIN/4TCZndA9CVvBCvcRGzqW9CvbP79g74EQtoynqTJJnxx9DAygcsOljf5VKQHYxDOquToi9tOThfOUmYcF6y7JlC7nTwQ9z/BzhNSxixHA4fTEfMeTUBOBSuWyEip6c1VQG8OE2PVLnEFXnvQT8FZyq4UQCg8IGORr0IpS3by+/7jf51lznUOgKxdQseWN5bO+eItxw3CFp2K9q75cD1Nzj374NOf+TnOhMu0a7rLi49lE/y4oFEJ1Ypp2kxUUy7e58C8eSJ4qhOl+RjHOnkq0w1nGtcokacwLsDY+tGYwxWtnL2bhDa2F3db3Kv4TKiMb2vEG4CqIje062R4z1SCHpexvPAl9hlHcUw+4flKhhUS+6kUDIMnN6kgcfWNK6gvlKjSx2yaOalGVfgWF6F2cl/FHjUbpVO2Z55c7eBBLo0NsaT3Jn3TseWh0BDrInuAQW5yeCqBitYj4U19jm3Ne1vmLPe+4cfG16Tubo1SNxIPOZvdwTaBo1lC8R+bOKvxrWDWdS0sntgnXBxSYo4nwnFOfsD1mrNVnXWJmMU2spLtN1GAu7nN6K0zBEyOIUch196r9TncHAGxy+DIMUdVivTTSH/Ngtuq8S58tI8bJ7Eaj4jxt7yZpiJHOHFo7J0x8e5LU6uDxE/hSlVw3Vosyk46L465zy+KLOf1P/gljpAVXV9NXJPmZAtbd54e8fAfsV7EJtDAFvFyACYnM6guFvLD+MZ/45tYrfBKv41xh22ufWlEFOKKsxI/BN6sUsS3GxwtFxxzuA7HD4FQvUMEoNvYS82EYivpU0SeSy42ZcjltWFfTW+h1as3HsbTXvMQbSHKJe68GdhreLg9aqix6QNxHEjZXqWRtfArsT5olsrJgCznXhPQMlORuG+1Ut54L8xloqmJSFABO0simbvTx414JryiaoGU5yGXxE2GRrFLgGt7KJEXbWbLVR7tOMWnqOYY0NWq5dxWOh33lw2QaKWI9drso+gn0bNvCLG/7MY6S63VGJTYpTI5CAjT5jMbTuap3t6VN3FUCTh2Rjp945UtZQxUCOzrC9+b6Bq3OSU3P0JebTgJljKOwSa70KT269TmVSxqiW3S9yb3TGVcKUErXa3TVKxdBwopdcIM6xv3SlUGE1SaIcNWu7sUhjR9qR1eIV7D2i0WaLoOzCuJTNZ26jz5tv/yP2CMzPDD4fiAuGX9K+KCL2MTAF1klzIDqlTS2Gi+v+8v/yArE7dFZYhinLA3+Ap8Yq8mGxa7An8yBWQ+WCAYWY5Q2zVjKwRilZEjrTUW0YfQ2nbalQoLPcDljJq4pjwDrDgrMFviDNQoBcelyFw22eEas+0Q/2GQOl2DGXtGZFSId2+ph1nQ9VQniPy07rppK+hr8AHr6ZcLjmFdWk2PC3YpR8FTSHKP4UmN+618CGIMUIKFfp78DuSVFKFTkK1EY0IswL0EBlZrJOa7YG+yk8dLxZZcg8T/VZVWG6dZN2DLeQln/CFndn6pzxV78dSG183aBb4fn4QMONkSRJWRdkneHE2HkStmOpLDjRDm47YnKUk2Zk+IUg1GZpPWLHf8CKxDpDBv7JAQaVmIz2BF2sXuZtumPWX0sZzTN/dyiNdYB+1Bm81hBYzgzay5MOpm9K7Jp1gDAyfuG/OLPmrCYrR59T75O3/rr8lYpuUrKGl4sJHc+k3rxWwCqwe5NUWF9TDQ/tyf+16+5Vu+hXteIlH4NVlzTI57YX2xhtVmiGe/h5BTj3EJqLSkOCaB52EpJV25nF0ul9imrOGQe0vbSezJq/PxJWidyhklNHHV7Lwa0T18EZuxkVqLdIehMPtjrg6ay1sN4Uaz/UK2XG0qdc0HcOseN9+lMnw/9hKZmWXXOAGX8gJm/Jk1aHZs9vj/56NUNjhq4xas0sU5YghqsCHGZrqrtRm/RhtjEu2EqZPVNPPvAwmyjnFAVAQS4QvFac/cKiSEygfQJn8yhcba42umOb1pnh9D9NLk46H+a0CmJ1wha/mJHNcARa+l5s3vSrYvtk+GY2tq0K73rRHRathBwp3GzdHHn1EcEqVUzWSTSt0nnv5MtLYeaflyqm9icE4VYnNtj7O6TB4JdFIxLtHDdeBJlam+8yFhFv/k/def5U9+57fyU3/tB2gkw1+txKteQxF/w3oZm0CP+0tpfuzjyNLZfPI7voPP3j/LRbN8q98egr5cv2T+uV2jGc2UTTNYN2nZvYdOPQKWbqpvMmWoIe0M8CJn3hIxp8QDsDKsNbKy8YOTH17Tl+bv0YZy6OtZg1BtM8MPyNLPYgxF7aBzPbsp17D3PIsu0abt0gNgrVJbH77BMOjcSq97JeZSNbQPUw4Rdiy1aSnWzT5gQzKWWcOca3PsEvchY9yKSxRozbWHi4SIO75cvXQ67kVstU1shcFo7zK4vxJdtnuMYeV4ZPQzMUgb8k0CmOEr6JpnEzNRxhsBh/ceBNxizE9kAWcxHn44cRtDsamFlZCkj2fmdoCNF6Nem7lAYVwZBpjjKS2+26K7uMYS3+apt+FOmBUrGGLObADlukcS6RiA5Kb7iNGAMOzRnjQtBythDGYPz5ySBf8IkejAUpthe8MuVjaf+YWf56/8xT+tQ6dlNNPDb6g37wEvYxNwgwqV1haphwTnuz75Hfy9n/7ZKdON02XGYWvK8HSoA3MJPaqLcwQqrvzsYfGhN3dEN10tTXuVyip6rM0M3HEkx02GSTcptf3wyZtyUM+SSbZ6Ar4wW6NTEG1U6bzNttdCiVHmnxKEVeQ56kWPGvMKdJPF0ewqqiTqSbehOOslsZu4SbyUscagZKLKdD/NzBziKGwxseLypLd5n20L50ib8vhac24ML39Obd8qlxvFvlXJq7j2jBKnFnW7aJKMsQ5zRDIAHqGpPpVahmbpnvnMlsTnVB1TDfZMWUE2X7O5QqhaGvdi/duIOVryPijV+yrL6TErLTrV0ijKT1oD2/2hy1S1VLFpCypGOlbanHo2pTTndJG/NqjtGuyCGsyhJIoCo1O8k/ZL9neucTgMIWy4ArTupcOWIIeSg7Ps4bTLZk+SlR08IuR//qf/Bj/6A392sCkjQ9Wj20uvBMy001fDNXHQFvzQp36MX/z857hRvOIaGrF4+GkHkYazxdtmMglWy1RURbJuuDkJfbi3GTXlrXj8ZcPYA9lMHoaHcAGdcDGknhYRoS+BkNSMyFL2Uavp2gL5uogqbiCwLpRFVzLlE3bmMlT1OemLljFmBBVLFGrkMW+3Kb9ZKpdr2oLUaZOo+nkAlzXClQpVF1UMuaYIS3YLL2mTJuPB6xdPXS1F2LyuWBDqYW+0SD7dLFPlVJMyzMMF2gVgZcO5puzlUIU3mok0poJaYI9Z7FSAU0mozC8RYAr5L+L40mHeKefF3S3uwVJLVBvIGxUhQdO98NQGJmGO8ZB7O0oXgqIVkqCNBE1VpDuZyLJwIvQwtS0onfirQq8rxw5ttCtmw2p0U5AorQqqg8qliq1M838elR5jC/bQshhtoc/BxnejJmbNGnrJgm4ppOX1L/4CP//TPwVIkGVXo7DcNz/qL2MT6ObpXM82313Ft/13f5Dv/yvfjx2SBW8/gFSjWZtjp3ZdNNdeXchApmWVVdNDj7+AbZubYFRmT+PEe0hjoO05n980Qpbe3JoOocvtY9C5JGiycLxlbOHARu667SMyig9ovSttesath9eZmGqpFnWSyD0Xg9upG/tRoXgW69KJ2CU3UnvSCLEj9LpWqVTfaiNkt25qNYD1/iKXk2mCikonzOnIHbkNCxlSRB00hzQW+yIowo1txd0FWj3667Wgn4RmuzfdGtXdsse7N5UfYLLfNgvCRz40pXguuHdpHDk6gR6ZrnsoPj3EwDTfMhwNxglKJq4Xg9Q/bR5mLrRAwSboPuTT0E68CnxdMkNZxlpG3vSgVWusvC04zbHjYqgrdMjmhjbKU8auIifga7G3qqvDZWRSMACscQyX39b1LCoD/eAgpiIRbVgTgiTjC8QAgK0hDtbDhyA4Ut97TfVxj+JTP/in+NG/9KdxLoWxRnPVC+cJNPCFp1Pe9i5Hms985nOc9zsk1FpUnbCXePxucExCsSGzDivOI5UKbIkfYk1JFp54XHTLYorzYUO1eDrFTW/XaOi+nMrGrpOrhqiSBvEKUp6A3YqzblfOXRh4bsmfb+Nm68aYp4u4tA/uLPG6PdkbrjJ8iSxEF09uiro+7mS4jDoa4QNbTMCNaxR4M+kUXE7ANo37Vcq+c0zJSL4nys2xmwhEYYpws244JJC5myi77E3Zqd42tpKDzLkuyJFZB5vyxS6fI3mAyTrIuoEdYqzlkqFFH8QAvcqD0Ib18Il1nPV+DH4tpxwvNfC7Ej9tosflLRC1hB1RrHYFklrz1NcAssb7tfE6MU7CRx9AUuEc3dTrTa0n+qnoTF7bXVOX68AzuPE+r8x5OhEblOJWkFeSclTRIXNrOE5qL7qKxTkEJ2EMt6XEYPbmJMlSNqN10XlJa2HFzuSKwmPBMWKpBcfuqWKlUeAJ8E1nEpVsV/UkvsOdSjhfv+Y8TzCF4DCZi29av5Id+RdvmbF2SLp5wnf8j9/FH/1jfxzP0omQJ2Fio70iuKxlV4WYghlilt1yURRewV5Co5dLKLJTP9/G8nlZqy+L92DKvg4AG+qwQj+uwydo4s4RrtHfoMO9N69wrlBM96GDR1V5L/BUP5nNWl9gmU4MecM3T9nPJiNt8Lqb1c7RN8oS+kb55nJ9z5pm+XInqolxCXK/a4JAEj5/r2OUkq4NoO4QB9aJBVwJ3ovOzY3AOzi5437wOhc316ZWvIaKMdwowm/skKtvzzhPD4VRvVRmm9OKxiXNOVoUbOvgMIFo+7WkyAfGxefo4xMKz7way0k7bnA36pZc1XjIEdiHfCVZs1iOwki06QfGJ8ImKGbwJkZstp3sgLWwfSlncGsjTwNuCdZsu8GIfSKgXi/qduKpPIpeA8xcGot6SKl42SKOrczMLYzKacIWF8lawwkYF+ntYqvi0m3kyMUlDTd639jHZhHkIHyPUSYmB+p9SyyNi8Xtko3+9/7Pf4hP/IYv56v+ma8lCvb9zY/fi6gEKHjq5sji//g//ze+7Q/8t8qpO5zlwTJx12/IV46G3hfGhbNZJTZdHUUdwXZ5CkQauW+iAQ+gxCvnhvLryEVc5zPYZbWxvgYhD04L6kpWfgG/FnmqdTWDPuQ+dCefDS2um2MsupKHu47lZvVF2g3nCT+arOC4xvQDWWA2xaqmMrHXKqd9paiqGTyZfWBrfTa54criqMYucR/ablQ9idhiG+0MSVVONVVkLfIUbdUoPA5J3stwX1Qk9mTTdyuYtI6xeztupDfbIbI4trOzoBdVQXhyOxBTfcJLj6mGbvjwQGochYxAm+WKL1PDX5BHcX9SWS7EUROLoIkEi4P9pKQeCYDEQxAYHNxcTEu5JwsMZvAmAdAlrn4nZhrXZYQMY1IAMKMh8PQhkTV9uLQgLkyC8uFQGBE3cRiOpRY11a/bKg6XCvD0ZA0dfGmcIKm2F8cSLiDbNM1OChPY/EoOS3kTvkMHu4Q9tCF/zS5WG7cU89NZWCZ/8tv/Ez79Uz8huvLtzY/fy9gEgMa4fy752b/785pPp8Yo2zWio2XftZdOTTd53hfis+8q9oaVm1tpBFeuXTxd4Eqn0e8377cso/YRsjkPeQO4g/UxIx/NhOM6wBbbH3HWoywrB28O5HIbvTn2zLBNTv+UEKzdPW41BakPrzwkVzVYJs+8jlJfeBT71L+RvsaARB+VHIJ1Q0frQV52aDSammTXAlAlIDTeZ9wtZDu5kdzYbRK6VHI3eS2cyPykTiNGOm1l3EYXkIO1JEG7Tjg3WFWaMuRgL0spxI2mKZf5uBZpvm7D64i6iD2TmUtDhJUjwXXJs+UXqZM3rpOnHOelOqQgFOSv96ihbhceqQGgxWACExoL2KGcxYukd4ubEgkud+c0VRyKfb2UIGzX+CeoF68WOOujBHSM3aIj9yQs5YZkUX4QpvBcWdCrCigUZ7Yx7HbJ86ybvBfdW2GqktOy7wpbrZnqgPCcWkblwfuIY2Orn8HX9uAXP/13sX4fXjxPwARq/ORP/QS///f/1xrd1aOX1SlVJB23ieIysDVosyi2bs1hAW4aq93vPCf1yCVaLr2pHvoIZOLRCZkj41ww6DYOeUK/Eiax4uHC01jWiI3m2h9GGaY5snWNOCcFInqQKTHNLt1ANjTTNs37IwsrZ61BDePQGOyWmDdZ0h482nDPsTPbiys2mB6MNdWCwIRx8FkyP+04yBAbjyV13lBU5gQ6OfZIj0Nkop5JaHuPcWhTZ8jVucWglNrX1HMzcVlX4GOTttymn5+3rByNTeTKvEeERWhkGiVmn6EXq81wlKA3KSF13yTDkdYvm5DaLcv6fsi/txB+Tz0gmc7CuD3m8jNz7WwepAFxIkrkrtnoNb5WVXEIOyY0pmC5/A9B3JGqhfuibcBOP8hDYOBmUhNMVYPxIKCVglwOKPTe0IatG44mMzcrRbDP9GtdQAkTebgYVfdIxo3v+SP/BZ//pc8J3H7DeiGbgPHZ97/A9/6FP4+3DdPNBvAwyfja1Suaen33yZLrcaKp0G4/PAE74tmbrdtoX9ixxDJs0WzDGvODxLlc1GET4UC6/FtTfdFxEdVC+MNEA0YbR7E0YsSe33iBbqgMZWKsMHaAd/EqLmEdUVjJPlrjy0k5SiCucVZWok0wM/9iSDQSHz1Hsrmql5qHWMYmupmXBAFYbyIfHAVlMuznn64lu8SQTn/ZzD0UrOIUfhsjkVKlUtPj1/AmHspEnypte46Sk8EtWjB7ihPgJnt0memVxrydxJzDeGGWUCNSelh/zY2uspAZMeg1BOthOqURLALp/DFtmMOitsBdH4KQaN4P/YQcriR/jqGa2DPBClDJPv/2NW7K4UYdUIfciT0bt41lw9XiTkwAia5w2Fh7GIguEFR8s/6g9R3BnDDJzYMq+nrpIDnm3u1E96e3uBoOP/J930Od5xsfv5exCTR8/vOf47s++T/pAUaii4ejaw23X1rt0UkXNIVHz8bQWCBtvYki/CBj2ABqFY21IqiqNF5qK8ktURlHj3Y+H1WEY+N6YyavgyrRRa1iIruVcUAVZ8GFQsJwHVQi4sgYY1lNmMRwPkoquCtkuCHfPhF0UjvJ3OOBHicBRhYtTbzpmaL6OZacZzbRMCiHRlrdQAg0LCU5d02qcYuRFkNTzvlgyody/XAzqbHTcqTRN/Ctsth8Rrw4OeNcyXSU9IMbFSIqFWLM7Ta1Eo+/NWxAJUipFLaHm85lWN3U8pg2uJ7+mpwN1GVI4gWL0n2kUkbj1qlWaum1rVF96gHXqa9GdNKWgTNU7VW1CE2mpEdF0dkH19cM6CgeCkM6UwaI7GZyCQy0qXTUroT4CQNwOq4WCfR93sK1UIvS3dhMSGImBxvJnZtGITr6WWbNX/gznySvNz9+L2IT6C6+9Vv/c6HpLiLKgQI2o9QDrxCRY1+H7tB6kGL0IZaBoQQenXpDveh67u+z9eav1dTNRHMtdINO66G3sSafXieV7xjXmqG8jhfdQ+YZkxxsqJKXF2FNCTwGH+NTn1s8iEjwDGqNv+G0KTHsrmixIfUadVWN0mzW9MmVW3qLbllcT2Kyj7tsjjCoSEYVTZmyDCyNTsmpRdrRSZJeHJc4DQ/SkUaALo5EmjTtPaZs8bDdAsOVEow2xuzSmDZaZKtdaslQNHivJh6cVs1yp4WDh6rvkcaMpzam0GPSZuP/NzSbkOhMdGIRxxQuOqeuy1FaD1apOlIvMk9bTgUoMHL3MDrdlKrs8nKSsMuFEB9JjYwaOzjMx9Mh51R+WNzNeHQs5Go4qUIFEKPQZrtvZF6yEttj84Y2Grqozg9VIGoLCVGohSnKn7BzNnsgwvlT3/lhe9D/73oZI0KMb/qdv5Nv+jd/l4Ae08kinXoLLLPXOIuzQ5bc0zVKaPJI09lQExFtPYQh0UW7odgsxAHH1oCLxmES3JQJINTHUXQeWIjYgR3T09bIjdGHsmek2KjaQLTcoIbSKVYbE8ZhJiafu1B1H1OKngdHO7mwjsogb5py9MylNTabso+psUOvNYcNd/j07i3ehO40019EoaCyWBfSZ1bUatZ2Jf08rMoAG3suQfBSVVbD//XDn6MOldArxcbsNnYkVicVwbEDsxz7Ln8e5eWAnB3N01Y+ZM+bqktVMMcDFMPBbs0+xTC0burQ+21trA6yFO1WBN3BQvJy2pQaZI/kpsfrcrHwaNnZmz1vEA/efZha06epRM2Mbc/nBR7jrVCSmvd4BPYDxOzgcjEjJ0VCFmmEWrJLnhOE6PAdm3hQsy2gVa1EJRehHMTexJC/Hj4OXdeQtCRlNzdpD9qQZ5u8Gr77O/7ARz59L2ITMDP+qa/+pzkC2MZVYB7sYxNnwn7Cb3KE8XlA2vQQLOA1NXTjwv3SOOx0btFYNGcK1bXZGDqCo4pik30j1laV0QfWIvdAwRW4nbqJQJx/d+oUyBIWRDift+KWs3F4cuWhebheHX2fmDKbcdWWgcetkj4kgfYchNwa41J5uVU+Bzqhq8AP+Q0Qo3dI1zhNoIFOhkHgosay+ibKsPWNtteQI7+urT4zpXfPuBMmtWNUyc5ubvquZBMsK6oO/uZnf5F1NXXoVNuTiON2yfo9wJWbLUGLixouQZM8AtrlDiSgb5ygS4ibhYsWXI+Tv6Dl3PxBtzNgVwKWRIg16JWYvZKJx/w9smFpIwqXoOvuxq2TWkGfrvi1SxVEeoE+UVWYvXFcluqT9ks3azaPasNqE6FUJEtNhCQZ1p+XtdrPcvw08kk/I0palKA0jeCgd2P2Hhmnqgg04bg1wi/aKL+wS54Pi7tO/gHSHegRbrzy4nP15ufvRbQDdOOB1H+p8Ur7lgOta9KMGZHF7oRYOoXvJ3tfrNboyY+g/Ym9faK+SmGYNMsC0E301Oqut4VMS8qxE9bOccs1ah/6AOOQk4yN/Vj2ZCSqH9+d3GZ8VOeB1Y2FDETvXUpCejj9FOoXboWtCV5FuFAFSGBmcC12GBxAvgY78QyODiI3Y2Q4ykj1jEGLNk2xq7nb5jTITxj3qzFunK1KSRmC8vM/WzjEXoXZE8kryAEtTWYeO4vsNWCdUnJ9X+QhzwUun37+LlvsmzYwS4OHShNhNXY4toSNRM/o1m5gr7BjsR1o4R9pDwGsOuj3rMGL7SmPhMft2477IvdNmQtmRL7mGOFT5lZO4NiM1U4umhtzVWAAABIVSURBVGhFiu+E9jtdxeGwxs5cKlPH9xPy+oEO5xgZtR/SUwSLOpLbq6XUZJwdj6pQpKb2R8vllG/qOCEnMNeFDWUfJE9UqFpIu4gLVm1WaPN4TMU6kqOkLzE/6fU0LarzICHl/Ltnws3e7CfwIioBVbRG9419nHRJzVWjquPLVJZWPk4/9Wi8Ai75yUVcZEJv52Yn9urXcd4vlmtEmCRed9pvo8OWTVc9e/oLMV6tmKq2UhbcGcQrpy9xC5QzL+RV7q8HlrIK26vJ0ybc0gkr6eRjUpZNXIEixh5MiLU6i5QlYQfEpvYrLCQeolsMMyR4euYMBBhJpZx7cJ0Y2hS0/HOyQfsCyattailMEwzqiad5Lcfl9O3itAurIPOm92K8G8IVTro39HHAkwRMd5K4xejrHeo13c2yOb0d9hU8vZrPZ3CPqjH6OCGOU+rQ95Mwlch2n555gco3504PAWkT23h9bCUiR3IZ9KtibSMvI8PJkoruWAcVKvP9hM3iiDvZxvt5yKijZUpyTaTcwrmdUJ6wX2ssGxrttbWi7y7j7OQ9b3lfmjZFW8qprK12ttKISVAii5vdVB2Qo04FQomDvY3egR+N9SJvl6jwl3F4c1qRbax00sGRZ8LVKUeoS1FusQTg2jW2ZLy5FHgZmwBofOKvKY/nZGKf/mzfndUX50IPTW5iH2BGrYTeNHLj8cPZLOx+52iTZTOPUM0ncie1xPk3LrCbUNl06D14gnHDyPtBPxnX+/PwD/ikXeMQMShqPlCw3qynG+1GXZdsy7qxBcfZXEtg3S7H68CWUuYWQN5ISzKNtZZosYIS4ER4gQbTahWsyQyIi2VLyjlPpSWZ437DQxZmWc2xNfboVK4CS3qL3mpachXpjiEDV+/Ntkfp7NK4d8yfNbw+6FcID7BLhJaYyUEL0Os7HE+QT3DdR2NvCSazTa7kOERyWbbZodOXvZV0TMhfYZl4FtFQGwsnj4vFBI+YpMN+CtAcpw75HLTLBGULVzkPI+ykWWJzunz6Fuqjawxm8lSCc7dht2KtxU65K5q5Al8CuV1tWB28f0tue1NpslgfoNlGabmfmqdMrj1WY6aWI4bPUBb4ErIfr9FoOjRa7OGpBKJ8y7FJhKiyxXFB2yaOGiMW4Q6ZjrtStN60XsQm0N1cAR5GnK1yLnwoIMPQM3n1x57+egZmu6Wko52jR1QkAba47LrzwIw+IGpxsvGHGUUk59XcFpKLdkj9l9Bra1e+9UwjxpK6GbCx2LWJClY7e0lnXlvMQqXcbMiDHHAtehE+7knhrHZiNWUiC5VtTjNe0dxzmIdPjOikRwHo2NGsSvou5qK5tPPizvfo1g2rIkJ4RUWwTJkBXepMyphUH2C3qoqbeA+WannatubWkQo9qaJvNtl8m11QLtFPDJvRb0ZtJhJdqLgl45yUkuJ+mZGp0v7cmnCE+zPRqlPGsnDi8UR7sU1uvg2USyfS87ppx5Z8Ce4Or2Y6U2jC0Bi+j3Hs2VwZvIpi1U0THB6WZUAk+73G705vxasRulezC+pxnaZqzIr3anH5xvaoI0mVa8ux2GJE7tsEh27Z1mWQPqY2OTeXHeSh8Wakqkm7O30TwShC/I2uQwK6NaMcFmsIUT30aGKzh+fwpvUiNgH1fELDw2U11iWJ79EzP2+4DcpJyar5IcRJc2QYbVAL79cavSUTzKk35nKTRmH4ADnA1SLYcY1LzNZp3ONc40KJuwtfIWHIY5MpY7m05Hui0x50vp4xjTIKk1iviKuwdY0Rlw1RSBRSG8DHfQGSIj9s0LjmpHtMPUwbQveDuDQGJtueo++iiqvBQtLiEyU66yRDgNuYcMqIw1nZXAvCUg/LwGLKA9Sz4Z2c9gEANS3o4B2j3LTSIGJBCnLUQypq/bD5EruPj5BLT08mWZsw9drt+purIZGpyVEHNUP0D4JINUJ92HiHJ09tOAe577NhzEFgKrsp4ykA9ox8pcr0HiLYscnTMIpaClHp7HG6Nnwikqo2ZgE2JbkJD9A1rw8dGKoua6Xo1eXjT7M16TFTxNw26Au/IWCYRRz3GVwPecsnlMR1X1rO5m+KOG8HdylgrRVS0i/dVEQEF9mK9cOdt5FOsB/W47p55M+mnZeZb6+UgYIIQg/jhmJZPoNmDwry1Sgl+LAZ8+hksnailTcXaMQWwDGjyHYhvvSG6bGU+GzKhl+ijJaDlRB9DyAkuLl6y2RCDA6iwbZK7WqHGSLtoenstg+47g4rBgOY06fLxxdR7xF7nsY53jW/drkHVag0LpFNuGawPwePvE1tXpNBLhGtrGH0+v4gQWA8mbYxQoGfAYP2p4DNhhq3Imv5Fqwl/MKGb7AYXIcBWxFWEAR0qBVqRYl7BxVjrxUhfceAhjXiKzcjQsrKlOMpRUrGq7dXPxdmdDzofc0EwFEl2Rofeg4AN4Qkfx7J+vw9nfbewR4hkRFkiMDWPgw+/Q0eSV9pqlZClEsxWlHFa71GoCaAHCQy6qGa+5DnbNuDLaeRZJumUa7DwEw2a2Zow3h0sG9YL2ITAGgWvm2MJ/Rw0cibTawVOd+E5sQ2AE3Ph7xwsYuX9HtyE1L51sGwwlqJOIdxN7UZMrsYhH4CS5SAqxPW5oZiBjXuzxacuplOdAKaDB9FyR1S0YwJb60+sx4j+9QJw1hYxZwUsqOEntPUnGfxUGF6D5Bt1oNv7o3IIrLDGZelplp23WYmlpmLZ6D9Vv7/D6PTTlG0e8kPtHPkwYyfXugmzYS+WrgKrh5Fu7OqMEMqyCr8UoqvlzbUjSi6j/790SuvcLn/bKlGlbQ0D8ZINnP6WfekQ34DgcBiKyfQGDKDMeLUaLTrToYkyGLc5dCQgXASOUbJkxIeUwgze563l4l5SNkH1OjRjej+CnxO6XqkLaU2C6yfSUxmjzyN8SAZ+nGacwzO0lb4klmMtaZJXaivnw2grSedq8Y/0yaFz+leQ2leQ6n+QPvh+eZd4MVsAkeNCsqVOwD2TI/lQS5BHybMjsnQNQuylFVAXppN66MTnZV69pefZ0nagw58S8Ib2bO7jxBm6zp6HiLauAL19SAG2QB1QbDLOcLnHpBVuu4VaRgc0T0bExGqEQ3VdL7kqMpUGTx2dBtuwbAGa/wMvSWcWgaIIWf2UCmo7NUGVPQ+ga3wjtZ3lJteG6Kumhn70R7A0JXlGUBKmtuYgjoXXLFUcaGefw/F23Nu5oA+iqeA7aaZeYuok4/XOnPz8mRHwfGgbe9nQZY/aNMGdsrtGK+Z5qwhzMhVyAZTeZCzXHFGapnmFrIDzKWLYLj25iED0n5MhKaaw4hL6cUY9AphDjYao0Hmy8Yab7Qk3otC0eOG2lHZgokmlH0ijGSqCgTk6e5sfY5zwjOEKOEyokU/StUeENPmyWhS+Mg1Pgx2YS3NiorHN08HXsQmIFHJ63GxgYcnd7nK133TiZ89NNbQhyKQpme61zQTMDLf28fcyC2LKnNkyOFCzMtrTlHhA2YH2sEP9jKuuZxoI+ri1kV04m7D2ppswxFw+B0ohX7YIXdkp9l6XLDdMsToknZ8iRmJO8vEte+l0SJn0XfYHmzGShyD3tqwRP5jL9001T0JQ/LTp9D5ZAI6mQfftil3r+emMBtz06ZCN+vyJFY/h1za0FmtkCrS7kTN5vqQBl6i3RLCCXzJ6958U2sMQUWh5ChTRWUOafipmPKCGX+qgumWhsTMFP22C9/J7k3bBjSKFN3XRLRJibhk32YkyVWyKI9SwpWeP+En6aWJS81GOyai25o8WpqNDqyTtRmdyp4hkYu6bIr9sEuVYGLU1Zytk95MD6mETU+scnJahMfGNUWharMz6VCb0Uufs7fu8dAwTBv4fmhqdGjGI+tgKstoyecT4WhvWi9iE2hzuDk+iTyaEOuDOVrhjMqVQcyt1GkcUdja+tCoiQV7EsCSyf3eSo9pxqBUmvfzEDnMjmbdxbMPbyJOAX5bJebNACQ8OkcOnJfcb2Ibt5TeXLoZE9PNmqduAsl8H4hvu3EREM1ashWjh+NtOtHiTGRwH+Ksr0Y5RItY4guQhrOwWqw+hFkMhyAckXF8xDWPvnQbS1Q49tLG0nOqpI+OsOETp+y1r0RKTheucA24ZQbsg1suOfyayCrdDYcqhUjw3fRZtJ3cakndiNBvrwLfYyu21I9bY30nUjwGs63M2an28gHSmtiGR5Q2KJc7UVhzmLwWKy9qq3VJ1LM7KqO3JelCXXSGJuaw9/H8Xmm3U8yb1bRxp+Lbto3OY6kS6E6stoBsB9t37sqap5dxLKUbZ8dz+7LoZ13KHs3JzqmKrAVm2+Lam7gK/MKjxkYuB+Bb3Ai4hEtpjzbu2aIgM1MqNunFKrjWS28HZo5au4gdHA5HazzWLB0wrvTavDe1xUKrHfS1eNXGcW/2GdjtkuFmFBGJJ2x0s5QFZjqNdxjci/N2kas52VylU39h2PIPPA02WFyUbY1eXJvUflXEMs5Sg5iHQJmz1Z+1J9z0AUcvnty4enO+r/6/09guhp+7xmphN6nsLHhQ56kLvzTqMzfuNLaa0y+1DjqAyR1wBzoJU05CLpUzyRiglpxuHqKWKHVQTfF5N8qSeFhg7WK7AFgzGWCI2vxK3gJenKUe1M3gDtuKCpd5ajk5aUeODqNyTVL2BuuLMONmzrKbEnWWTb6gE4eCQbX3Xc+9esdNOv+UDbd2YcV0hYdGrj3nY61pARaRCpSNx59lsd6XQ9GqfB4TGkW4Kpker4rOhNKBo0+vNJdfNyqEGZ03Bassg1cUfgpMdopMqQRPgtMuOvS5Na3xoUOfzq6Htf7YyeV7er2WXEAGpE3m5eHCB8fa3FP5iMckGDuacm0Tz+NNyx6o5dtcZvbzwOeBv/+2r+XXuL6cd9f8xVofx+t+adf8j3f3P/rLv/giNgEAM/v+7v5tb/s6fi3r3TV/8dbH8bo/Ltf8MtqBd+vderfe2nq3Cbxb79aX+HpJm8BHOx687PXumr946+N43R+La34xmMC79W69W29nvaRK4N16t96tt7De+iZgZv+Smf24mf2kmX3z276eNy0z+9tm9sNm9kNm9v3ztd9oZn/GzH5i/vsPv4Dr/MNm9mkz+5EPfe0jr9O0/qt57z9lZl/zgq7595rZz8z7/UNm9vUf+rP/aK75x83sX3xL1/xbzOx/N7P/28x+1Mz+vfn6i36vP3LJv/3t/EKCvb8B/FYUR/FXga9+m9f0K1zr3wa+/Jd97T8Dvnl+/83Af/oCrvN3AF8D/Mivdp3A1wP/C2JHfy3wfS/omn8v8B9+xPd+9dwnT8BXzv0Tb+GavwL4mvn9rwf++lzbi36vP+rX264E/nngJ7v7b3b3CXwn8A1v+Zp+LesbgG+f33878K++xWsBoLv/PPALv+zLb7rObwD++9b6i8A/ZGZf8cW50g/WG675TesbgO/s7nt3/y3gJ9F99EVd3f2z3f2D8/vPAj8G/GZe+Hv9UettbwK/Gfg7H/r/n56vvcTVwP9qZj9gZv/2fO03dffPzu//HvCb3s6l/arrTdf50t//f3dK5z/8oVbrxV2zmf0TwD8HfB8fw/f6bW8CH6f127v7a4CvA/4dM/sdH/7DVs334kctH5frBP4b4J8E/lngZ4FvfbuX89HLzH4d8N3Av9/d/+DDf/Zxea/f9ibwM8Bv+dD//2PztRe3uvtn5r+fBv4YKkF/7lHSzX8//fau8Fdcb7rOF/v+d/fPdXd2dwF/kA9K/hdzzSbt+XcDf6S7/+h8+WP3Xr/tTeAvA19lZl9pZjfgG4E/8Zav6f+3zOzLzOzXP34P/AvAj6Br/d3zbb8b+ONv5wp/1fWm6/wTwO8a5Pprgc98qJR9q+uX9cv/Gnq/Qdf8jWb2ZGZfCXwV8JfewvUZ8IeAH+vuD2d8feze67eOTCLU9K8jlPf3vO3recM1/laESP9V4Ecf1wn8I8CfBX4C+B7gN76Aa/0kKp8v1Hf+W2+6ToRU/755738Y+G0v6Jr/h7mmT6EH6Cs+9P2/Z675x4Gve0vX/NtRqf8p4Ifm19e/9Pf6o369Ywy+W+/Wl/h62+3Au/VuvVtveb3bBN6td+tLfL3bBN6td+tLfL3bBN6td+tLfL3bBN6td+tLfL3bBN6td+tLfL3bBN6td+tLfL3bBN6td+tLfP2/TjgGwMGPNd4AAAAASUVORK5CYII=\n" + }, + "metadata": { + "needs_background": "light" + } + } + ], + "source": [ + "#@markdown Define and reset the environment.\n", + "config = {'pick': ['yellow block', 'green block', 'blue block'],\n", + " 'place': ['yellow bowl', 'green bowl', 'blue bowl']}\n", + "\n", + "np.random.seed(42)\n", + "obs = env.reset(config)\n", + "img = env.get_camera_image()\n", + "plt.imshow(img)\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "-rU8TLu3Q-vC", + "metadata": { + "id": "-rU8TLu3Q-vC" + }, + "source": [ + "Next, we can provide a language command and obtain the ambiguity measure related to that language command." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "9814b40c-71e1-400b-b46c-f3830f242a30", + "metadata": { + "cellView": "form", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "9814b40c-71e1-400b-b46c-f3830f242a30", + "outputId": "b4a53ccc-2485-4e7f-9b1c-ded5c296aa5b" + }, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": [ + "
" + ], + "image/png": "iVBORw0KGgoAAAANSUhEUgAAATsAAAEICAYAAAAgMlPEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9ebBv21bf9Rlrrd/69f1vd7/9232/9znn3kcrmlgoRBqTkCJBJQErAkZTlQqoRAOFAYEEYiUYyqCkQiICCsRETEwqipiiExDlvXvPPrvv++7X96ud/rF+57z9zrvn3HMfF9+9sL9Vv9prrTnXHGOOOdaYc4455tyilOIBD3jAA36/Q/t8M/CABzzgAf9/4MHYPeABD/gDgQdj94AHPOAPBB6M3QMe8IA/EHgwdg94wAP+QODB2D3gAQ/4A4HflbETkbaIzL5PnmkRUSJifI40vk9EfuZz4/CzyvpJEfnB16QrEZn/MGi9VO4vi8i3vSLtdyWfe+X8nvD+BnRfWbc3ePdYRL7yw+bpNfT+sIjs/F7lf4PyXqt/vx/wYenz+9D4syLy6x/0vfc1dgOF7A0M282gwRIASqmEUurwc2H4FbS+XETOP6zyHvCA+1BK/ZpSaun5/fsZ25fzfxB8rh/kA37v8KYjuz+mlEoAXwB8EfA9v3csPeABD/gw8Xs5yvo44QNNY5VSF8A/Bx7BZ06dRCQqIn9TRE5EpCEivy4i0ZfLEJE/OehRH730PD4ouzgYRbZFpDhINkXkp0SkJSIbIvJF994risg/EpE7ETkSkb/4PtUoiMj/MSjrV0Rk6r0yvTw9e7mnFpHlQTlVEdkRkX/rfejOichvi0hTRP6xiOReQbcoIv9kUO6+iPz799J0EfluETkY8P87IjLxHmX8IRE5E5EvfwWN/0lErgft9KsisnYv7SdF5MdE5J8NaPzfIjJ3L/2PiMj24N2/DcirKjxwQfxDEfn5QVmfFJG3XpH3S0TkN0WkLiJXIvK3RcS8l752T943IvLdg+eaiPzlgUwqIvIPXiPbFzMHEflpYBL4Xwe69p++Lv/g/lhEvlNEng7q//MiEnmP91aAHwe+bFB2/V5y9jWyfWOdEpGZQdu1ROSXBm32M4O051PJbxWRU+BfDJ5/i4hsiUhNRP73+7r/OtrvpxOvwLeIyOWgLb/zXllhEflbg7TLwXV4kPYrIvInB9f/yqAO/+bg/itE5J33ofl6KKVe+wOOga8cXE8AG8APDO4VMD+4/jHgl4FxQAf+ZSAMTA/yGcC/B+w/f+c9aH05cP7Ss+8D+sDXDsr9IeC3Bmka8DvAXwFMYBY4BL7qFeX/JNAC/tUBbz8K/Pq99Pv1+WXg2+6l/dnneYE4cDaojwF8AigDq6+g+8vABUEnEQf+EfAzg7QX8hnc/yrw3wAR4G3gDvjXB2l/CVgHlgiMzFtA/j7vwFcPePuS17TptwDJgQz+FvDOSzKqAF8yqNv/APzcIK0wkN+fAkLAfwS49+X0Hm3n3Mv/ncAREHoP3fpC4F8a0JwGtoDvGKQlgSvgPxnIJQl86SDt24HfAkqD+vwd4GffRL/u0/8A+X8bKAK5AY//4SvefaEvbyjbD6pTvwn8DQK9/0NAk8/WqZ8alBsFvo7g21sZlP89wG+8Ce3X8f0efD2n/bODch8T6PDzdv7+QXsNA0PAb/Bpe/L9wH89uP5u4AD46/fSfvRVsn2T35sauzZQB04IPsToSx+YBvSAt15T+e8ENoHSmyrXvQ/ml+7drwK9wfWXAqcv5f8u4L97jbH7uXv3CcADJj6gsfu3gV97qey/A3zva4zdD79UB5vAeD+Xj0HQmXhA8l7eHwJ+cnC9A3zdK2ioQd1PgEdvrACQGbybviejn7iX/rXA9uD632XQ0QzuBTjn9cbufn6NwGj94Xu69Z7GBvgO4BcG198IfOoV+baAr7h3P0ZgYI3306/X0X9N/m+6d/9fAj/+indf6MtL+vcq2b6xThGMSF0gdu/Zz/DZxm72Xvo/B771pbboAlPvR/t1fL8Hb89pL78kp783uD4AvvZe2lcBx4PrrwCeDq7/N+Db+PTA5leAr3+VbN/k96Zz+T+hlPql16QXCHrcg9fk+UvA9yulPpcFiOt7110gIoEfYopg2nt/mqADv/aass6eXyil2iJSJeipz179ymdhCvjSl+gawE+/CV0CgxQikNt9FIGqUqr1Ut7n0/YJXi/j7wB+Sin17FUZREQH/irwDQQ9qz9IKgCNwfXL8k7c4+++/JSIvJ/c7uf3B9PC4suZRGQR+BGCusYI5Pk7g+TX1XsK+AUR8e8984ARgtH0h42XZfNZdfmA7z+X7QfRqed60r337IxATrz07DmmgB8Vkb9575kQzMTehPar+H4VXtb3x/d4P3kp7bkMfxNYFJERglnNHwf+CxEpEIwqf/V9aL4WH1acXZlgqvm6efy/AXzP8zn5K/BBj2A5A46UUpl7v6RS6mtf884LhZBgVTkHXL5Hvg7BR/ccoy/R/ZWX6CaUUn/+TegS9MwOgdzu4xLIiUjypbzPP9ozXi/jbwD+hIh8+2vy/GmCKc1XAmmCnhhe43u7hys+U37CZ39gL+N+fo1guvle8v5vgW1gQSmVIpjGPOfpjMBF8V44A77mpbaIqMC//H74vTzy53PR5TfVqSsCPbmvn+/VDvd5OAP+g5fKjyqlfuMD0n5TvKzvz9v8ksC4flbawHj/DoFr4plSyiaY5v7HwIFS6uXv5QPhQzF2Sikf+PvAj0jgYNdF5MueOx4H2CDwJ/2YiPzxVxR1A+RFJP2GpH8baInIfybBAokuIo9E5Itf887XSuDAN4EfIBgmv9fo5B3g60UkJsEizLfeS/unBD3QN4tIaPD74oFj+lX4JhFZHSjo9wP/UCnl3c8w4OM3gB8SkYiIPBnQfR5n+BPAD4jIggR4IiL5e0VcEkwFvl1EXqWoScAi8MHEgL/2Gp5fxj8D1kTk6wcj67/IZ3YC74UvvJf/Owa0f+sVfDWBtogsA/f5/6fAmIh8x8DBnRSRLx2k/TjwV58720VkSES+7g3rc8OrjejvFjdASe4tsrwP3linlFInwP8LfJ+ImCLyZcAfe5/yfxz4LhksRolIWkS+4YPS/gD4zwffzhqBL/DnB89/lmDQMzQYsf0VPq3fEExX/8LgLwQuoPv3nzM+zB0U30ngPP9/gCrw118uXyn1LvBHgb8rIl/zcgFKqW0CYRxKsCr32inCwFj8UYIh7xHBSOknCEYsr8L/CHzvgMcvBL7pFfn+KwK/2g3w3xM4ZZ/TbRGMVP8dAgNzPahv+LOLeYGfJvB9XBNM+V+1avyNBKOtS+AXCPwmz10IPwL8A+AXCQzD3yNwPr+AUuqUwOD9ZXnvYN+fIpg6XBD4UN/L8LwnBj3rNwA/TGAsF4D/631e+8cEPqEa8M0EfhfnPfJ9J8GoswX8XT79cTyX9x8h+KCvgT3gXxsk/yjwT4BfFJHWoD5fypvhhwg+vPr9FcMPCf+CoIO/FpH3HZF8Djr1Z4AvI2iHHySQl/Wa8n9hUN7PiUgTeAZ8zedI+03wKwQLIv8n8DeUUr84eP6DBIb6KYG9+OTg2f33knx6yvry/WdBggiNP/N+DMnA4feAB3zoEJHvI1jweVWH8oAPCSLy8wSLBt/7+eblo4qHvbEPeMDHEINp5pwEcYZfTeCH/V8+33x9lPEQWf2AB3w8MQr8z0CeIPznzyulPvX5ZemjjY/kNHbQU/0oQRjJTyilfvjzzNIDHvCAjzk+csZuEAe2S+CQPidY8PhGpdTm55WxBzzgAR9rfBSnsV8C7KvBaSoi8nME/ohXGjsRUdFIlF6/9xnPo5EovlLYtkUkEmxhFILgI8/z8FwP0zRxPfd5NDcigm4YOLaF5/mYZhjXdfB9n7AZRtc1ev3+i/yRaIR+r/9ZPEWjMRAfUdDt9YlEIvT7fUSEVCpJsTjO8fHxi/ye52HbNoauo4mGjyIUCuHYNq7nYYZMNENHAMuyCIVCaBK4XBXQH9Q9Eg3q+ZynaDRGr/fp2NNYJEr3ed4BT8/rHYmE8T2F4zoYIQPbsgEwdAPRBMcJFlFDoRCGYWDbNrqho2saKAFR9PsWZiSM3beIhE16fWvARwQQer2AdjQcpWf1XrTTi7YTiEajQXmDMDHf93FcB9M08V0P23HQdQORIH/ICL3Ia1s2nv/p+OJwJIxt2SilXrRVNBql1+sRjQx4GtCORoOF7V6v9+Ia5dO3bMKmiWXbmKaJ4ziEDAPHdTHDJiB4rovjOBhG8ElpomEYOtZLehQyQwyPTlOvXOG4DoKi1+t/hgyikQhKhP6Aj16vRzweozA6w+35IT2rh4hgmuHP0O3nUEohIrgDniCQkREyUINvINAhA03TA33p9zHDJtZntJcGouh1e+i6juu6bxKL+ZHFR9HYjfOZ0dfnvEcogYj8OeDPAUyMlxgaymN7Hpsbmzx+9AjLtdne3AYChS9OjXF9ds1YaYz9nQPS6TTTU9NUqxV6Vh9D10GEwtgoKI/zo1Pq9QbFsRLZXApB4bmg+WArhSYKLeSDF2J9412WVpYxDQNb+Zii8fTpOo/ffpunn/oUmqYxOz+Hjga+ILoBYjNZHOf85oq5uVl0X6NWq3J8fsZQIUdxYiwwzK6G8jxEV/gaaErn8PCQkbEREmYCpfkoZbO1s4/juMzPLKFMhe7B+rN1FhaCI+6ePn3K24/eRvN9LEMQBMMXLK/P1tYWETPCzOg0pzenrM2v4usKdJdapcHJ2VmwJ2GAoaFher0uvV6PqYkJ7ioVhooFrs6uWVhc4OT4hOXZRfZODniytoxPGDQHTRPWn26w8tYjNt959rwdmV+eZ3N9k7XlJZ5ubNLrBh99LBplYmKcs6trZlYXMX0fpXQEjXL5BmX7jIyN4mkaF+dnVCtlZqamyMRzuGKxf3jI/MQkEomgaT6+J2xsbtLr9Vh9vIqpGbz7dJ0nj55gCHxy/SkAb7+1hucarG+8C4AZMllceIQoB3QfJRpKQb3Rpt2okkzEOT2/YHiogIggeojC6Ai68hHHQ4U0RANxXXb2Djk73mFldQXRDHa2t3j09iM2n26yurZGWDf41NOA7hesvc0nN95B04TFpQUsS2F7No/eWkP3DUCB8rFCGroPBga65lCu3tHtuhTHx8FTiFJcXF1SrlQ+4xsqjU8ST8TY3d1FKYXVt1hZecLu7gYLs/OIBp7y2NjYxvM+IyT0Y4mP4jT2TwFfrZT6tsH9NxNs+v4Lr3onFouqpblFXBS6C24I8G02N3cJhUIszy+jRxStZpur6zumZqdpN2t0Gh2mJsa4KVewFIwUR9A9BaLwPQ3Q0UyXw90jRkdHSUcSbOzsML0wi6lH2Nx4ytLCKmZU8e47G7z1+AtQfhclMTTH5p3tdUSEJ0+WWF/fZWl5kf29A9aWl+j3LE7OzygWx7i5KZPLFGh0KkxNT6KhcXt1iyPBqKZ8c/NcNgBMlea4LV9RGh/m5OwCy7JZXV0gZCR4trGO57kgwuPHT3i2/pSV1bfZ2nyHtdXHbGyu89bjNTzPZ2t3j7mFRY4O9llcXA42S3Za3NyWKeSHqTXKTExMoYlCAF8JKIWIxvnFObFUlHwmjesLvg9iCAaCUi7bW7sszqywvb/JwuoqZlhj/ZNPWXs8h+/ECUUUniNoWh+NMH18BA3Vc9ne28Q0QywtLSCG0K222Dv+9A4jESFfKFAsjoFSBGzpCMLpyRHNZuNF3vnZeU7PT7Esi6WlVSKmwfrGMx4/WmH92RbLTz7B1rufZOXRGrqj8Wxn/QWNJ8uP0A2h7/rsH+6yuDpHt9XncP8IgGQyRWlyGk2HkGjc3t3g+x7Do6O4SoGvOD89JZtOkUwmQTNAwFA6G9sbOI7D0soahgGarqFcD1+ZhENg+S4m8M7TDd56/Bae4/JsO+ggdF1nbm6O3d3d9/westkM48UivvK5LZe5u7mjND5KIV8ANJQSfFEcHhzQ7Qaj/sWFRaLhKA5BJ274Or7XB4ng4XO4u0en1/1Yj+w+isbuy4DvU0p91eD+uwCUUj/0mnfUFzx6wjubz1hZWmJnb4+VxQWUrmPbLqenJ0xMTnFxesr4aJGb8g1TU1MgGpVqFbvfC6aFOgwXRjk9PadarzA5M082mcL2LY4ODun3eiwtLGDEw2j4iB1hfetT+L7P40dPEA9E9/BVMAJzdQV4eB7s7+6xvLSC5zkcHR1RKk5wcLxPMpFgZGiEvcN9RIRsNsvY2Ch35QqaUgwN5bm8viYWS5DN5vGVz/HREcNjJZKJMDtbW/T7FnPzq0QiJspQbD19ysLyChEtio+HoduBYTGEp+sbPFp7xNb2FmtrK4DQs212t7aJxWLMzszje4CmaHUbnJ+ckcvnGRsZB1/j+u6CsGmQT2cRzeTk/JhGM9hSOT83TyQcZW9/l5nFaUJGFHzw/TaGHkcpl/X1TXzf58kXPOHpJ5/y9toKngHPngaj8FAoxPzcAlvbm0RjEcYnR9nfPiGVSpDN5WjU60xNFFFKR3nC7d0113e3FEfHyI0MgRKU54EewvAN9g42mRwf5ej0jJnZRUxD2NzeYmlpjohh8qn1DdYePUHh8uzpM956vIyvh9la32BpYZHdgz3ml1bQPJuN5zKam0MjhPIVtWaVi/NT8oU8oVAI3/UYGhvh8vKSWrnK+HSJRrVBu9libm6WcNRkf/cA23ZYWVtmb2cf13V59GiZzc0dfF+x9vgxG+vPUMrn8ZO3EF9jc+spa48fgePydHOTUMhkeeUx+DYKH4WPr3RChuAH/TX4ClcTbq6uiBghhnPDHJ+dkMznScWiaIaO7/sYorG9u8309BSmFmZje4OVtSW2NncC3f7EGrtbB/S6D8buQ8VgW9EuwS6AC4IFij+tlNp4zTtq8clbaOKxv7HN7PIq+5tBL2iaJsvLqzS7LY73D4jF4xTyQ5yeHn9GGcNDwxRGRvB9n+vLS2KpOOlMFqWE08MDRvKjxJIR9vf3mJyaJ2KGEHF5uh6wtbaygqWEg+3AtbiyssTW9i7L86ts7W2wtLrCwe4ey8srOLbFyekpc3NzaJpHu92jXKkyMTlBrd7C6nYYGxvj7u6O29tbRktFep0u6XiKeqNOoZDBSCUxEfZ39ugP/CyPVpZ5trX9ok5rj56wsbHO4uNV9tY3WF56i+3ddVaXV3BEcbizy+LKEo6tcXq8w9zsDJ4fwu63ODo5IZVOUipNoCHc3d1wc/vpjQDK95mYnCSbzeB7Pj4K3ffZPThkamKSw+Mj5hYWEcNgb2uTxcePCes+nuPz7Nkmbz3+BO8+/SQiwurqCuK7+BLG00HXfHxHoSvBFxCloSuFCnnUal163RbhcJirqysKI8N4nkutXGVsfIJMOsXJ8RHtdpvZ+TliiTgHO3vMTU6zd3TI3Pwihg7rz56x+ngFzQfDN3F0l82NTUSE5bU5dNtk62CHuZl5jk4OmZoscXZxTXF8guurCyanZtFEEDwatSrnl5cvRt4A46Nj5IbzHJ+cUEiniafS+Cj2d/fp9/ssPVoj5AmaFpzEICENQwl91yMiOo5obG68i+/7vP3kbfBcNnZ3WFxexbVtTk6PWFxeoNPpcXN5zfDQEOVyhanpWSqNCldn5xQKOcZGi1xf36KHdArZHBcXF6TTaRLJOIKwt79PaXKCdCzJ04115pdm2ds5ZH52lVjEw8FH9z0+tb6DUurB2H3YEJGvJThnTQf+vlLqr75PfvX48dtYvktUN0D3EFfDc12UePjK4+j4jKWVJTxfEN/HQ6Gj0JSOg8LHRXzF7W2ZSCxCNp/h5PCEfHqURDKMLz4aIULKw5MQ6DZbm7vYts2jT6yxHSgDjx+t4nk+6882ePL2I56+8wxdD5zAS6ur7G5ts7CwiBHSERS+Enzx6bY61Bs1Rotj1MsVfFcxPDyCi48e0tB1HU0Ez/bQlMbhySHDIwWi0RCaFgFPcMVmb2ef5akZJGLi+ArTCPHuu+/waG2J7Z0Dlh+tsbUe8LS6MMv2/hFzC3P4js/u/h5KQSwWZXZulmq1SrfTIRqLYVsOI4UhfKUwQnowcqnXmZieoFqu0mw0AV7U1fM8lldWODw4YGlhEXQfcQVHQShigqNQukKJhqdcdtY3WFldBV3Dx2H3WTBFUyh8zyeTTTE1NYHvgoagaYIPXN/eoukaucIw56entBqNF0ZHgIX5OULxCKrnI6EwO9vPmJtfYW9vg7nlJfa3tll+vMjW010WVhcwHVjf2ebJ6houiv39fabmZzk7OmFuYRENaNZ7nJ3vA0ImkyYej9Pv9hgbHkU0HURxfnGGEY/S63TJJFOYsTiXZ6f0Bws0s7MznJ6e4XouKFhYXiSkh9ja3GRt9TGebyHobG1vsbSwDJpC4bGzvRe4ZhYXUWhouo/rQaPWotGoMDszi+X51OoV3J4FCkLhEJ7nEk6EyWVy+F2fk/NThkZHub6+ZGq0iISjbO9ssjBXREJJ9nZ2WF59xNbGOq7rEo1G6T6M7D7/iMUian5uje2dZyzOzWOEDXq2x+HeDqFQiNL4GOfnl6zML2B7CsNQoIHrBatm5VqN25vgBJuh/DCZbBZTD/xSyVya2+tbrL7F9MI8EV1Hc8EzfRCT/Z1tZmbmCJsmmihs32N3awfXdXny5DHr689YWlrj6GiXqalpDg8PMc0wc9PzeLpFp9OnfHPH9MwU3VaTk/PLYDUNARX4csbGRkimMyjlc3lxSSabIZVKcXR4RLvVZnU1OGh4Z2eb5ZVFNje2X6wWv/XWF7Ox8SlAWHv8CL9rIaaOQvA9H7FtDi7OmZ6ZxTBC9HsWt9cX5JN5Tm9PyaSSxMww1qBT9/AZHh0CNDQlOLaDqTROL0/p9Loszi8g0RAH23sszS3iAKg+e3uHAU8K5pdnCBFH1xSu76DpBpsbmygUj5+sYFkeGmCYBq1ej8rlLVNTk9Trdc7OLxguFBgbGUOJQnS4urpB14JV8lg8RTabCVaIXWewGKTY3dtjdn6WkKvj6QrXDxaz3nr0CbSwj9vRIQxWq8PR+SFL89OokIlha9i+ixGO4lsunuFjhjRcV6E5OoYZ+CtrtQqOb1MYKqABl5dXVGs1pkqT1JsN6o068xNTRNMJXNdib/+I6elpjHgEw1Ps7Owxt7SEYKFrITxPoXkKVxQHu/soQNc15mfnUSEdXXRQCsd1sbsdjk5OyKQzpJIpTs9PGRoeImSa3FxdUygUyOZz6IYGKE6OThkZLhAOJdAMwcbm9OAEx3ZwXRdDN5hamCdiaOgSYnN7g5ARejB2HwXEYjE1u7SI7musr7/D4soCp0enLC7N0+pZHO7skUwmmZ2ZpdttUanUSKdS1BsNSpPTOAqUctB1uLm8pl6tMz0+SSIdx/E8jo9PGJ8YQTNMDvcOcexgOX9ldRXT1Nne2mVpZhlMxdOnT3nrrbfY2tpidnKOg9N9NE1jcX6Gza09FpYWALD7NmfnZ8ggZCOZTDI5MU6l3qDX72MYBl7fIT80jOiKi7Mz2u0uxakSjWqdfC5FJJ3hdP+YyWKJk9NjHNdjaXGZvf0dlO+DwOqTx9iuj99TGLrO7v4Gs3MzuJ7P6dEReijExPQsFyfHTIxPowyPfq9Ds9ZgbLxIo93FarcZGR6hXKlQLpcZLg6TzxbQHDg6O2ZoZIhsIoGtg2gCXuCnVG6wgre1vc3qk2WUcjH8CE+frfP47Sfgw872NvNz0ziO4uzijOn5WbbXN0kkEszOzOB6DiE9TKNV5+TklFwuy+hYEdHAdx185QMad7cV0ukEjWaLRDxOJJbg5OiI8bFRUskojge+20f5Jr6C47MjfOWzNDuHrwueD5qRZGv9t1l7a42D7X2m5yY42D8hZIQYGx/j9OSUaDTC8OgQJ4fnJBMJisUiNi66CmGGDcp3N4jSBuErEeLJJIYR4uL8nE6nzezsFBcX17TbbYxQCEGxvLoKPmiaBCO5lSW2N3YQCQJFFmensdwQyQg8293FMAxm52dwbZednT0MI0Q8FmeqNEGzXaPZ6jIyNIxuaCgZGN5qlcmJCZqtFu12i2JxlFqtwVB+iJu7W0qTJcKE2T3eY3psnKOLMyzLYnlpBT/kc35wSufB2H3+EY3F1PzsJK6n2N7eJRqNMre0gN3rcnJ8hmmGmZmZRpSH67h4QLfdpd3tMDI8QqvZ5q5ySzabxfM8YpEYsXgKpXyury9pNOvMzc9ye3OHbVlBjFLEZHpyGkdTJMwEttVHlMf27i4rq2u4vsvZ8QlLSwt0+w5oLieHQUSNACHTpDRZIqQZKM9Fj4RxHYXyXTQ0UIpqvY7Cx3FcWs0GY8VRKuUK6WyG8l2ZydIUkVgM2+qhfAclEQxdYWgGmiZYTp9QRMf3jGBasrCAh8GzZ+8QjUSZnZykq2tERFAIB3u7uJ5LKpUilUzS7fUZGRmlWq3guR6+72MaoWAqa2o0my1azRbTpXFS2Ry26xIxQvRdD6UUp6fH9AarfU8eP2J3d5+Z6RKHx2csrT6m32shvnB0fIzr2swszYJnc3NZZW56jmanSblSYXx8AsfqU61VicVjuJZLOGRyU7l5oQOjxVHisQSaHuL4+Ih2sxksOmkaS7OTbO8fgybYVhArNz09SzRq8O67gW83bIZZWV5ia2eX5eVldN9ne3+PheUF7L7P/sEOc/Oz6CETy7LRJYSuCY1GhfJdhWQyRS6XRTcMBOH29oZ4Mkmn1SYWjxGNRlGeD7qGYZicHh8yPj2FoXm4js/p8SmLS4v4msLuOoRDUTxaiITZ3dlDE52llQW0wcJDt2dj+w6VuzITU1O4tkuv16Hb7ZGIxbm6uaKQzaNpGreVO1zXZWhoiE6nQyqdpNPuMDxU4Oz8gonSJPFolJ5t4SiPy9NziqUSh/v7mObghKog7u/B2H2+EY1EVN8KgitF05goTSIhg4uTE2Zm5hDREOVj9zscnZ2BCPFEgmgkSrfTZXRkDMSj0WqgEyKbzXF9fUGz2aBYLFGtVUinUiRTOcJ47BwdUVqa4mTniG67w6NHTzg42EcpHySYKoiCqekJomaUzf+M6s4AACAASURBVN1dXMdmee0RSnzcXmAwb+9uGZ8sUq3UGR8fpVauY7s2mq5jdW2KY2PU6zVCoTCpbBrLsjg7PaXb6QCwOL+EMjSuzs9ot9tB/UVjfm4a0U2evvsOn/iiJ3RrLTzdADRM5XN8fkFxrMj51TlKge95TM9OY/dsbso3DBeHaNd6jI+P023WabRb6IZOo94mnUuTTKeo3VXpttsowLZtJsYnuLm9YXpqmqPjQyzbZvHxKvubgSthqjRB1Iyze7TNeGmSaCLG+qfeIRqOMj4xw/7+JpFIBMdxmJubwbc9bqplMrkMnWabsbEi1UqFvt2jNDaOcj0koqMcF0HHVcLtzQ2ZbIJ6vUmt1qBUKpHKpHEsi8uLS6Zmpjg6PGZmepKDo2Mmi0VOLy7wFSwvLNK3O5jhJJrv07ddNN/BiBl0m10urm8wTZNsPsfN1TWRaIRYNEa322NycoZ6rcJt+ZZ8Lo/ve4RCJtFohJBmcH17TbvbwbIsTMNkdm4M5YVQvkY0FGLv5IiJ0iji6Vg6XJ2eszg7z+7hLpPjY5iJGOLp9O0eIqBpJtvbm6RSKaanpuj1uxwfn2KYJhNTU5hoWLaF0oKgdiNkcnN9Q7PZxPc9dN1gtDhGOpVif2+f4aExcqkk+8cHtNptVh+vcXJwjKs8VhcX6fb77OzsPixQfBQgwXif+bl5wok4J/uHzM7NYPUctJiB0+kHMWIiaPgYpkEorNOqNbFsC9M06fcchvJ5MIJdC9EQiBai17cxQhoXF5ckY2nSyRRdq4/SgynazdUV8/NzgEGnVSeSyqFpHmIp9o/2mJ9bwEHj9GiP2dlFlONwcHIAKCLhKJMTEzi6RrfVpNtqUsjnqdYaNOp18vk8hUKertXHcRSNWoV6vYZhhNB1jcnJKa6vr8mmM5yen7G6vIwRjtPu1olEYuzv7jA+VmL/YI94LIZSwTRzbnaWw8MjZhdmcXyf7fWNwc6LwE8Yi0YZKxYJoeEKNJt1PNslnx/irnJHvVFlZHiMVDqJUoqLywt63R4KKE3OcHN1QavZYPXJGicHpwyNFri5uqU0PIwYsLN3yJMnb/H06bvMT8xxWb1G02B0aITLq8B3ahg6mWyGdrvD5ESRbtvi+OyUWCxCPB7DsvqUSkWuru/wPAUo0okkZiiMHtK5vLqi3+8zNFygWqkzVZrG0MHXhF67SyRhcrh/zFhxFBeNdCrK3vY+w+MTRHDY3jskkUhQGBoOgpWnZ7Bci5vrayYmSnS7fS4uLonGogwNFTB0DaU0fFGI+OAq7u7KpJNpEpk4Xcvi9PCUyfFJ4vEwx8cnNFotHq2soukG+0f7dNod4tEopbFp4okQKGH7YJ+xUgnNszm/uMZ2HBYXl9jb32V2bo541MRDo9/pcHJySiQaIZfLcnN9i+M4KGB0bIhup0c8HseybFLJJPVGnWgkSrPZxHFdRgujJJIRNrd3mF1+hFg2tjikzAR7h7v0+n18z3swdp9viIhKJOKIJkzNLKC5YHt99vZ2mZqY4OauDJrCd30sy6KQL1AcLeHjBTtiBGq1KsHqWoG725sXvXK5ckeukMVXGuW7O0aLwxzuHxEyQ3Q7XR6vLdG1HWLRGAeHRyzMzqHpPs2GTToRwhYTTbn4HogOvW6feDyG8lz6fY9YPJi+2k4fzTSoV2voIgzlcigRXE3j8vycbqvNxESRZqtNxIyQzCYwJES32+X27hbX9ZiamcVqd7mt3DA0MkwsEuH4+JR2u80n3n5Cq9ElmYzh6NCttdF0hRFOcHN5RmlqHEUocMI7FoZp4vT7aLqOpgW+HzMcwbItNFF4vo9mGti2RSgUwTQMlP/8HHWdk5MDJiYn6bQdjo93WF19wsnxPu3BqPTJo09wcLyDoDG3OE+rXaN8XWNmfJpWr4PnO7TabcaLJXrtDihodjs4rkU6k6HX7lDIp2l22lSrTQrDBSKRMOW7Mrn0ENF4GMvqcX1zx+jIOK5vEY9E6fR63N5cMjs9ASrMu8+ekojFmVtZRFyfvf2D4NwzTZidmuDw5JzhkRFikSitXhsNBUrD0HUi8TCtZhur18cMmXQtC00EM2ziuDaxaIRUIkG31+P09BwQJqen8ByPeDjK5s4mY6VxMpkkiMHh7j4L0zPsnByyNjlPw+3juh6Hh/ukkikmJ8Y5Pb+gVCpxfHxCOBymVCqilE+/b9G3HByrTz6Xw7JtOp0uhhkiGYtzeXlJKpkmmojiKxdQVMp10qkkqUyas9MzMukE5Uqd/FCB8m2ZdrvNk8dP6PQ6nJ2dY/X7H2tj91HcLvY5YWRkGNFBx0fh0e82iEejVGs1hoeG0cOCKI2z01MSsRjtZhMtohOPhMHTiEdjgI+mFOlkEt0wOD8/Z6iQJZpIcHF+QXG0iB7SiScTTI4XOTs7o9e3uL2rkMnnyI/kabQ65OIJbu+uSSUn0ZSNj4aug+P6nJwcs7K4RKVap96osry4SNu1qVSrwT5FzyE7PEq91SUaCWGGw0TDYbKJNJquUxgZpnx7Q9gx6To2tXKVbC6D+Bq9weLL/PQM+ycnjAwNMTo8zDVCtdmmXr4jk5iip4Ry+YZOz2J19RETE5O0Wy0MwwgCZdtN4tE4F+dnZNJpQoaOD0TjSTrtNromuL6HaYSwLYtcdghJmGjKQxk+vgeTU1MoJZyc7hFPxNHwmJ+dYv/4lHarTaPTZGisQDqWx7X6VG/qTM9M4Tsed9UbjJDJ1HiJTr/H0dkxsVjw7xbiiSSRWIy+3adp9ckVhsjkhuh3+pyfXFAYzhNOhOn1upyenDE5NY3T71OpVXALaWp3NeamJ0B0mp0WiUSSualpOrUmCo3h4TFysTjVXgOleeRzeWrlMpHSKOXb28EquSIai1KMjZBMpjCjUXzHI6J8HNsmHo0SiqTQ0VEiXF3fImgU8gXsvkW1VsMsjpNIJKiWq2i6kEhmmZ2eRhkm07OzeMDu5g6JZIJkMsPszCyNdpl8IUff7jE7Mw+60GnU6Nk2t7c3FIYK5Ap56rUK7XaXdCZLPpfl6vKSvm1RMEO0my1a3TYoyGayXF3dYIZjFHJ5bstlpiYnOT2/pJDPo2sangu317fY1isPQf7Y4PfNyC6VTtFsNJmeniUZM9nc2ePx6gqe53N+doEeNskN57m5uMJxXRzbIpvNkM1mCYdjNOoNBI9UJk2lWsP3FfV6nXQqQS6fo9+3SURjeKLR6bbQPUUqFePdZ9sUS+N0Gk1qjQapTJqp0jSigxIVTJ2VB0rwLcX5zSWJZJxatYbveSwuzONp0On0qNzeEU8lUb5PtVxmdHSUTCYT+NU0qNzdgkA4pOMRolVvMTYyxsHhLuFwGNE0Msks2UwKT4PL4xOqzQYTE1M0mjXmp6fxPEWj3SKbjHF0fsnU1DR9u8vO5i4hM8RocQx0we86WHYQne/YNiFDp1JrkMmmgwMUYmF63T6JWIxoJEE8EcXuWXjKJZHK0G428ZVwdnrM6qNVNDR0wBafs5OzwUZ/mJ6ao9m4JZPO4eBhuj51q000nqTX6BEKR2g0KoyOjqB8heU41Jp1zJBBIh7H6lmEoxGajRa3NzdMTEyQTKc42Avi4KZnptjf22dmeo7bu2vikQT5XI5au0K91iCVzEBIaNbqwVZTgZnSJMfn56SSSa4uL3m8uoan+/i+jyYGju3Q6XWDbYUKbMfG1HSy2SzNegNd1zCNEIZuYLku5xfnFEdKtLpN+lafkeI4UVOj1WgRj6XY3N6gNF4imU8Q8kxq1Sq5fIbDkxNS2TTpRIF2s0K1XgfxMbQQ42Mlyq0qtxdXJJIJ9JBOMhXD7ntous5QLgdeEKdYq1fRQmHSyRSi+fgawe6Tq0t6fYtUKsX19TULCwtEjRC2aJyfHDI9NUGl2SGbSLOx9ezhIICPAnRdZ2p6krOTCxqNOvHoGOl0mnqzSTabZbpUotxs0u+0yecLGCGDTrtNNBahUmsQCVsICk1BrVonkUxwc32DiIZlOfiuT7vTod+3yWSyNBttapUyq8vLpDMZWo02c1PTqLNTJqcmqFdqpPJJBI1GrUE2kw5CJEJCcXycq+tz4ok4zUYTf0A3GYuSmJ7GdmwatVqwFzedCc7yMEDzhVgsRqPWpN/ug2hEIzGavSae7zNemkAPmVxdnJIZStIoN5iZnKL67CntdotUKoUvikq9QafbIZVKkkwmQPloA8Pj2A5nx6dAcCJKPBanUq1+hqzrtcG+01YbgG6rA9wxPz/NxdU1RsgklkjQ7rYp35ZJpZOgFI1ag0w+jSbC7PQ03mCfra8cmu026XSORr0BrqLb7+BYLv2eTVqE/FCBTqtNMp7AtR1MTSeTznJ7e0ezXmesOEYkEiGdzRAK6WiukIwnyeSy6KEQ6XQSw9SYnJjg/OyMah3a3RYTUxNcnF3iA5l0ilQuS7Vao95sUBrkTaaTlBs1NF3IpJKgafhK0WrUsW0Hx3EYGRkhGo9TqzRIRCP4KKr1Brpu0GjU0XUdI6QTH/gAu502Vsej3W7h2R6ZVIZ2u0kqn+OmUaXfbeGLYnZ6mpPTUzKJFCdnp0yUJslkkogPd5UqN7eXZLMZ0ukMuhmifHWLEQ4RMcK0u30MEZTyCUej9PoO9VoVxCeWCFatR0dHUQQxftFYjE67QwfIZtJMTk/gi0+n1WAolULTP/6Hmv++MHamaSIKSlMTaB544hOLxmn3+qTyPohHt9cJjuHxPebnFohGosGJIQj1am0QhBscKxSOmCQTCcLROJ5jo2thRkfGuLm6plYrEw6HyeVy6EZggDLZPJ4GsXgcfDg9O+Vx4THK1+i22qQzSWqVOqJrZNIJIlETq2OTTqexbQe73yNsRrEcC8dyabXaZDJprH6PWCxCq9XB0MNYPYtsNkc8Gqw6b+/sYIZNcpkMzWaDTDZHLBajdlun2+1w7XvkCzmi0QTnZ6fks1l6vQ7FiTFqt1X6jkU/6QZG5iX0+/0Xxz+9CXwMUqkM2XwWQ9eJxeOIVChNlChXGlxfnuMyiS8+YQWZXCHYwyk+sViMSrVMp9Oh0WgyOjqC7XgMj45QrdUwXINmswU+JDJxQn2h3+6hKchk0iQTSWq1GiHDQDMM6p0GuqHT7jRJRTKMj42hlFCt1igWi1zeXDM+UaRSbVAqTlJuVGi229jK5+r0gkI+h+e6xBNJMpkUN1eXmIaBl4jTbTVIJ9OMjY5Rq9cwDJNcPk+r2aBSvaNlhIhEo2i6RqNZR9c1Eskk7V4L23KIhcNcXV0SjsQoTRTZ2dpmZXmFVrNOvXyH3bWJJRJ0uy3w8yTigS9aKcX5xTnZ9CPQXGy3RyaXxzR1ev0uQ6kRJiem6FldLi4viEYimKZJq91BKYWvPCLRCO1mm1IxRCwZDnYT6cHsY6xYpFar4ro+ynPxTQ28gU4r70Vs6ccZH39zTWCixINyuYyIR/WuysXVBeFwGN0VdDPE6MgYZshElCAqGN5b/T6iIJ8fIhKOkM/lyGazaIZOLpfFtS0uLy/pWX3EU4wWRqmWq1i9DqPjo6DpOK6FaC4on8vzcxRQKAzheUKlfEdxfJzKdYWLywv6dh+lBM+B0ZFRTNPEU0K72+fm9o5avY7o4Lo+/b6D47n4CI7j4rkO2Vwe27G5K5e5Ld8QjycolkqMl0rc3d5SrdzQ71t0rD5jxXEuLi6IhMP0eh0K+TwiUBodBzwubq4ZHi1Sq5Sx+x+CP8YPjkJqNGp4rofV6pPLFwhpOrfXwb8Mta0OdrdPx7ZQ6NxW7gBFt9fDti1Gx4r4vs/NzS2maaI0CJlhTDNCNpej7zmgoNPpUm81iMaDFebKbYVoJEq/18fq9nHtwE3R61nUbirBeYWuz+XVJb6nCEfC3N5VsXv9IHSm2yccDuN0LQq5IUaKRc6vLnGsHkIwyg2HwyhXo9Vqc3t7w+3tDZ1ul1w+g1KKRCzB2NAoIdMkkYwTiYSJxeIkEglq1SoX5xek01k6nS7pdJLIwJ+cy2Wp1ip0On36ls3E2BRWv09pbDwYWXUtfE1RKOQp5LK4vkelVqM0XmJ4eBTP8RGE8k2ZersRbHXMpVEIkWgU13VQns/YSJFSaYJoLApK0Wo0cCwb8SGfydJq1AkZBqMj41iuS79jY/f6WF2L60od3fj4j4s+/jUAfAW3d2V8XaE0xfXNFUOFIRy7z/XNHWgqiOjXtWBzvwaifHzPp9Vq0+v28LzggM5grAdaVqjVqkRiUTrtJpGwgamFyeez+EqhNOH25o7hsTEq19f4yGAz9g3F4hieL1xdXJIv5HB9j+GRYQojBWqVGiPDw7iuS8+ysGwbTdMImQbReBTX80kk42QzaZTyg03iuo6IgVI6rufjuC6CBFuiEDzPI5/P47s+hq5TGB7h7u6OkZEhbM+nWBzn9vqGq+trNE/HD8HQyBCeeJRvyxRyud91GygFSnnc3dzhOB4hdMbGRtE0RWE4j+FpjIwOcXlbZrQ4hufbeJ4XbPLXDQxN0LUQw8PDgzb1cR2b8u0NsWiERrOBGYng+xphM472/7H35jHybdt912fvfc6peZ66queurp67f/e+ZwOJhRkk+AMk/+FICCQclFgJEgqjBSKGBKKAFFlWLMdCCEXCgCKBAkEosVAciIwBKQnxe+/eX89D9VxDd1ePNdc5Z2/+OP3usx3LONzn3Hett6SWWqe7TtUZap211/oOlkc8EaPXe+N1MCScjGEEPL++UiiVyWYzPD4/MRlPglaBkOTzeZ46j7Q6dwGPeXsbpKZSqnD/2MayLAr5Eu2HOwrFPEoqtPZote4oF8uQNCgpGAz69AZDpqamGA7HvD6/Eo1GUEqSSqWJx2P0ul2y2RzD4Yh4IomvX3h6fsSyFKVikcenJ9qtezbX19g/PGJrbZOHlw4Yj+nKDL7r0+ncM12ZwgjB9PQ0wvi4RuNrg+caeq8v5HJ5Ok8PCAH+0GU0HjAcDrBViHgswdQU9LsD+r0BUgrS8QSj0TBImJU5hoMumVyG09Mz4okEqaTBshTFfAklDcIIWq1mMJj5mscfiMrO81zM+5EYYyOEYKpUpljKYRAYH8KOTXmqTC4ffLFHwz6vL28YNNFYiMrUFMYY+v2g7PcJWAWZbBonFOLx8ZHWfYtMLo1SFtKFh/sHWo0WRsNd645yaTqgfxlQJqjepJZMlcoUCkUe2g9oX9PpPIIIlH+fHp8w78K6RgsiToxUMo6Qhm6vT7834O3ljXa7xX27STQeeVfHhcenR3w30K7Ll8pIKRFG8NBuIXyPcqUMAoQK8DUGRavTRiGYKpewpWCqUOT7gRUNlkNBVRtSMii3tcZ9Vw0uTgV4tru7Nq1GE2EMU+USKIElJNo3PD7cU5wqoSxFNpvn7fmNQi7/xRdNGMNoOMDzXFLJJINeH2kk5eIUkWiETDJD2Anx/PxIo93Cc10QoI3k/qlDMZ/n7unhe6rUStBut5GOoVwqoSXcPbSQQjE9VUIhUNKhWMjhex4PnQ627VAulShPlUhnshgjEVIGop0S+v3gmmkjMAYy6TThUKAkLA3kc1men17JprNMlWZ4eOhQLJUQSqCNDoZaaLBBYLh7aHHXvMMArbt7nh4fyeVyPDw+4AuNshXlUplSuUwkGg0SbziGpSzad22kECRTMd76LwxHA7KZNE44hCHAHN4/dhj7PsVikalyhbAjMFKg0LTv2jRbTUqlqXfNqK93/IFIdoEEtY8tLAyGSrmMwEMKm2yuAMKQy6ZQWqMEgYKsVIHkuedTLJWIJWKAQVoChMb4mnQ2Ta/bIxaPEAuFsUMWDw+PjCdj7tp3FPMlwkJSLBSpVKZotptki8H7NdtNCoU8zWaLVrsJGJ4en8jn80hLMvZ8Xp6ecBybTDaDtCxGkxGjyRAjJO7EIx5PYKQkkU4STUZQtsISipfnF+7u78hm0jw/PeHqYMhhWTae76Mshef7gMBCIgVMTRUpFnIIISiUSxjPcN9+oFgukcvlvw/XAKStyOUzGMDzPdrtNp5WdO4f8dBgSSqVGe7v71BG0mo0kUCxWCRfnEJYHs1Gk8fOI9pzeX19JR6L0X19pZgvkEynGE5GwZew0aTX65FMxnG9Ma1GA8eyAkxcNErEtjFaYymb+/sH7u7auL6gUilTqUxRmq3QarZ4uHsAAY1mk3KpiFKKYr6A5Ut830cK8LWPdBRhJ0o2m8N2okQiMZ6eO4RDNlPFEslU6l3y3CAtRSKVZDgeMpoMsG0LIWTQzwzZCKUASS6Xx3ZCQTUrBcViEWEpGs0GFoEitIWNJS0sVyCVg5IW0hhCIYt0IsXjwwMCwXAU4OrS6QyO7ZBMJVF2QBvU2ieZSBCLR0FIUqlMQAOTgkKpRMi2AyVipWnftXAnE5rNFkoqHCUxyO+q3n+t4w9EsrMsi07ngWIuQ7PRIJ3MctNocHvToH3XwA45aCSjscfd/QP9cYCNs2ybTCZNr9/j5uaWt7c3YrEE8XgSKSTuxMWyFFIKRpMJqVSG15c3Og8dHMsmV8hTyJdoNFtkiiXCETu4KYSg8/iAFIJQ2OLx8REpJVNTRRqNJpPxJODH2jaFQg4nFCKWiAU+FOMR3V6AeYtFI8QjDr47IRaNUSgWsR2HfKHIwvQ0pXyRWCjEQ7uF57nk8lnC4TC5bI5wNMzN9Q2FQpGbqxtub29pNhvMzFbQaJqNBnbUotFsgfh+dDMU2VSGTucRJW2iVhgnFMESksp0hVazEdCwRiOmK4FwajgU4vr6lpvbW1rtNtlsjqd36XBlKUqlMhqFrw23t7fc3twyHo/I5YPepcEQi0exlMWoP6TRbnJ7e0PIDlFI5+l1eySTCeLRKJXSFELK99fnKRSzhMJhKjMzGC0JRWLgBd4MUsBYKlTIotlqYDkRfN8jm8ugRVARCUu+E/kJZMTwEUpihMH1fAyakKN4eXnm9e2NbCHH8+sz2g28RlqtFreNS3LZBM1GA2MM1vuMLBwJoYXHbaNBvlhkNBrSaDXIZvOBR4RwyKazKGHjREJ4kxHPnUcsqfA9D6UU4UiUQr5AOBpF2iG0EXiuT+fxkeGoR66Y5+31hX6vy12zBZZAaMFD54nOfQc7rMgUCozGY4QMhhhf9/gD0bP77mPHF5JkJIZSmlQixdjzeHi4Z3qqgsFibIbc3d/z+vaGr33KlQpGa1rtNsPBkFQqhXZ9nvpPGBMsSebnp3l6fOah0yGeSlApV/B9n1QyhRYKY/nE4klur29YnJ3l8uqKuZklFirzXN/cYAiWcZ72GAyGxOJJhDTYtqKQL/Ly8sp47BEKhfAmE3JTU2ht8NwJb29dtPbxXJ9ENIRE0n5oMxoMUcbw2utTKZeQloWSAowmm8rgKY9ibopv33yL6ZlZQrE437VaSaXT3F5dE4/E6Pf6hENxmq2b3+Xc/t7CFxp8TSQUJ5GMc3fbZDL2KWTTQeWqFL6Bp6cOO9vzeGJMNp2hIyxat9fMz86hCDOzuIBjBJYlg+thDKl0jkHvBYMiElJE41Fi8TivLy+8vvXwPZ94MkH37Q0lBc+PHWQ2F0D5PJ9Bf0CxWABb8vj4zHjiMr80TSaTC2SitEUum+Xq6pJEMsX1zRUaAcaj1xuwUCtxcXyEAcqlChIfHMXIs+g8PhIKO0z8CdlMBmESgOT58YlIKEQynmI8cRmM+rw8P5OP54k4UQgbbm6v8bwpopEYkgDj5/qaQjrHBJ+np+A+TCTigTiq0ETDDo3mLbOVabAEqWQab+ySTATUvcHjE/FSkm73Dd/zSKXTjMdjXl5e6L694nkes5Upsuk0EoGeaIyAdDaM0JLp6QrGTEikCwjfEIskEFKQL05/6Xvkq46vrLITQswKIX5NCHEghNgXQvzb79v/UyFEQwjx2fvPv/B72BsL8/NcXV0Ty2QQ0vDafSOXSTNTrtBsNZl4I9rtJratKJayTM9MI9/lb4aDIal0knDY4eXlmZeXF15fX/A8l3b7ns5Dh0I+z8PDA04kTK/f4+rmCuP7XF1dk0wnyaUTXF5ekUqlUIxJp1Kk00lSmTQzS/P4Ljw/v9Dv90ik0ljKIhqNMhyOicZiRCMRoqEorVaLwbCP7TgMhyPGkwnpVBrbjuBLQyIWJZVME0+k6A/6aO3z1n3j+vqay8srLq4vEAZ8LViYnefy8pJcOkkqlyedSHF1dUUqkSCbypCKpMlm0iQTiS99PaXWGB963S6+8XjuvZKOxvGlAE+Qy2TABMKeBjeQLxIOqXyS2epc8AAieH08k2QwGtNsNt5NXya4kwm5dJpYNIYQ4LoTjAHHCREJh8ll0zghh/5ggBO2eXh6CKTSQzaJZAKpDI2bK8pTZRKJNFcXt+B5XF9cIjFIGSTVt7cXEukEby/PZOMZ5mbnCVmSufk5ErEkEs1wPKbTvidhWcQjMbyxx2gw5u21y3g0JhJxiEbDWE7weUJhh3Qyw2y5QsixiGcSJNNJ5hdnMMri7e2Ni8tLDJrrqwt8LGwhWZpbIJ5IkkrneH195eb2hlQyxdvbC0YZvPGYy4tL7h8fiKfjX/CGn546REI2kWiI17cX7lot4tEwSgpSyRTd7oDeaEgikWY4GpKIxJDGRktDJpOm2x2gfY00Fi/dV/AD2a+ve3yVlZ0H/Iwx5ttCiATwLSHE//b+t18wxvz8P8zOHp+eyJayWNLHaEU+m0HYFqlkAjtsY1k2pVIJbzKh8/CEMQLPc7/Akg0HQ4aDIZPfhifq9wOJore3N0bjMaPJhNFgiDGGq6tzClN5ri4vWK7NIVDE49GgKS6g8/gU1FNKESvOYIyh+9al5ML1zQVKWeRyebrdV0QqzsT1yGYKDEZ9ms0mkUgMozWj8RjHDiN8iIQTRB2DUB73HYURknQyiesJhC1A+dhGgXBJ5bI4IVAajGPQSpLPxYgm0gDB66QhkokEAvhfIhQCpKTX7zK8GDBdmebl+Zl0YUptvAAAIABJREFULouRmvP6OXMzSywvLHFxcc7y4iL1yzoLy/NkE1mUtpAYsqkU7tij1Wy/9xINEVuhU3F6ozfu2/ckUwmSqTRhO0y/3w94zUIQSwQCAc9Pz0wmEwrZLMqySUbDXN2c8/r6ihsZs7BQo3F7jTNnyBcyIH0845DKRrm6emVuaQZrcZFUOA62ZiIU2VSSsetydlHH84P+cDlXRkYNjiXxXlwG/QHpXJbO8wsh26bX7SOMIGRbKGlBKIaQGiENQsPz4wv33hOFXA4lAASFfBah4fzykqWFRbQLRmiy6RJCulzfNpiulDk/r6M9Q3/QJxqLIg2M3AmTyYRsLoflhOi/vnLXbhOPxyiVi3geWCLo59q2RavZIByxcByBi/XO+zW8vfWZTC5ZXKyRz6U5v7jA87wvd4P8AMRXluyMMS2g9f57VwhxSGCj+A8d4/GYYmWazl2L/FIKV0AiFufo7Ix3/cNg4igMvuf/jmDZ357kfnsM3uW0h/3vebD2+z1MWzNdLHN6chFMVe8MS7Uq5+d1+v0+1aVl6udn3Lia2nIVg0FZFsVSEUtY2I6F642wbYdswSISjhCNhribeCAgm84G+nSBiDtCBBO7m9smpUKRVvuOmco0UUvg2TY2PvWTc5aWFqmf1alW5zg7PccowfxSlXg8h9ISlI+nPa5vGpSmy/9/TvtvCSGDwm1+dhbP08RjURwRRghD/eKCfH4KIX3aD/f0+j0QgkIxj8Dj7OQcpCAej5OKx3HFmEwyxcvzE05I0rhqEIpGiMSidHs9nJCDZQ/w3UB7L+SECMDgNqlkCs/1wTdYjoURGsuW5DI5srk8joCr6zrGGI7rFxgE8eUk9bMjhPCpLVc5P70AI7g3dyAM0rKDFUK7Sb5QQiiJpSSv4y79fp9yqUhWZui8PNO4viWZSNIdjbFtyXgyYTIJEY1a3DZvKRXz2JbD1fU1qVQKywqRiMU4Pz9jPpYgkUhxdnrC1FSJs3qdlaUqruvz8NBCKUG5VEbZMGyMqFTK9K/7jIYjGrdNisUS2UyB8WTMbbNBPBJndnqWt/4b11e3lPJFwqk0lvBoNVo8PT6xVK1ilMBShov6FZ7nvkOZSqBc4ukUQjo0m1++1fFVxw9Ez04IsQB8Cvw94MeAPyWE+KPAbxBUf8+/w2u+8I0FSMXihKYrSF+DMpzWr5gpTzFyPa5+kw3f9zsG/QGNdpNKeZqz8zOqtUXERNB/V/eIJmMs12r42qd9d091YQEtDNFYEmEEGp+wiCCNxnLCvLy+MRmPmJrKYyRIZaMNaOHSbt3R6/WYnp5mOByRz+QZje6oX5yzXK1xdVrH+B6VqWlOz+r0BwOMkfQGfVarVS5OT1lbXcEfTjirXyCFJJctcX1++aXPw/X1LUvz84RjcS4vzgmFponEbJRRTJXKNJpNEsuLFKemKEsw+CSSMXwDM9Oz745shrGnuW00mJ9dwImHkaEw5ekKlgRb2oRrgXy8RPE0eOTy6pJ8rkwmnaZ91yCWihOPJhBKYxRcn18zGU+ozs1hQhYWksGgH8hyCdDvMlaDfheAWCTJoH9ObXkJbeDi4pJqdQblKJKpGO1Wi2gswszcNE8vIx7uH8BIQk6Ip84j8VSSTD6L8F0ECpM03N3fYZQiWyhgWQLfFZQLJW5aDXxfIyAQmUBycnpCuTxLLBaid97nuH7G4vISlakyQgos2+b8oo6UklQmxawIIFCRRJS7hzsSySS9fpfpSgVLWdghBxV1SGd8wo7NXbtBt9slnyviOH1urm+oLkzTbD+QyeWxpEIBN80mqfQyRkMsmWB0/kMhgC//AYSIA78O/OfGmP9ZCFECOgRThz8PlI0xf/x320c4HDartU1O6gcoBAsLCxyfnLC5vsrh6RmT8YRQKESpNMX19e9P4guHwozGI0LhEKu1NdzREE9COBxB+gLP0kwGI26ur7Edh8pMhXbrjlypwOv9I3OzMxjDO2vCx7YsnjpPGCmZuC6xcIxYJMLV9SWFmQIhFcERFiPGGCOJqAgTd8QYQ8wO448nHJ4c8OmHHXqTEbaK4E2G3N7esFpboj90UZaHZcV4ee1yc3X5pc/B8kqNWCTK4eEh80tVwuEowjecnR0xGA7Y/rCNMIqTkyPWNmr4vkRjuDg9Y6W2gjKKwWDMUX2fUCiEEw4xt7iA8oPhS6/f5a3bIxKJ4rsTMsksI2/Cy9sLiUicSDSMcDStxj2D/pDpuTmUBFxFs32D67vUajX8gUs4FsXXcHJ2yPL6Cv5oyMS1icXC7H78DtsbO5xdnDI7M0c4EuAaXV/jvU87HaXwNTw8PuCOx5TLFbyJz/PrMyHHJpvLgDE0Wy1ioRCxdIbGbYN8vsjdXYvZmWlOTk9ZrM4hjE0opLCkTX84IepE0UIz9gYYoXGsgB5otERPJpycHVOtLRO2HMbjIVe3DRYWltDaZzge8Pb8Rjwex/ddisU8D50OjuWQSaQYeR5a+wjbwfg+nu+RCCnGruH69pb5+XlsK4w3GnN+c856bZGD0wtGw+EPxTu/1JsLYQO/AvyqMeYv/g5/XwB+xRiz9bvtJxaNmmptDTPqMVYWNxcXTM/Mc3t9wXgy+e6+sCwL1/394/jtrKzy8eSYDx82OTo8w4gAuV9b2uDs8pD12go97eIORtze3GApi8rUFBdXV9i2TTKVIhKL0W42yOZyZDPZABDsa9qtNr1+j1KuzGv/mVK5SNSOU786Y7Yyy9X1VUCLAjCwtl6jN9IkIxYHh8cAbK2u8eaOUcrmun7G6soqWkgmoyFHR4df+vg3alUad/eU8iVu260A62cMi/OzIG1i4RATDHo0wnFstFSgFO7gjcurBqurG2jfpdd747bRZLm2hLIclG/wjQTjYYzLS6/HZDihUpkGNNrzQdgBWV361OvndLs9qksr3D+0Ayn9yZjl5WUajQbVapX6WR2tNZX5eZrXV6ysrSB8AcrG9wYoIZhon4gTxvNdLi5uqFRmubsLDH+ub25BQCSaIByO8vx0Ry6bIZtNY0TgBoeB8UjjSIEMSyZjjaUk3tjFjlhMJhPCdojjs1OWl5cxUqK1pH5yyPLqGmfHh9RWVzk5OkIqyfLyMudndRaq89gijCtdvMGE20aDcmWap6dHKpWZwNNYGrRnEJaF53tYUiEdhZkEfTlpAGU4v7qiMj1FSFr0XZeYFUErgeXD0B9zeXHBzPwcN1fXjL7menZfWbITgd/dfws8GWP+nd+0vfzez0MI8e8C/7gx5l/+3fYViUTNdwGgtbUaex/3sWw7QND/IwzHtpm4LrZt47ou62vbIH0iYZuha1DKBn+A1Ip+d0TzoclydZmXl+fAfEdKstkMhXyBl5cXlBIUSjm0r7i+uiEZD5PK5fDejY0vLy7pdnusbmxQPz1hZWERY0l8BJf1Omvrqwhf4Okx+wfHfLqzzf7xMZsrG7yN+1xfXGHbFnOL0xztn36pY5+ZXyCTTqL9CQobzw+A2RoIR8McHR5QXd3AFprDgwPWV9Y5PD1ifWsV7fvs7x6xvr7Fxfkp1ZkFTq7OWamtMRoNeOjcMzc/R/etT7//wnSpgNGCx9cud/f35AtTTCZjYskIqViCseu949wkpycXjEZjlpaXiUbDHO4fsrKxjnTh4GifrY0dPN9gheHo4BDebRrXVpYxSoE2KE/RHXdpte+Ym5t5f2j6aBF4cRgUvuvRfXpECItCvkj7rkUiEiYeT2Msw9X1NYV8EScc4/L8jLm5WWxH4aMxI40lbI7Oj6iub+C6Y65Oz1icXeTy9pL5+SVOT48IhQMvFaTi5uKS6dkKzWabmYVZLBxeXl8YDvrMzs4iPJ+Hxw53Dw8AlKdmGI4GvLwEKjbzcws8dO7JZnPcP9wxPTNH1JEYS3F8cBKAjIHF6hIX9XMsy/rau4t9lT27HwN+CtgVQnz2vu1ngX9FCPEJwTL2EvjXfy87W6stgzEcHgRVzD/qRAcweX9P13XZ3tzm8PgQISRba1uEcPG8CfX6BfPzC7Q7LZaWFpACUqk00VQCaSRSCiwhKBVy+EYw8QXSuMxMl2g021hOn8fHxwADOLPA/Hxwo28sVzk5v2BpaQVbapZXlkBZ7O/vsr25CoCvDO7ExQiPWDhCdXkB35J8P5538p21oaTi7KzOwkINJw7aC2hQi4tLWFJzsLdPbWMDIQLvCs+V2EZQXVtHOprF5QX8STD5kxi01LiuT/dtwPPLM9OzFTpvz0zGLoVCkcFoBPho7eF5EkQI27aQls9FPUh0q8s1YtEQCMXq+iqhCWjHYX1rHUuCsAXaV1RXqygtEZZBexZa+DhS4NoQVXEWl8JMRn1OT2+Jx2NkcmkeH3pMz07T7b3ha0GxlKd93+L+4R57ZoaIJVG2y8z0NEgFSjO/NIMlQ5ycHFFdXKR+Ucf3NZ7vYQsX2zGsLa9weHKM53uEQg4rK1uE1IDTi0tqG6uUZma5vblmcXGeyVDTergkm83yNnF5vH9i7A1QlsR1XYrFKTKZJCk/Rj6f4+b2hovLc+bnpolHw7RcDyl8hFF42lCrVZm4HifHJ5yf1b+wV/y6x1eGszPG/N/GGGGM2THGfPL+878aY37KGLP9vv0nvlvl/W4xGg05PDlBWs4PzIj84OgAz/NYqi7y8eBzDo6PEVIzV1vBCsVYWppFYYEyjEd97m5b2FKiJHSen2h3OigpsTyBMhaOVMwWp3l8eCRbKLC6USOUDFM/v2Q4GiBsi+riCpYtOD4+5vjwFDXx8DwPLRWbmzvIUXC5fYJEEgo7xJSDHH/5B7aN5vrikvHIsLywgGVJjvaP0HrE0cEBtpIc7O2zvLrG8f4+/iTIsAqf/aNDIlLiepNgcmzZIEA6hkg0QWmqSOepQ3m6QvfllWFvSKFYxFg25XI58AyJRMimE9zcnNPt9UAb5ivzbK1vEAnH8YD9oyMwChFRGEsTtTS7+3sYZRBC4AiHkC2IiAiOo5G+4eDwBN+fsH+wx+HBEY3WA7WNFXKpLC+PL1RKU2jXkEllKBeL2FJTLOVZ39ggnc4E59u1EJbF5cUFhx/38Ec+oKlWlzg9O6O6WMXXPmvbO2AU+7unWJZkfXmDTzc3sYTk4vyYkIyzvDiHpcERAt/XTCaaVqvBzPQs0VSA3UtnwljSMJn4bG6tInDZ29/j4OiQ4aDHwsI862srPDw8MhppQHByfMZYe0jjI2xByAmxurb1ve/T17qmC+IrH1B8PyISiZjqUhVpJLuHu1/1x/ktIWWQYDbWNlBWGISLhcETGiUCLFb7rkUmk6HXfWNuZob20xPtZotSuUApX6DZuOPx+YnZ+Xni8ThSgtYiIMj7GksJDo+PcD2PjY0NjGWQrkBaPkKH+bj/OdtbH9jd+xxjDOtbH/A9OK8fUFvbxB8NOD45/lLHKUSguSaEZG2ziqWiGCE43t+ntrGEbWx810M6Nh8/3+WTnR0++/iRT7Y/BW1wjcfJ2THra+vYStB7HXHbvmGlWsOThre3N16fnkkkYtzeNhHv57VQLpNLZ1GAtCRGw+XVJcWZPLFwGnw36E/ZBs+ViLHmsH5IdW2d08ODgP8qJVufbCO0j3E1B0cnbGxvIoTBH485ODpFa00oFKJarb2LYkK3+8p14xaAXC5HpVwBHdDJmq0m0USE7muXdDbH4/09xUyOWCzM6fk5i9VF6qfnAT1NSjbW1jg8PmZ9fRMPiEjwDezt7yGEYG19g8ODfSCgGa6trjH2Rlxf3bK0vIxB0u2+cXt5ARD461bKQTVjDFpLpAg08V5e3wCYW5jnvn3HzEyFsB1GOpKD/SNqG6soIdn/uM/G8irG1tTrl1/7ZewfjGQXjZrxO3buB+14hBBsbW2hPZ+Do0Mcx2FtfT0YJHgCKXx8Y+i9dXnqvjI7twC+ofPYRhhDaaoEvuH65oZEPMXL2wv5QoFkLMbJ2SnzS3OcnwXLtc31TSwUJgzC+Ox+PGRzewN/bGGHfbyJICwMrgVaKCbjEZf1C+YXlzk93v/Sx7qysogTiqOEASEDxRkdqHgIJUEb9vb2qe1sEDE2QnhMtMAyI4y22T86eu8/HrO2scSgZzg7PSEWj1NdWKL78sxzr8vszDSdziPNVotSuUKxUKJ5e8PT8yPzs0vE03EuLuoMet9D/S/XNri8OGVtZRlXKCJCMJGa/c/3+GR7jc/3jr+4Xpvbaxzsn7C5s4U3nnB6csp8dZHby2vWa+u89fp0HtoszS3h4uJrzcvLMx5+MAgTimI6j3Akvg84mnc2XWCGDYDFweEey7UlrHAIpccgQfshhCUxrsf+/gHrH9Y4+OyQne0N/Inh8PSYjdUadiSEp11wFcPxiLOzM4DAmGd+IQApC0P77g6JIpfPwzulUGiDEZLzi3P6/R7ra2tcXF6yXF1mbAku9g/ZWtvGlx5qIvh4skc4FKY/6P8w2X3VEY1GzfAd9PuDGEopdja2AMVk1Kd+fcXSwjxXtzes1Go8v3V5enwMEp2QPD3e449dSsUppDA0Wg06T08s1OZ5enji7aXL2kKVq3aDuZk5rJANjoXUcLC3x+r6JkcHe2xtboDUaATSSIQr2T3eZWt7B6M0eH7guaonHOwdfaljXKktEY7HkNpG+Jr9k4MvJt+bW1sc7O9jjOGTrXU+2ztk68MnwfLq4wHb2zvs7e2ysVrj8LROrbZKREl64xE3NzcUS1NcX12SiCeZm1vAlgLPuCAM93cdrIhFppjjpn5NJpMmm0jhTQJLw+9WgL6SaM8QVZJv7+2i36WndrY28D2FcIJloZrA/tk+G6trHJ2esLaxiTQCgWE46gdT4uVlhBG89npcXZ6TyWSZnplBGMPdXQupJOVsievGLY+vz8wvLgbOdFNTgWWlhpOTE6rVJSzbxhYC7RuEAvedJGu0RhonAGtrFxvDSBsO9vfeOfmCUDjEzOIczcsGS4vVoLpV8Pj6xM11g1yhyNRUGSUEzWaDUMghl0lzeXXNW7fL0nKVaCQa6EB4BkcqtFZ4jBAhGXCcPAtjfE7qpwx/WNl99fFd39gf5FBKsb6ywsnZGcuLi5zfXFNbCwy8uy99njqPLCwsAgYj4O7ujrt2m+mpCrlSHq19bq5vyOey3N936PZ6rCxXcTJppJnARHFwsPdFj2V7awshJR4eBx8PgKBq2dj+gD0y7Nb3WFlb4+zkhNWVKnv730t2f+iTDH/sj8zyJ//MR37yn5tmcznJn/8vvwdN+U/+zRp7J13+2q+2v9i2tLpCIuzgYYEnUCow2PHMGEcIXC+QzzJS4BlDSAu+vfsRCJb6mxtbaONxfHzC5tYmSoPGA2TgYfHa5+n1iVQqxfV14JNRLBQpT03RbLWwIxFy2QxSas7rl3TfuizXVoiGbYSQHB4ds7y2jpRw8HGXla11HOOzu3f8xWrgk51tDAJXe0gMh4cnrCxvcngcfM5IOMxmbRkfiVYGYQxPL29cX12Tz+exbZtWq0V5apZiLo8RAQQEBSZolIKBk+MT5ucWccJhhOVjI/jOZ78pAe98imU03979nJ2tbT7u7X5xD21uboL0MdJGSYGe+Az7fdrtewq5Ak/dR+YWZjDa8PDwDK6mNFVGC4PRBiWCZX4uneOuc09/0Gd5bYW4FeLg+Ijl5Rp22GH384+sbW5wuBc8pCKRyA+XsT8I8XVJdqu1VU7rp2ysbDIeDzg6OwEgmogzV10gpCXGt3h4eqDVuqE0VaJQmqJxe0syFiVfyHN2dk4hVyKWiiA0HB0ds7AwRyQU4uP+AdsbNRAxjFJ8/p2/zyc7W2hj+Li7x6c723zn4y5SSta2tohoiWtrFDAeDDk4/gfhJ3/kn59mq5bkttMlGlH80n9zxZ/7U+vsnb7xhz/N8Su/3uJv/52HL/5/ZXWJy8tbqstVwsrwcf+E9Y11rHdRStc42MKAhM8+22VrY4P9w0O2P2xDoLqOkoEBrXEUY1fjvr7x0Hkil83x+vbC3PQs948dfO0zVZpCa4FUkuubS7LpJPFMGm3AN4L60REL8wvYVhjX8XHcd6NgJQgbnwmg/IBF8XH3M77xyTb+dw1wfYOWFsIGJiCx8IWPQdHtPvLceWFxYR6UD0LgewKFQCDROjhGFFyeX1DK5Gi+S7mvba1iy4DrrGKCycgQsiSe9vj42S6bHz5FGp/djx9BCHY+2Qn6ka7PxDOcHR+xvraJUYE5+PBtQOu+zfJSFYnGkz4oC6EF2vcxKnjQSW0wJph+o4PBlRYa6Uu0EBweHVCrrWA5DnsfP6e2ucPx7mds7mxxflJnMPxhsvvK4+uQ7ACUZbG+tY3QBoxGSZiIAP+jNdg6UCsxUiOlAaNotpo4YcWgPySeTJJNZUBpTk/qlIozRFMxLFfi2x5Sg2UsJgTWh76lYDzCURYTXyGNRgkfIy1c4eH7Lgcfj3Ech6WlRY6OjvnGRoK/+gufIIVECsX/8LceODx5ZGYqTG+o+bf+1cVA5twO9M98bRBK8Gd+6ZX/5W8esDS/iBW1AcH+xz02V1bxLYGlNC6gtIPULhqJLSSuFBjjI22FO/Q5PT5kfanG8dU5K+trDLsDzuqnJBNxqtVFjLTodB6ZDAdUykUarTaOEw+kxh8fWJif4/HpiWKxHHhUoDg93qc6P4+MRIhPJCNpoVUPRzr479xpd+yzf7jHN3Z+hO/sfotPtrfxfMPh8SEbq5vod0234XDMXfOWUmGKzssD6XSa7lv33e/BQwgRVOTTMwwGfeLJJOlcCssXQQL2JFIZjo4PmEwmbG5uIZVEex4Hh4d82Nnhs88/59NPtvBHChkyaNfH2BYGCyYue4cfcRyb9bUVhFFoK6DfDV561C8ChlAum6EyXeb+/gElA/xnpxM8lObmFnh66jBTKhGOxzg6PmFpYRHbcfBsOPxsD+377GxvI4TCM5p6/ZRh/4fJ7iuPry7ZBW5kv9ewlGJ1qcr+6ckX22KxGKVSkc7DM6lUlpvbc/L5AvOVMkIYvPfJnzEEclKZDLlUAiMNJ0dnfLdXufZhHRsbYTRCSz7u77KxscX+/i47O59iCx/Xl/jKAAohDNL4+FphSc3YHXJ0EHyuP/yNLP/aT87z1//3Bv/Uj2bZO+nx5/6NGqD5C/91nZlijN3TLv/kNzL8jf/jnj/6E0V+5EeL/Ic//8bf/LV95meWCIUstG2QSqAnkv29j2x82AID+58Hy8Kd7Q9gwJUeEW3hWROUCTNyB1ycX7KwNM/NxS2rC1Wehl2eXp6Ym59DGMHj4zOunlAuV5B+0FMzxsczEmkFHiPG511CWYD2OTg4Yn11BaMcDvY+srP5KZ/tfov1jR0ODz7yYWsHSxi+vbfH6uYmp4eHrG/WOD6ss7pRw/ghFAZhPIQNr50ez703FubnkMbFCAVCooGb2xti4QjpTIbLy0u63S7Ly8tEY5HgztEGV2iOD46prdY4OTxBa8369haWAPu9Rpx4gVqNLQI2ilAerlY4ELQMEGCCKbQQLlIGS2GjfbSxQRicd8FR/733KDwLoQyn5yeUp8qEQjFQHspywNPsHeyxvLVKSIf4uPsdgB8uY39Q4vcn2Ykv1Fmr3/gJWvW/y/D1nvUf/0nqf/9vMR72+Gf+xM/xf/7yf4T2Xcx7v+X/KxzHYXtlDU+CAt76fc7qdSCAC8TjMQajMZWZ8nvfCm5vb4mHo6SzOQRwdn5Gr9enurGKY4ewpeBw/4DV5WUsFQoEKaXg888//8Ii8sPWJr7USCOxhcNEjzk8OmZtfYOJGXPyPo38x3ZS/Pe/8ClCCoSvQBiMRZBtXfM+YQVhBFJotG0QSvJn/1KXg/oAXyqO9o8Zj8e/5VGwvb6BchRGGoxvAxqMwhUuIUsxGWtOjg9Yq61xWj9he3ONCQTm4r5ESp+xVlgm8EfQ0kdisN5lSQPjZ4+rq2te395YrS5hx+MIT3Nydsp4PGbnwyb7e0esbK5gixCe0Bx89pHtnU1sSzIZwd5BsMzf+LCD5WqMkIxGYy6u6qysrKMAlEAJn4enF/rdbuCxisT4cH/XxhKCyWRC5+mJuaU5csk0npac1U8pTZeJR6II7SNwQLjs7R+ysbjOXj3oj21/8wNyovh899sIIdjZ3sESGn+k2T07Zmllnav6MWsbawg36An2ej3q5xekM2nmZmfB9zFK0Lp7RCLJTRVo3l6TjadIp3KcnZ9SKeW5abYYDIdsLmyjEgq0Yix8Tg932VrfYHd/j+3NHT4GsKUfJruvOr7vyU4I5j75p0lNryOMRuoJudk1Xu9uSUzNoIRD5/aYfLnGr/93/zE/9if/M/6v/+pn8ScjpLIBg/b/QXCzZVlsbn1gPBpzcXFKrVYLYAYmYBm8vrwy7vaZqUxz93iPEYJCpYTShuvLa1KpJMlkivOLCwqlPK1mm9mZClY0Rkhb7B3u4nkem9urHOydfFHZrW9+goUPUiJcDSGN9DRjYzg5PmVheYH6cZ31apy/9ovfxLYVvgeWDRqF0gZp+biuDuSAbIFwwUg7WH8Lje8a/v2/9Er9dog0CqSNrQyf7e6ytrLCwfExW1ub2AL6nkdEaj7fO2FjfQdbSrSvwfIxvkYogZA2xowJ0quDNIKnl2e63RdmZqZ56LxgZEBpsnxBIVfgqnFLMp0lnUqA0ZzWzxgMhmyurGI7Di6Co8N9VpZXMY7ieG8PYwzf2N5mIg0+DrbvIrDwPY/D0wM2t9Zwxx7nF5esr63ha4/hcMzp6SmpVJq5+TmU8MBI7tqPaKmZylVQWiMsw8XNNclclseHB0qlKUIhm/rZOfNzc1xcXFBdXsG2Lfb39/B9n52dT9jd/cg3tz7wrb3P2P50m73P9tjaWGd3/+C33E9hJ8TK0jL9yZj2/R1zC1V6L0/c3F6Ty+WpTM9yd3fH/V3zi9dMz06TSWcDXKQ2KKNwhcfJ8TGe67K1sczh8QVra2tIO/Bz4M3wAAAgAElEQVSfUOMxR+dXDIc/hJ585fGlk52QSGUh3iu58uo/QWqqSjRTIjc1z2/89V+iOLNKafvHeG1dMBqPyC2uEBEO7asDpqqfYDT82l/+Gf7ZP/4X6L7c8//8Tz+P7/5WWRzLstjc2GAwmdC4vqZaW6bb7XN1cU48FmdxbhbpKPR3rdIweFqihaHduCEWi5LNpPClheMHU9uTk6BqAdjYXMeSgr39I9Y2VjncP2R9dR1tGSwdwiiD5U3wbQtjJJ475uz0lI2tdZCKt5dXMqEnfuaPLfNX/sYNv/iz27hjDy0N0lb88v94SzgMP/2T0wzHBmUJHGkjBPzUf/At/s5nL9RqVSKhCBMdQdILGuPGZix9nHcLys8/+8jmxjZCWijl42tQQuH5HtIWGAyjwZCzkzNi8TgL1WW6vS6vnSfi8STN5jW5fI7ydBmDQBiJeTdTMsiA4uVNgqWtUhhLoidBJeVaLief77O2uYL0I8HyTYCRCktoXGNQvmHv8Ija2hoKScgSDF2Ddrtc3d4xPT3Dw/0diwsLPD8/0u8PqUxPc//QAd+nPFWk0WoRiUZIpzMYEUB/Tk9PGIyGbFSrnN9cs7SwyEm9zvLGOiEEn3/8yPbWDkgLy4xxtcIyLsYO4TFBKhvjGuT7gGHg9mk07lmu1dDGw2iF73tgDLYSSIJBjC9EgK0zmtvbBul0jkQyQf3slIVcBZmKcXJ8wOJSlWjECibnIrAUnSA5/LhLJBJmMBj+MNl91fFlkp1QFrmlT1j60X+RRDSNsC2Qkte7C5xQjP7glYnbozS1SbdzTnRqhpfbc4zrIaXAWIJUfpbnxgWVjW/yt/+LnwHgD/30z/F3f/lP442/J/aplGJpaZnLy3Nqy8t4Y5/WY4tCqchV/ZJ0KkWlMo3rG2xlIZRG+IZWs004EaXb7ZJNpek8dsiW8tw128zMzRDBQWAjQhpfewHdyvUIKclvfPaRT3ZWMCoMvmH/4JCVtVWOD47wtR+wApaqTCYTzupnfHMjxV/9+R+hP9L86t+758/+4iHDkean/6U5/vSfWGM81ox8l5/7y6f8+I/k+PEfyRAOu9hejH/vLz5z3nLRxg/YHSKYMIakxcfDAzbWN/H1hKOjoDfo+z5bH7YwSmINNYf1EzbW1/EGHofnR4QiYcqzZS5PLkjE4ywszOEKzXPnBd+dYFkWGCjli9w2m9jhCG+vL5TyBdr3bYqVColoGF9Lzk6PmZufIxyOc3iwG/CXtz9wcLDH+vIWXkzgTIIJqud7KEtwfHhMbWUFX2nOj+osLC7SajSoriwHYGFfYcz/y96bBlm3nfddv7XWns48Tz0Pb7/TvVeDfSXZlh1bHmRjJ5jEwsEOeCSBAlJQJCShgCoKAjFQUBU+AJUUSTkkJtgFCYldloUVk9iKZUu65urq3vtO3W/Pp/v06TNPe1hr8WG3VTY4FSoKkSV7VXWfoavPXn367Gc/w3+IQZhUldposKkydbVRQxoBxiKQCGk4evmSzsYGF6en7O1tYbSHcTSBBS0l777zLo9feZQqJxsHgebJF57y2uNHvP3kCY8evYZMQqxURMuI48tT9nbv4zma4TzkttelUq1ydnpKo9bAdT0Sq7HW4rkOy+WSTDZLpVZFa8PJy5esddbIZbM8ffaUrd1t/MDn7Te/wCuPHmNI8ITLm++8g+86zJdf2cHuq8Jd7J90SeWw+do38dq3/gAiieidv0v38E26R7+BmysSTqeYxQxloXfxfxHOF4yOnyMNaEcQWQchDeOzExwp6D55k/d89Ed4/NEf5dN/9d/nW37kL+BlCniZPJCe3C9fpiq5p6en5PIe25sb5P0ir772GsVSgXefvMvgtpdK9CDo3lwTZLNpIGytUcyV2N3e4friio31Djnf5/nJIYvVBKzBRqkxy5MnT5mTZpOGADRYa3j82iOEFTx+8Ag/8Nnf3+PZ86ccHx+nskQCPvd0xEd+7FN87q0h/8Wfeky97JL3JMtFyE/85af8gR/4FT70WpHv/nCF//gvPuWTnxoTojEi4fnhCz7/hbeZLRJ0rHnx/AWhEdy7v0siLE+fPueVV17h8WvvA8BGYOMQqyz7D+9jjMH1fV595SF7mzsUvRKPH73CWnuN0WhG9/yKWrVOq7lOo96k2WyysjHNTovVcs5sNiWx6f8WbTFWcHT4nJ3dXU6OT0jiiIP7D3BdFyMcHr7yCk8O38Y1URq4jOb5s2eYyHD/4IAXLw4hcnCUQxBkube7j41jrNHMplMuLy9S/2Gtaa13EK5ESMF195rJeMzZ+Smz6ZSTk1PqrTq+57K7u4kQcPTyGU/eehtig7SCBwevpirLeAgb8fTtpzx6/BrGUbzn0SNiE/Pu0QsSG3Nyecq9ewdYErQVyDhBCYnUhlq1RmetTa1ZpdVostZuUmvU2NhYZzmfM5nPOD8/o9Vs4rkuIZaDB/c5fXlMvLBgQUvD06cviBLLq6++905w4St7/Z7K7IRUuH6GaDlDOi7rj76Bhx/+wyxnQ2bDa4w1aA0KiZAai0Yoi9EqtdALJcoBR1i01XegMIs0BuEopBFYoahuPuL6/DnNnYd4RjJdXPOpn/zz5Eo1djabYAzKz7BczLg8v2Rvb4fEahylsEYghUW6LkbLOyS9Rkg4PjmlkC9TKhY5PTtho9PB9wTGc3n7rXc5eHjA0bMj9u9t4jgVEjvHNS4rqQmsJGSFl/i89fRtHr/2XhJX4ycuwoaYlWQWzsi7N/yVP/811CoeWqZQCsfAcm74S//rCX/z45f8uz9+j+/7jjY2SXml//ZfeId/9Y/s8B/8xXdYiiaX55csFou0bH/wAIPk2fOn7D98hCM0SRiCk+XFs3fY2d3j5Pgljx48AFLl5kQLtI65OL9gb2+bRRjRv+mztbmN1kkqte44JHGMMJZ+f4Dv+JQrNc67Z5QqVSrZHIfHR9SbdYJ8FqMtJp7j+BWeP32Xnd0tAulhhcQowIFnX3iX+/d3SRLJ8dFLDrb3STwHKQSOSk1+tLFI5ZDxHWyc7hNrcQOFMal0fu/6GkVApV7i8uqCgl+gVC7z8uyY9kaLy9NLwjDk/sEDDo8OeXhvH+MmPHn7CGst+w8fcPjkKe95dJ+IhDh2kESgslgb4bgBNloRh0survs0G3VuboeUyiWWs1TiyRiDRiO0QAlwfIm5Gy5pI9BYRBJzenrK+vo6nhLgCBzr8OY77/Dg8WOUsjgSIi145/NfwBjzFZ3Z/Z4KdsX6Bnvv/3be+qWfov3wgxx8+F/AzMeMby+RbgEpLMlqhjYKhAYX0BKlFagQIz2ENhhhEYkgVhbPAtIirQSZwkSkk3peSBSChHd++X9DuQFf97E/zT/4a/8RnrLs7u4yGU5SNY/WGqPJLVsb6wxHE8JlSL3avOuVQWIi0JbrqxtmswXtrQ6+65NxPJ4+e8bOzi5n56fU1zsUgiIvnr3LwYPHPHvyNg8ePuDJk6c8un/Ak2cveOWVA9555zlbO3soDE7ggRLE4xVnl8fUK5L3Py7xX/6ZRwhrmc40JoFP/mqfeRTzr3zvOgbJYBSxmsc06xmCrMtqqfnBP/0mi7CKn8lwdHzE1vYmSjvIQOBI0NYFrXn67AmPHr0CQvPk3Sfcf/AQk1gSuURpl2cvXvDg4SvE8QLfUawSi/AUIgqZzeZEiabRaHJ7c4NyFeViGUFK95Ked3cBEugw5vT8lFqjyc11F51otta28LIlDo++wP6DA+JFjBcobKLRwnJ0+BIhJNtbWxwfH+M4DvcPHhOGc14cviBJEsrlEjvb64SxYTFbsFis6LTXQEogRhrD1XUP6XkU8nmur69o1hu4+Sw2tigp0tLdCowRIDUokWa5UiOsJB0Nad558gIrQArB9r0DTo9esL69zdnhSzzPY219k273gnK1ynQ4pNNaQ/keo+EAoyOklFgJ1VoNgSKOI6RyiJKQ7vkljWqDTCHP86dPOLi3T+BneOfJu+wd7KOXIdmsYLkUHJ+esPgKL2N/TwU7SHt07f33ce/rvofF+AZXKcLFnAQHgwEr7nBbBiFFSrNBgjBkSg3MZIyRCVEIOAbXOGgZIVA4QpKQeh+oSOB4AUaF5Ov3SBYrytU6Lz73c8jhU6xQHB8fk8lkWN/YRFjwpMTalMvau+kRJiGNdotet8dsPGFnc5NcocjJ2RnL1Zz1Vpve7S0bm1tkfcGTZ0fs7t5HKQNSYqwlnM6xSpILAjQOylqMTXj2/DlSSTa2t8iqLImM2WwYfuLfKTMcJTQKAbeTmLdezHh5NudP/uA2CNB36iZ/9r96l0+9MeS/+XOPWGtnOD+f8T/+rOGsGyKMQVuBE/icnRyxvbePZy3GcUgWS4Igg5YJ0SrBDzy0FRAaXp6/ZHt7k9OzC9rrHc5PTtna3qLfu6FaqXJ+cUGukKXRqTO6HWNjS71Z5+Lyknw2RxRGZEsFHNdDGMN194pavUb/5oaNjU2yrseTw+esra2TyWaxxDx7+pLdjW1enh0jBGxub+JlM7x88oJ7B/dYhDNymQJaG6aTGcPhLetra9g45GY4YW197Y4zkfLB4kQjHDBJwvX1DfPFko3dDXJBPhVStRaBJYljtEkQCPygQBgukK6DjmN81wfXkCxjfD+HcTU2SsDAYrXg+qrHve19LDFREqMT6A9vqVRrDAe3NBtNHEchFKRYUIXWMUls6PdvyOdyjKcTGq0q/es+nXaLw6Nj9u4dkFOKFQKpLIdPn7P34D4vnjzBWsFq9fvB7p/84EIcA1NSibXEWvu6EKIK/C/ADql45/f/ToY7/4/X+f/0RyjHo7x2QHXjEX6+jHQMWDBCIIREWos1AmMFkJaO1io8FSCDdFooAMfxicIli+EFmcBnNg0plBsoJNqC9cExMp1+WonM5cDJkM/V+PRP/wSOgq3NFsZYRKKJl0uEdPGyPkJpkkQz6A8ZjceUiwUSnZDPFMlmvRRtLzyUJzh/eUK7vUZsDTnfQQvBchZRzPkYo9FJKnmVzWRodzYoZgIW0QqVDTDTECfwOD07YWd3h9l8TlYO+df/aJtf/PSQb3hfleOLBX/qx+4hjEFKiRHpP+rqesFf/ZlTPvKhJpudgP/2bxzxxz+2zf/wt0N+4e+9C1YgpGR7Zxtt52ScPFI4LKOQZ8+e8p7X3otxYo6eHXNwcMBsPiObS/ua9q6hr+OQy26XjfVNEhPhKock1oRRxGy1wHM9rDVYYNi/Za1dp1Qok1jBRfeC5XzOxvo21zfXbK11CLUmG/hoYTk8fMnmeoeMHxAJh5fPn7K/e8AyXNC96LK3fy/FIVs4Pj5i5949onDF9cUF5UqF6XSWinFq0ApsFCGExgt8ev0hrlKpW510qJTLRDrBcRwcV7Bcrsi4Picnp8RxxHK5ZO/BQ67PTtjdv8fh4Qu2WusYx+H58yfcPzggKPgsRyuCrMfx8Rm7+wcYIuaTlFPdqrcwAhxPsFyFDPpDNtbX8AIXYRWrJKHf77GYTum02kxmM/KVMoVMgCPAWMXLk5fU6g3yOQ8Z+8z1EpUYKAb4ieatd95Fa/0VHex+NwwoPnIn3Pn63eM/B3zSWnsAfPLu8Ze8pHKorB/QOng/0/4pge8RqACZDg0h0RhjsFqgpxP0KsIYQTTpk6zmWCTdd3+VICgQFCosBld0P/9LzF98hv7RZ5ndXqAFqGwmRb4rUEKiDcTRCrtcIjG89u0/zN7XfJTldA6xYTAZc3Z9xe10cIdulwwHE5LEcHBwgHQc8rkcuVKB3mDMcDIFq5Eh7Gxso5OEXveS8WCGQHB09ByMJJESx7PkcnnW2y16vSuEp7nodlmOZzw/fsFqFdFsNUlikaofh0X+5seH/NjHdnh5tuDP/Og+/cGKk4sFBoO4o7b9H798yx/4QJ0Pv17hpz9xyQ/94S3ajQAdC7LZDDubu+zt3MNVHlfnAxJjmMwnXJ6dk8vlmC5nSKM4eHgPBBy9OGI5myOsYblcsJpPcJSiUW8QxyE3/VsWccRgNKBYKNBptalXq9Qadar1Ogf3DsgXSyTWQmJZb2+wtbGOF3hs7ewgHZfedY/YpNL9rU6bi4srZrOE5WxCq9NBOXD44gUP9h+ihAYZEy1X7O7sI2yM7yrW19pMZzM2NjfSAZIwmDhkEYdMZzPiOCJfLFAoV6k32xidsFgtGd3eshgNMNpy2+uTJAnNepvOWod8vsD12Rn7O9u4jmX/4B4XvSsuu2dkczkuu5fMp1O6vStWiyU7O9vMFzMW0ymj0ZhOu8NF75Kzi1NOT86JwxhjDJPphDjWrKKIyWRMoZDhwb198sUCnfV1SrkcAkhzy4Sd3R1uBzdYo0BprrsXnF5foJAY6+K67j+N0/DLun43ai1/L/Atd/d/Evg/gT/7pb6o42XIFKoMz5+z94HvJl70UF4RMQlTrqq4Y0w4huuz30CoHK0H72M2uubosz9PrrbG+773XyMc3qLcgMnVIbuP3sf5ZZfH3/p9/NrP/Ne8/n3/HslijJ8tIKQkCqdIpXCIMVYxuTkH5eDmG0xjh2g5Io5W3Lu3R2Q0qzAiG2TwfUUSRUSriHqtxmoVQZLQaTW5uu4RLWMiERFpy2h0Q7laYjC4JVfIkSvkGc6mOEKzQrC/n3rYdjrrWOuyu7vHydER+WyWq14X15FsrmUplIsMx2NefdDilYMcr+wdYIXh2eGco6s5P7STQ2CYziL+6HeukWjL1W3En/yhXVzH4a0nIwaDMeVyCeVLlNRM53O2NjawSnLTH7C/sYPJORwfHlHY32E6GlMsFCnk8vRu+uxkNuldXeG4Dk3lcX5xxoNHB2xkOmhjUJUCkY5YTUPcjIefDSAWrKIIJQWOk8qeC+txdtGjWM3jBznyfpZGvQlCYRAMejfs7+7y8uQYYywSqOdLAEwXE4ww5HI+h8cvePzqezDCQaKRyqdaq2B0wnKxZBWmpWiz3iR2fLoXV8ggYL3RxAoolIooxyWXydG76hIsYzY3NpnPZvRv+xij2Vrfw/EEs8mUkhfgCMH9gwNim2AQKDRvff4dXnvlVV6eHVM1MYPbCa6j2NnZQceadqcNCK6vrwmjmFazheu5zOZzolVELpfDDzysI1OMnrDM5wsSnaqZCGFAOdTadYSUGJPQajRxCzlcY0ng/1ejqn9W68ud2VngE0KIz935wAK0fosU+xXQ+p1+UQjxJ4QQnxVCfPYfdxDl+pQ7+yRRyO77vhVpIVqGTMd9Eiuw9k5k0hoczyFXbTK5eoqrLK9+5AfZfu9H2HzvNxN4Rd76xE9y8+INXv3WP0ac6dB88Dp+tgIISq1duu9+mmyhRpApIq1kfHWMcQOMEBhpSTAE+RL3Xv92dK6eqtvGIYPrHr3ra6ywFEsllKsYTYZgod+7YbIYg4J2q4M2lpOzU25vuqyvtZiM56xvrSN92N7dZjgY0Lsd0ev3mUyG7O3u0rvuMh2PURLqtTr39nfZ3d1nZ2uLJE5YzpZsrrdQesTV1ZLxNGQVwWsPS3zs29eYTGKMEQzHCaNFyOVkwdHFgvnKMBiueHgvgzFz+je36FhjrMdwMAYpsdZSqdTQrkVKy+7eLlIobvsjjID9/V22d3aZL8bs7e2ysbOFFA75XD4dCBnDYrbkutsnDCP6t31G/VvC2QqMZD5fMhwOSZKEVbgk1hG5fJb5ZMHguoeOEvqDPsYarBXs7u5ipWBvb5fd/T0OdndIrKFYLNLrXzPo95FWkc/nWU76SAwYy3g05LZ/y3A8pnc7QJuEcqWSZm/TCdPFgqzv3dHkJDe9G1azCdKxKX7SarRJGE3GFItFdne3UQ5YKRiMBwxGQ7QwTKZ9RrdThIDheEKpWGI2nbK1s81oPKPWaFAplTA6IU5WTKZT3MCn3VqjXK3QH9wwm0+pFIsgDBcX56wWyxSKY3TK1pmO6fd76ESzmK3QWjO6GoGOSITixfFLwKaDNuX8vgfFP4X1jdbarwH+OeDfFEL8gd/6Q5s2FH/Hfpy19i9Za1//LeXvP3IF+Qpf+91/gr0PfQ/CC1jNBmntKgyIBLRNuZ5WkM1W2Xztm9l8zzeTL7SY3Lxk74PfxWrcJ1ktaOy/j0H3BdIa5v0uo+NnuJkinfsfACl5/Q/+WziZABtbtDEMu4c4iYPVDomQCGNYzXssbk6obj3CCss81AyHY7L5PAZYLmOsVmS8LACZfBbXC0ipiYbFakYum2FzcxNfZckXiiBdxoMJwkKpWKZcqbC7u8VwNEFKy872Locvj7ESJrMxGoGwFgeJChTtVgviBZ26z6+/OeTNJ2OevJzwhWdjLq9WXHZDzrpLCkWfjK/IOoqNhs9skXB5s+LiKqRaa7G9sYkvHYTVrK9tMJsuwRVMRgMmsxlCG4bDIW5sKZVLDG+HxMKAjRkO51gNVitc32FtvYMWEiEVqVMCFHN5dnZ2cKTDYDgkjGcUihlcz2E0HnF+3iWMl3TW1tnd3mVnexc3cFnf3MKTDuPxFKMto/EIYyzWKmIrMY6mVC6xs7fDzvYORlk2NjY4PuuCtgwHQ1ZJSL3ZJIkSdnZ2qZarTIZDFsslVkr8wMfzA2bLFWEcphx9K5BAIgzz2RQbx2ysrbNcLrBGM56NESZ9L06OT7Aa+oMJZ+fHTG+GnB2fsbuzx2g8QQrF+vY6w8Eto8mEUGtm8zmFfB4bxywWEzxH0Wo0aFSrIFJnt85aB+kobgcjokhjjaXVarK9s4PvB4xmU0b9AevtDSbzFY4jqJTLODatdKTRaKO/pBP9d8P6sgY7a+3F3W0P+FvAB4FrIUQHUltFoPelH0cTRzNMEjLtnxFGC4QmHfFbiRTgZQtka2v4pTqF2hp7H/pDFFt7HH7655nfXvPgmz5GUKnz8CP/Eu/9nj+OG2R45dt+mNe+7cfpPn2DytoBZ2/8IkL5RKsF8WpOJlNh473fQu/4HRwEPgrlBrjZKm71gNll6ty0WKxoNpvUa3UwkEQRge9RKJWYzZcUcnmEFUSrEGMFzWaL7e1tFssVsU247l4iYstytgAE0/mYyWQMwrKxtYUB+rcDarUqg9sRuVyW/u2A4WREYg2j8QSrY7o3cz711gorLcO55uhsRn8UsgzBUQ5vvDXi6YspX3i+4DNvT/jc20PefHOIwvJ3PtmnN4iZzKdpc8RJObOzxQiVJKytbzKbpr3Ps9MzImA+nbCYLrAKUIqNjU2Q4FiDjWPGoxFpT9zguw6FQgFD6r0RZDNkswHj4ZDJZEqlVkEpRZDSmojDBIRAuDCaTJEmRkrL5dlJeqEaT7m9GSJMwmA8oN8fMJsuQFpuh0OssYxHI6qVKlYYojii3e7gSUkmE6BsgokSkC71WhWFYTqZcnV9xWQyIklW5HMZPNdhOp5g4oRWu40XeCAtnU4HJT2W8xnKscynU6qVChrB1vYm1UqF2WJBpVrFCkshlwNjcQVsb22x3lnDMTAeT7nuXpMJsjRaTWKtmYynLGdLBIJisUyhWCQKYxbLBYmOMRam4xlJnGCFZa3dIYwSjDBMFzMG/T7r65sMhkOMlQhr8TzvSz0Nv+zry5abCiFygLTWTu/ufxT4T4C/A/ww8BN3t//7l3osow3jQZ/lvI8yqSSOFqmgoevkcLN5wsUYrGE67DK9Occa6Oy9j9b9D+Dnyly+82nC1YS1R9+A43ucPH+Dtftfj/IclqMrjInvJpaWOIlxfA+9DInjGUe/8QskRtPZew9GCUwSEzRq3Gv9EfpPfxk1eoNqtUG0WBLHMaVCASvBYBmNx+QyHkpKlJQpSV4btANJvKQ/CCmVCoxHQ9bWOgz7t7TX1lFSgNBIIxBSEUYrNtY2ePOtz1Or1ZCARKILBaLVCqdaZHNrE0+FvPV0wt5OjefHCUkScdxVSKG57i9ZvjEkmy8QBB6zyZwHmwHHvTkf/9UZwikRRhHaaoyQDEdDWs02w5sBxWYzbexbRaNZA2XSiaYVJNbBSo3RMBzcgtEYK0niBGNGlKslMoFP1s+kMkXCkM1nWSyXSCHRsWa5WFGr1KhUqwxuBwxGN0ghKTfKhKsFpuDiKUGtVuN2MGB9vcN57xoty5yfnVOv1gmCABLBKlxS1gW6V1e89uprGKmpNxoMb0aU8hkW8wVxFIKVuEEGgSUbBNTrdebzBaFZUa2UsA64yuGi16PRcVGeC0JhsQxHI8qlMoHvo61kY30drQVWGKyFrfUOxnpYL8YYzSpaUhJllIFYSIzQzGYz1tfXmExGWAurVUgSJRRLRVY6IiOySCOZzuZkgiye5zJfzBHCpde7Za0l8R2FkQ7tTgesprPRoXd+RWIlUbTCaIEVX9FD2C+uL2ch3gL+1h353gF+ylr7cSHEZ4CfFkL8OHACfP+XeiBBKv1trcSSan1ZBUIrVrMBo+sTjE1gUyETSzib3nk3xHjZHFJ5hIspy9kQrTUOkng+RWCQjoefL7Pztd+B8iXPP/VztO6/j6Nf/3na++9FWkVt+xHnX/j7NLcf0XvxWRrtNUTmgCDboL7/IVbd54zGEwLXwZgEkcbjtCQt5HCVIpPLIEkR+sPRkKBUoFlv0bvpU2+0GQz6JAhCnbDq9xHC0FhvcnPRAyHY3Fgn0Zpms0mz3cExgNVoUsWQ4e2IarXGYKL5u7/U5/2PXQ4vYTSekMnGRFFEPl9gPklQckSxVGAZGqZLw2jh4Ad1huMRlXKZ4WBIuV4i0akaSKwNymr6t31q9Rau66Rqvgh6t9dU202USfeShDEWgeMIqpUKL49fIiSUS6VU5vxujrSaL5lNpuTzRYxJwGqsSBFvjuOSxAuMSTBJjHIcboZjWtUG7VaH3nWXm/4tnY02MhG06y1qaw16FzdgJOuddWwCzUYdIdK9xwisjfG8MvVqhfOrK8LVknKpzMQmFEpFMrk809mM2XzKaDTBAk7RoVgqMp/PcDwH66ZeITbW9KGnvEEAACAASURBVPsDrNWYJH1vTCJQrqXeqIMAYzVSSbCKTrtN76ZPo17D2Jh+/xalJL7jUavVieIVw8GAwPMJCmUClcMaAMNkOiLwPBbLBbeDAVtbPuVyMQ2CWQ+hUu6uMRIZWdrtJgZwXQeEwShLHP/usCj9UtaX0zf2yFr73ruvV6y1/9nd87fW2m+z1h5Ya7/dWjv4x72W6/yjx+KOn2Xr9e8kX6l/UR/OSIW0gsWkx7h3Algau6/Q3HkvUrp07n+Q+9/4L5KrrGHiGINGKMXOB74DP5PH2jRbsoCQlqef+hkSa9EYknCOG+Twsj7GFWSCwhcnXJfPP4PVC5LbE+x0xur8GdbxMZkakBDks5QqJYwRTKczrrpXLBYLLAJj5d0t3PHL0Qhq9QZSOTSbrZSxIQRp7gDWCLCCq+tLhDBIaWivdcCBq14vHR4odacFp7EWXNfDzVR4cQF+4FOsFMlmMuSzWYRN3auCTAZ91xs8uXEZz9L2p00sX1T1i9K9IGCt2aLf75NEFolNS1MruLq+5vqqh4NE/uZ35bDWWaNaLTGZjKjWqhiT+qwiLYiE+SIF1iIgV8iCsAgjGA3HhKslGE2tVsP3vZQvKg3EqRqVBTrNNsZKXCNwrGGt006FMYXg+uYKjMRIS7PVAgzGOrieS7lc5mbYR/mKtVabTrNDtVTFWIkxKn1/8nk67XWCTJ5SqYyf8ahUyzhuClq/vemjdUKzVUcqS73Tot/vpRtzNAk2NdzxHJQErItVAiMcMPym6DvSV9SbDZIk4fZ2BAgC36dcqbBcrZiMZpCkg7dsPo8fZCkXS3RaLfyMR7VWxfVT2StHGgTQ7/fRxnLdu8axCTY2KCwISJLfn8b+rlgWS3O9/Tv+zPEybD7+Ruajq7uTxSKURUhDJpfHL5dJoiUYy/D0CePeKUK5CA0ksH7/GwjyZTzHT1VOMCgkx5/7BaJYY4GH3/z9SCUQ2mHnQ38IqVx2X/8uHNfl8smvcnP4eaxOuH72GSrlHL3uOUnvkDCOiKzBra5TKlcQ1mKEQMv0Qz1fzJkvVoBAWLgdDFkuVlQqFQLHQaPRGK57V4QYrDBI4dJsNGk1mtjEQWtNc6OTyr1bB6FThWLhKFbCcHPTQzkOtVodKQWB69JqtaiUy2kDX1uyfh6JYjqbgrTky3n8TJDiApEMhiNCHVGrVpmMx5QLRaSUOCKVzbrupVPmVruBIKHdamGsRTiSTrsJRiJRIARSpBLlNzc3uEFAq9lO+2ZWspyHDAcTBIJ8sYgUitlsmto2WoWUaf/VGAPWYISHtB7VaiPtkbnpxSlxFM1WE2EgQXF+dUHvoodyFEoJjDBYKe4GDBBqS+/6mlgn3A6HIO5ybKPxMz7Fcj5lUAgQSKSVlEoFgoyPUAKEk7qHSYEjHTQpN7XeaHB9fY11Uqpbq9HGaMNlt5sGNiXonp8jTMpR7jSaxJHh5uaWZqWOSDRREjEaDvBdj0qpgsWyXC2ZTcZYYZhOpwgpyOQyFEsFPM9jcDvgqntFoVjAky4kqaaiQmIdUMoFJWjW2zgIhAHnqwBn91UR7IQQRGH0/3re8QLufegPQhjjSJdsro1Agk1YjG+IwhXthx9iNR9ye/GM24tnuEGOWf+Mtz/51xiPTjGOAASdR1+H5xeQ1iLvWhjKlQgke+//ThyhUl6s7yFUBun7BIUWF88+88X96CTm5O3P0r+5JJr3iZwc55//FQrVDZarFYPBgNlsyXgywhhDLpejs94hl80wHAwIoxCpJDf9W+IoQiKQQjAcDjHaIIQkjEI0CcYKLs7P8TOKZjPFfV1cXGJlCpyuV8tcd7tknDSjShLDxdkZvd41xsJ8uWQ0HuO4DlbCMgyxQuJnAyajMYvFgsFwQPeyi0lSvwxjBa7rYqXkqt+lWi4glaA3uKVaq97ZAjqcn58jREK90aLRaCFMgr57X6v1OlZBEOQol6sYIAoT+v0BQjp4yifj56g26mgB8/mCXL5ENpOjVCzjeQGu8uj3bynkPBwvlUlHaYQUdLsXJMakRuMy9bf1lYub8WjUGzTqNQSGq4tLbJJweXmNsAbPc5CpGSsCh1USMp5OWCznjMYDhJPSBK0EK2KsiBkOb7k4uyRcrlDKRVqHOIpTRopNexW+9FJnsozP+fkFvuPjZz3OT845P79IlaiFQAPn3Uu0VCjXT9sQKHzfo95qkhib5vNSEGTzOI5iPB3juIrlbMZ8MUNjUZ5LJpPB81PwuxBptXPVvaJYLSGlpd6skZCadINM/VGc34ee/K5YynEoBDk21rZ+2/NGayY3ZyRWE4UhSbIk/bhCplClunafYmOD6sZ9hLWsP/wgjb1XGJy9y+nnf4n5uIe1qfa/FaCMwAqJFZbHH/ljeK6PkBapAZPKlAdG4CiHd37xrxPkq79tP0II8tkSrZ1tZGMDxkfk8yXiwRFKuXh+gHIU0WpFt9tFWEEmCEDCbDHH9zN4QfollIOSAiEEzbV1XEcikRSyGSTpHkeDHrVGjYvjM6SEbDZAivRKLXAoZktUapU0gEvI5n28IAAs0lEU8wWEEAxHAwrFAqVSGWskyvdRrofyAoJslmK1jHIddGIoFlOMYCbIkVjuCmpwheLs7BxMytY4O7tIE20k1gjOzs7QseGye4ErJJVClXgV0ru+Ainxsh5Gx0RhCHevWyjmKZTKSCFTFoy13Nz0cTyPfKmMo1wuu5ecn50ShhqhLXk/m7p62QSMxDqWeqvOYrHg7PyE07NzJIJM3sdahZ/NoRxBqVTBdxw6zRZhvOTmpg+kU+r4DjokbLqH4WjEbLrEdXwW8yVRFFIqFBnc9FGuxBVp4Lu8uGA+n4ERlCtlcnmfeqNGrVonk8uTyRQo12ucnp6jAN8PcKRlNptx1r0EBFK4lEpFonjFTb+PtOkAzFqD53tEcYKw4EvF8HbAcDgkm81gdEKSaCwSIyHj+XdzbpGK+RiZKv6kzRq+Cij0Xx3BTgCT6ZRKtcT2zgbbnTToScehsnGPiIhwPiZcjlKAqAA3X6PY2OT25VtEyznrD7+ecnOLfLXD1ns+QmX9PtLA0a/9XeY3lwitSSwII7FG0z95G5NEWDTxXT9JILAahDB07r/OWx//y7znoz/6xX1KKSmWiiwmE1bDC3pnL2g//kZMUObq8oJ+/4ar7iXGaBrNOqViEZFYjIVqLT0hT05e4imF5yoEqalMpVRMA5y11MslEC7np6dsbe9gsRQLBbQRTCYTTo7PUFIQY5hOJ3dWEGlwqxVreK5H76qHjhMWywW+495huBZkgwBHSrKZLL7jUiwWqdRqOJ6TYvaUBEehUJRLBa6urjg7PWN9cxMSQT6bx7qSte1dKoUyAsvxyTEnZ6cMBrecX5xTyOeJw4TL7gWO45PPFUl0zHKxxHVdvIwHaJQw5PyAjB9grObm9ob5ckI2k0F5ikI+hxUO+SBPsVREITDWUqpUUEpwfHLO8ckpEGOkQ6FUJJctMRyO0IllOpkiXEu5UMLEhtPjE7rdHvliCSV8atUGrUYbzwkoFMq4XsBoNGY0GrNYrAijiHyhwNbaOtl8ht7NFY5SLBcrIqM5vTgnGwSUi+nerIDJZIaxFquhXq4ymYwR1pDPZMCumM6nCGEpl3L4QY7Ly3OsTQHPSZwwm80xxhJ4LuVylUyQx/N9yuUSi/mCJNEUC0UUimzWx3cVIgFhFKViCSskF2cXiCTh7PSU09MUpnN6epKeZF/h6ys/NwXCMKTVanJ8cowQFnvHV5bKpb71iMnoCpRMHacQ6dUsDJndXuBmsqw9+iC5SofTtz6FymRoHXwtDz/yL5PN5AkKVdxCAYvg7b/311lNbnnPd/0IzYMPgARpPWKpQQukEljHkiSW9sMPIh2Xo8984ov71FpzeXlBtdUGVaRY0JhwiRh3qVaq/KaegeP4BIEHKG4HfUaTEfVGg3KtxMXZBUkcY0wGgQJpEAYQKRMkthLtGGrFCrlKBaymWK6ijaFUrHB6dpyi6IWkVqhiEtDG0O1esrm2hfQN2VIeV7oUcgXCMCJJDMLEjKdDLBZHuOQLhVRBI5snCPzUcMYRSA3nFxe02+vMZzPazTbj4YhCIUulXOHl8SHGWJo7u0hpqVQrEBsm0zG5aplSMY+JJZVKEYlDMZ8jTEIKuQKu46Fc9w4KoRmNJwhjSYzmutdjvd2mXKmk2bexXHbPqbdr5JzcnZOai9UgXUOpXAUlkUahtaBcLHF8dMz+9i7WSaiVSqnZ0dkJrbUO8/mC6tZ2qliSxCyXK5y8z9HxEbligWa9QYjkdjTEcX6T/ZGQyQYYpSgV8vhejnC5RChJqVykmM1wfH5Orlrk7PiUaql6Z1iX8lPL1RzYmEKlyvHJEeVyndOzczZ2tpHasPAUQiq0Mvi5DA3VAQlhFBKGEWG0wgCVcgWLIeu4jEZjfCcil83jyhS2Y6RFGokvDaVyESEUpVKRWBo0gnK5wnXviq/09VWR2RljKORL1KtVqvU69Vadvb0d4tWct3/xfyLj59A61RGDNHWf3Jxz8fTX8It1gmINQ0J1/R7lxi5YKDd2OPn830cKgZ8t8vYv/Q3q6w+4/zUfRUmfxs5jhKOAdAonRIKwFiEkjpCgBYef+Tj9k7e/uE8pJPVKm6B2n6sXb8JqwrN/+LfJtQ4oFvOUi0WKxSpB1k812awmkytQb6yRCfLkMjk2NzfJ5DIIadPSQklOz86ITToJNSLBakEhX0QlaTYZacHJ0SHlQoF7O/doVRtMx2NuBj2Oz49J4pjhYMjpxSmOtBSyWWK94qJ7yXQyoVarUq5VyQZ5ctkcuXyAkoJqPk/WUxilkIFECYMQmlKlhOspAIqVIrPZlJPjE46Oj6i0GrQbdV4cHZJYw83NDf3hgP3dexQLeYwEIQW5fAGcECHBcxyy+SxGSGazBf2bPuPRlFUY4ufy5ArFO8kmi8Eg0zqdUq2EEm5athtQyiA8y8vjl+Sz+TulFZXaTyJpNJr0bm8wwqdQLIFyqLc6eJ7D7s4e+XwGhMYJJNVSCc8X+FmXcLXEYnF9n0qlSrlYJfB9ev0+08UCYQXZwKd7dQGOwvEt+UIOrRST6RQsTCdTCpUMR4eHqS6fFpSKFbAOQkgatTalUp7ZdIaJNGdnZ7iu4uziHGElvlQIkwqHBn5APlsgiiKuu13Go1tm8xmr1YpKsYifC0AqEgtnVxesViEnF2eQRAz6A2JjKZRyVCsFDl8eUS0U0vbBV/j6qtCzCwLfKuWyv7+HUBKpLVYZFnOJv/01ZMoVXD/Dot8DDFpJbKQRQlBorpOvtuk++yxSOqy98k2ktR2MBlec/PrPsvf+78ARglypiVWKz/3sf8erH/1hvGKGJHJRWqI8+If/83/ON/7Af8iv/NR/CliGly9+2z6V6/P4wx9DqIBwfEHOWZHd/zCJjohG11y+8Qna7TWG01t0lOpRKKWIkhjP9ykWy0gD55dn1Jt13MDn9PiESrlENlvAE5ZEuUibgJZI4fD85VO0gY2NDt2LSw4O9rFWMJsukVisB651OXz5gv29bVQuIJ4vubm5pVqq4LgeyzDk5qZHpVqlXq0ikCQCpLEIYTm/vGS5WoCBnc0tLq66JElCp9WikMkzieZpNpYIgkKR4+fvsrm2jpfN8tZbb7J/f5/eRY/tnS1OTk/Z6GzSveqytbkNKtUvTnFDktv+iPlijh/4aJ3Q7rTp39wisZRrZZTjYIzk8uyc5XLB5to2XuBxdvqSzZ0NhPBYTOfkcgEvjo7AWvbu38fVFiNgvlqR911WjuT8xUvub9xDy+SO42vSzCmMOT87JZPPUW82mNyOsFJSrzVBJ6AkN/0eykiq5RLX/R7lSglhBE7O5+L8jLW1NZRyWc0WqSfrMiRb8vj8G18gk8kiEdzb3ef58SEP7t1HG4MQlkW4xPcUT58dcXD/AWah8XMeUlhiHWOSGCfIYK0iTGKSeEXg+pgkzWiV64EFaRTWWubhjIyXIY5XZDIuq0WM6+UQjuHw8AUbnTZ+JuDpsxeslquv6GL2qyLYZXNZu72xiZfLIuIEHIUmQWoPmW9SfuXbkFHC4OolRmqw6ZjCy5bJVZsoN+DkjU/geD57X//PI6zk6T/4aeo7j/DzFTJ+GSfweOPn/nv2vu57MdLw5Bd/iq/93n8Dz8tipYvjCobdY976+b9yh937HZYQ5Gsb7Lz+XbjCIt2A1sH76Z894fhTP0OzkMX1PUITYuM0E5QiFQ+wjoOLQ/finGw+Q6lQwSpDtFqR8QMOj47Z3NwDJXCE5cWL5+zfe0Sk55gkppgtM5vOkEWf06eH7G3ucXx+zM7BHmiBjiJclUW6CdoYYhMTLULm8wWNRhWdJCjXw1EOCXB1fcVsNKGz3sFTLoYEiYPv+SxNiDSawM2AcNBGI1NgA6Fn0ZMFmayHQRKGMYEHSQRO1mM+X5HzPZJohePnWYUhp/83e28aI02Wnec9996IyK2y9n3f92/p6VnEobhKomiaEm2DkmzJAEUKJi3bEGkYsORfJiDAEOBFPywYggGThCWINigJBmWDsLyIskiNuEx3f0vt+75XZlXuEXHv8Y9b0xSXGTanSfT0YC5QwFdZX0VGRmWeOPec9zzv8QEKKLa1MdA/4PVySoMWyuUyNnX0dPZwe3dFobNIe75I3KiTuJSryxuGp8ewrQSdaSNnHOIioEGr4XCknJycsDQ3x+bODktz86RBjFIhSaVONtuOiIcguMBLUUwiNGwT0YZcJqJ0d0+j1WBkaJi763tUaMi3Fbi5uqG7s51MGHJ+dUX/YC9XVzd0t/fQ1tnG/v4+k8MTHJwcMDczjRhoNpoekuoUHZkcj5UmhY42nIuxznB0tMv05CRJkhJmMgTiyzI2cOhUYQOvOfRaS83N/Q1aoKurxxetnEMphQ2AxJd2jo+OGBkZJQgVon0DyznF2ts3PH+2SqoU62/e4Oy3sOyf+NJay7NnqzgJvGHK3hYIRNk23vmzfxWV66JZLVMvXXh5CIpsoUC20INNEgrFLm9Fl81x8vpfUOjoo2d0FhNEBGEecRoiS+X+hihXJKMDqpU7soV2VBjxpb/3N3HWiy6rd+df9TxNmGH28z9A2NZNqutkmyU6pr6HRvka17rHnb8HqSIBtAjGgtWChPJkGQipsygMZycnNBp1JicnyObz2HqTveNjJuemCLWm1UjJRhFKe4puojJAgguEpNYiFxoeGwnnx8eEUcTU9CyBS3HOoYIMqBTXcqROCIMsygh391egFd3dPbg4JU4bXF3f0ts9wO39Df0DA2RzWTTeUHvnYI+pmWl0qCDVaAsbe1tMz88RYVFk8IKJGKNCYptyeHDoMfMnR0zOztF8fOD4/IKOvh6a5UcmxiZxKEqVe26vrunq6sRaSz6bo1BoQ8KAy7Mz6tUqExPTBAaiCGwaoE0WJQmihRRFqAVnhbhVJwizvH37lhfPX7C1s8ny0jxWKeLYcnpwxOzUNGhQIqQYwCHKoJVDbMLt7S1KFF1d3YjSBCYiSWO00Vycn1Noa6PQWfQk4kweLULcqBHmc6S1Ftkow8beNnOLM4jz9oe5EOJEsbu9w/LCLKkzxGmDrM4iSojTmLOLCyZmJiGGWr3GQ6VEoa3A1dkVoOjsbKe3p8/z6JTvrioUxycn9Hb2kSnk2N7eZHxymvPTI9I0ZXZulv3dfSbHxjg4Pkae6MqNZutbwe6TXtlcTuaX5tla32JxeYFG3EQ1HfvHRzx7+TmCXIHc5B/ncvc9rvfe0jO8xOn2v2Bg+jPMftsPcbH767hWi+nP/xmcbaJNQBhGvP6nP8v1wVsE+Myf/U/Y+pV/yOL3/Aht7Z0QKQwR/9/P/BdUbk8/0nkGUZal7/1Lnl5iE5RWtB4r3Ox+ibHRMQLXwCjD7f0dtVaCUhBqQ093L+dX5zw8lhmZHCNXaMOlLSRNOD2+YGx8ikwYkCQx2SjC+p4zOsF7xqbC2u4mc0sL2Djh4OCAhYUFQifUkhhjAgITPlFVLNVanfJDiY6OHs5ODgBFT18nXb09aBsQRH4aQJTDNmOMymBdiskpDnePGBwZJWs0LeeIgjzapijjKcetuEYuagOxKAKcSlFasbO5zdLcPNVWi9PTU4aGBjk5OSGbyTA6Mo4JAyRN0VFIqXxPvdmgt7+XCMXl+RVhLk9vdyegkCRl//CA4dFhLi+vmRwdYf/ohCRNmZ+fY3d3l6nFOfY3tpldWkCnFqM9Q1BnMjTiFsVsltRZsF6ke3F5xeTMFEpSJM0SKsEFoEyKpJBaQQvoUHN5fU0uyNLV2cXJ+Sm5TIHOrg6OTo9oNBrMTE9xcnLK5MQEUcbfoK0ypLGA1mjthc2BsWy+3SGOY6IowhjD7Owk+3vHjE9NcnJ4xPjoJGHW+0xYG4BNMVjiNAGjeLgrozIBSZxSyOQodraj8Nh4bQyBhlZsOXjK7lRoyJqAJI2JsiFJPcESsL219i3DnW+Elc/nZWl+lmbi6w2JcxiVYK2gM530Lnw/jfSRevkWl7RQJiRNGlRvzwmiDJl8O3HaJIhynLz/z1j4zr/AyNIXcI0a1nkxrcm0I60qmixf+sd/i8/82z/Jb/zCf0317uwjnaMJIpa/44dx2SJpWie83yQs9tL34oeone9w8Zv/GycnR3R1ddE/MOhnm5QgWmNMRBzH4KzHe4vGBhrjUhLxzlFpDHs7W8xNz+HyASZxJDhSp8lZEBoYkydGIEn9qJC1HOztMDOzQJw0uTg7Y3BokMuLK8ZGxtB5wcYpWhxahxgTYAEMXJ5e0FYoUCwWQIwPXMZLIA4P9pmcnCFvAt5ub7A0N02qA7Y2NlhaWUGUoGIBY7HGEIojjlMkUBxsHzAzPUOkQpo00Ub7iRYH1cojlccKQ0ODPpgbgxGQp3rb+cUZxUI73R0FmoklCDJAjDIBLlVYEvZ29pgenyDMtaHiJjYbYp7sxTRN0kThcrC7tsnCwjxKFKkCRDDaUm85ri/OmZgYAxsggUMZ8cZMWqEAGydcXF7R1pGnVHqgs9hFqVymu6+bKFMgigxb6xvMTE9zcHjA1MwsYdZgUu3nWQNLkoAJFCoWEmMJxJE4Lzlq1Zscn50wPTVJmqScXVzR29dH+b7M+Ng4oh1KhPv7W5Ikobd/AHF+i6qVgPWCeLRjf/eAZr3J5Mykx+eLIJHCisMpw87aW1ZWltnc2KLxLQ+KT37l83mZn18gUI6YADEpJvGGMyiFLvbStfQneby9pJXU0E54vD3k5L1fBvBuUu/+aQCiMEOzWuJs69d+m7Tosz/8N+jsG+Bf/oP/ivLVIWEmT9Ks/b7nprRm6bv+HOZmCzX+7lNW52c9o7YOujpHufryP8QkMfeVW05Ozxjo6yUMAs6vrukfGKKnt5fzsxM6ikXauwooq7FWI0ahtaCSlM3dXaZmZtnf3cE6y/LiKspoRGB97TWrK6tolTwFJoVTKamKkJbDRAE2iDEt7UfVxHtOKKUo35c4PTuju7eP4aEBlIj32XCOs7MzCh2dXivmFHsHuzSaDWYmpzg+PWVhbgYRzeb2JgtLM6StkCgKeLvxmuWVFTKkfLC+zYsX87x5vcviijfRVgaCNKISNzjc3QGBtmIb45PjiDgeSg8kjfjJExd6B3o5Pz8nl8nR/eQ05rRCKWF3b4+JqTGyKsf69gbTsxOEJgQi1tdfs/RslfXXb1leeYlysa9ZCaRpigkMThmMU1jl0NoLbFEOjeLhoc7Dwx2jEyPc3917iY7yotyerh6UMt4KU/tZZhMo39TBkEiCUeJ7YVqxsbHF0tI8MYocijT170vQrG+/YWVp+em6N9nbPWZmboEg8PgyZ0EFhodSiUqlwtiYN3ASlUL6pBgwFhX4bbuzBo3y45PW0kotR0eHTM/OE4YKjMLFKQGa2CaEQcQHr14hPvX/1K5vimBXyOWllcSsLC/xdm0d8ObQqwtLJAREuSK9n/9z3J9t02rUECU8XB1w/N7/++Expj/3AwDkiz1MPv8umnEDFLz/T/4H7o43MGEGpRRp3OKr8ES/6goyeea/84cJMjlUq4UoR1g7Z+Db/iJyf8X5ez8PTmMRbGq9DZ5L6e/vAxSiDeLAJHB0dkhPXw/X19cMDw9xcnrK7NgMktVsbWyyMDcDKmBjYx0R4Z2lVUCT4FjbXOP50ipvN9dYXX1GvZVwuL/D5OQUxydHLMzO+NkhYyjVHnm8KzM8OooTIVCa2+tbVGjo7elFicNZHxSNCGIs4hSb2zuMj80TFfH+rrEhVpBTmtSAbYGVhCDSQIhOYW3zA0Tg2fPnTx1Podlocnp8xuzUJNZpAqOQUEAZlHVPchHBuYDAaGLng/D56TEdPd10FNrY39tldHSIbFQE5WiSkFMR4Hizts7c3ALb25ssL8xgwgAj0LQhW5trrMwvsL677WtlCewfH7K0sIBzllo9Zm9/h2KxjYnxIZQJPHpKEpwJUc63Y05PT8kXC3R3dXCwf0i1UmVmbp5ClGVzZ5OJqVnCjEM5QyKO8Gm21qCwBKBTJIGEkFzQoOUgsBEmSkidQgs04oTzs3OmZ2YIERzpk1+IximDVl5MrZ1wdnpOW7FAufxIZ08v7R15UjHQErRqsr17wNz0IjpKyBJhLXyw8YZnywts7ex/y0rxG2F9xV3sxfOXJNYRBo40VQRGaCYp51cPfPtf/C8pHa+z/qv/iJl3vw/beqC0/+ucnvh6m9Lmw+NpbVj9nr/M1cH7XO79Jh9nVkZpw+J3/zC7v/pPePa9f4nYGfJhQP/SuyS1O/b+7/+R/d0diu0djI2Pg/ixskAlOO089cJpxDv1PQlALXt7+zQaTV88fuIeyVOnzY/3eP6IfvoeBSsrC4jKoNIUpzXbWxssLS8RxykHB3vMLiwiDrKhD7oPpUeOTk/p6eshjCKkZRFxXN/fMTQyTK1apbu9wKnQ9gAAIABJREFUh7v7G/p6eim0d3h7QCWoQBFa5bV/T5DUt+sfsLS6iiZEJY63W69Y/cwzXDnl7e4aYRiysrLkm+Vp5EO0ShEVokipPj5yV64yOj6KUeCsr2+hLYIDpzg8OKKno5fOTj81QmDY3lpnamyGTCisbW8zt7BAYEGFmldvXqOV4tnzF6ingJmomN31HZYWVvxkSiBI8oQNzXgHNCte11h/eKBaqTM0MoJTgsYTcZQSsCnaKTDgnAYVs7t3wNTYGFFGEwAfbOyyuLzI+toGq/PLXugcaF69egXAs5cvES0kLUug/N83tSl7u/vML86hlMIlCiOGx8oj5eodxfYi9UqToZFh31Rx4ncp4mnDEmnEKowDaxS7W9uMjI8RRiFZa7G5LDoR3rx9i7WW58+fs7OzQ71e/1aw+6RXIZeX6ck51rffsrryDIsjyAjSjAiNIlUxKtNJ2rOANO7JVO94cBF7X/6/vuoxlfLjV3/QLO53HITF7/0RTChe2yTGWzYqIci009k3yv37P0va0GBAtPMD3k7hMpqriyu0Err7+zk9OePhvsT4xAR3t7dUq9Wv43SeJku0Zm5piZ3NTZZWnnlHeyxKQWqV/1A8Fcrt04C54gnjrh2OgOPTU3LZNt8U0BYRDy4NdIaYlMA+ZWJPQZrUb/0cgjF+O5dgvBBb4M36G+ZXnxGmDgLjs5EALAbtHJXGIw+Xt75WFgS+HKDw+Cf1NM3kBKw/vlIBLhGQFIkUWL+tfbu+weLSIkEYgEtxacibtQ94/s4Lgqbj/c23KKVYWXkGCG/fvvnw+mWzWUbHxrg4P2dgoI+723tmJsa5KZU4OT2jf6Cfwf4hJBXOLk/J5duoVh7p7CrS1dXp50ythzSJEjbW15mdn2Nrc8ubmW9ssrrwkiCT+BFEKwQEtBxkAqHmLPubOywuL4MIjUaD89NTZmbnvGA+9aLqVIU83t2RuBYDQ4NcnJ+TCbM0GnUKnU/nYgVxhoyzpMZDDLTzkAyFwdoAo5t88HqdlXefU3t0HGy/+VQHu09sgkIptaCU+uBf+3pUSv2UUuqnlVJn/9rjP/D7HUuUEBSsD06B9QinNARtSW2KdRrTqpCtHDM49cdwvUtku8Z5/h0/yMtnqwwPDv3uY4rjYwU6YOV7f5StX/77iI0IlCFMHaq0zdCL76FzYoTLX/950jSAUFCB8FCqcnR2hQtDbi6vQewTr6yEiCAiHB0efl2Bzr8mfwxrLTsbG8zNLbG1voY2hmoSs7Gzh2Q1j0mDw+NjUIHPnIwGrVHKkBCSOsXI4Di1WoVa/REJDHsH+7QaKZub66hWi82tdVzSQgfetGVtbQ1xghHDBx+89n4c1Hnz9jVKK5aXFgm1569tba5jW012NrcwSUyjXKZ0ecPE1ARitMdVOS9vwaonXZnBmAATRFgVsr23SyOuYCLvnbq5vYVTjtXFZTbXN3n1/itevVrjzdoH/uI0Fe9vvuXl4jNEhJQGb968xhjD85ervHz2nPmFObJRlsnZSQr5IpMTE6RKefmMCM4JF1cX3D3cMTo2Sq32SPmhhKjwiYuo2N7doZbU2djcYGZ2hlCHzK+sEIZ5n6VrhxWNFsPbt+ukYpFsikhIQRmer8wTEBJqR2c+ZG5mjkAJkECoUZmAMErpHiiiteb64oqhoRE6u7sYHhmhcl/m9Zc/oHFb4XBni/fevqJVS1CxxlnD2to2Lva+Ga/fbvJ8+SXr773h+nT/Y30WvhHWN0Rmp5QywBnwBeBHgaqI/Dcf9ffzubwszs+hrCO2mrebrwmMYXVpBX/bVxgF+Z4hOt7589QbVep31zTv1pCyl1Zcn59xcXH1h/mqeP6nfgLRCUIDrtfJjnyB/uffiUlTysfbfPkX/zaLs3NYJZQrZar3VcYnxolxKFEYHA44OTunfHfH1OgI9+UHHr7OYPe1ViaTYX5hnkatyfnlBWPj04RKERqHM47rm3ukZRkd7EMHilj5rZAygraK1AEZhySglCEkBjKkWJ/YScDbN+8jIrxcWcUFGq0spCAmg9gUm1q29rZYfLGATjIobcGE1O7KXN1e0dPdw8PjAxNjY14zphXO+YxNGeH44IT2jjY6iu3s7x0yONrL2fElE+PThLmQzbcbpOnXR9zNZCLmFxdJAIOiUalyd3vDxMQEgmBEcXV1gzOKweEBHP66HB0e0tnRQ2d7jlR7PwcJUnQrAB0S2BYuDDEKUhFev37NyuoqRnkazOu1VyyvrlIwilrq2N7cZGVxFbKgEoXF0azUuLi5pK+/l4P9Q3o6u5kYH+Pq9ob0yZ0tigzdfT3ohN+6iaHIkPB6e5t6s8Xy8jyhKeAChwakqVE6QWU1W293qDc/3dvYb5TZ2D8B7InIVxk9+Nqr0WzgtCLNBKiMYvnFM1/3CbUHUGrBBcLj/SWXX/5HmDCL0Q242YdEuL64wGYHef5t30dvb8/HeiHP/vR/yNKf/A9Y+cEfw9kYHSnCoJ3x7/pJJDWoapmDX/rvuF//RaZmZrBoKo9VHu7KTEyMIg7KN7dcX12A1lxdXFK+uwPg4PTsjyTQgYcpvHn9ht29HTSauN7k5OyQ21KZVx+s4VoJg4NDxAQ0rMdnWUlxqfYoJ5Wys75DXG8RYXAEvFp74yGdLkYrH2RWVp/zwdrbJ7KxxsaGQBxoBZmA1ZUVlA0wpoVRgokbFPIZZifH0NpnUDd3t5ycXiCpb44osahUg1OEcYghYnZ2lnzYztzyEgdHB6jW703a/dyzlY94fWK2t3bQYjBKyLcV6Ors4vjkBKW9TebAUD+Dff2eeixgsYxNTdDR0YYzmv29XeJWC6MjlAnQgcUGIWLAOgCfJbowQAIN2YAXL56RzShilZLRAcsrizgjNOsxm1sbBAZs3oJAe6Gdly+eMzI9RhriSxDaMTgyQG9fP4qAw7NTn21qiwuFV7t7jE+M8eLFO5ggj1KgUwdNecroveyn0Wr8Ib3TPrn1jZLZ/Qzwnoj8HaXUTwN/GXgEfhP4z0Sk9LV+P58vyPzSDHGsCLMOY7WHGab+7uUlCgmNUp3zWpOVP/WjFNvbOXz1y+z92v/BwNQKwxMTWGshTVBhkeOdNUoXu1/raX/XWv6BHyOwKTrM4OpCmBFaLmB85fM4pTnb+FWMCmnvG+Ph/Z9HNOAMlco9d7dlJsfHKd3f0oybBGHAxcXt13M5v+6Vy+ZYmpvDigPxNUtlNDpQ6NSRaMAosOASxcnpET1dndzc3TIwOExHPsSiECJSSXFKEUlMoAJS0T5PjTRvvuy3js+ev8PbNx/wfPEZEkEcCtnYN4RtHLO2uUUhn2dkaIjtvT3aOzuZnJzCpJZEwfXNDVop+vr7UcDx0REdnR10tLext7fP8Ogg+aiLza01piem2TnY/rozO/A1u6X5WXAOQSNGe3ezapXRsWGUKC4vb1BG0TfYj7YgGASPPdfOeQ1dGrK9vcHM7DTFMIcFrKRYFaCUBWdAPXmRiILUopTxYmOX8vrtGlEUsTS3gtb+Z+BIELQoyuU7qvUGQ6MjgOLq7Jyb2xvGx8Zo7+nh8OCAgd4e2jqKaCeIjUCnrK2vMbcwB9l2Amnw5v3XrK6u8Ob1W/K5PLV67VOd2X3iwU4pFQHnwIqIXCmlBoBbfMHsbwJDIvJjv8fv/Tjw4wBhGL67vLSAJAaV0aRpwtbmJgsriwQS4FKhFTfY3t2mrdDG9NQ02b5Jupa/n+rZGg/Xu+i2CVTlFNU4Q3eN47J92FSjdMj+b/wi1dJvjYHNf/HfI9NWIFCp/3ArnuYtE28jI4pU5zDAyMpnOV/7V4yuvMvR2r9CpRZ18R5KB0CKPNEqPPJdcV+65fj4o01k/FGsfCHP/NwMSvy0qzaK28sSrbiJ1gajoa9vgJOzUzqK7ZRK9/T19VJoa/PFbYkQHSOiIdCsvX6LTS2rL5+hY42yChtZ3011gNJoI+hYcKElTWB3d5eVpTlaOngS6/obmJCAhARKsJLSUgqtwXwFAqoNVlICBbu7+wwPjFLI5/wW22g21l9/3V4KmSjDytwKRBabGiQSxKaENvDetrT8fcBqUuu8C5zx3iHKpGhCjHgvEtHylP1pnNFAik4iXK6Jio3HSBFgJOH1mzXeWV7lg411VldfQuowxtGMmxwcHTG/sATagfVdYm30h0QchWBEIzgswsnJKYW2drq62lEYUiXsb+8wMjFMW5BjfXOTmaV5ttc3WZ5bRoxDK4Mhz32zyuGnvEHxjRDsfgj4j0Xk+36Pn00C/7uIrH6tY+TzeVlYnCdJFFordOKw2RTtNI1WwvHhIXML84hyIAGhs+QHpgj7pqhcH5Ab/xxpo06zdOW3vFo8xdZ53wCDBhziwBmNciFGEpxyuMSgjGCtIXBNyBgiBRZNYkK0E79Huf01tI7o/+y/z8WXfg6XCMZYj+dWeCmDeLLG3f0tVxcXfyTX+6OsQqHA5PQ0RgUEyuGwJCrAaSErKecnlwTZPF3dnRwfHdEz0Et7pgMdguCQOGV9a4uF+XlM8GTaGCjS2KGfXmNgLInN4ZIEkwGlEkRC7BPlxIimVatzenHO/NwMYjM4FSOp/wBel2+4OL9goL+fvr4+78KFwory2bkKwIBRjs2tbZrNj4coijIRS0sLfkje+rqkCQXrNEpS7u5uidOU4f5Bzi8uCbNZ6vUapVKJybEpbu9vGBoaIsyGaGMJrSE1EcpBggGTsvnqNc9WXqCkRRIFqDQgIPHMwQiM1RBpxLYwkiOxCUYszvi548AI4sR3xgONoP0WH9BGkSYOnMFlHc5ZAtGEqTcgFywqB4kTAhvw9u0bFpdfEOGNmDb3tmjUv6Wz+3gnoNT/AvyfIvKzT98PicjF07//U+ALIvLv/j7HkBfPn/N2bY3F1WfopmDyimYr5WBvh/nFWa+K1wolEYoErX1Npdi/QpTrJXUtXHGY+u0VygkEXr9mJQCXAIoghNR7uaBjIAupc4BBa59dgGNwapXL/Tf0ji2RzxY423mNSxugLebiNWhHsxKzdbBNIV+gr6+Pu7s72otFTs4+uazud66uzi7aim3UG3WGRwa5ublBSURvt7cYlPApoxKNsjGIQoeKzc1t4lbM/NIyRhQ6MIgkrK9t8HLlOVY7jE1xKsObtdcsL64QZBSSOKwTdvZ2WFlaIHE+mKCFrAqwyrcHcA4rHi8lori4uCKTiejr6gGl2D86oL+vj4vLC4ZGx8iHWTa21kiSj++QVWjLMTM3gdgMBg/QSVI/VWHEfdi91uJVAlZZtIBDIVaxv7fH2NgIQSbDzuY2kwtTZEwelYCKmigixAqBgkbqyHwlaGlImk3ECDvbB8zPzfnrHWSpJzWuT0/p6+3hvvxAsa1Io9FkeHQYURZl/a5BaTg+Pqazo4P2YgcpglIQKIVNfG1badDKYBGkFZMxEbGz7O3vUW98uhsUn2iwezLHPgamReTh6bG/B7zEb2MPgZ/4SvD7GseR5ysvEG1BKzbXN1l8uUzUFFoCKhBwBhUpnxmIH7PBCqiIaGSZtsnPY5Qmqd1SfbjEugRJDEYbdCLEWp4inBfvRmGGprOEgFYRiXXojCCxZmLlj3O8+c8YW/4Ojt77ZUbGV7j88t8nkZgQg8US6MjXc1QCVlN+KHF8fPxHer3/IKutWGRibNybaQd+a2Wf4KTn52cUcxnae/r8dkkEpwSjNbGyaKcJBDSOt+tbzC0ssrXpBcyKkNjW2dncYeX5M4iF9a01nq+ukJqUMImokWDEEBihWqtyfXXD1PQkoSgSJZTuyzTqDQYGBrm99e5orVaLQq6d9p4uEDDW/71SlQFJ2d7Y+NjBLl/IMzs/5011Unywc4pS6Z5m2mBoaAhtn8gixmd/NnXekEj7er/RES6NcdrgDOysbbC8MI/LhE8IfiHA02bW1zdYfTaPJcXaDFvrG6w8e45txuwf7TGzMIczGpM+gWmdeD8RBFGGq5trUuvtMx0eCWi0Q0ShrWL7YI/BkSFybVm2N3aYGJ4hKhrvzqYNgXjc0+u110SZkHrtW5ndJ74K+bxMTi2zs/OapaVVRCVoZcClSKKwxoELUQGeWmEV9WqNh1qFfKFAK2ky+e73URx5h9rDFZlCO/liH5XLYzLtnZiwQK1SonK/jxVD1GyQHxindn+JCgO6J97h/uADeobmCNoHwZY5Xv9NRpc/w//zd34KYzTjCwscrG8wPTvD8cEhK4tLxNJCK83DkzfEN+Lq7O5iaGwMjXB/fYPSAa1mk0w+orOnm0DAiMKiUKG3XhQlGBd4k2elcRhIE6ykiBi2dzZZWl5Amwy4ALExThQa8RmjEaxoms0qN2c3zExM40JwGrS1OGe4vbtB0pSRsUGsAmLHU3oN1iPw0YIJFUkibG/tfKxgF2Ui5hbnCcSgxNKwQhR4fxAVhWjrCFGkVpOkHrRpAuH09IyHx0emJsfIFvK+hiyexqxcinPaT/wEGZJmExOEKJeACVBOaCYx+7v7zM8vsLOzzdLsPGloIFQkDzXOLy+YnJ5C0tTXga2fstBBwO3tLQ5hsL8XhXtyCdMkAmms0CIejKr9pM/m1iZzczNsbe0wuzjvBe5hQNJMODzap177Vmb3ia98PivT05OYIEMzTrzLuXW41LKxtY0I5LJZZmYmqdaa3N7dMT4+gRaFNYJYy931Dcpoxj/7b1LoX0GSJmGhQJAJaKaKSAVc771H9/gid1/+X5HeRQpd43T0jnKx9SUGlr6d07UvM7CwyM0//znipEk2E9BsxuSiHDVb52j3gKmRaY4uDhkaHuL68pr+3gH2Dv9gXd+PunKdGRrlj4/T7untYXRkBMSRoLm6vOLx/p7RsWHaO7ogtSSuRTbI05Amgcmi0hQbKCBAxQk6EDY2tkGEpeUFAhESCckBDYHt3U1mZxcItcIGPutWgg+aAWiEVIO0fGfzvnxPklgGBwdIJcYoIAhQ1qAcHB4f0tvdS0d7gbWtLVofEyseRRGzMzMobWglTW5v7uju7ubxoczI6DDO+dG8QCsuL64wJqKrp4MUP5EQKEglQxgkoB0u8abnKSkBAS6xrG2s8WJpBRvmUNIiFmFvc4vFZ4somyIuZGdni/n5eRqNBmfnZ0xNT9K0Cbdn13R393B6ckxHVwfDI0MoMYh1pKn1nrqZAGedH61LhMgEOOWwViHaopV4JLwJiZQiVcqng6LZ3tii8Smfjf1G0dl9zKXZ3T2g1WxxdLCPJCmtRgsdBqwuL7G0uMjk5DStxHs6jI5N4Zyj6RLEwkOpTGotnZ1FGkf/kqtf+5+IK6com5K2GpQOX1N7vMCVdkibZaKojb6ZdyjkijSrD0icElduGJqep/wrP4PrnWdvZ5s4hf39PZxKiLRhYm6aXC5kcnyc66trJiYmUQjBx/DkzHZmKPTlUFr9rse/569/nnxfDoB8b458b+63/oOCfHf2Iz2HS30tTUyISyyDvb2sLi9TbO/2chKjOT+7olx74PjwlNp9FScJgVWoeoudvV1asbCwvMjK0hKiDC0iTOSoxL6LOb+wjNiUVIknrhiFkgCHIo5jxEGjXGNrd4ur22uUMahQ4wzc3ZZ5fGiirMEpiF3M1MQ4t/c3VOst/jBu6HEcs39wQGAMhVwn4+PjFNsKDA8OkTQFbOC3pkrRPzREZ28XoLk6v6FWaeCUwpDiEoVrhcQtixPhYHsfp5rs7u0QhCESWkhboIScViwtr/jCoISIcSzOz+LSlPPzC0bGxjg5PKXDFJieGKO9WGB5cZHhoVEvXwEkUNyX73msPiIiXJyfU69WOLs8o5o2iNMY5RxH+0ckqeLg8Jhms4EI3jA7gVqSfDOYi31zZHbZXFaWFxf54NVrVp77xu3+7j5zs1MEWuFQ1CoxF9fnDA+PEGUiKpUK9WqFgb4BrHKUHssoB7193VxfXJPPF2kvenu54+N9+vq6yA69gMcTup/9GTJhG9evfgHbOY6+3GRze4u5xQWikVWS41e06glhMYPCemxSIhweHDI9P0vcismEGazVxGmNVqXG8dlH4+L9zvW5n3hJx2iR9/7nNyTVFMShtOIL/9Fn+NX/9jf43r/1Hfzzn/4S3/2ff5HaQ41f/7vvY2NHtj3Dyz+/yG/83Fuq178/qqqru4ehoVFurq+IwpCOjg5UBIYAEYtSlqPDU/pGx7k8PmZ8eJgoG7K9vcfU7Dx7u1vMT8/5OVxtSENBtYTNgx2WFpdI4oT9gz1m52YxFhLtECfETcft/QWT4xO4VEi18yRkDPoJ4ZVY/1hgDHHa5Pz8ip7ufvL5DGI025tbH9swRilFsVhkamoKjHsavdNUHx6pPFbp6e3CRBrlPPRARwHaD5z6wAEkzRaZjMa4gJ2DPcbHxn3tNnBoC4lWtOI6hVyIS0OacQ2NIcpmEeelShoBozDWk1JEW0ILsbVEYYSQoiTEaUvwhGZPBV/PM94Uuxm3CEKDxnB0dMzgwAC5sODJNRnN7toWI6MjFDJ5nNFsb2wSZiIeH8qf6pj3TRHstNby7rNnrO3sMDE5SRRGKB2gxJLia0oOS73ZpHR7y8joGKIEsTE31/dYa718QQeEQYBWjriVQGgwxjz5wT7Vk56Kv1lraGUckopHEj00n/ROIRjh7PSIxbk5WkZoNlLaMpFHelvLyekJY9OTtFotjvYPv+4PYnGojdptnXxvjs//1Es6O9uxLkY7QXe0c/0bZxRn29FKc7/7wPo/3uQ7//rneDxv0D5S5Ev//W/y+b/yWX7pb/xTCn0FKhdffTrDBIEfchch0IZW3KRQbKOYL2C1QhnfpY7wI6sGQWERF2AVvoGBRasMmgAnTWr1BGUchUwGJYZEpaCFII5IoxhJQwKT+OxRGyAE5+uufEh6gZvrK8JMSFdnD+fnZ2SzBbLZEKchnwnZ3tqh1Yq/rmv8lRWGITNTk2TCAmJSwAcO0DyUHrm6uaSjpxubpARG0zvQB1Z7+omCOGlxeXZJ//AgbUHGg5i0xzQhYFtNXCbHxutXPHu2jAkiNje3UChm5+Yx1pJEYFJLs9Uim8sDBqeFuF7n/vaO8dFRT2RR4ptfaYKyApkMsY295s7B1fU57fkcxc4eQmOwKsURoJzDaQicY2dvj9mpKZJAEyQZ7msVTg42vxXsPumVz+VkYXEW6zSioFmtkc+3o5QHlIsNSJSQNhto7chm81ig9HDPxdklQwP9BMaQount6kYrx/7REcViF13FdjCB92RwQrOZIJJSaMtiA4PEilbzkYvTS4SvwB2Fual5dGBxYtnbO2J8epZAFPW4RkeYJVaOx0ZMEjc4/wOKiNsG8oT5kM/++Ete/4M1Fn5whrgW0z3aQ6Icd9u3DL0zCLFwtXZDvjdL91iR+DHBRT5Y6UKGtNrEJSnv/9wG7/zIM37lb/86zXKTXHeOyvlvD3w9/b0MDQ2jPPbR8+TUk3eReO2ZMxDhZzqtdkTi/HSA1tSrVZRTFNvbvH+vg629PZSCqalZ7wWD14wZPM1E6xCUz/A03o83tpZcNg9AksQo5TDGePcvDHGjzvX1Na1WE4CJyVH29w4/drADyGayLM3N40Sw4khsQhhGtBotTBRisgGlm3tCbejs7sAJkPiM6uz8nL6eXhKEbC4DyouhBUe9EbO3s8PzlVV2Dw6YmZ0hMJpm6gjQPLV4iK2j0ahzfXvD1OQURjlSEcQpVOBQqSaVFGsdmShDqVQmbrXo7e3m/qFMZHKUyiW6utupVuvk8210drahQkXjsUUml0VrhxKD0o7UGpRKQAXsbG1T/5RPUHxTmGRnCwVKj1W62oo0U9g/OODZ8jMv1rUaAiFptji/PCebyzE0UkCJEOmIvt5+unr60Uo89ZUEXEguyhEYr+53KgEN9Xqdm+sbxFrGJ8YwDgINxxc3zExPUa3VaWvrRMKUJHFkBCBiemYS7fyb9fbqhvapCZJ6yvXZKY3GR585LA4XiNojFv6NaXqmumi2Eha/f5L6TZOoO8fNXolEPAW5tH1HWksJU0fjpkmzWODusIzG4VJNcbqdynGZQk+eF//OHDo0LP9bc2z84i4T3z7K21/Y/G3PnTQTkmZCNhMBikYjxmQiosB/GGutOplcBqsCgqcO30OlRiGfRzRcX10h4sgX5p6G0GFmbh4tliRVuEAhNqVWq9JWLD6JY59uHjalHqc4gXKpRHevJspmqFYrCJrujnaa9To6DCmVS3T39KG1JRNmCIyiZ6aTy51bXOI+3htN4Wd4UTTrLW/sXcxzc3VNR0cH3VEvPT09aAeIpdVKiGNHPhvR1dUFQcDd5RmDA4OoMMA4jYoU15cX5AsFrGim52fBCWmS0KzVAUOx0E5Lp2BTbu48/eVD7Z72Nx8rPrtuVGMajTr9fX10dXZwXy5ze39Hd083xmRotOoYFTIyPM7Z+TG5tgwZHXJze83Q8DihKJwxoC3V6iMdUYZSo8THJQB9I6xvigZFZ3s7pds7YuUIBYrFogcualCBQ4BMNsPU+AQdHZ0oUWinKOTb6OvuBSfe8sH5Tp4TR7GtQC6TwSmF4Kg91Dk+PKat2M7s1JRX6FtF6fGRyckZdAC3dzegHMp5Z7DUOpRA5eERpxQ6o5gYnwCnibJZRkZHP/JrbB9pY/zbh/nMX1mlY6qTuJlQOX4g09mGCg223AISAmUxEpDrjXBiPU5OwdX+PYgjFYNDeDysoUTRqibooqF0VCLbliXKh2z/0j5dk52/7fkfHx8ol29BvP1gpVYmbrZQCqwSHsslnE1QkpAokFS4uS8j2qBTmJ6eZHpu+kmKotDK00oeKg+EGUECLxJ+uCsRWQ0CjWoVJ4pW4nh8fKSYz9Pe3kbp/oZa5YFcrkBXRwcaqFfqPNyXPQ5d4Pa2RBrHZIYyTH/3GGHW39f7l3s+bOYMPO9j8Hkfgy/6GHze/+FrHVjtR2lF/0rfh49prSm2FZ6o7JrUQvtbAAAgAElEQVRiPstQ3yDt+Sxj4yO0FYsfHtcpAQJqDzXubq5pthLOLy4ol+4ZGRkgE2VAGUSBEsXU1CRTc9NU6hUeK2VIBFFwfXvD3e01GItyjkqrxuTUBBovsbE4RCz1RhXnElqNKm2FLH29vX77rBWdHR2EJqJRb6GVon9wgHx7HqUShocHyUVZnIOxiXGiQOGUYAPBJYr7q1sqj4+UbkueG/gpX98Uwe74+IipyQkeSlVUoBifmPAu8CgPIhRHpVLDJimV6iOpi6k1al5sbLx7PArP3tcBGkWtXqdlU2r1Gql1PFQeyOdy1CsVXxAPvCq9fP+IJuXx/pGpqUlEJTg0OlQ4AymW0mMFpR1YhViPbXIuoVb56ASTmT8xQVJJediuIKmjelolbUHpuIxLLKltIVYRpAEoqB42fFDXGu1SFBbtFEosogRNAk6w9Zj7gwrKCO3DRV78hWVG3h2kb6n3d52DEvFD7UqTL7TRajW5vy8TO2FkeJC0kXhApQcE0d7VjQ5SREOlXEEk9AX21GGcQqymUnpAWUFSIdQB4+OjpFiSRFGuPBKSEkSGQluBFCGbyWBMwN39PY1GnWajQT1u0tvXh1KKJG1RqZXp7eshm8vR+cfaudi8pXehCx1qvvATLxj/4hAjnx3k2/7au3zbX/scX/zJzzHxxTHGPjvE6BeG+OJf/QzKKEa/MPLha9da01Hs8EiqpwEreZofbtW9I5uIEDeaxI0EJdDXO8BA/yDZKKJYKFCvV9Gpl9EYBKstlfIDkgIWHkp3lG9LHj6qFZNTU0xNTHk/CRQP5SqPpSo2TSiVyzyWK5RvH3h8qNBotDyEQBtQHpLaarZotRK6enspFrtw+FKnEV+M8JaSghhF5eHBE4JE45xFCJmcnOTuscrk1BRp+ukPdt8U29hMlMEpXxfq7ehElEMwOJdyf/8IOCRO6OwfYCA3SNxsUG80yOXzaOWR1Q5FrVFDK0WUzdLV302tUqfy+Eh3Vze5fJ7+/n6CIESJw1rLY+WRkZFRDFCqVmnr6kKZlLubEv2dvRgNiDA6Nka5XKKtvQsnQmoF4pRq7fcPdoXePJ1jRbL5kMJ8D816Sv2yTtKyaNFgHVY5T8VQQqocGEWaivdrxXscYJUHTOqnjNdpXOQgeRIFNx13x7cUijlGPtvP8ZcuGPnsIA9nFaoXvlvbaLSIWzEpjurjI0nsPwD5XMRjK+Hx8ZH+bBYTxmgltMolJD+EBFB+rFBo7yBxAaXHO/q6exHnmJoYf6KsGKxSIN6LI4wU/QNDNNOE/5+9N4+RLMvO+37n3vveizUjMnLPqqzKquqqrqWX2TmSuMmUZYuUKNoiRNKETUu2ZAoCBJOwDUGAYRgGvMAQbQs2ZMmwYFqAZdOUZdKkCJicETnijEh6ODNde1dV1p6V+75ExHv33uM/7uvmUKS4Sprphi7Q6MpAZkZm5Iv7zj3n+37f/sE+otBqtymrCpdblubPY9RwuL+Hw3I8PqY72SU3Bc126/27uPcVH/neq6iFd/73+7jc8sk/9zahTH0prQwYWLu9waf/wkeJTpFhZOkT83zpb33l/b9DjJGjkxNa7S4+eIZHI3qdFlYdttuhYwSJyaBvjCE3ORbPaHyCNR3m5maojYVpOlvTnZ8+f8mVy1coxHHm3DlsbeInpvtwlIq93UMG0wPOLi2xvvqSdifn4OgwtWJcwfTUNHtHe3Q6nfQcmqaw46oiaqBhGu9HC0QVjo4O8WVFb7KNGIu1yvHxKZ12FwmWo6N9uhOTOIHz58+h0ZDn+T+dN+vXcH0oKjsFEDi7tEQ0kd3dbSxpnD8+PSH6ivm5ebz3HB4cYuo9fmt7h6OjIYGU7eB9ia8qJKYm7bgcMTnVo2jmVMMxUQN7+7v4GMiiZXx6ihKJKpw5u4RFsZWk56wLRhBMNJyejiAG9g72cRayhmV2bu63/L0mlyd4/Y9f5K1/4yrdM1185QHPyd6QGEMCEYjBkSQPoolvUUw4ipkGnemcrOkwrQxxKTksbllsDTlQI5DZFAKuCipU4wpTCB/9N29w5V+5QNHN3v95qspT+Ug19vS6PZaWllhaOkuRF5wOx8zNTeMyh0gkWsOZM4sYDBI9S2fOAoLalMMqMSImUomwtbNNZkgBMSSS8sHhQaokVREV+r0+w6NDOp0mg8kBNipEpTcxSbfVoRxWaOlrR4Dh4PCIURVpTDZqZ4Xh7T99DZNbNCSx8+PPvkhJZigf/7Nv4etjpWaOT/zAmyx/y/L7v7sRodEoUJfQ52U1St9W6gwQ0qR+ojdBd6Kb4gqjUmQ5NjcpC0QNEUMUAbHkwPRgir3DA4JIigIIsLW9yc7OVn3iUE5GoxRuRGRx4Sy5SXj4c+eWmJ+fJVBhVZmbnk79wvo90Wm3mehNpA9qK9nJ8SEnp0esb64zHFWpChRhfmERYy0xg9UXL1CtEgjDppD1967mD/L60Gx22xtbgEcVvE9/8WiFRqPB3Nw80UpC51QVQQLleIyvxgT11DZpJvs9DEo5GmOMMDkY4NoNVA0zM3PkxlGVJRFJb+bZeTLrUqThe2NJNSwunkHEo6LJBiXC4uICiOCjJ0bP5tY2+3u/JaaP2WvTnPnkHGoTjUJsGn8KJMacJirB1r0dQlWiorS7OVPLAyaX+3RmezT6XWxmafQseTNlzsZXwqs7m2ze3EnT6qyerAZHNYqEUeR4+5Rq5Jl6bZLlbzpLd75NpztBq9Ol3+vTarZSDKAC1jI9M8fhyQll6YkxIwZhY2eXkA5PbO9sYVCMRBbPnGFrM32salh9tQExIZ+CpGrRlx7Uc3pyzNTsND4K4yok7rHYpHitze0A/ck+zUYTreMJqypNJd+LLhRMCs+xph5ElQz3jkATKcTFhIt6+PcfgyZeXWvwayJsJUFiD3Z3KfIGg6lZgggnoxFHx4cMh6ccn56iWiPjVdk7POTo9ISd3R3K0mNtCrNWFFulCm9hfp7gk3shB1QtZeWpPGBhY2snXU82/RRb21tsb28DBhULRmgUOVPTs+naCIJGYTgcc3Ka+rJSX5uqkVCVTA0mmZubxeU5GGVvd4/t7Q22NreIGpienmF3Z4etnU0kpiN1u/vre7gfxPWh2OxQCDVzxAZXG58NUaGKFdtb21RlycHREVODKfJWg4W5ReYXFpjoTyCkQJbDgyHHJycEVfb29yjHFVKTaY01oJaZuTmMTaE50dRdAEOqnEhhzSKSeG4iGCIhBjY3t3DWMTM3B6qMxp6dmkD8T1wRxnslxy9POFo7phoJqOW9TVVjYuG5TkZ3oU1nrkv/bJ+83cQ5Q6NdYCcKcIaiack7ls6bBeGF8upXN2kPWkzMNOn08gRGkJRxSlRGe0OKbs4bf+oqb/ypqyy8OZOkbTUpI5ok7RUVNBGxiD6+rz1DU6o9mjab0qQbijFgvVCaWBPzlZm5uSTbMR6NQm4s0/0pdnZ33q+QG0XBZH+K0emYo8MjhqMhJyfHHB8dcTIcpk1L3xsyCdNTM7TyIv0sDh7+3BOq0qOatj5RuPHdVxGF6GLNf4NqXIEqXj13/9699Oc1hvmFeWZmZojBJO0aNVhTE7VZY0TrYHMRQZwQTKqeY4Tt/R3WNtfwZcXO9hYhVGxtbRM0MDs/jwiUChhhen6ehblFYlSMBxcDxluQSKSikpg0cRFMrDNg1RDq5rMYAxqSNjSmoKT3BH0i6aY/PTVNwxYYL8lXHBO+Paplfm6OzfUNfJVkP8G8l1r3wV4fis1OgDPzCxgxBBuTYtwImVpmZqaBdMTDGYyADdQasfqNKuliqKt99vb3GY3GiBUOdncZ+7K+20PaaNKF82p7A0869qKRGIW1tfVUXUi6vowFRDHW4qIidXrW7MI801O/cQjw3pq+MqB3vsPRxgmjgwp8wBBSnwlJ1KkMil6DpW88S+/sBK++tIltOSQomYlIbigmMhr9giy3FG3Ds3/4gt5Hmlz7zte4/K3LrP7qJs3JDtlEC9sANYl2axH2Xx7x4pfXkcxgCoMxiWSiMVUndbwHGlLLfjA9wOUuVRiqzM3Mghis0ZRytbFBjOm1m5ubRSV93vzsTJ3jAN4rW1ubCJEsK5geTKXX3Pg0Q1Iw4hEbcHVotYqyf7DP6WgEouztbTOuJ8WnR4EyRqxxabMOkYDBiXD7/3xIOsEboksV4dU/8Rp3/t4D5KuUFtZa+v0ee3t7TE1NJailDagVmo0WvV6PTquNFTg9PEHUggoTvR6tvMGgN0kzz2uFYkRswlalak3qCldZX19LuRsor1ZfIFGZn58nmhT47tQwNzfP9MwURhKJBhLZWeoNKdpEoGk023S7XUTg8OiAk+MToqSqOBWews7ODqNQMTk7YH5hhtmzMxirrG+soRqZn51mbW0dJPwGO+IHcX0oNjtIlNaXL1dZW3uJVJG1tdUEgRTHRK/D7t4uk5MTRKHejNLd/fR4yOH+ASKREKvUSyly+pN9jo+P0XFA1LCxtcnq2ktWX75kffUVVUhi1vVX6+lOHwQngmsUiXdnUwChBkWNZXp2Lk1i6x6JMUL1T6DmTl8ZcP1PXKY13SEMPQZwXlL+ZxREDSKeYsKiTsgKx/2fekw2YTCZRdRjc8uX/vZtMmfIu46snWObGRPnekx8rMPytyyBWIrZLgGHkfpNF0hTaUnVWSgrytIz88Ycs2/NpWMaEYtnf3+bYTlke2eLWI4gKhKEzY01Xr16SfD1oMgr689XMTal0z9/9YIYBBknB4SIwSAEPOtb64jAq831pGvc2kpTdQGMUrSatNtdimZBs9Oi2enQbrTIbIbJHOLAZYadnU3MuUjWFu7/xEPOfGoO28iRUPd4s0ir30olqaTUs4iiktGYbHHnxx9w41+/8v7fxARD5rK6YpUata6/NslHEJNhnOXgcI+ToyNMEHoTPYpGk0F/kJw4EpmansI5x2BqqiYLgxFDkRdIMGy+WsM1c6wBwaM2S8WyGjA2tUzq9DsRoSwrtna2kytI0xDi+PiEw6NDoiEBEozFKPR6ExwdnzIeD7FZhjW1DMhAJLK5vopzjvnFJRSDLVwqIn4fOPuvl/Xh2OwEnr14Rt5s0Gq0MQpF3qSMgbW1NWKM7O0fMB5VbG6lBDFVQVXInCVvOESVwjl6/R6DwQyNLOf44JBGs0mWZTQaBe12g0arRaPdIhrL1FSf3d19nr9cRerAlOn+DM6CjTZRijE4USRWaJ0Za0RQX3FwcPCb/jq9sx0mL0wgSqoGBGIilBOMIkZpTrZoTbVoTzVo9go657tc/MPLGFd/fhQGF3tE8bjcYfMGjU6T5W9aJO/mFJ0M2zUsfXoRCSPKvZKnn3tFqKgdlUp7ssnEmQl0WNFfbDG42E1Vat1sz/MCaxzNRs7u9iaV96yvr+MyS7PVxpuk9VIVdra2mZyeBCKNZhtBUQk8f/GCly9fghrEWjoTTQbTUzSbTYpGBxVha2MreU5JNyqV1BfbPTjg4GiXSKTVbnN8dMTwdEiv16U70adxviCqZ2K2jTXp60ONKRcLy9+2jBiP+lSZBpc2ngvfusTEUoeDl0fpD6Ig4uhNptcTlRRcHiNaTwRUU5+x8hVZ5jg6OuLk+LT+csVjyJpFOmLWR3Mt61daFKMwOzWNAVqNBtMzs6QGnyBea+lQOkGoSacRteBFsWLIXI5RGA5P2d/dJcscWZ4E4O12uiEYSZGYRSvHuIzexASFNbUkSbDe0Co6TA1mmJ2ZRpxlam4OqoyTo9/8Wv0grQ/HZgfs7e4wmJpgMBggWWAw3ceajIYr2NraZmFhAWsNjaKVMOtG6wlbRruZ7EfNVptGo8He7g7Hp4fMTM/QaDZQEbq9CSYG0wz6U0xNDjCSKpIzy2fp9tqpnxLqvhTw5PlTnj1/liaemoTKahMCW7zHWcfcwm8+jR3uj9l7ukd5dEqo8wOCJHlMY9CiMdmmNT1Be6pFd76LbWTsPtxHxGAFxAaiwuIfPIMRMA5sbrCFxTiLLQzEyFf+9i10XFKWHjWG5qCFKoQgYC3Opia6HwVimXJ5A4Cx7OzuJ8Bjo6Dd6dNuTZCJo9Pp0utPMjk1IAOEEhHh3PkljBWeP3vK9PQkxkWer75gYmKCdqebCBvqmOxMJDHs1BRTU1P0B1PkzTYnp0M2t7fqg5tBsDSLgvFwyNHRMSLKyfER49MKDYZup4MzFtt0nP3kLLf/7ruE04BxIFhiGRDnE5Eah2qS6EhiDHD+04u8/JU1jDEsnj2TBiF1o384GvP82XO2N7c5OTplZ3sHVU2tgiKj1Wwy0ZsgyxwuKlYVY5RebwJrs/QaRzAuIepDFMYBcPDsxXMmB7NYb3j6/DneWDAj1DiihZcvXuJFCAHWVtcSWCHL6PY7nIxOONg/IFaezY00cDjc3cMo7O7tcnQ6BAy97gS5c4n0TGrdeBJppjcYoNFC8ClPJUaePXtKWf7+7XZf6/XPfLMTkb8lIpsicvurHhuIyM+KyMP6/5P14yIif01EHonITRH52O/kOcqyZHn5AhIS5+HZsxdUgFjDYLJPvzdJt9sjywq63TYqHjHK8fEx29t7HBwcs7e3lzIqgFanRZ43aLfb2Dylz4umY4uT6v2vx1gO9w7o9AcEFCQQssjK46fsHuwy0ZsGEwgCz569pIqkmaAKRgwT3dZv+vuY3OIRylEAMWDTMcNqOqJ15psUA0dWWJxLNqHlTy3gcgvRELOEOjIqSLB84a99KfXWTKwvcoO1cPZT86nJrULWtPQXO4TTQPUsEEUIJomRow9UZaS93GTiSpOA0mg2ycViQmBzfZ282cRkjvZEG6OJBm28QRC8dUz0J5Ho6E0MErxelMl+n5neIPlIBYwPSf4y8qy+XEUlYq2h0+2SFVmynilpQxJD0WzWn18iKswNpjg+PmQ0OkWCcvLOCMFirWHh4zOoC1ijRCnSaxoTL8+aBC4wIbw3Xkatqa9VQ7vTSbecoGiMZLmh25+gaDYxztFstUGERrNJq2iyt7dH8J68sGzsbPDk2RNOT0/SRu2SOPvl6gsIAWO09hkbYoRebwoTlWAj/UE/vS5oyqEQ6HY7vHr2EiPQ63WxJCvk5tomuSmY7PdwzhJCSb/fZjgecnhwQKvRxBmHxJCCjkiOlUjGy9VXPF55QlmNCRowNnnKU16Lsnewh7P297gDfP2sfx6V3f8C/Kv/2GN/GfiMql4GPlN/DPDHgMv1f38e+Ou/kydw1jExMVE7A0jRemXk2bPHqLW0Ot2k2SpT6a+YWm3vGQ7HNIoW43HF3t4+AO1mkzxrsL6xyenJaWorB4s1DlVLphk6znj2+CmDzlTdTxJwGSaWDGYmAdjZ2cRqSps6PjxC/YgXz5+DEapqzOrL30ibn3tjmivffoG845JoWA2iyXg/eWmC1V95xenmmDwztVo+yTlm35zFGMMv/ndfJI6SKjUTIaBc/peW03ErpM0HH5HCMr084Ms/egeTZfgSYqn4xx594Rk/KkmjBMWKEEae06NTxqUnBE+rWeDyDBXo9btsbW5wPE7cOYwSRIhWePr0OURP0DQk6vU6WK0gWnoTE3gimiB0eA0gSpZZpvo9nAoGSzlK1VO70026NiI2RmyAQX/AxGQXzYTmRIfB9CwuL4hOKc45rE3dtZnXJ9HMIkQMJcGaNFUO6agdUdCKGBM0lJi4estnztQT1vR9xuOSzfUNJjp9Op02ReFot5vp5kdSBTS7HRrNJqqG1kSf/vQ0WZanybVPlqze1GTitJMwWUbTNHei30Vtuo47vV4SIWOJsUQx9Cf6TA6mMLnQavTTBMw62r0+eeFotlp0uhMszi8y0e0xOTlFs1nQaGfkDZcgpzGpFIJRoGRyMMnsYJD6tqLEkNw/QZKN7+Kl8/jwL3p2v+1S1c8Bu//Yw38S+NH63z8KfNdXPf6/alq/BPRFZOG3ew5T33VCNDx89IhXq2s8f/6Uufl5cO95tyXx1xSUkOLsJE1YsyJjcmpAp91hZ3uL/cND1EUmJ7s08hwTYroDE9FoWHnylMqUTM/M0Ou3cZoEm6KRlUdP6Xf7vH7pEnMz00QrPH/ylEvnzpM5y9TMXHILBJLQ+KvWzNUp3v6e6xSZY7g5SoMMSZVAa6pN1m9wtHlCGI+RGIAALpCJRaKiPvL6ty8TveKCSbY0IvNvzNWBCYHP/hdfQCWZ6jNjuPrHLhF9ZOUzj2nMtyhbJS/31skWHRhLFEvEIM2C5pkuebdANaIEcClGcnd3j06rQ8PZNInE8ezJEyQE5mbmsbEi87Dy+BFehYDFaEHAUiUvFGjkyfPnoBnGQLtbtwZswOVZuplp0t4Zhb39PXZ2tmnmOUf7Rzx68IjjoxNarRabm9uJ2TawmJpynNlEC64QPv9Xf4k4tEiwEJQypD7Y5/6rXyEzKXQ71H21VquNiXXFTMRKxqA3mYZEpH7Z4cEhW9s7aVYRIHdt9vePODw+ppk3mGy3yJxDPJgqRW122m2ePnuG+kQONk4JUbGieBViCIi3iERQIWtmmOhT+tzmJhIUI4ESj2TQyXKUiBohyy3NVgO1hryRc3R8xOHBPpbUawSDlQyJgqql0WrTmpxIPD41PHm2khLUMDx4sELenmBucfl3+c7/+ltfq57d3FeF6KwD7zWvzgBfHcbwsn7sNywR+fMi8kUR+aL3FRmKULF4dpazZ84yO7fI2qtVRJRgUybpcHjMixcv64kVtCZ6zM5NYwgURUZWZHR7XRqtdLxstZq4zKHWEawlaIBM6S9OUwATrRYYT7SSBJ4iLM6fJ/pI0W6ljaFSluYWybo9IoZOqwCgyC2vXXmtdhaktf/8kOe/sorkhqyV1TouZWK+jWDBKx/5vqtMvdZDi6ymhxSUPk2RNcLgwiQmi4hkSSYi/n3EebCGt7//BpAjwRBzmLw4hes3ufYdF2j2DYNrPeb/wCzPfvk5YupciDyifkgmnjN/sE8YjNjfP0YrICpT0wMm+n2MybFOeV4j0R8/fUreLnACxgYWl87SEgOZ4BXEKFY80ZSsPH3C0vwCVAJYymAZDUse3X+XJ49WePHiBWurr0Bh53CP0/GQzqDP1t4OMXqWFhZoNJqo8UzNDMitTdQUScE2UQ0ZSXR77buvJj0hAZUKpwac4+3vu4E3gLdYTWJoDam/mSbCFltYskZONGWKvwxKp9Gk3+0l8W79ffuTPdrtFusbaxwNRxhxvFh9wTCUREmtjOm5BcQZHq+sMPYeI1CRhmciQkrZ9umm5UG8EDVhniQzGDUpT1cTXitiOD46ZGNjo/aFAyi9zgSdZjdthnherb7g4cMHDE9HiAREPM9WnjIaARqZm1kgSELFn5ubJwN2tr520Z7/tNbXfEChWovWfvdf9zdV9ROq+gnnHCFCbjLazS6dwtFuNlhYXESSVhZTWvKiyez8AsNxxdrzVzTEkGUZIer7YtAsLyjIUG/SHRaLN8rzp08YnwaoDJ1mi+CE8B67OhiQgKmgmCiwEiEolgICNDtNrEZ8VfJ4ZQVrPJgs0ZAHU5w5ex6AwYUeS59eSHd6oBa0MdwpcV1H1szpLQ7odJoYC7/43/4qw/VjUENZeYJEPvuf/yPimHTsqBQrBeJSnJ/zlv7yJGJSvN7P/5e/BBIomobeco/2dIfecpfFT8wxf22aJz/3lMxZxi+UkweB6MF2hZmzs0x0JsEZgghF0aqr5zFVDIxGI/Is59ziEkYEbxJnsNVoMhZPFSqcKcEIRg0mOM4tnSNrNIlZqmaLPKAEYlTOnFvi4rklmkWTjfUtuu0eM1MzNKxh0OvSm54hb7ewWRLwNhoNMptz/KURDD0akhA7hWVbps51wdWXXdZAbQV+TH+5BwLqQrrRACZzRFVKU6YM1lpeIxisCJVE9k+O2dvfRU1ABHa2NnjyeIXT0yOmpidptdoogZm5aYpG6hGKCL1Ok0cPH3PmzCwNG3j08AEP79+nQSAQkRioTI6ogwilsxhTsLx8jkfvrlCFdNIwkqeAKYRWq83MYMDe3j6bG9vpWO5yTJ7hSYF6s9PTLCydJyuaBGMQyVhcXKAwGdELnV4LJ4FclLyTIWow8vvEY30drK/VZrfx3vG0/v9m/fgqsPRVn3e2fuy3XKPRiHsP3qUC7t9/gK+rh0azSSVC3fTCGkNeZBRZwczCHNSop73dAzbXtsEDMckPLJHV5y85OTrC+siZhUXyPMM5sFEwxiXkNpZ3372PDcrYKg/u3sNnlpgrxiq0TVLGI2RZxtmzZ6miIDaCVyyR3Blmrw742PdfR8Sx93TI6X6Z+ksu0lpo0p7K0xFLPMEK4i0f+/5rFP0cFSgkYsXwyX/vI5jM0bRKQBGboKNRFNcC5JRxJfzsf/oP+dQPfpTqSPn8j/wyjU4GeU7WadDqN1n69Bmuf8clgkI2k9M6nzE+GbPxYIvh/hDJYG31JcfHhygVRmr9voksX36NRjOn6BWIuiTTwKAmYkQoXE6WKTak47cjkhUWT0BM+lkJ0Gi1uXDpEu0sJy8adPs9ZmamyJ2QFTZ9P5ORGYsG6opJkRK0ipQn4EuQGCgBTERCOibmCNF61r/8Mo2r1ST7XSqtMIXjyndeIWQ+IeQ19c7qUyWEWqJjYKLdZaY/qHusyvRgkovLy7TbHRp5AQSePHuGDxGDJaqBoMSoLC0sUrQ7jEzOhaVzXFy8hGQ5TiQJsqMiMaRr0gjWKI12l7Nnl4hxxJPnK2hMwzMDHB8N2dzeodvtMz05RRaFna0tdg/2yGtlwNrGJkHS93MhYspAlhWE4DFZJFaeoI57jx4wIuP+u/cR+ZrXRb/v9bX6DX4S+IH63z8A/MRXPf5v1VPZTwMHv11mLCTqycVLF7AYXjt3EaJgo4BX/LjkycNHtXXMYGxMRnxriaJE6+j2p+hOTVGl64tXa6/Y3z+kqpK/VQ0UmSWoMlbPw+5HR6AAACAASURBVIcPKatxPfmMXFhern8S4bXXruCigM+pgsAo9VGCQlDPs+fPUZTT4Zinzx5jjKHTb9P2fe7+9AoiSt4UVEqiVapgOVw74Xj1hMrDyHt8iKiD1qAFVvkH/9kXGB1WGIXJuR4FkWFeohLJgkWNBWcJCuOx4zN/5R/w6R/+JJ25Dvm05Rv/wschtxQuklBrBikssdPEZ4I2QPOc43sBc1DQHjSQOGZuapp20UWiI4pBvEN8pFEIHkFOFashNealTGWFNYTKUpURrSqiNagIVoVMksNFVTgaex6tPEoUYpXksTWGzOaJ4OJNEgS71Hu1ErAxJHeJSxtU/w9kNHqW3Qd7ME5Br6v31jEYQogQLYvXZ5GgrP/qFgGLKrz48gabX9mkOZGhZSQLSR8pNuBNJDNQ1MJ0aw02s5giI9TOBMkyXKNAxBJVsGKYX1wkb3RB0zCiwhFEyDsdJFoyb3F5m6KpiA8pZ1YCaIWYCg2C2OS00GpM1rRkWcHywlkssW7NGDqdBoOZWYxzmFyoLAym+wwmeqyuvuLw6ID5xXm6ecazp084PR0RrGdl5RG+ZtlhHBnC0mtXyL1w6bVLzCws/xbvwA/G+meOZReRvwN8KzANbAD/CfB/Az8GnAOeAX9aVXclGfD+e9L09hT4M6r6xd/uOVqtll6+/BohdZjITIYxFb4EU4AfVZSV5/HjFQTotDssnzuf+nkmqbY0knytCD6O66mVxWIQFU6toSBgBErvyZxQiUXUkYeytvRERHIYRyiSsftIIsYYihgY+cDKg0dcvniRpy9fsrz8GsaZxMM7PKCcHPL2917nZKdktHtaOzBAxZC3hckLXVxukEZG3iiwWDR6yuMhtpkTjcM5T/SO0o0polJFh1X4qb/0Gb79R/4IhoA/jRTtgqpQJIvYymG0Yu/5Cbd+7D7f9MOfZPPWFl/4619h/o0prv7xiwSbKtNmv8n25w7Zv3mIEPEVOAwhB+OTcsNGi6kN7zEruX/nITFGrr7xJq7yROsQAnfu3uPajesYC/du3+fqtWupzxQdIXoMPhn3Q8pEPTg+4uTwMCXdq8WE1HpQI4hJMYDRpsdcNDx5+Zhv+Pff5nD7iFiCiQb1EXFgs0BwOVIKaiM6DkQXKUxBiBVhpBx/OeLLSKYp08GPxrzaWOf8uWVsJpA5tPIYbN2H8alijw4r8n7PVUwkRkMMgrURiZ5gmknCFJOm6P6du1y9frW2tGl9jDbEfAzeYYMhNBXrhejrjFxNEYjRRDQWSXRsa88saWhnvCAmoCZhyaxV1GTYWBE13X8ya/BeIS8wIYIkOo0bQ2kDVgpWHj3g+PjwA+0Z++cxjf0+VV1Q1UxVz6rq/6yqO6r6bap6WVX/iKru1p+rqvoXVfWSqr75O9no3luZTReYlZT74CtNQ4sSsgyaRYMrl6/y+tXXWVo+w+HhPs9fvcQYBY1sbm2xvb6RjPUux6rDGSiBaIScMSolIQSMsYgKWQlOEpvdR4uI4c6dd9AiEESoCkNTHEUES0bDWq5dfR0pGly4uIxxhkiSW3Q6k5w/d5mi3aHRSC5KRTGiuEIxhVKeVIQKMgOigcqPk32pkSUicQzgFdEx7SgELbBVerN9x3/9TVgNWBXaEw7rHNWh5+d+6HPkAahyJudbXPq28/zif/P/MXN1wMe+5yonD4cc3xxi1OAyx/4XD3j3595l93AfVciNYAPYEEAsjx+v4MMQ8oiznhAE7wNXrlzEVEIkT35UBO9TcI2JltcvXCCaSIxKcCXWgliXGu15wkZNthucnZtL9jlAnCOIEjUZ2EWEV89eMt5P/L2lM8vsvntIOI3c/j/epRyVlKYi4Bn9snL62cDB50dwHLBBQS2Hv1TixxWn7wTC2JMBmkFe6/qWz59jOBrx7OkzqEqsRPb3d1jfeJU2IO8gJMtWCtbwSC35efpshZPRuA63GWNjAKtUASrvkzRIDbfu3uX2vTuYrIJxjuaGO+/eRUaa7HXRkWk6hYharM/IgCxPmChjLJZ0/dvMU3Pi00SaLNGljUsDpSwjxBRL+fDePcQnMXExHHPr3Ts4F5GyIsbfe8D418v6cATutFp6+bVz3Ln7iOvXryUCSel5+DB9nIyFhiAVR8fH7G1ss3zuHGoiahxG6otTDSYIat4TDaeLNARlZeURo+Eply9ept1sETPlzq27XLx2g4II1iZyRFlisjbCEUKGEUuwiRSiVag1fjGp+NGkYyOJPdsXc/p/eJKTnVNO1oYpotBo+vmM0Jpp0eo1abQtWTOHwmFCwI8VcYZKDC0iQT0+aoIFWMNP/sX/l3/tb/7LVD5DJNAk4MlxeeD0NNa+y5ovd2Kwatl5d5MnX3jBpY8vs//FEa0bDWb/UJ8iz1j/uV127hxjjcNIQOtKwxihUoOrYqJAS/IHlz7QwKIZqHhMzLh56xbXX7tCqygYW1ArxDoI2sSY+p0eyFNj3GOIISlonDFsbe/gxxVz8wvYrCKq8PzZKs1um363i7UOYqTI6q/1FUIr6Q0lUFUWU43xYnj46C4A169dx1vP1X9nkZW/sVv32Coerqxw+eIVbEaCTCQMQro+9L0NJrKzvcP6qzXmF+eZnp5NN9KYdISPnzxhZnaWdruLUE9k66ECIUUzirXcvnWT69eucfvOHd5++waZFAxNII4rHj14wLUbNwiiOJ8Yc0ELytNjXm2vsXzpAibWWkpNR3qj6RozCjhDJEmk1AKVUJL6gDZT/BhcYdCgWIlUw4grcm7evEmeFwyHpx/oyu5Ds9ktX76CiwYxZSKL2DwhOYyj9GP8aMirtTUuXbqIBI9xGfsHBxwdH7GwtIiKSZGLIVm7chK2GpW0JemQQAvrTJ1Xody9e5ur119PfaWQBhsxC5Sl0hwKN1fu8vb1Nxgbz71b93DOce3N68SQjNwmSqKz1P2YxuWCiW9ucLI+5mhriDhF1WKD0plrUEy2sTkY53AuxzXf63GNURVitBixOErKaHjjzFs0TYfPP/oCtohpA6DEqEVzRcoSQ5ODgyFmrGiIjIcVzX6DzdsbPPzxF7z+za/jJi1ZYZn5xgm2fmGf2//PA/K8YHJmAChGLY9XVlhcXKCVp4CdoBGbG+6+c4erb12FYDEaESvc/Mot3vzoVXxpEjwznXkR22DsAxknCI7hMLD68gWXLl7CmAyNIb1uJg0GQkhIcZ9c7ECqGFUhq28wyVMbiQLBZBjvyVCiGGIY4aIwEiFGj7UtvAs8vH0bUeHq1WtJoxTBNB1SRdQqGksqLKfHpxweHrB07hx5SAySYDQRdjTlFYv1WAyxElQ9MZda45ZyYNUpd965w+vXroJJRZhmFlMm/Z216fsQDWoEhyJBGVEyCsLG02e8fuk1DkfHbG5ts7x8jr3dA05Ph5xZnGNzcxMjOTMLM0BAvBBE8AasRlyEdx89YmlhiaLbQCWQlZZoDF4Vm6LeOBxFnjy8/YHe7D74IxZgOByS55boSkRN8h+KJYqh1AonQrfZ5PLyBTItEFvgMXQ7AxYXziSxp4/gI0YimUnhdRFbEy5ATYeVRw8Zn5zy4N5dfBhy48Y1skRjBKt44Pav3sGp4Z1Hd3jzxltUjUAj5Fx7402uXXwDrZv4AYOLgiNS2shY4ejBkIPPj+gMmrQGRdpUJRDFMj6KaOnJGkIji+SuQkMgRo8xNumwbKpafvwHf5ZcLf/hN/8V/uQb34W1CuLIBFCX/LrB4aQgaKLphqoC6ykPx2zc3uDBjz/j4vlzbNxa5/7PPERFqMYBFpVznzrD7MwUFouQ46lYXjrPy+cvGcUSEZLQ+dSnnlw0uKh4yQiV4+obb6LaJMsS306jxfvI7Xe+TEs8Mi5gnJMap8poeMqTpw+wznNwssPz1VV8tKhpABmZWjIVMhSnnqymmBAtEU9UQ64VLpY0RQhGuHnrHdTmlEVM4dGmIBDIfOStK5e5cfVq8usKWGsIY5+yXqMDmrjg6Db6nD9zDqOGtd0dXm68SkpkI+kmqaCaEdQmAXbuwFnevX+f8fiUkQaiV669fh0jgXu37uABVYtYRYqMWCb5kUoFGvAKQQwNMrrGsnz5NYIVWq0eZ5YucnhwyMnRKWcXl9jZPWB9Y4uEyFKCsam/KYYXK084PTmBzHD5yiUa/QZFdLjo8M4gWUXmAhoVb4SNV0++Vm/vf2rrQ7HZNZtNtPI4TTaw0gjRBsjLJKcSgzc5mjlKyqREiYH9g3VevVrFkJT/Kha8RTw8f/6Ug719qHwN5PRcunKBZttx7cbrtPIGUhlu3r2L2gyRZO96+40bWGv56MduYFW586V7VBJ59+4dpADjK5xN/ZTSKSoOMYLYkCAXMTA8GjPaqcg0YkvBxkhrNqe90ESc8PM/8kXWHuxhbNqYIenV8irJ/r7rf/yj/Jlv/EGKrEG/2ceJ5Sf+7E+hLuBKSzSWGAKHOyU//UOfIZyO2bizy8/8R7/IV/7OPQ7XhsQoeLVMvjbB+T80RzwYYaKycXOL9Xe2iLWb3aqSk/H0+VPOLi7Syi3CGLUR37TcvX2HOB6jzmPHSZxbYDA+pCo6NlPvKbO8/vp1HIJtGrThyfKCy5ev0Oq2OXfxEjFaBt0+y0vztRXGE0LAqydq5Nmz59y8dZuTwwMiHqRKNyuNjKxFjDAmUaU/eu0GrorkZYapfJK/+ATyHBkHEnHjIXcf3CeaihGelUePk7ZSRoircO+DEZWZqSkWFhbYWF/nna/cZHtvG5dJAjOgrKysMBqfIgqXL19DGo6mgsMhDSHLHB9/6y1skYGWaMgwVYQiJ5G0TepVShJjV2owuSG3Nh33bSQ3Sm9iwNmlswQbGcwOeOsjH2F6ZpZYJVGyIsRouHDpEt1uJ8URGINgqOp2xt07txhVkipvA3dv3uT3IIX9ulsfimNss9XSK5cvJy1QliQYMTrKUcXKo4dcu3IFxDAaDXmxts70zDyHe1ucX76YsOkxqeWjrYm1ljRF09RzEiA4yL3io6BiQAz3br/D1WtvYAqBIEQN2MrjjSULAoVHgiKaEbKAEUesJD2BlCkiL+Qps0GTHal7JWP6m/sc7Z9w8OoUKS25Knnf0Vjs4TqO3AnWGTKXps+xSj0ZMYGYOZ79D5v8wi/8AoUrOB4f82//b98LDmI0NA2EWKHBEaqKo/1TPv9Xv8in/9JH+cx//AUAOt0OFy9dwnTBXVbCFrQnGkx/S4d7P7HCuz/9DIBz55YZ9DtU73UeVVDxqPwa3gqjidZsDbe/fIvrN97AStqgb976Cm9/9OMQKm7fvs0b165z9937vHn9daKaOqnNpRAZcezvH3F0sMPSubNsbe8QfWBuYRGvgrEpsYsaFPDo0SPml+Zpt5pQplGxiqkhrRG0oByPcDnceec2b9+4QQgphzU6gy09sQlmCOIieCE4wZiAx+HUcXx8wN7+LueWzyXEV0z9WAL4OgYALCIesYmJFyQCDjQiuWJGDqxSaqShJOFzFjEhcfYCUuPDIlEDt2/fxWUZV65dr5l6ESeKkiV8lIkQPCG6BIIQy6v1V2SZYXJ6OuVmVBFjaoeFMVAKvinYWIFKsqxFQU2GiUN8zHj86AGnpx/snt2HorITVW7evJlQSsOcqsqwKK2G4+Ll17j/6BERpdHIWbpykfZkg7MXzxEVJHiIyeP5HgLc+LR5PHv6nOPDI6w1mKDce/ch5ahEiTgq3nj9GiZLF0/QiEG5efcuuYmQCTEW6TgdM7w2GI0q7t2/gxDQ4Dg6Uh49elgDLJMbw9HAWYepLGasIJ5H/+gFn/uRL7J7b41GXmGtUFgheOH0sGR4PGYcLWFkCGPl//qZHyezKSinmTWR9zRow5BCc8jwLpDlGc1Wi2/4c29z9Oj0/ddTVZAJQ/76e4MUi2vmFO0m8wvneevNN/nom9eZmpggiMWqxYXAw0fvUo4qrMC9+/ehOuHO7dsJC6Xw0TffJEtNOqIX3nj7DW5+5UuQwVtvvcGd+/e4/vp1xlg0T0dtEy2uTOLjfr/F+XPnMGKYnZlhbiHlehgUCRH1HhMjMaTKpVUkB0u0J0RVrAmISCIdV8Nkp6s8166/RVBBi4gVUB/QQpPAOPOpD+igHJWsPHiKqxQTK/rdNv2JCVZfvEQkhR5FLKtba+zt7PHyxSrHB7vpTZbOp6imo7ZRRbwhGM/NW+/gfaz5hWmy7U1MJGcMYhOevTCOj934CNeuXsUBuRqqccmjh0/TzVnS59mQprXb2xusb66ysDDL9MwgvU6q5C7BHWJQgkbGNvDg1m2GwzEGywi4fes2WYiU0iQ38rsKc/96XR+Kyk5E9M1rnyBrVqi3eK3SkS4Dn7t6nG4JYlKPy6aGtlYhWZYkES5W19cTcWN2gAsQNUOoqIzgvKHKQYOlCiUtk6GhAuMQSRQPdWCNSTmumlKrohrUpuOMqSKm4ShLfR8s4G0yeNe/ByLQebNJvpxxtHacKhFVnv78c7Yf7PHmd19j+q1pevOtZJ0sGpTVaWJjGMt3vvVdfN9Hf4BGlsJiqlDy7/7YD3C4v5uMAg2DqdLPc7w/5u//8Gd/3WvZ6rS59PHLFNcqzK4lvGvofHyC+T/aY/vzu2x+7hBi4uYZgZWnT5mZHtDKW7jcElytZdOQAAKmqFPPKhSPlYxA5PZXbvH2G28QvXLnwT1uvH4Nbx3OkRD0xiWiC8mxEFUxMbJ7eMjx0ZAz589gTEBLSySw+uol3U6H3qCP+kAQi8PQUGFkIlYNd+7e4caVy2iW432JyQzBW1QDWYAogWAsVjI8HonpvWHroSmaXA9GEv0mxir9HY3WQxaXSNgasZq8qqlH5usYRYdqlW7KUjvWbIMYK27fusX1azcwLkvZvkHJnFKWIE65fecub15/C7GpWowuIAGGJ0esvtpgcXGejY0tzl+4gKjiosULWCkJGlARiDnPXzxnsjeg1+8QNWKzgvv37nJ2+QKNzNIwhuEYJEuVcm2M497Du5yeDj/Qld2HYrNrtVp69fJF1OekDJySkRisMYxGFY/u36NoNFg4u8Dm2iYXLl0kgbXBIRCUoDYBGmsskqimixFh5ckjFs/M0W228OqSfEqUECPUotxoFMZCaCuuTAZxyQRTeOKpIJlCcIw0J6ekhk9grCGQ6BsSA1aEwUfaDL6hxyhU7L06ZLR5ilHh8Wefc+ZT83TnOvhQ0ZprEgNktUPizLkl/sb3/E/vb3QAp+Upf/knf4jV9SeEQhgdKUVbyI5H/N3/4HO/7nVsz7S4/l2XUTRlDpiMomNoNCzD7THlq8j4KSCxTrGKqE1GJVvz/AyWoML/z967x3qWZfV9n7X2Puf3uu9b91bVvfXuqur3zHgGMwhjrCRjm0iRsY0Zkgh5APOyE0FQIkOeSiwlSkj4A1mJEiOIjSAQImQS8bABGeKx8Rgww0x3T1d3VVd1vetW1X0/fr/fOXvvlT/WqZohMwMzMsPQLfaoW9NXVXXrnt8+a6/9Xd9HS0sl6lyzThZmarzyiVd45n0vMyxG2xbeuPI6L7/0InjCBrkUohbPl3A/LhLmuQ1mXoBE/dll/6xUE1q0I5UX74bagoWKqRaqQUIaIWdBJFM1yiffeI2XXnwRU6WdtgT1Ag4ZNJArYGLknhImSuwVrC0kjIrC/uGER9vbzM/Pc+f2bY4dWyZWFaUYJ06s+NXTxM04F5Y8nSs0FKnQ5FQV1UI2d2XRNlPqqoNDnBenpnzitU/w8nteRFUoybBKoVGI7rYcc6ABQg5uHKDuCagiBDNyhlaMe3fvMjszYGFxiRs3brJ8bImZ2TlihJLBmoYrb17j4qVnoIrELJ3JraN1V66+wfhPit2Xfw2GQ7t8/hISA0aL4RhaGyFUhk4BxFnv2QX8RQyN5qN4y4i6i4QVv+qYFEL066upUNrg5bFADkatSqZF2tBRXJRXXv0kL738stMJPv4q73nfy5QcEaaIBnIqRCooCQvRme8kLHeByDE4jmLG4p9aZOmrhuze3WPntw7I25HcZm69dYO9vd9rkf3V/8EH+O1/8Aof/Z3/l/df+AAxfDrr9WB6wH/3S3+H37n62/zyD/w6X/dffw2/9F999LOeYX+xx3u+8RKlDW5sCfTnahafmSf0ldiv2fyNXTY/ukMp3VBEExjcePump3nNjCAYV9+4ypkL56mrHiErWntnQ4qYtLz6yqu8/N7nefUTV3j5Ay/CVEh4d5TKmGtv3OTy889SSaFJeH6Ct0xYHZA247bujooVw5UBTxUSXvBKLtBzpxPMicmmhmTo1TCZGqrCJ195hZefe4+3kLUH3rgxvSGSsdSn1gmNGdnEp/eVdqqbjFZCMHdZUYX79+9TaZ+FpSUIkb4VMoWi7mJdUkZiILWGhgjaQHIViAUlNEJRD4+qihHrzEGKVB09Kucx5B6EhBEpUlweWRm7W7vs7e1z6uxpt3834/6jB9QhsnRsGY9ScqVLNu2MOoHWceRS1W4DL9k50gha4Oq1axweHryji927ArNrplNKJZhkUmm58vrriELPjGoq7vyghWKRDH5FECO2wtbmHq+/+jqf+uRrPL7/GHLETJEQKaa0AVKTUSYuzI5KtACiaKkoVeW8LyIvv/Ayak4teeGl58mNAWOKBA7HwhtvvEGSRA6FqSTEMklrRAN7R4fcvXn7CWuV7U9usf2b24S+IlUkHTbcvnGDlYVjzM3Mo5VP4wBsv/B1f/4v8vLZ9/6eQgdQpoXrP32Lf/y3fw0r9jkLHUCIiokisTx1XSa2SDSO3pjw+Ne2sNb9/0QLGhIqASVw7sJ5+jM9rl+/RpoWnr38LAOpHFfSFskeWB5LQ6blpfe+hEiP9zz7EjoVQFxP3ELfZrn83PNEiexPMm/fuIHk4liqGtpO0ZA7DWcmCNQYpWm5desG+0fblJKpsvH22zcYH7WuM6WhZUKxhARh2hS0X0CFl154iRwTJWTaHEgxIGaE4oYLqq0PYGLNZDLm1r1bWBT2j/a4f28DLbV/bhkosL66xvLyCkEVSY6J5ZI6ZUXBYkS6rFgoxGKAYAkqgTYKRRtCEc8eaZ3onTJIcpJyJDmXVJ06k2KB7GoU1YhaYGvzIY82t1g7vsbS8tLTIHWAEhKpTN0Zx4RWC1euXiMzQTWDDsnqRHuazLE/0cb+8Vjve9/7zHKmDYV+jkyY0quGpNKScubWWze5eOESBOPocMzjzQ3Onj0DncQIhFwMwWPk+r0+C0sLjliI9/HRitsmheDM+eITyCLB2ebFyaqtKrHQqQcCao1jOdEte8h9QjQKDRS/dIngVz2cJGvdTW3p/SOsMfavHFGeKAyyoET0uULpt1z9hRscPRrzxpU3uHTp0mfle04mE37gB36AH/7hH/6czy72AtWg4vm/eo4gfTc+iBDFzT9nTwxYPr/E1r/YYes39tzQILdoCIh4YdVSyJJIGWIttOY61NqENgUkHFGFQFJB20gvRKa0fiXVQKEgGrA8JVjlgnSBaOYUk9Y7sO29Aw72d1k/eQrpbrkbDzdQ6dE0E2ZmhszMz3L75i2Wl1aYmR2gwbXJlo3XX7/KcxeepaqhxSA1SFWRp4VcVwTLlFIRFMp0SuwNsTIFChIjk4OGu/dv88y5CxQVz3AwD/rWquNjml8vyerZrsGjKclCFRreuHaTtVPnuXXzBuefOU9VVUgxWiBaRkYZJgNefeUTvPzCeyhkolUkcdpO205568Y1Ll2+xJNYSwl9WoMoLSI1pgWxQs5CwDCf0jhJHk+pu3nrJgeHB1x49hmGVcXVN69z8cIFLBqqPWozpiWTEa586lPUseLoTxQUX/718ssv2xtvvMELL75AaTNvXn2Tl55/npQFC4pookN+CK1QomEasbZLS9cWxLlGps4qb1GMjKSWUNVIKVx96wanTp8l1EodFUuRWEGbx0wyDKSmkUIs6u4YkoHKweqM43pK5wLiNAkzJViXKNppRi20BDMgsvjBEfPv6bFz/4DDrQY78itLqgtv/uJ1Tv3pdW79s9t88D1fxYc//GE+9KEP8cwzzzCZTIgxEmPkB3/wB/n+7/9+4jCSjhLV0MO9Yz/yvq9/loMrmd4LBvSgZHrHBqxeXERrc0v5Tx6w8et7aFSwwIO79+n3a5YXF0iuG0dy4frNt1k/uU6v7pFDgQquvnqVnDMvvPCC20yZMW0LvVGFtb73chJqEZfVlUDOY67fvMXlCxcpnY2+mUdaFuvyETRSzEOANEanG+EuNhHj7WvXObZ2nJk4JIs5ly8nz3pQwSxSW/IhKc5fa0shmtGUwFvXrvDC85fIWrlZQMxug9+4M4iVQtRATn7tDVXl3EMMLUa0LsNVPV/C2i61kQRUSFGa0lJL5T54ESxBDEYiUFGYaKBqjCxPQqoVDaljEThfMAX8Ypo9x1bUidRC9A6wmD+7AhaNFD2kHHOfVP85KrSYq0OCEcZCHkA6aohVTRK48eY7n3ryrih2w+HAnrv8vMfcFcVKotTRT75OD+qReQHUrxIqhSC186ckI8FzZInmmFTX6d26dZvlY8v0tE+ou/SpbgNPJ1Ni3UMMrrz5Os8+95xTSFA0Gma+g1OCNJkSQ8Xbt25w6dJFGoGqVSQk90pTI5dETgWtawfoW3e2mP8zQ0Yv1Ggd2PzVfQ5vt1SXEwwFexPq0/DKP75GO06oKuPdMd/3fd/He//NF3j/uQ/yQ//TD/HjP/7jvO/bX+RTP3GNl/76cyAeFp7Em6dSDA3CaHHA4uk5dm8fsnR+nv2Nfd78RzeY3Eisrq1iAnmaCaqoqFsAhoRYIGJM2ymjqoYinmGAsy5iUVI2+lF49crrXH7vc4TGP5u2QJ0DuS4EQmeJ3tK0Lf2gmAk5yNMJp1hCDB48fuxBOydXMAFp1QtKyEiqALdjSgiokbKPQdOkoQwjMwSmXQZvJFEkggVMhChGmxM5+bGXUHcqWbAhEQAAIABJREFUroWcCr06UFphZ3uHw/EBZ86eIRW3s1LULdTVaPC/f0otVQ2VCYXo+JFmrPhAIqA+GNOKZjqm7veRLBQSdYKmUoq4NhjrogbUvRVNIJXsjjNPHJqf+BSXTkcnXogRL4ApBLDCjetvs35mndoGPkHvASViJK5eeYPLz76ISMP1a7fYP9j9k2L35V7DkU9jS4iUqdI2h9TD2k/0HJ1ISedvp4XctPR7EQhkQhdSnLHcOTN2wckpt4RYIRq5+fZbrJ88SRh6ULLGwJuvXeHCs5cIuaK0E0rH0tduIjiZThgOavKkcPXmWzz/zGWyiFNOImgjhAhJhJAzO/v7jI8mnDx+AqP4Ca5C/1Jk4cURvWN9prsTjh63pOmYNhWKurGkOouGD33F1/Hffvd/z2ChD8CL3/gsr/1fV0DghW+8TPtJIb7sWF80cfytVkoLca5mYW2GSgLTB1M2fmmPbEbKQogFiZ6ytnH/AcPekJnZRbCClUybE6Ffcevtm5xZO02MPQgNWPCX2xKNGZVLCigIwdnCUAq5RGptSSIUi7TTI+7fu8+5sxecqFsyEro0ewy1zOOtbZDAytIyqWkwMUIVsKhoIyANTSlUsabCNc3JlNc+9Qmefe55elXgaNIyHAYmpUEbsKlQ+hFJDakErl278nSf9Xo9Tpw8wcONh5w7d44Ya/b2t2mOxqyfWGWaDUUJMZCK896alIgauHv3LovLK8yMZim0JFEUDw2iKkgLh+Mxg5khb165yuXLlwglMm6OGNZDmvCk5FpnPiC0ZvRNSFJA3EYqBo+7VBPn/UkGSe6jB08Y84gFp9ogHnmZO66oCW0lhNwgsQfi1Jyd3Sk3r7/+J8Xuy736/b698NzzZDOsCG9dv8qFixdIQakSTNKEQW9ApHBwOGZze4tTp06RU6JppkTteSBKyEySE/SqKnL/3h1GgxFz87MEgdzRLKQ7TWPu3COSYGRqVaYiTI7GDHp9rl67yvmzp+j1Z2lz6xpMCxiZRoxoSuyuXxmouquKSeeSkv3kb60FMsf/7BJxLRCsYpymtPuZdtxgWQihIRflJ//Ln+HZM8/xp7/z/QhObYj4da3KEQuZRCCYaz7r+cD82hKbt7aZWZ9h94098m2jqRLl7SfvRueiIdFdRwxKN0JO2XNm796/y7Fjq4xmajDPQ40EiiVoxZ+TRGoS0yBYW4CaOhSSNeQQ6RfI3VGBFUroqB7SDQtQSrGOHuRSJucyCg8ePGAw6LM4v4BSSCViFG7eusHaiXXCoHIqhhnXr13nmXNnGeeWOzfvsH5hnbc+de2L2nPD4ZDTZ844dpkLvVixubNJRn0YAIBx79595meHzM7OkzOoBn+WQRApHO5P6c/0SEctN2+/zfmLF6liTWtTqhJ48+pVnn/mEtMK6qK01hKswqTt4hcrRJ3nWayQcaPPWEWiAMVoSouEQPWEMGiZkoPjjhgUd6wpFEwzmgNCQiOMdyaUKnDz7beZTCbv6GL3JZ/Gfp7c2P9RRK502bD/UEQWuq+fE5GxiPxu98//+oV8jxACJQXPGlB45sIlJECVIOmER/c30FJIWejXQ06vrYOULoDnHtu7O09DkPe29mjH+wQKJ9dOcjg+ZDpJtJU7QYTihOSQijtHCC6wDoFpTmgQ7t69g0S4dPEiN+/cI2Gexi44R00CPVEihqkSohCk+Gaz5JiedRheNLa3d5hMGh7/8wM++WNXufvL99ExrD6zwHBu4L+3ce+3Z888xyfe+t0nIjc0CzocuIljNAo9hrN9+os94lzN8ntOEuvI8qUlJg/HrL1vhaXnZ5GbHdcOyG3L+ODIuydLjNtD2qaQ28hk4g7Ep86dZjgcUIq4AkGg5IIRyQi1FkrKTDQQzCjBKT1GYXyU8NereBh423JwcOjTy8nYzUARsiiJQM6BqBAk0VqmUFg5scLs3Kxz1NRQLSCJs2fPUYcnhQ5UAhcvXWbcTrn2xlVOnDz1RRc6gKOjI27fvM3jjcfcvneXo5xYWD7G8vJi5wUQEFNOHl9jNFp0N+ZY4X7uBcktJRUebNzjYO+I23du88wzF1CDUDJihkTh4qVnyQXaoyNyMXoEQCE55QahC+cpaBSm4yk7W5vutFLc0mlnZ4eDnUNKdv9EM+ueUWbaTkESQTJ1NLR1M1EVxbJwe+Mud27dYjqdftHP6I/b+qNwKv5a4ACPSHyp+9pfAP6JmSUR+R8AzOz7ReQc8PNPft0Xup69fNkGw1kgsb97BEEZLfShCWiYYqVHlEwpSrJApPWTFdziXFxkHcy5W0VdI+uqBJc2mfgGKRZoDg/oD/tIVAe0sxIFshR3Pe5soQKdE7kIqUuFV8TdkbV4x6OCJQe1iWASOvKrEdQ6LtcTDzyoLvVYvjigPlFRDhQdGdv394FESco//6Hf5D/+u9/DR1/5pxiFXr9m5vQMG5/aZvbYgGZvysrpk4zOR6aHU+q5Hu1BJm21JBoefXSb9rZzwSY5MRj02N3f5fGDTZaXlllcWuDho0f0Y58QKja3H7G0tMhoNARVSquEmMgirtftMLiomf39A0Zzs55KFsV1yeb0kvPnLhBCIlsgNy33Nh5w7MQJtjYesr6+7hw5CT75NPOOU4wmg5GpgnYYl4OQVZfLkDz+GtzYnaOjA8Tg+ls3+MPc+/ML8ywdW6aue/SqAf7DJyQH5xiKc96ETCmO/RZ1f8PbN97m/JnTFAmu6FFzQLV1t+GGzJ3rdzh34Ty1QeoyOqwbeAUTSsf1tNR4KJBUWPQwzWnT6SBTi8bgIVMmFIEHt++wurJCb6ZPSEbJhWxKTyNJMkUS0gqvvP4pcs7v6M4ufqm/gZn9066IfebXfvkz/vNjwF/71/ket+/c4dJzz1KK8njzIarK7Ow6iGK5gpyhNkrJPoAwIU0zVqZU/R7F6GIJ3ZonFvclmxxOnBpQe8am4ClZj7a3WOufIORIKokYSneVzDT0AFcYBIBg/pLPzPkkUSG22V90nJqFFrcqV1Bz7zVRL5ioELR0Lj3C+LUxd69MmX1fzeRaYv3DSxy/tERbpry4/n4Afvvux+jN9tDKmD05y9atA+bmBszNDdgTY+djm8xeWCbdSzQPMu2hsf0vd1j8wAyTWw4FTJspBwcHDPsrzM8uMD+zzHTSsLu7x/zcPHVV8/DhBktLCwz7QzodAKrd1Lt0RV8yB4f7LMzP8fjxY0bzsxxO9pmbn++4Zsr5C+fdOJSKCsPqmrOnz2BSGKyvIwg5Km3bMj2aUmukNxq4S283BKGjhD1RWSSJBPGBRNHM0eE+pcCt6zcpT0D7P8S1u7PL7s4uy4uLnDxxkrZkNCqxwyitM+lEA1KJE36pSaVldWUFC8rB/j6D2XlEA1ICLYVgbq554fxlhIa2MwqQbo9ZFDeXCOXpldTDhgtSXK54sLtDrCLTZkpv0Gc2BIIEgirnTp8hNYU8Fpc1qiDZyKlQYgFxMnav1/tDf2Z/1OtLXuy+gPVtwP/5Gf99XkQ+DuwB/4WZfU4WrIh8J/CdAFVVQckUAhfOX/L09pIJkjCUHHzgLyoEcTyobaccTg5Y6Q3cNsfMU6PUJTZk2D/aZzQzx8h88LC3v8dwOODMGTcRsGJoFZjmQj+45fjBzhaLC/Pu769uAPp4a5PZ0dBJzeKC9GweNGPF09fBCB3vDrWnV6GCC8Q9UEWQjod3+PEJGiKTmw1pVJjuTPjLf+EbAFh+ZhkNgkZoJomVC+4wfP9XHjJzecj28S22Xhtz9+d3kJQ4nBwyOzvP41/bcRVDKAzjiNFwhBY4mrRkpjTTMXs7OywvrVDXNSsry/h4W0AzEe86gsDB3h6D/hCtAjvb28zNzHHh/Hk2t3bY2dlmZm4WTeLdLN6pydQwRyIoBazyl9hScKXLuGHz8WPmZmYZDIddWLcxaQ8JoaKqe2hyjlmUGhO3vA+i7G7usbW19YfazX2utbm9TcGoqooYKqqqYmY0IsbKTRWCY7GKeriSCBubmwzqAdvbO8zMz2K5IKVyTFIShQCSHDoNHtfYijIdHzKnMxTpOrdihKBuTNrtGxAWl1YIUsjqpHArcHB0QK/qU+oBKMQugc+ImBVK9M8hl44T8y5YX9ZiJyL/Oc5M+MnuS/eBM2a2KSIfAH5ORF40s73//+81s78H/D2AEydOWAiQQiZPFaxlb2ePxaU5ivgAQArOWC+FlFpMjZXVY24IgBcTDLftjiCtsLLq2d1i2TfIwQF1f0BA2dvaZml+1jWwIlhxK5/9vV2W5xbJMaPiZNJzp854VgIZj5px7ETFu7lQHMOajFtybhmNRmDiFAxRsnbVDiAokkGL+7Q9/PV9BGfPf+X//FXcf3SfwzfGFBFCdDDfstJ/rmL+K0fc/vFt7t69y+mzwVUlktjfO2A0O++EOdwZmU67mwzatuHwaJ/5xXmWFy+ACclajG6CWA9RC24fj3uv7R7uMej1ifQ4ffYMIkK2lsOdPebn5rzrroHiE0Cxwt7eHgtLs17YpWAmhGxIEYjCYDTLueGoC0HSzmlGaQ5bqh70Yg8VYX/vgP7A0DpwsLdLycZoNGR7e/tLXuwAtrd3WFpaAnO3kMGgRy01RX1wI6JIyiT1vXP61FkktJw5ex61lmxKkZaYhRK0M0Twg88J7Yq2mb39XYajASEHTKBpG9omMZyZZdJMsFSYGYwYHx6So1H3B5ibM3J0MCbOK4E+SKYEDw2qzY0Nkpkftk8gg3fB+rIVOxH5FuDfAf6tLigbM5sC0+7//ysReQu4DPy+wTuDwcBxriQkMloKR+Mj5phDMOdzxUTbJsbjfepBn8nkiMFwCB1NBATLHXGzCzsuufP8ChERY/3kCb9imnHUHDEfRlACEuDR5iYrS4ucOXPW7cKfjH5Cp5Cwjn1rDv66IwaeCdq9tMUK06ZlNHRBu4m6ftGss1/3l0MUkhqluNhbLHShKvAj/9uP8qmfucbS/IL7m+GuKzSzbN7cQU0ZDgdsP37M3LFFNChrJ9fAjDzNHIwPmBuOSG3DuGkYDGcZzc4gUmimDb3+wOcs5oz+ZjymVylRhlinbyXA+uk1aJWEHxQmQijGmdNn+firv8vi8jG/1pvHF5LgaHzIvM12XbHzJEsWJOIHEeYTRqGj5jjZdmFpAbWC5YJFYZpbYmqJlXLv3gbTyeQPced+YWtra4ulhSVGoxmCdj56WpDi+mue/ByKd+taYZK7AG7Inadigc4+KkEUNPmtQCs4eeIkVoQsbhGWcmE8HTOYG5LaltQagxFM0xQ1pepD0cDB3g4L8wtUVe3ZFAhbuzvMjWZpo+u/RZw6E4KSyzsaqnu6vizFTkS+DvjbwJ8zs6PP+PoKsGVmWUQuAJeA63/Qn7ex8YDR4gwhVaBGQDm5fgosd2CvXwutFCY5Mzvs06t6eCCWuleGmHdN4vwkw4uKYIQCrYkPH7pi1+v1yOJDByswnR4huojvY5c/eZBKcCmaBpc/qXaOxMlHGFmexuKFLq7w6Vug/r0CThFwiN3F4CKO5SAKVlg7dQqAn/+//x8m4wbmfSgSxLAsPPrVPXJwflpz0JCjkR5tsbi05DAPQs6JZjLGRrO0rZDaRBTPcJiZmfPutBSODg6oNDDo9T19fm8P0YpedLdmE3MnFxW0tGiuPK6wA+D9w3YlgHbkbe2yVbWYQ1vqknVzWhgBo3RQQ+kIsmL41UtKJ+1z3fOxY8tsPd4iHeyT0xd2BfvI15/2vAfL/P1/+Olc9ioKH3hpno/97s4X9Od85tra2WJrZ4vxZInVleMcTMf0Z2epiyIh+IFaik9u8VBzcwWdFxw1th5tMX/iGGbqBGGJiPk+EFyfXFQpoTAY9Rn0nUw9Gs76g5PC3MK8/4UUpBRS21KqAe5a7O4qzXQCMyM2N3dYnF8iqFGouhjIjpP8Dl9f8mL3mbmxInIHz439T4Ee8CudlvNjZvbdwNcCf0dEWjxC5bufxCz+fmtpedk7sCfYg1s6uCqiCEWNghF7PebCPNubOyzOLXbvnfo11o8zv5JirpLAJTkZY2tni/mZWaTnMYpt03Ydlc//19bWu1i/TGW188RUKKkzlxRFcEeVnBNavICJdu62+CTMXYddSpayEboOsWQHuQW/apt5cLOJgCa+5SMfAWDaJk6eXOuY/J0rL9AZ32JBObW2RhszD+8/phsTAkY1qFmuVxAThoM+gxn32nMSixdghe6QAIuuIJhMJwyGAxKRkP16bN2FNppjQNZ9BlJaVldOdHhb9/wi5KL+PHJweV/n3hzwgu2bCT7TZVWfCNVxcN7UC4KZsLm1zeHh4efcL9/2DWf4Bz93m+/6pnNPKSnf+9cv0O8FshX6w5rSJiwEahX+3AcX+JV/tsn+YeKnf+HeF7r1n67NzS1m5xawpiWWhOL4nVkhZvOfKwoPHzzi+PIJNjbus7q0iqjQptYF/JQOhwtELU7xeUI7we2u/EBXMp2Ej093ZNIVUgOWlhfxqLfM5vYWszOzrJw8jhRIObnjj7nuO6WGWjo8+x2+/iimsf/e5/jyj36eX/uzwM9+8d+EbkO4w3CJzkOK2tlci3VW6uIboAgmfrKqFTJPKCL+h1nXNVhxeRKxINYB8R23aeXECopjTWrOar939z6ra6u0benUAYZKwrqXueq6wmCRaWnY3d5h+dixzusNBv0Bg96T4ut0FxPvaErnWot5MyjAo0dbzC3NUUfhwx/+cNclJFInsdIuqFkskUogdkORErwjXT153IthefJ86JzUi79IXaG07t+RgomxMDf7ZHBNUWFxcYmD/X1mZ5VYBUp5IslzBr9FIYh3ulkKqydOunty1ymH7NaeiCCheMB0KZ3hpGLBzTtFzVUlJh2140md1u5T8yVfwLXrP/qWC3znN53lf/n71/nmv3SK2xtjTp8YcOXOEd/3LRdpmwYJQs5QqfHSxRkebRZWFmruPxpjBH72l7/wwrezvUWvrp0cXQuSs5PHcQqIG4U6btZBpxSrOHFynVyMhsLWg0esrq7SVTnXByfc/QVhMmnY3NykNxqwsDDvksfuRpK7z9GjFUGlwbrkNb/NQMjC6upxlEyxSLJOU0xhNL/8Bf+sf1zXH4dp7L/22traYunYkk/fLGCibDy6x/rx1W5IpWgUmqMJuwf7rKwsuwNutqfFS7rpql9nje3tPQa9AcP+ADNhaWkBFffC634JWTx33bJz4qrokpynV6/kL3BTiudDZL+faKidnhHdNcTEy0qwbiIm4oRRui7TzHNUAYoguYBCFbsrbfccfuzHfhSjUAkk86JRilcl6ZQZWnV6SVHnZuEJX8UMzcG7yu57WemK7tProxtDRgfQyCIszs8TizFBuxlK+/RAUXGO4L2N+6ydXEefdmP+Od27e5vVtTUe3nnAybU1iuVOqA8U76xFzSfXmMcimnQDo+7v1E2YvJNxH7rHjzdpps3v2SPf9U3nWVmISBCaceYj33iaBw/HSCVIgDfePmJnv+XagzH5yCiaCQrHlit2dhPPnhsyP6f8rW8+x8PHU169fsBvfPwx66t9fvu1z5qffdba2fFr8PziAn3pkci+h4J2UK6xurJKKIXV1eM+jQ7e1cbig6JQxW4/+6kUSnZTA98qPsioIkEDh/uHlJyYn52nqD1V5SAQSnE8UIylxXnQ6AceuOloCNy7f5djq8c7XXDA8jt/IvuuKHZt03Qscs8SJWcebz5mbfkYJQopNWw8eIgWZWZ2tns5srtUmFs0IZ8Gg62oO0GETkeoheRkAf9fTl0+QHc9s8Ctu7dYXzuFF7NCTtBDSKpYa4SqA+m1kK2lCoHFucVuaOFmlAI+HQ7dxA68EFd+8rolknVdjLK0tEwuxae3wE/85E9jBil11w5zaktQpWTFverVp28WsQLqWYaouYahGTdsH+0xmp1hvH/E0uICJfgARUz96umMa7Y2txj1+8wMeiwtLZJNuP/gPgvHVqjFzYXu3btHb1Aj2dybTXD/NoXewKen1aiPmCKhUOTT2s/ctTlF3ebcIQVXXVjRp1erJwU/YGxsP2bj0UNS+r0J9l//50+wvtqnJEMtce3GIcsLylc+v8KNjQlnTg24fmOf586OuPrWARpc57q1PeVwDP1+ZHpknD3dZ/eo0OvDt/zlM3zg+QV+4hfv8HO/+uAL2quqwsP7D1haWCb2fcBkBSBw584tzqytPTWiyJYJwetYJcrKsWVKMtDUxURaxyV1R+rDvUOOH1ulCDSTA4pGl4SZsbuzQ6/uM5idIWt3YHbPT3GCcsaNjjHo1T2Cuu5ZrMfRwRePWf5xW++KYifBgW9rvStRC5w6fQaquhvXCzP9GaqozMzNYdlI3fAhS+ni7vwaV8TYebzFsOrRr2qnKpg9vbKC//piBYJ6KEsQRqOBJ7V3chsTI4s7WmgREkJPHF0qufgm7jgv9sTiSbz72t/fJydjYWGBnZ1t6l5gdjD3dEzhDroeRG1ifPjfdaRge3vLhx3FBf45F8fGSkCrFgvZXVWsoPlpe4qhRDXaXCAYg2pIINDr9REcG1Rcx5u7a9fW5ialtISZGUfUzQcEw9GMD3266eloNGRuft6LdnZvj1z8qry8cAxEWFhacl2nKTH77yMEpPhU1n2MM4fjfQ4OJywvL/IEkVLBwfTiL+v+/j6p/XSh+95vO8/qYo+5UeC1q7uURggxs7WT2D2MPHycKTuJE8sVxTKHB8UnkcXQ0LKyMMv2QcNb1w7IpbB7OKUV4YULc1xcm+H06pD5hUhTMr/4Tx79gXv13r37zM7OUqkgOXS0IrC6MJwZeVh7wQ+YolgRrHWM1J2MC0LsAtTdLQcRQon0Bv2nTip1f+DvhPmT6tUVdYhoMZKIH3LlCUOgYCrcu32HEyvHqSywtLzs0+HkAfBPK+M7eL0rnIrPnD6NZlfYqLld+t7urmNTwahj4NjCPMPhrONfhnc9SGeAaA7+YqgUBsMBVa/u7JropGTg0najSPQNZ8qtW7cxMju7ey6XUunAY6GU4s4i0Wktlvwlsq64qj75viDZr9sH+wccHBzS71eIJnqDAVXoOzdPPEfBNY8Z6yRk3/43/gbgdBDLhShCCOaFlpakXXRedkyodOC4kYmSEFqyeqEPMTIzN6RXRwYzQ4K68YBfg32YsrW9Sc6ZpcVlql5NKkLOhdIKczOzVBq6wQnMLy1jpuTiQyAzdcIzjlFRQFpHKVtruXXnNgVI6pGVuTOtxIQQA6Nhz3HIJ9QN8VJomnm4+ZDDo0+npH3vN1/gGz60xoe+epX9w8TGdsvG1iH3Nv053d/wOEtTeLThHoJ3t44IHbaZcmH7sEWlME5gYhwdJqLA/LDGUDa2jji5UvPM6ZkvaK/u7e4xHA6R6IMzzEixkBtYXPCJt1WFm2/fpArF/fpD6UxTC3du3UGs24cJLLvWuJaKudGQo8NDNh9vgilioYtTLIwGM8S6xoo9NY29/2CD0jZkK6RszM3OEtQ7RRHv9rMFqJo/+Ad7B6x3RWc3OzODiJEDvH3jBmfPnmFheRnLrYPVwWh4UtzogHtz4r99OvehiA/0B70BgpNWxazL+zTMEoWO0tELSAsry4sENZaPrXjplNYHER3gr+XT7hyqbgjqRiLeIWkJPoTousC612OxVnoD94QbVX2XlOETWwxPt1K/kqoUhoMBP/VTP9UZUapfP7J6ZKGZc/o6lxVRozSG9AyK0rTFDQJSJmhwMmlUKqwLVQbU2NnbwRKug52ZJRCoYnBStRYPxMkuf5PummsNhL5RJBE1UrIfHkkDA2tcFVA8A7dgRIkM5ue5fec2p86eQSLEJE6vyYVeqOnFnlMWFba2t6EIC0vzECLD+Xmq3T0+8ldO8v7nZ3jfs8cI0bh6Y5/9g9Yn2uqdYyluehlQUocPpixENZaXIg93JwSJbG8lN5FzdIQpiXwEr7y5xygE0oyQcsMHX5jnxPc8y+m1If/HL9zlH3304efdr7V22GxHNzJzUrjUBtZQJLK4vOyfYe1T75tvXef82bMsLKwgVaE5arh7/xEnz6yDKQfNEQe7eyytLDEK2h0QwBM6S+kGVsGn/ogwv7iIxgimVGJUM7OOj4bEneu3OHf6nNM3OzbUO329Kzq723duYzjQe3xlFdHAzKjvGaVBSWq004Z792/j0SeeFSsiblHbcZ4iRsiBR48eszfec6A8Clk7EodBseDXCdxdYzg7B6bMzswBGQnqrPcS0SBkFUpIqEAWv4IGU0+IMsejDiZHPHh4F5FEXUX6gxqQ7lrrGttHGxvs7+85rUPBzF1O7t3bAODHf/J/9yGG+tUHKy6Btz49OixGAlmj6zO74BWNkRB8MBEolDbx4N5dd9g1HwAYxqAeOjaomf4wEPqVG2mW4J2mFXIpSMhoN/QgFM9rNXU9qgqa3BKp6QwP6DTAhsdQLs8PWV1Z7Nw/BE8yyxyNj3iwsfEUxywYMzOzzMyMACGbsLuzSzOd8p7LC/yZP7VCKbC5mdjdKYwnnZ1REJRMkwM5+sApKLz/vfOUAIszFXtN4olyOebU0X4yJUPKQnNU2NxqeLzXsnvoXdf7np/lr3zoOBuPjvjd13c/e5N+xnr71l3G0waJbUcyD9B7Mkhy7uXSaN47aROn6xxfBY0szs90mGnN0uIxt9sSoe7VzC3ME+u+k+wpFClsb2/x1lvX2drboqhj06rO4RyO+sQYnddnRipPhDPKsWMnMBW06thV7wLuybuiszu24uP4hHLvwT0uXzxHS59sjXO9shB7gWMrK35FKYZWhf29A+49eMDC/DyrK8fc5jrBwvyCZ5l09tZPou1CkA6iqyBl3nrrGs+cP4PlyLUbV3nm4nlCDqQqE0QpxZ1mo/gU1rIi6kaL2gHPbRIGvR7x5BKqwUNWijzFY4pkBGF+adFzISQgFLdJEuFbv/VbAXjw4D5a3IgXy9iOAAAgAElEQVQ043y6WIDOpBFrIHkBThpgWrj69ptUoeLchUtIbGhRYlSWF5aQpnNmEaexDEIP00wLPHr4mEE1ZHx4xOxohtHiEIpzHIupR01m3CLcQDsxvHQEaJNCFYUWtwxPnd1VoQKr6NfdHZUOPzNl1OvTqyMlCNZFLMbgnaqJUEkhT8b8h//+aS6u93nwaMqjRy2744YmZ6IEsrkBQ6QiMyWYsTw/5MKFIatLFe/N8xxbqHnj+h67OfEzvxZRIlWV+Ytf7c40EuwptjtpE4OpMp3AzX1j7WSf97wwz4sXZ3jw+PNbIh0ejb3oFjdulUxn/pCRrKgEcihupR6ARhj1B0CXfpcMjTAaDRAyJm6SsL29zamB5/Rm8dCg2dEM/UGfKvS6b+I5J03xrNzW4lOet0qkaCaqMJyZwb2TPaJx+fj6l+r1/SNb74rObuPBfaBgkllfP0vSSE8KJdbejUWlhB79kVuoJw2kIvSGQ06fXmdhcY6ijjFplakGNSGELo3Jw40T3slUGigCZGNtbc19v4Jy6tQ6wdx0IErlYmqHoro4RrCSyR3NoARzS3QVeiHSp0LMOnwNNIo7n4iSTahiRRD1gcYTvCoJ3/Xd3wWAZZcNWXFDo37yy29t3pvlVIFkxBIRQQOcOn2GU6fWiWbE4oeCxkBd9zENlBixrtgldaqJZmF5fomZ2T7LK0v0hwMw89SE2jsGE6fZxA4WyHicYS6uCo4E2mQEcV1sLA6iKxkLGQ0+afV/lCQKdUWv7iGpk8iJD1OcMJ649+A+u3v7nDo5YnGx5uFGw+b+hLZ1knFObpPf04oshZ//jT5WKtZPDzh5rEelFZfPzdIctcwPewwGwte8L/FvfADubQq//C+F3lB470uLvHBpRB2NUAnjNvN4M4EY06ZldbHmuz98ng++vPj77lkrBWmiUz1QIBInQibSmmGT7HzF5ITRoM5fbFovboJ/zTSSo1t5rR5bBdz3T8WHRHUVGQ37hNohjVyAZPSLOcTizTPWGbtq8nBtK53vHZkrr19jb+vzX8vfKetd4VT84gsvWhVrCEIpiShCjkJpa8hTJBZMardKKp7TKqpPX6acnZMWxboxhY/mTQpI7aenGZh7jRVRcvJhhmTX9yR/H8lHBanFiVFRgIqQMzkmclsTQgOqjKdTHt67z7kzZwgqZBGnnYggJXeierdNcpJzgk44JlpIUlNT+Fcf/x0+9i9+i7/1N7+dLIE4TeTKp2epC7gJxWi1OG5mYFKIOTBV6QDsjJaMSYVVcHS4z9bGFuunTvnV03C+nkIlhdJ5obnCxDtMy8Lbt69z6vgasV+RRbrjwQcrVloChdfeuM6zly9RkhJ7PvShTAgaySUTJJKDQwZVA7lyIqR2XaoFYXt7i40H91leOsbq6grQcuv2Xb7jG47zb3/tMrfuH/Fw0yeyqv6SVyhTMieWKn7s5wvf8VcHLC8ElhaVE6MesVKCZsZT4XA8ZnMPdvcm9KtAazW7ew2nTg4Y9ZTDScODR2O2d9w0ogpKVcHJlZpzJwcYyuZ+y9FR4b/5u2/w67/5+LP2bFVXKMpzly+C+s/le86djB0khFSZH07Z4zuNRCxC6qayIRttgJIzMcDu7iEHR3ucPLEKFgElqO9zw9BsDs+QvSM2p/j4ldn99XIAUndMBZgcTRi3wq3rV97Rd9l3RWd35+4dSjaiZkL2KwBT0NASNRBKRaQhp4yVAEQn8CYjpExd3BZIinYvp/lL7mZDT9UXJkbJnRMH6q4mdR9UuHbldcqUp7wotFMNSKLFA4ljSC7BwZUGa+vHsVJog6s/wG3bNTjL3lsedz+WEH0IIAFrI1XbUlrv3n7kx36EhELbQu0dXrJMMEGDMQ6uOihmIKFjnESsTLl27XWwxotqzoQmM9Q+J06uIVGI2jnrirL1+CEbj7dJouQUsBY0Jz8ognF67UQnQ1GqVDqytLehtQitBi6evkjRmtiHVpweU0pFK0qW6F0FQmw9CYviWRWFjtRcjNHMAmfOnmNxcckxKIn8Z3/zBT7y186wtV3Y3s0eNi0FNQ+BDrWxMqeMhoHv+aYep44LSzORWnuUKJQcCUS0UopWhJCYna3YO0j0rbC2EhjORHRgxFiYHVaEysO5CRlD2HjY8uBBQ5DMyrJy/lSf0eBzv2Inz53l9HPniaHrfAu8fuUNkrUudcwVEgNVW3fwhZBDg7RC1kyR4DQgfaKmCFipGI1GrC6vIK0nmD1+/JCNx4/I+NS5FeXWrTvsTybeNVtAq07pY84dVTFi8oksOTPsDdh+9MXL5P64rXcFZte2LaXvJ6FJdiG/OMaSLEJuPBnKFKkc8ygCxEJB2d7cpW0Tq6vHobjDSJszlcDtW7dYOrbKaHbo+Z+uzyFoC2IILahw8dKljvCrFDW0e0HVCqqRCEyDU0yqoqhGSqz8qtJpH92hJNMS0GRsbmxQacXK8jJZnWJBbCkBqmnkK77mKwD4rd/6DZ/QRs9bDck1k4JBrl2EJIVsmcpaV1cUo9erOX/uNBJqNCsHaczGvXucP3eOGN1RWdpIwp2TFxZXEO2Y+N40kNx/HdFC7A38Gp0LDmwVpPiQpok9rG1h0DUHBapcOclaQC3Tou7OLEZWQ/AiW6RA6sjMAXq10Y/RRyfmfMNhzzgaN+yNp0gxorpEKvYUaTLDXkVQoV8F1pYqtI7M9CKlbtCmJowmTFNFKsb4wCglsLTcZ7bfYzSqaXPLwlBpTGgOM5pbhoNAbp3jJ/AUP0wWick6LOxzN0N3r9/g4vPPkmP0rjVnnrl80SGQqKhl3xPBsKNC6YO2wXXR9JHSIpVRWp/kqgr7B24gemrtNFZcG764vPTkpPBnHjLr6yc61VDxg87ArA+WaEUJBlYJVencgCLkd4GC4l3R2ZkIFUoKzpNMSbACyQxCQWON0p1UHd9US0BzhEafDigCxoMH99h8vEcIPi08sb7OxsZ9JoeHbsekShagDtRPIhdLoYrumGKm7p0XBUkZCJi0nXzMsZpI4wXRCmKp0yv6FcVyIiQnkaweX2VxeR4Tn2qKQZgWeo3zB7/j25xfFyulZ5lI4PVPve62SihWKj/0xf+8iFAkEyX6hDgb1WDgHUIsxKri7Mk115q2FSH5oRFCQkOD1IZGIaZMyJ0bRqeoaHMgd9I2e5LwZRWFTGhAx63jQxKpy7QD+TOf/NRrbk+UBGnAJpGQn3S6FQmh5OAJceq4XzJoQ+wOMOGb/9Isf/b9Fdeu77O5PWaaDcsJCVBaYW19jssXF3jmwixnz8wxv1QzGgaCVgyqIVIpNAMQN3FfOVZxbGFArcpwPjIIhcXFHlUQNCv7+5kHew3buw0744YSilOHUO5tTnm43ZDUO+kf+k+e42u/Yumz9mxOGbVO8eM/HKMAlEKt8MprrwIe7s3A7xiIYBohtogkyMU1xtEIZoyGA9ZOrvvtogasEDPE4M7GuRRS8syW27duM54eglSk1HPbKR/Uu1JHJqTKFUC08nlK9jtrvSuK3aWLF7GsPu2MQqiMqO5QG7RQh4xpolGjhMLUCtPOzlvU8bHK3I765PI6i/OLzlEKblt0+vxZ6v+PvXcP9iy76vs+a+29z+/e27ffPd09PTOa6Xk/NBpJCI1BEB4CDIQiBdiOSZxyXMQphyTGJHZVSP4IhVOkymWXXZVS4jiA7XI52KRCKAIBI4gRIATiIWmkmdG8eh7d8+iZ7unXffx+Z++9Vv5Y+7YkBEjBNRIz5fPXzO3fvb/XOeustb6vxQbWJlzgiSefoC9nViWeI1BFDXnTQBB9zrhK5ApIptZKy6GeeOzpp8OhtwiqMVKrQZZE0UxOSgnNPy+9cp4LF1/nxbNn2bp4JRDOrHhxHn74YZ577jke/9RT7JhTVbj/7vsJz84IPF7CIJel2NX46CRa1H3rsIZA6xGNs5ZxTdG5Wh22Q4pYJjtDOxumoxNxsUoqlCSkVQAuIX2AmlqM1ZsJKxlYkIe7zGwxUt/zwL1DfSaQImu2NsAy0MkKuRR8WBllTWiPCd/d6MS+8fyrlUtXKtlgMg9nhyacOFo4ug+ee/oqpw4vOLkxsZgSD//5X8daRcyQlGjMYfpgja/47l9jsYCNKbG5lmESpu7hdj3kanXZ2NkSzjwrfOi3Q12TEHDn5Ve2ufxaRSXzyDNLLlyun3fO3n3P3RTNYWpgsY6orqDCqnfe/sB9NGv0EiFMxcFVWeR4b10V8wA0zJS5C0IhlUx3jy/XgZwRmVCBnKIwgvO222/hYDnI1CSI7pngmDTjscceR3MKM4akg7/55t/tvyWK3TNPPx2OHB7eaNI7fYj81MCSkn1iDSVJZm0I+IFAHVPCFmBF0IWRSrTsmjp0YY0UvC/vJE/cfec9YTrZYNVnsjcEGX5oYbjZdYW1BK0hntFcyM0omrjjnjuwZaelwZkTQEPR4EKACIPCcePJkxw6fJRTN93K5pH9uA6zgxb32h//iR/nnntvj1+y+HuI0hycxpq1YM1LABAItNxgfXRh3ampoqWgrJGsxHgjM+CkGgYFKkZvzrlzr/DahVdx9yhYozut3rES+6dinarjdyvobkNwqs/ADlWUrAtEVsNIQZACkg0rKzAj588Qwi9fvcxzzz5NogeK6TZ4faHMwIUjBzPr+5SK0nJwHU3h2JHCoUOF97zzEJqge0Il81s/837KWgnnjz6QYRzXxG/81FeF2WWPwpJJESqdhK4VFeHgkQWpGLeddL7mIWeaIhhdUziQnHtlyaWdFX/mof38N993J3fcsvE55+xTTz5F60vIDTVIyYJTlxqPfuqTSMtkFpgm3DKdsGSqvYMp2kcYtoVzdcrju98bf4kUO1K6fqOwNkENmFuXidmMeeohl5wHTUide++5h9oDhKItkWTccNNdb/h1/EYfb4li5z680sRJGhbp6gUYe526Q1Wn00JSpeAaKGTyFnufOfYfKw9NK3tES208eeZpdldbzAVWaUZSx3MfXeHEIhXacCtJOLKHUA7QodIDYcyKlxyqjUXCqwY6Fm0eFy9d5uzZc0RFCkRSJWgiIntxxmHbe/ttpwH4hQ/+fHRrMpFdMZ1QkcAJXKjSoxvrFTzGZFzJrV83z+y1YLXjqTFnwcRoLKBMkJzenDl+jVM3nuDo0cP44GPrQPkEQZJTurPqhYU7UqAq1FxRDV802KRUgR59kqlQM8xG7FRtgeSJZhMNZ9Vgc30/t95+OsjLGgX1tdde57VXXuU//NYDfOc37uO580tev1Sjm+8VBR5++yaH96W4MQmILGBjB/fGeupYMjohuNeWSMP5Y21tA1Wj5mgwHYHZkNbpbhzeLKyWjiXld85s8ZO/+DIQDjTBGIxCa9YRhX/2c+d45uzO552zzZ3eY0RtVmJXvCy8/YEH6BM0DZcZV8GHe3bNYdmUbKakjpYOU4sVC6F9liYjgzjcTQRHKog0ulp4EU4ShgJeImQbQ2v4P4ouKC6krsQtN/HC05/izX58Kcw7f4KwX3/1s6IUfxj4q8Cecvq/dff/Z/zbDwHfR3Bj/7q7/6sv5nmsRlhLdcgl7pLNYzxwmdBuaExriIRjrwcmHzrDJKQECUOGBRGE1va22+8kAZYcaYbIAhla0pyFuRqehKxGz5F3kURIyajDtFNN8SrsZtgQWF5d8eknn/xD38vFCxev//dNN55iNa+4cPHi5zzmB/+rvwHAcreSfA2RGZsqUocJaW488rHHeeid7wZ2QBZgYH1B6kZbT5ElK4KmFfgE3dE+g6YYN82xHHmzaehTfYAf5o5rsPGXyVlUhd5pk5B6ZUaZuiJU1iWzi7GQhquykqD6qBecBjn2jX0XmKJIaDJSi+yFlIfDhxqioc44dsNRXIXWO9tXjHk7RmfBMSlod67twpSdqWTSJGwsgmQuubOqwrR0dhIUWbDKlakv8ByqCTen6Ay9MGujuGMryKmwf9G55aZ1UupcOX6YhRxhfYLZhdxih6yTMEkGF37sR97N9/7N3+E3P/a5PrRZE2pO1ZHRa4ouCLullfCpxz/Ou975EIiNOM1MqhXzRtaC1YYWaF5JWoI2pU7zEd2Zw1oMF7IPQjeZDpw5c4ata9e4/a572NxYx/fFc5YaE0HPRBh8Ap+NjbW1L+Yy/FN9fLlyY38Y2HL3v/sHHns/8JPAe4FTwC8Dd7uPqvVHP4c/+Pb3onmJ9Bh9knt07EPALx4wumpCmiMl7HG0J2BFS+UzxgAAxAI+RgxhJZnEjKqS9tjCSLi3edgnlTkIyz4bbezdGKiodBu5E0b3RF3u8uQfUey+mMPd2d7e5qGveA/7x4nYCTqNZIE1x3bDwBTZuxkEsVaGpZVsG7lktq3y/Jkz3HvnA5BXmER4jhPEZVVH8ugQPNNwUh1OzMmuR0QqIK1EkAzgNSNYOHXsvS4KkfA80dJq3G3jycLA0ug5OlNvafjoBborKMmjI0WDXPsffNtB7r+lceaFLVo3pCjFjXfdf5hcCocPKDkL6/sbGyywnmm+zc6cmfKKVZ1AYV2d7k42ZWWJVJxJ4o5rkilUQFl1ZWd7lxllnp2cYXvV2VzLXL40c/lacDJzhpTh1psX/MDffowPfvji532Hdz14LwudSCQiG8+g5gh9otAWO2QvUYQmpdVOmZRaA6WOb6iHykf6QFiJ/axLZF7gQSVBMMk8+9yzHDq4nwOHD9Bb+DDmQVzHbRTFziyKMGElzGmfevwJdnZ33tQ4xRs+xrr7rwFf0Fp9HP8e8C/cfeXuzwJPE4XvCx6NJY984pMx0rowaxo5rDlOoiJoSmRVfAqekqlTc6flFJ2EGqZ7qGrYJ/mUWKVAO4unYR/e6dJwSagWFCe1Qo92j6Hfjlg/XSEu7GqO3aCskUyv79z+TY4f/dEfDS+yMYZkQBdzKAuWQpKYw6w20BD4x1AS4UQ1O5986jE2Fhvcdd9dQcNIhgjhZOKO6QApdoW+q7TW6a2zyiPAu2aEMtDt2CN5lxCep5mcwhxTJwEviFc6GddQbYgr7qu4SLXjNZLopYHHwhB1DRPLPbO1wX1Jnqitc9OpiZPHlUVxUndaA0/G2mSsTcrGPqP4OhWheqO3BWtZmNuESiW7RTiRFmorUAIA2ilKW4vCa0kxL2RxNvevcWB/QcR5/fIKGVb6ZVKkxGgemtOMW+Gf/g/v5mve9bmKirvvvp3JMw40wh0HF7xUvIDmSqlTfI5rGmHuLrQajjnZDZGwZspilKbk/hlz00wPpQmAOo2GtRW33Xwrhw8cJpnGe5ZEH3mxrplaKnORUOtYQ5tB7bwVQii+nDu7/0JEHhGRnxCRvTPhJuDsZz3m3PjZ5x0i8p+KyO+KyO/ecvPNTAoPPvhAdBStIkvwmq5H562u7fLUU08zd0W04CWWu5HdqtiseIPUDMyCkyVOnsesr6GUTUBnoq8WRBx0ZNNGKIlAUh791KciBEacRz72BDDQw2ZkGpor/keQTb+Y44YbbgDgAx/4QFz3Va5HQVpbw7tTJGRUjSWuSnalLBVPTvUY28tG5t4H76VJDTTUO61nxASVRkIouk6RjK9JOHB48OyS69iRDj9A/6zsg+zU7Hg3+vCbcYe5r8i9MaU2OjOP0b9MVPFw5dhYkJpgRUi+iIyIPVPxLlTL4dTbIxtYbObi5V22r4XEyVLj4XffwKGDaxw5XFhbD7cXcKgzvceYO1ujYfSaUY1QJKtw53d8kCyN5JBXUTTMfISaN3KChSkLg+OHC7fdtMG0gIpxYDNzaCOCuzszmUpSQ7Pz1773Vm49tX79O1xIYZHDFEF9hO94OBf77CM8HcwatrKI3MyCJ8NSENEtxYi6RKgSmaQJA+l7rocIidQS2goJIWsLB2qDZ59+ht2dnXA8pmFS8Vrow85KBmWFqXDipjv/xOfrn5bjy1Xs/hfgDuCdRFbs3/v/+wfc/R+5+3vc/T1Hjh4NJCqVUD7IgjwNvw43oLO+NnHHXXeQSyVLQpedSxcu8/LZF0IOFt5JeApZmNVEnZWuBsvYDQVeF+htytCqwBzWOSphq4kl7nvoQRyna+LBtz/Ayo0sM6pBI/AqaP18OsIXe/zAD/wAAFeuXOHJT3+aZZvDOSTX6OZqogePmCSZzSzMyamFQBfroEm0GalBXZAUFJnl7jZPn3n6eocgHkVBCJtyyWkYhAb5l+yxbBej6xS6TASdBVlTkNAVJ4dPP/4oVZQ2Z8Qjm+P3H/kUqYdUaiUCvgOTQQsZeieW7cUTWXNw53LHpgSaMS8cWCzI68rSC9Izv/vIBXauNl48v2Q5Oy9f6SyHAeZCnawVpVCaIIscbtYpkws89cE/S7U0PjtlXo6xbsdoqrTgKZGnOBvWUub40X0cO7DG+noilZDXpTnRVkJvsL3rfOCfP8/zL+0CcNc992Il01qYJWQRXDpaYuenBajxuaYCScMc1JkjF8Ki+43PUckWXnzm0FFcMq9fusjLr5zDpbJS4rzWRJWMlUgiv/22u9i3OdFnpa6Cd1pEKJJBZGiUlWS7nH/xzJ/4fP3TcnxZip27n3f37lGJ/jc+M6q+CNzyWQ+9efzsjz2efPJJxEroO63HHcoEl4JSwnJTRhqTCo0KCkeOb3LzLSGL0iI0c7oZmmZ8MmwK2J6pD4unikil90ZHWS/E44JpF1cTjXWP3dYIfiLUtROpjiFyUpL+ybGhH/qhH/rcH2hof3sr1LpLnuZQD3To6lzpgnoeRNtKz4XSnc46glGSUStkEvvW9nH7HbfhgPVMbYHyuQ0/QJcAX4QgH7vQrJMqFAtSbmqO6Yy1hGcn50ZXuP+Bd9A3Ej7NkQA5O29/x4NhepoVMcdsMXainbBSCfJt9fjc8eg4dez3EOe581tcvLQkkYKE7MpHH7nMxUuVa9vO5lphcjCfqCn8/WoP2lH2ikqkmHmamdwoIxPXW2xwV4z7KEFKb0H//ozWmE5tnctXGtvbjTwJaSFISWjOLNYW6IiJE5EhQmQQ0h3vK2oXtIWtVFkKmgX1CasRRyk+IV4oXuLGkzsFx2WFpUQbo6+XANIOHT/CqVO3oJaHQ04bZPNAlc0VTR3pGVUh5wUU6C3x6GOfos+duQPayG2Cf8uz+5MdInLjZ/3vdwF7uPbPAn9RRBYicprIjf3oF/p7qiFipgvFM6pKVyFrpafwzxGHRfLoxHp0BWHTpWhXaOGnpprpviD3kRwmYWH+7JkzfPrRR1ku57igtdOq0eaG9dBx9iHP6B2KgHsoKgLEcHrpkAPp63xh+Y1oivzZP+T9/sP/9R+RygIQnnj8CbTvoChZJ5B1kihLMRaeSEPlq2mm9ggBXyYJobfGyJI0aBV9OBonwj5JUuySZDa8NboZtQYf8NyLZ7l2+QpnnzvL1Xk3DE6Bmhsy8k0NsApexoU9B1jjCI899hjeDGpFe9hWJcI5V5OQuqHLIGZLWmPWkUtbB+gzfAMTGayTqXzkE8Kzr3Y6ndXsXLxQ2b42s23hxhs3viVrRDi6poQ0w1FoCfdMV6faTGeJr6CsYsTutFHwIqB82SovXdrlzEu7VHOOHpw4cmAdBv+vJCNL4zceeZXzlyKo+57Td1DW1sJGXmqYNUsdKpcOUqgbQq8gdUYtYcCCijlMLOOmbULP4CTEG6kFYh7cu44Oqgkp8lPMpoB41BCtdKtghY5QFRYs6c0xXfHg2+/HMuSZsI7ve4Dcm/t4w4vdyI39CHCPiJwTke8D/o6IfFJEHgG+AfhBAHd/FPgp4DHgF4H//AshsQCnb78D08ROGmjoQFRb7giJrjGo9jljMgJOPNxc3WAmnCOaG9IrOnd6U86+cJZrW1tUV267+04efPt9rE1TjA4uSF4wTYWkwWNSX6BqWEpBdq2x5NeRro4LrCJZrPU//KPXlClrG2gu3PXe7+C2h96P5omy2EdZbLD/YEiPfunxbd7//R/g4MnTlMUG/eRXhAlnd7wvYz+YnOYjgDkbua3jKWO+ItuKZCH18jagV1MMpeteX6VoB0tCnxKdEgvxJKDCqdtuYvPgAe689TSbi/1B4HbwXqh5EYt0ghohs1AWnYShlsEXPHj/O2AB3Z0nn3gSlYIzXXeV6VLCngnDGpG5qha7K1O8D5qQCJbXWVrm4bd3Pvihc5w9u+TAppJLuDnr3Km6Yvb4u1JS0DK6oK7AisSE+TaGU63QSoaSkM2KLRSlhMmCKm0Jl6/FDWmunXMvrXjx5V0uX9slZ2WtKBv7FnSc99y7zrGD0cm3ETr+/LNnaCSoDSRD0gE0AdXIGSQ5qjVGXnGkGLtWI9/DG5rCcDUMIuLGTQPpCmr0WD9TaLh0ehdqVWYSL5w9y9bu1nCohnnwBD0nukWCnEzR4XYIg9E3+fGWsHg6sH+/337HHWFj04Mu4jnR+8ykOnSLnTy4rC6xvEUVQ+IC9IAbZM/9UhJGHwCAB4dDPUiXNLJk6l6gTjasN4oswJ1uQSXIOI9++nHuvf9BNKjFYQDQhbq7yxNPfT715JZ3fC0PftN/zHO/9yuYNVrdZW3zBm5+4H0olW9934P8l3/+q/nOH/wn9N5jHCqGqPPKr/0zlvNl6IRLs8V6LTnUVMNJY+ztvAavpuoCscZCIxoSgicGHl3esCynh2+fSsesY434rCZFXdHexzjt9Cp0SSSbQ7ecjDTBqicW1YCJPvIo3QxLytQK2MysGZcaVInUkd1OXkwgidaDKOsSHoGI8+e+cY2HH4AXX1nxSx91Pn3OqCTOPPUMP/Ejt3P36UNoFg6sgaeJfVP44a2yYK0jqeJ1ER/SHMHYc7UAdTSR141ShW7BY7IuLFfOpe0VV6/OXLrcWF8XpjVlSoqrMldlLQsnj03s21T+s//+E/zCh14FUe648w4W+9YQ72iP8wcrsNYorbB0o0jQSdSM3jp9ipFWvGJeUDFaqhRfw1sKuhBCcYt1Q96z5Bo0KfI13kUAACAASURBVGfQgYzQaofDyZ4xonknDbunOXUSmWpOIsZul8blq8bzzzz2pqaevCVcT9526224QG1jF4SjKZMHS72EJSxdgzPWbdiGdydpQ9QwSdGt7Qn9vaN9IKxTlKpMQaeONKgp0FwEbBbUF9SFUDQ0nQGUFd5+//3IojNvOalMERHIkFr9gUPzhE6bdBNufegbUAmidJccji5M/Pvf9A4gpI9aiGhAU0hGuuXd9Oc/HI4b1vEEK5ySC7gEQts7TRXJKca/1AG5nizfwzsh/j2USWHVpBLPGotPzr/6Ehv7NzhUjoS6Q4yMMhM6Y5UWC/e5Uy0zV6OgsBDq3NGUYpCUAJB2TSmTkHqDJKQqbC8b519+mdtuPR1CdiIWMpkhZpga290pmwvuuG/i62WXy0vjlQudKSdeu9S546boqJcLOJSBXrGcyLNTm4KsQepBodHGcoaFJHIKVFNqJNEpC2IGgGqdS1e2eebsKkDe15xDB5STxwu5LNDUmavz2sWK28R3f8ONPHFmFzZOsra5TvZIwltVeObMGe655250LogaqYRhbO+VivLkM09z93334ghWCoz8iOSZVW+IznQJZDxraK/FQ8fRpVMF2NMrSxS4biXI8Ulwnykq9JyiKahxExPtzA6LuJq4eP6FN/w6fqOPt4Rc7MwzT6MIk8Vd+5kzz8Z1WYPFFJzUELKbhljAkiPZ0KT0rNc1qs2BHqhp13h81xgdzFf4bh9ecBnPnVQdnUo49NZMbxVWFnSwHov21hQtKQpt69i8y4vnnqeU8jnv48jb7uO293xL2KEruOTYtXiERYs6xw9v8tuPvgC2ip3iZCSFNC84fPI+9gkstOJJke6UEf/oXekTLINIjzlUErQI5G7ksGWXKELWW3St4qh0kldohHWQOzfdeCNHNg+Gm3PrqAlzh24NT22EYRe8LPAiJJnoGrb2WTtFDFrDSbgKJc3x97uhc3SZ+xYLbr3zNDLULYLRbMmyV2pSmgg/96Fd/u9f3eaFszv85idXvPp6oxrcc9+t/NA/OMP51ysHFoVJMq0lmBKIsMxGmnKoaBIIM+4pzpEi0BOWQ1OqTbBeWc2Cd+fKzszVHcGasJxh2YXqwbkM1U7Y+B87MlFr58f/r+fo6SjrZROdYbUUuima4K677mDujd6MFQmpjs1gaYGkxAP33sfCgkKVvUaWcR1egjhZE4VQSDQTas/x/WnCxCkStKfchdSIfR5hR4X1cG62FKFSfSYJNGmINyYPKvcC4fDxU1/Sa/qNON4SxW6xWEQQjsNq7tx7192xfJ8mVBaY5eAeCdCCKJm6UF1iT1d1jBSC91AE7Dm8qjjaC21VyZ7wFCYVvRleU0RqWoOiKKFRbA6xaZqZRck1TDibEuqOacE999zDyRM3f877uPzik7z48X8d7ZWADfWFuqGN2E8B//KXH0Gy0m3sq6RDWnL5/BNIA0qJXZwmsiS8OqU3ausUV7JMTAMMcA2Rv/uKTGflM6I1Rh4zejXmuTNbSOLEg1ohewoLVjidLkHeCplZGgKTTvXIMfAMVhw84zmH+UDOQd6uCXxBcSfZRFVwMxBl6qHfdYkN4M7VmdcvXAygdoAvG+sTN92wwZ+5L3HDIefghvEN7zYO74ezr+2wpNMx1DrNhMef2cZ3lG4z6qGjnXSCHjdBn4ngoK70GpzaugK6srt06o6xXC7xbrQqPP+y8NxLE0rk55ori32FaS2xvm9ifWONRKa4sTPPqBsJKCk+yJI06CZWcXd0BvXwGunuwS7IAi3TaqO5xZ255NEdB/CgfZgZiNMIa/yYviPIHc2oC2oxtqoodW8NYzNIJnmnuIWUr4N7YjbhwivPf6ku5zfseEsUuxM3nqR5Y2nKi+fOUV0QzfQEbi1spmUQLjUsxPcS5pVx8boHr0yNpCm0sYNu4Wacf+U8dZ5jf0X/jAuuJ8QruuzhLkLkgdau4Erpwmzglkm1s+hBWJ6X25w99+znvI9DJ2/npvvfR3jbxb4MDzTZpfPeu98GwBPPXwgCsQhSnbyCVhOHbjiNiYaZ5JqjKsy5gQq92BhtZBTzMBlIGhMO47PIqiAFUh7y4ESyTB7cM5GAf7qPHZ8lREpI0ZKhOZxVdDhBq0eWuLhj3UndSFJokjBrIeETcCo9h7t0kh4kZu/0BobRamOuuxw4sI8bT54kZ4ukNVE+8omZ3/j9bX7/aaH1zHe8D144r2zsW+ev/+1Pcu7F7YhOFGFrWflbf+dTXN3dwq1Ro8VllcNlOq0MxJhrZ16tmMPpimZG9xZUXQV6RjRx6Zpw8MAa/85XhjSuICyScuLwAgH+39+6AtNR9m2u0VV47eXz1LnSs9MbNAkCsasMh5IOe9xj7fE9E8U/3LgNWXS8BPijCtJD1teJtYqYolUpPRgBljKewsEaBVFoIcUdBVpDB2uAp9hnkwaHcoAmvKnXdcBbpNi99uprZFfWCtxx9+2Rnk4nmyEp+HFuhAJCEzoFdUBdqKvGarkatFm5HugcAYLxOAFuPXUL09oiYhKFsPywRFOjW8GykbJBKhxwZevSVripSA8YXypimUZGrf2hyXQXnn+UMx/9uWDLE24n1+3gEf7ct7wzHuiKaSJrED97SZAaly68QO/hIEIPUbh2mOh0CQ6bNMEwri53QjfsHvSWSAEnuxGYbIAeKkbvK+YWzsxOaCVxwVLoizHFquKW6DOYGt01JEYS/myKMeFI7rQW8YpXX99BVJFk6PiOrEQBdQur854ixHx3Z8WlS69HocHjZjSSsR6+Z+Ibv/Ig3/N1G/xH36aklPmad8B//Vdu4T0PHuL3HrvKr/72qzz2/BZXX4f/6b+7n+3Z2VoatoTWp+AGuiGLiWqJucKOOatlY9l1BCgJ01pCF8raWmJ9v/LA3Ylvfm8i7Y2vBiUnSlaeeXGHD/5e5+XLiSoJc+PYkePkRfDz8iQsVCiSBvAFqeXhq5dIFbolrm3vRsB6TmHpJMPFubfYXdJx0yhHQrAR1OlJRuEaH5m10N0mKMNVOR4/1BI+slBIeFmxML/ehcq/pZ786TiOHjlKThpidI0xsw7+UdzMFEuODZVDGs4npsbO7i7XdrZQgeTDylxDaOMuLJfbVG9hCdVDWL27vUObo7MQjY5GPHYkWHDLLl15LcJzUsiHksTejaQ4BfHyee9jsXGAfUdvDHm3RxaqiIM0DOf+20/wzIsXiR0c4AUzjRNUlBM33Rt5EESOgdNBM6/vbMWpKi0M5kR59dXXBrE1LihIKI66YOZx8ZjiCXZWK7a3rtF2Z+ZVoNbZhTIriR6yMewzHoEtXD8gPvNkQktDEWECusKtc+nyq4jETgyN3WRvRhOJDsOiK0wlcWBznZMnT8SNSMNHcNVWdDo//1u7/PyHdzhwIHPXrRu8fCGzu62cv2D8xX/3Fn7sp1/h+3/4MX76V85x4eouZy8EArq1ayw93GvMW6DM3mnzzO5K2N3u7F6dubq9y7xrJHWKdtY3EkePrXH4wMRUyuBxCk7khxw5lNm/lthcbDAN2/U0OsLXr77GqoULToRVK5J67EdxfM8ZWEPxINl47dVX8dRIvQbCXoU4kyWCtBGS9pH/Gh0/GsYKYlHMqrRYNxh0TaFX9phEUhKkR9SiuZMkgtQxjUjQt4AuFt4ixe7ci+foVJoKWnNolrtQPdxcYYypoxA10cg4UDh44CDHDofW1HT0cxJLc0G4dnWLPnckOc0EmvD65Ss0jGZRxBKD22fhPTYL3HH6dIwZBiJG0zR88oLFrpoidPqzjiO33MPpd71/pDx13PfyxbheSP6PX/lEGHgmCXBgFChJhqXwKEsMNNYSPisXXr2AWEJaBk+IzZy+7TaCVKJBNHUPFxMJukEbLiTzcmZaFA4dPcb2cofd5QoZ6G04b4Ru1VUQjc4hpUz3ADouXdtG1WgGk6fwiRs7vztPnw7xe3hPjR1gu+4JKHvNoQXtJXKPHOnCcmuXa9tb10f+n//1FY8+UxE3vvVrEyePwZnnlOVKePjdp7jh2CH+5588yy/+6qv83qOX+eXfuICvwvBVfUX3RCqJ1XJmXjkXLq64eGHm5YvbvHZxlyu7jd2dhmnBurJvgltPLdjcBM/gYzzcWC+sL5SnX6j8w5+6yqNnZoLR2RBxbr7lZqaywDHm5kjXcM3JHbPQ4lAMNQ9U3Du33X6aNV/EWiZZjJYSlS0451G0YgwNOo870a1DmKH6uJk3EOtYj3WMJgsPPDGS9IjvNMgpU0tQadyExfrmG3cBf4mOtwT1BAh7n5yCkjEcOTyFA0eg74Fw2nAmUQkW/6pVrDbWp4nxwLEQD63sjSdOEPdGg5wwUW5529vAG7byUFwMbVZ2YTYjd6fnsDMv5kPjmVEcm+PEL1PilrfdwtmzZ9ne2h7vYkjaxEcaWBQGlRid/8d/8it8+NEzMZ6aBjBBhD/LiFkMRn1ilox4QqRx6223DW8yIyfoPRMMK8LFRYOEnbTRJMWukKDqrHaMebViMW1w+MBhQjQVIv5oL1PYgCu4xy5okhXqGbKwe+UyB/ctwq7dFbFGzvm6fRMSqWbiJQoGU3QXPW5SLh7UlqEk2HN5OXDwAOqhGNhjnX/6ucbdt25y9KhDdb7zfXBhucPXvFsRbuKpZ4UP/ORzQxEAX/2u92GlkkTY2Czszs7OTmc5N16+uGSrdjaL0ndGMbZ4PQfWCrKeuHKlssgJt0ZOibVJOXR4Ym0t8XO/doWPP7XCdcRAauhYrTdUwo5969IW+48eQtyYRDAyC1UaK5LvcRFlnN8dlfj8IgAbmmXywqDlYQTRSbJ3owhViA0uexpnsaiwfe0a09oC1SlULd7wnMJz0QoiLcA+N0wdrHDo6GeLnt6cx1um2CWCUuHa4sIjx10ORbRF7OEYahnaRxzmOtNXS9bXFgEKePx83CtD8qSCm0B2aHHXFE2Qw+nWhk5UE5RG7NlCtslKgjeFQCfGXSR2iZNMnDh5nDNPD6BidC54aGfFhw35WMb8xieeDaqEOmI1xmHC+oc2TAxwLr7+OgePn0BoMeZ45NtG0le8nohVjBuApLAoEhfScBRxiZFqY98GG/s2QgXCHpphw+FkoLMQOs/RDTc6uYOrcvPbbqHXyJdt6iFD6wus1VAnMGRkZXSJpqhUksvINx3pBx52Rgog8b0AIImhkuWXPlJptsM3vTdz4mBhc59y+bklu7Xz8P3wngdOsV6gVuejn7zEL/7ma6gbf+HbTpAWxvbOTDW4eLGxbeEO8vrrjbwu9Gp0FpA7x0rh8vbMzrWYFFAlJ+XAZmbfmvDEs5Wz58de1qJb9aZIUkQFsw5W2Z6vsfANJi+0lFF3lAh1MlfcemSOYJG7q4pZCt0sBjkoRebhyW0SHX1KwfHEGJvoAJ/MC+Kw3K6IFjamQcEjQ1eMOm6QcZ0IgbBLmdm9dOUNvoLf+OMtUewOHz6M6oDZVcKHy1LIZ5A44UbBCAJsEHvVYP/GAttYj1CZYV8UBmYh6zENMqskDV5bEO4w0dhviY0TxukkdAJWY1ciUYByA6fTRFCJk1mGAehiKhw6eJDLV65EUbRAcV18hHgPSsyw2U4SRcs0KAVqRrYoQk0iJGW5s8vBVQXVGHlxUhLOX7rAsaMnRncwCit75vGhEJA9JQnBqcON3eUKN2NzY98A5QYp0eHK1ausb2yQc8ExJh+J9K40Gviwlm8RmdjTFBd7DrvKsKZT0IZYFDyTeL+CgxT6oKIMf8oB2Nh47VFwQ2kl/MpHd7nr5gWnbljj0tUafD9xfFBmvuebTyJdef584sd+5iKXL1xkzG1471zZ6sy78NBDB/n1j76K1xhTj65nvvX9p/jlj3b2LXZQ7Tx0Rw57ek1MKbE2JZ56YeYf/8wWn3xmGV02kfnrdProvNKIijx56gSqmQjIBdOOeiZrxyzMKmy801QKyIynMsAQxayF3512uivhuR3WWjpwCVFhZ2ublDNra2sk4PjRo4NyJFgDU+HS5Qsc3NwfBXREF4jETV5wrl569UtwJb+xx1ui2N1www0xyrjx2ivnOXH8GCJ9XAiRpL63EVIfWD6C5eu9Xuys4LO6q3DI1fjWCZ9hiYvTe5iH6TA9bJEyFo2JolRevXCBG248GklfQ6+bJMfuBkEFusBiWufUTadwh60LL/Has5/ihtPvGPmgg1zcg4biI9sCJxQIFqacg0PA7qUXsWacOHkyNLrN8RLQQxeY64pEiOQFIXnoYKWHjTeSSBpOu0jUABEixat3XGM8Cvun4Hd167h3hOk6+ttMw7JI4o9oaaFWoI/fSyhC9wB8skPrjojiFjtTMehjhHOXsEGiRxdre910KGI07g9YvGw+fQ4eugcOrBeWhzr7D0Tuxwsv7gR5vBnvf/gkc3Jef0X43cevDdKg8OizzsWLF7jzznv52V8+z9d/xVGWBp++przjwZn/818bRzc63/yVYQ67mISD+wr7N5SdlfGzv77Fx57qKMOleSDpwzMl4jNVYuIwyElZuVKk4bbnmRjgjozvOqXIhrABoJmm6MQJMrwqVDPKYIyIZdxk8DQtfP88bg1B4Yzf7cSqRNAwtGAUYh83cB+7ietT0Zv7eEsUuwsXLnL82DGsO9oDF8xxZuG5xxcGMaKimI4ZEzBPsQgmGOliew+PMckkchBkNqwEeuZJQ0QvwwdPhJfOn+eGG28YEoywVtqzSG90ioRNtjfDWqB2LjE2lrLg5I03stOj+qqMvNABBDAW9YPZQMzEFp0jMvZlCXx0RdpxmxANDavm2F3eePxUxD2m4SQ8KCI+xkKzSNESt0AQxfEkbGyMZCyR6xcgyZEGRw4f5Xo+hAidjqfw08OD64eF2N6TD01roK2inWEcPpDFipmRUolVwNiVSo+uVXTE/rkjqqiNjm5v1+qAK7/6Ow1tu3zX+zc4eXTBYi2xs2VcvlK5vNMxOu+617EJyl030FMeyoLELbfAxVczWZU/+7Un+bp3H+aR5xO34nz4Y52DBxLf8lWFr7ovs7GeKElJWXjm3A6PPt145IkWNwr3QQ3Yu2mOTpUUXaaN/A0SkuN7wgmEvyZQw8jkUeDFDZrG+oERHiAM1kCkk4XlVornjy0OChw4cCCKbmxIyID1QT2JrQrHjhzDp6A5SQ8n7UFC+SL8ed4cx1sCjT1//pVxwSaO33iM7GE6qMNIUtirbbG81SxxsbqFg4QE4TIMA0bn5BJFznVkNhDcMmd0euHWEU2iBe+OGDU7yo3HT8aCuMcYpRInPild/3vxbMEjW5QFJ069jf3HT8XP3WN/5hrj+dj3QJzgYoqiSA/CMcD+IzchyZEW4zp7nah1bBBerRHaTMIYU53Y56FjRA70Vz0KW8JQHYvvHg4psdcMmoYHEgSEw4abDvQ4WlcHeosOI4/C5KNlVFNElKbxnpJFVXd1LHU0RbFMKSyy3D1GQw3pHTn2ijYIsWbRlSDwod9f8VO/tMQkc3WrIercdfvETccm7rvzACnvfVtOkMRBzHjwLuObvvooRYWvf/goDeeF1zLf8/41Hrhrjf/k2zf5+ncX1jeUwwcSm/uVp891/vHP7vILH+lcuCiof+Z8UUuoCTkPTpsk1EPKly2mgcl76FFz+Orh4AG9xkhpbeyOg+gtImG9tUcIJrpsH+V070YuewUxjXMGUKKoKzr2tI5L6JFdHO0NEXjp5ZeDsmXwdd/+V9h/8NgbexF/CY63RGenEgLo4BnFmOO5kOjQ+hCxR8vfW0dSFC1FSW7RXYw7nOGx7wgiUqCj3ukpBRrWHM/jouyCauwFj91wnGzQPcJP1IwmsatJMsXJ2sCyksbfBQ9OWfcQf692WV67wvrm0aBcaFyAuUs4iriSjDFWxKgYyTRBiXaEafNW5n5m3PtlXDwyOslxEUgivEEitSsBXTUuUhuvzYOcKg6qwxZrb5LxYbEkQavBO+4KXtEeNxpJoeW1Nn4fj+5MLPaVlsfrCRApSxCfRUcxlZjQ9y7e+Lh0FNbh1ivxGp34zsHDcXkoWX7zkTniB9vMX/2ugxw+nFgvja1d54H79vPiK0v6rFzdatx88yZnn9/mvts2eeqZbe4+vY+1RXwHmwfgxJHM8fcKt5xYsLWMTv3chc6/+q1dzp03HjujmChpbzSw4EiKRGauj9AbEcerj4Cb4UY88iAG8QRTjS6PTpOEsqJ7wQQ0Q+9BJVEJhYWbxt5VIiJAMMa6dMSGMr7QcfP0WBmYDGmfxM2p973XBGVtwsSwrGxffIWN/Qff+Av5DT7eEsUu5xwLVXOETJVG6YZZgAkeeiWEhKXO8uqSa7vXuOHYMSSNcajFWMho9WM3Fax0s9iiZBO62GjsS3QDHWxSdA73YryFi5s3NEVxU4t4O3dDTbAeY5rsuYjEdIMsd6nnn+d1jLX9B1nbf5SdqxeZr11FxNk4coppbROksXXxJWrdRTXIqQePvY0rr72MzddQ73ST6AQ9CkByoWEk1RgDPXZgJqHndFeUPkZjxlI/6DMu42Ia0iTG6GkSiVhdIJGQpOxhLF2MlHUANUGCTgMoQAw1Y1ZhyHsjZxViH9hbhJeTIv9BRu4FjMIa6weXhohFB27gmgeQE+aiivObn5iH7G6bsgbf/90b7Me4+cjEgX0F7Z1r1xrHj+/j2P4FhzeVosrxows29yuvX628/+HMpUuVYwcXSBEuvwY//cFdXrk888mna6w9VIIUDDGiBx8gXGvGDVgsHjMT/oZ9RFQaEg4z6TMrEDUQ62jqeC9xM46ZeJCBBwfOY9Q3C8Q11DcehWo4SdueES0B4Jl5hPUQ6wQbll5xc0m4OEePHAPpZFM+/pFf/JJdy2/k8ZYodmOyHHewjniKvSqKZyeZhx7ULQwPy8S67AdS6DvVxxgcqgnZQwLH3slHGIx148WXX+L4scMwhVEnCD6HeqBLCq2i9aBJSCePEau7xP5KHTqoD93jGKMdQdqSfukZ5q2XaAdOsLu+n+VqRV9dQneusHr5ILlMgNGmfVRbwNXn8bqiL5csX/kEtnVhvH4htkKBnFYSEz1su2dHJIFXEpXmSrLoTGTs32y0ceZjf6ijK9zbWRPZCcFZDLXFYHODd2QAHioWBe26bpjBj7OwJXIjDddoEUXHxah47Kc00sXwNHwIYyTdU7ioJqTu7Vk9JFTjeWLMTiiJjz66Q89CnZ2/8Zf2Iy4cO1AwzxzcX3jlwoqTx9bYaZVbT6+TPCHq7FtMbG4oPjceeXKHq1szH/648ImnBtCTiM9gDJE9YO4ABkYHHISZsOua8XC/8dHNS7++VhBzrDCMCAJMKAiVglqYA9jYTbrGvk811ikBksd5X93Z2t6irjqHDx+NkX90leIydLg9vjM1Xn75RU4cO4qmBa6QdCyHJbrnPlYib/bjS5Eb+xN8fkj2vwTuGQ85BFx293eKyG3A48AT499+y93/2hd6jrvuvss3NvfF/sHCjhNXsjhdGzRlT+FA9tje7hlVOkhqo1fRuDBhsFcBgeahMjBTtrausbm+DjmhbphIdHWLRK4ZK53scXcOj9cJWqKlFiJuBGsa2lP2QJJRVwFk7OZSeNB5csxmZDWPljNGSp32hdKjXQabYHEQr6/HXgwnkel5jsV3moCJ1OboQs3Ge7M46ZvgkgdGNzop9t5/INkiTmQReXQGroOPZWM9JUGcbqHLRIzSFVsL1Dg1pUt0D02FZDFyqQbQI9ZpUihurEYXF9GoQhKn9j10MzpDH7N2SNwMNI39aCCPKpGoGsv8UKIg4ebxjvthwcQ3PDTzwUei0P/AX9rk7//vW/ytv3yAv/fPt/mb37uPf/AvtvnL3z5xcH/h7/7Ty7xyqdGWzvmrjvlQr0h0mTqKrQ9KjEiM554Gemzh8GLJw+jUY/emkwT1JEGaO60ABqk4bRngU9IAIdRC1lYWhdr2KEcDDJMexa5FdzmvKpixNq1dJyYHkBWKFdGxErDO9s42G+vrA0QC6+EGrYy4THFeeOFlLr3+2pu64n1ZQrL/wL//PeCKu//IKHY/94c97o879u/f77ffeWeMMgOVChePHvWhJSRYX7EYJ/ZC3feoFPF3dAAPCMPKyIOaQYyz3YIuIjglXjvN28hwcGSesOThiJz2YN2QF6lWelJy24u4U5rP11+LyGCOWR5SLAYPzmi2p4WNCz0eD6EdiACcKmEKoC1INtS40MwaXjKpd/KU6ebD6UUxr3QSOQdvkCaI1uiiRhHqLXIqVELWZBK7JTWluNCzBwnZMz2FdC06aAtlRDbwsE8iCUksxunOyOYgOG4z5Bw2XG5C6hEornu7ViANGsR1EwWPx168+CrT2hqbG5vRxYzusfl43XT64AzqEJAmlBuOdM5fi1H/wVsWPPp84x13ZD7xTOWe0xNPvTBz+sSCtQkee34n8hjcIBnuKbpXNDol31uDBKKf0kj68qCbqAjNO73GzqJqYl0HkJGj85/cWbpEQHfP9NqwZKQhTxGLbk/LhFkjQp06lmKPJwbkRieReiJplF8hXoOb0DT2piZxGxAXCimiKSeoXUgtbtg+Cd3BpHHh4sxLLzz1pi52bzga+8eFZEu0Bn8B+Ml/k+e48dTJQFlpSBe0hyuEiZCaYrliWdCsn+GtJUdzG9ZOA9V0H0vlEKz3cTF2hzYMNZNEFoI1B2+IwhQpyjQRXKGZh/WICUYbZqCgI2kWBqo7doIw/MZEBsLokSVgcPHCBa5e3Q45TwlU0lLw57pG7q2NC7lIx3K46Voao3kSijW0aFjFS4NimDY8FYZ7e7yekclA8pFnClmUibBHCt6iITY6prF7IzGIvUPaBVAaPjnN8ujENMJ0bOw5Zc/VJcCVvgj/PQiOccoNLW0AM2GzZeNSC51zqEs8CQcOHWRtbQ2kh7yJKIgtgdOCWiRTqBF6vBbXzisXJKqoJj7+dEjgPv7sCu/K9F9TuAAAIABJREFU489W2gxPv1D51HMzVXQ8v+ASsZPxxnMUPdMgpoeAN9yDu4J1GkIwSwJhUBWKdygDYKmQqg9Ev1N2jOaxXw2qTdwcXQUtivcGGohadwvwiISqIK4kIGu4OmfRkB5qiHeTC8niq1YK4lM4/eQgz2vpnHn+Waoq895KRwvbV7/YnPs/vceXm3rytcB5d3/qs352WkQ+JiIfEpGv/aN+8bNDsnd3lnEC5ByUijSoDyGDRXrQIcQ8nCQSYanTQchh0V4sWngPIrKmGEf+P/be5de2Pbvv+owxfnOufU5VhYofsZPYTmyXbyWiEccEhQiRBhCQLEVBtGgAoQESggTowT9AYoRAgg4SiEaQaETi3TCPNKBBg0gkCo7issu3qoxcfpdju8p1z9lr/sYYNL5j7VMkOAaUqsq98rKufO+pc/bZe605f3OM7zNTbGKUYSm8RaJYhx1UBtd2smGtjacKdtqDaomO96smO1hbLg3vwZ6GjfRlL8nZ7ML2llxkF5/4HZ/k4598xQqtMj09B808kU2F0racXYu4TnEeTy1Lmi+uvuH10K2B74fAtcE2Vs3a6hLtUN7fOz1pUO3k2NYespIwZcml6SZekZpAqvjpn/489zcNecPthl1ifQuHrRrCcqfqII4gtrOy6QsxuIVqGFudHl/54Nf5hZ/7WYlEHhiD6eb2Ss4jOJ8C80XclaKyW9a9TqXRWF2sQ9NyV7JFM6v0phbLN9dWZFLUXSnTjzmmnKOZh9k70S9IZuITWrASYqOMw7RJggnF0ZcIsiczhWyGsa8i0kk/xVhvOJbxE597n7LCfEOoAWwzRMdiUmZUKymCXQ9vM/jqVz7gSz//y4B+zu5NR0KLPCIuOnIE3GDcYV1UJpaauH/3d30ncQ7RD9hz8Ts++eGXnnxDCnd+s/XUzP4j4P3u/vfmv2/Ax7v7V8zsHwD+G+Dv7+4v/52+/h/+wR9s68LsiX578fzKiL0l2rWFxiQRA9bzpLsBDzzderAeTV09uoeGF+X5I1RntXNci2spDl1FJrKSdSWY9FHuyb4MP6U/I0Mrc5Qaux4zXTbhwJKmqUe75im7W3sNi9myV7WSWRiDebUi0S2Uw9du5Eyc3UEAtzJ+/HM/yfd//6c5ItkEh43MxuGqg6it8MwcrCxM33cNp9JFTP4ZJcynbIJQp9/gQgfBvu6s2w3qmV3B0YYdSZ6OfbW14+H43nAzsp3sxO/Sza1oaqH/Didzc9Xmdr5C0dDXkBB6Dw312N5bjKUPvd0psL1xbN2JWtz3HVwHZWVSfWKrWRYYbyAPfXackmmwYS8irnlygrmztCJwma4RWRUFEVCttJ1LOOFyqKXUkaqDzE37GtLn4vATskmEPV6/8Rs8PT1RpxE4zzs5HMqSrMV5CRmo1VSiz38cK1nFtYtzHXKWoMlQYvQeD7mkTGVqyQOx4jgqZNpB7zt1LNo2QfD597/Al7/8a7+9xv7/eZnZAv5p4C8+fq27n7v7V+bf/wrwOeC93+prfeGnv8DdDjYX9TqJKtKMYyXmiY+5noaKuyKuq2WnQtKBykUyq0rX/80+tnoEn3HQ4dxPpXtsY+QtYuCiwfcBXGQafWtsFxsxtNDUOrgORUZ5aG2tA64x2Ie54spvqCD6ob16ybWTbcyyVXGYPsyc1mx2oaZD46TgaO5efM/3fDe2DOxksXVzPOlnDr/DCs4yVhWLZGfBtvlardBTE7lhw8KC4SFWNafrgrpjh0rJPYIVhr8SFrnuwPHOwvSZ939izOkj6n0yvNUZsrIVY8Vixcnr44kzm1V3acwuU5w+xrqauoZYieAw/5pDj/HWBuwmbppwVy7CPqZjcTd5fYCXVjouwQX0XQSIi2rKNZ5lh83m7g+9WoDFSJtc6/hAH7WKPIQh7xTBYGdjfgn7xLFMLnQ47uc3+Cdv7E46IdlEtBqJW5l/dTa7Vfx06+aRNCGfdnA7FhXJsz3QSe2t5cZ2JWmL37GHSpHDNrYLnjf4Rd0OQRZ3uF/9kUg9+Wausf848BPd/cXHL5jZt5upoNLMvg+VZH/+t/pC3/Xd30PUHepOt2K+b8KKh4sLRX+H08ehsM1syVBCIl8fOaasY1PMTGntcscS8mqsQlljANksKz7zmb+hyc+DXhLiHktYIL5YVxK3mQgu5ZTp7HPggHtwPC++/Osf8LM/90W8m5XBSsfjgCX5Qj8cCQZ3EqqIm27k+4hFE6YmT90JfS/uOOfNp63M2OtQ+XILS4xyrFSQvSNIX4pkWkW6K/HWiwzIpRu7sJcYrLYgYnGYkjV8MLbdzuloggKKzYGxIznOO9/7qU/hlSJ7Jj1lHQGnU7XGGjVF0q3OVlZQy6kjcRdR1CewirOa7mf2y3SuZGqrVqcCjT0fCinoIvcbAYivmqB5eynOv08fR8GN2gf45rYD8nngC4jSZJ11jfZyPtSr1ZLOJqywZ4O7pEzeOii7Rg9ngjak7Gn4uLPOk2PfsHF40AtYirfPR77hEvvMpOpsxy6Ha4E1ZUHXSWQM5hxEGUcVfQ98L6IOlrVkSZ1cuL71GIvbTm4NnMZTbQURfMhfX/fD7jcpyQb4Z/jbiYk/DvyYmf014L8A/uXu/i2R0f/zp38aP50rFmcfXJ1UKZE1I3iJMF7F0dJlSYWi1RakzliMPswNy8R3EOasLOyCWwGWIivWehG0fvpTn6Y6qCVav3yxK7B6S1VxLMQGVtL7jtUipjXe64JTze2f+PjH+Y7f9Z1UKa6nFmQKN4RxODQcXVq7RJXRAdHF4uB8mpW9jXVq/b75mMUaqAvfcAX0NQC41YucpGmVNM/K7zWOh3TlslWzq2lLZeS1SfnfxeHFuQrYOFvSH5wf/xs/KdZ6PFpxHZCuiscqWqpk7qnD8pH4AoveBTuh74rW7xLz2EX5nTv7YUcGM448WHkRLQ3cNUGW3SlmfW3qkD/VllKk7S1sbtwMnp9TWXtD2JynNG/7VBhqtibd3RP8WZu4krqa3i0yyhm7ouFnq5vSj7EKQp+B34Mzm/sSPHLeCn/e7EO6gcOWsiZGVmO2Hkn0uo6eRUKcnaQXVxQbFVxbNtHFYc8El2CSQsVLXHBs9eXGWBFH2G3hD0UgN3feWhOdXOfTxIx9uF8fiZLsj71+3X/w+97jjRmHN1dfHL3ovtF2qRvCgo1igpiL9Rj2ymvrcJkLiiqJgV1PyofupK+lg6mAFUQmjnNlcLQwjqsL366aRiQejVXUtYh2mmSH1PRRTq3Es+awFGOmwsFGmgQxdvZiiZNjodskls5DjKYpqflwFTkfLN5cF0c4dQRP+w11fAKvt8oCbrkbfCWXHUQrYsmOgirKDyyLNX7UbqbToCWD6MIsxvBu4Bu7hDHdaVZsqKAqqN5wKJjguOQ0WUdzXYcmkVBYZ2YRIeGrraJyqUMkgyrBEb0VNmoPOfzDZ4smpjRjVU7C7iY9hlAQNhsEtQ3z4tkujjPI3yjWeZKe7P2Gj9sTVc6bgFtKiC2fqg75Tme14AFH7OhLMrAfLDvpumg2aYtlxrKkYrF3Enlo6rdnnt151Q9J0zNtNxZGVnO4iRRpZy3j+fkZO1xsOchyt1X0tBzl8nhP3JWPIkGYpcTf0gdKAVgQB46CMrodSE3lI5vhGY7XxZXN+5/9PF/96m98qI+8bzYb+3fl9f3v/YAu8mi2a00zK/p4C2coTXhfrBwucS+N8q0ibVCmmBcc9TDKG0ZiV+H7kpj22ELBE7SUGdm6vfcxgH/Ka1gNe5q6qpy7Fc+RkiRwo8KoQ9THFU49MVlnOmitVHNiC5m+5+mbXfR1xzN59qDjTh532oPTb1yX8EMclksKYV3k8QpK6SFiLsfW5QenF7ctGYttJ9voO5CLbcEv/eqX+Nlf/MVJw5UYOsxmAhMp4fvg/c99nrfXG8I33QfYpUNruaoR68YuSTO4DH8lca1CK+/cDjBzYjn3bewDNgmmrtPTkDDb5doIh/Aezaxj6wBvrsN4/wuf54M3F+t+KmrKBE/0Udy7qNjcfKQVr4yrRa9wHNzZE2d4KS04k3tK72g71JPRYlItHThFevTCt0NtbDnLT25TtL7L6GsT7XBe84/xcWvwhTwvr+BtyLFgcOXmzXYJrndxhPOqk3hr2FvR5R6Lm6uA3Mwg9P4sJsyhnMhxXTQYChXQAzzJ+2hTLSX0NuipxuZo8q2i3L/t9/z+b8Kd/Xf39ZGY7P7wD/5QVxs7NL6bgV/gkVwVOKnYNwQeNyl9FDk4yjBSJW1ZHqGbut6Oqn9RpUnms5/9DN/3/d/LGacU6HbXk9MMeEvFK9aV5DqwumM8ESQri7cGHUOY5EGLuVAwQc0PMxOC+4hSSY45fLOSq42nISL2GvdBLnA4TKLTKrjZnWd7TdRbkmGlV7Mpnnyxn41eWnGUVqLpIGMR13g8wyaogLF/IQnD2KE0KRQVcoT4BClQk4KSpWgrgzJFPFENzxBnQbmCKnuJ2Y2g32p6dQNKq1Wh8IZdKJLKnWXqaaiaeKOSH7c39FoECigQXuvIz1yYqWvVcvLkbk3eayxyytVLK5YtFvBMcyOkgWsk9CVJCw6Qtm1ExdWiRWKmpzTHTdeeISY+0ziBtBNo8lqUPbNOXXMri4xjpCpJciP6osJYdpD99iWpB3jxGps7voMdd7zQ5K5hbaY2kSlum90tv7eBxQb8RXWw3ViTYmP3gLgTZ/GZn/gcH3z1g9+e7L75L12g5122I7YUIJfJJpa+yHhFRshREYUtJYZ0K+RS65TWiSNt0j9e8eq+8N1ar9bmD/yB91hxvESGY82+68b/6z/2Wdal0MVVwoXIO7uFzdjxlhVJBtQq+iz2oQgliyUB7bCqCg1oVod0ZyZ28ulkBLOLE7hdB55OXRIiF4s4mitOlmTEnL2mQNm57WDvoi3JCu57KYqpQub6bNKL9E2m4qJ6EpmLCzyxTu6tCHM6tI7vAzgnjn7LyxmHmMhRA8cuLILbk0sw60FzkDYpxtXUU4poOY0+jQtNIf3QP9LYTuq5aNvUGg+qgSFCKPp5VrgDt+bEue2Do04iZcdYVlxp9FX0AZc7Xs5tLY74BBFi2l+ZxOaxN2uLLT9WqOD6luOJNcoW9OJsrY8WxfLWA9FNep46Of1kTYjrIul18cplhbM2rsOo3uMEag5/pn3Tvcl6o1IcYlrkYFuxL6g3F1du7B4ja5o0H74m3y6avRpWsW4Xt9XEWrgFnReHyHtWFMXmvi7ciz/7b//XfMfv+f5v1s39d+31kZjsPvaxj/WnPvV97H3Db8XaF3kenM/FPcEeXTr1EMAG1Xeey7FOcMd8KWrJZCRvNzgbT2dnc+am7SAO400VUc7qYuPc2NxrpsGj5cV9PFYP4J4kQS9dSOqEKLY9YX2houJ5Ao9jIM0lD1kKqOyrpuOhsBGudhzQWz0Yp0zv0eIi6UWGcVzKRLs7HK7cnygV7RBJXpvPfu4L/MH3Po1F6BDfAq1ZDYIE6QvcCjvkO85U8KZy7oyOC8uT5C1+Liw32ImR3Jez3kK784SQgO2l9dcv4rnY62PjNtAT2CPFdsZS9JY7xsb3I9FjaQX2wOvxDYeqKiO4e3Jkwx64IJNAwuCzDAsZ6Mc3BWeRb4x1ExnzYrMq55JJlxgl4W6gVV3YvQgLYWO7FKllcsZIPC3ZkZdy+9qMA6OvOxzBxgnb3FgUwTYl4dhbJ5+K2xK524XcQV5cQBwSVO9LZv3ToaausvdM4TGdIp2jrRvHTqEujMlVzAs4ILbhoTLzTOjTuKHP/8c/+z5v3nz1Qz3ZfSQOu9evX/V7n35PK1XHOzypH55DI+OZiAN7NipCrfMpdvMyY41LIl19smvby7RHqdSluQbsDVYb9ZT426W15zDqXthZ1LOzAnbo5ujL6Q12e4P5a8igeaZ94SGcTILikqRimNJGyRhKB96yad3k3FAZS2N7Gs5k/R+HAVCbqgNOTSXJTWzq2qQ152Xso7kwnsp4Jlkd9NFEFH437gjU7h57URuEmNIqaFvgaufibnJfmBT+lcNqNywunvuJdV6cO/gqxmFyHmTrEKpYY+6X8d24xie8qV4ogEpYlpbT+X7qIbIOwe6je96dU/CsAuq7J6/2yTYgJOA9t3G/6X3brUDMI5u4b+6vDuJ6S/YTvnTzHykmNy0hj4mV0tdabdp0S4SSu6KbcokZr5LsJGgdNnVqtTZ5ahewSxYvJ7k6qNyaMLuI1fDW6I/dsLd7YsgkQO/rwJZCCNqEV5v1wCXixgF5xUtR9nYtLCQE91MqhH3p0nllzfNL8vNFrxtf/srmC5/967992H2zX68/9rrfe+9TisDphZcxkXHT5HSKiFjCVJgLr7cSeB/5Iz5A9yAcdMlrWb0xTomAqwaXUl/sMq2ifQ1pfzbnvbm7TOCPWs8epf1ZcI+m7c72G7Zhzc1XOwS4DwbXcVcnQ60x/yvk8powzVgNveF+chzJW3dImcOtGmISSlyEBHeAwCwlY0gxmbSmR5u1P1ykAHlSNPcwSVA6R1e3ZhLVDXVQVMPFE27KxLttkSEejQA1eAu8jpI/9nTymvnJpo3tOIh+Q+2XT2DA+8IrNClFABtPOUcwfWJlerBZFXnAreGywErZg7lS0osT1n2xuVOcuF0MOEhFY1dDHNhWim+tZihxMahrmJ2H+NmNZwzrC3AygA7WJOXcfbaKgREehBY0ueDVPoShHcCbgnXRLoLgZvCmF4s7ZOL+CrdSHl5vOTJaeT2Kk5mQ1pCNjx5ctYzDDHLLhoZp8yhpL8OS5xBubRupEZYm2CrBJ+//5Gd588FvY3bf9Nd7P/BpCW9tCW/C8dIatHbwtBPjmtKaIm3TWUxSNmylDm83aeUsaXvmRnP0Yq0Tt7fCR8pYsTlcK9izF/58EwCdTbxptm180kXWMWB1SNT8fPYA/gfHfA++m9gnEa4EEe6U3fmFn/9lvvTLv0q04nZogcu+VbPnGeT9JBs+KIVtR6YYwgz6WQddbMMuBTZab01l7hSL6gJiXBhbGkFiEpcRwD+ZfIZA+2iV8sSYkDYLbHHahVNEF1c5lhc2IarL4LU3dSHd2GaEu5uD5OjCrzu9p5HLg+ImuUc16QnrwvOONXzpS7/Kz//yL5LuPC/JSvwG0MSlesGwhD45Mll78QqHrZ+rbREhXBDUn+rVL+w1h0G4ejLMwYN1aGWPcYuYq7RGUfInwcFRzqIxv3OR+LWI7ZOxeEiXt2wm8JOvzvrdd4fXTtfH8eOYMpzG7E5HQJyUN9dqrhau+5X75rOf/xy29H3aq4v0pPviuJLIhJKNrMr1oFjn9FAU7c7u5EKMsUq5DSL0d+9NIEXNh/qUm9dHYrL7Q3/oB/t/+tH/nn/yn/gTxDqxq+lbMV3I0qm1g8mP2FnTMi9dl57UMnWnNcWFXdIh5XngW0/lqOZiIndCftU3wql5NnjyiTAy6HTMD9hJnBeZsp95LvYhse56iTvXU5XqCU40rulgNXSzm71rfmIHPI1TPw+qL0koGq1FeWIhAqZaTVe7nok0wpt8gv3GOdyAxjk0AYfJS3r5yD2cXsd4P+/C2lOe0vRhHa1fHhws46qFr4T7FB0dIbeIHSwvOpurg7UbXl34FS+fUZWxwtjb2HYRtjQZdko8PYxulxOXenvbJIOxTk3c/QgdKP27w7JHsp0M9baHNFiFvVVog9kz3ScRiadzVeMr6PwA2yd2zPT1iF5xJQHr+1IqjHo7UsnMFlRJnD7J+4NxFnupY9h+I7F1TOKyTxCnPi+/nH3meKBVJN7zTvU4RA5EbsdMy48A2nrET9n0gURz7xbTbwvHsNhaaeuZLDHDfhpVwbE3253yVohBJ7/21eLzP/VjH+oz7yNx2L1+/brf+/QPUIXYyyiOeiRkiKRYk2PW/TCQFe1J2Q1a7gmpJoyj9FR94H6EmMvP/dT7/L7f9z3cTjGJeekCLVM14eGLt9cHrLjRpfawTLGTa4npVJJTsEZwjG3aXJl3UZMpOkK9hjJjtSxf0T6S3eCOayqskCj0Fti+4AI7kyrnOJq3WxFN9yl9Xkdgvele+H3D7eTuo+3LwiNH8hIEE7vUDpVaK02HcR8Svq4arHJNsdHRXPcD58JIqGCfcITjl7SH1m/BXhP7kgWNmUx2EBE87815gN4AtWQ8kn9fPMtIDTJmkZFiyCFfJpnOObKXe8F5wpuEV+V0ovQSa7YXy5ysiysX56um79IP9kadD6VynDYjHSRP17T68v5MKCbjw+5Hx8QhbDJqsdlYGLHhfhi8TdqDFRvD2cDrdLYb95GJHJfOqJqE636603nDS61jFQoY0AGoOPeu0dHFxIb1yEuQNTI3+u8J6tQWL/Lr7nfOEBR0tSRRJ6/51bcf8IWf/HAfdh+JWHYPlTLjOXo44z7lIlYXZ0gesC/1jcapdSTzwELdsOXC7SyN7ZrOPBXd4yUnw/e/92lF+iyV59iIag9vMpNdBycfeycfcVj35n4cOkDjElAdQXNJtpA3Pc2P0lR2XwK4R6Ss0mpn1dzQuaCMM1zJIOtOdWL3Q4kV3gooaKVhyOAwsURukDqocaPOoOvi8IMG9lMT95jASZvIca1CChQ1WanMuERJihw4l6ZWnrG7Sfdmi8sOxa6joNAOOSPW/eSq5q43XCkwZvQJi+ZQZgKJsR49Fh7EbgXypqbyHnyqTSto5yHgfg7Prot9GDecKuepXRa3E04vdjS3ry6ulWyeuMWmBKdBN346nkWEmnZ7waqAeRDshrhLkmRbLo90wI3Dlczc2+glZ0f3oQM/nOiLfQ62169pngma5xN8K5IpxvFANx0KdfVLE/COg6A5So4ZYtJzDqd6oRYMHabsGN2fcgplMZyEajfWgai3cpIb+1Kj3mHG1VD+a8iA/OF+fSQOux/41KfYJnW7tWmFQ9NaOdJq9R07ZKfBgvJLxn87lFSRrWIW0yQRamzBUBjmGr1YdtOXKQ+tIZZLp5cHfdtkCgt5upy73bHXB8c22u8ywb9y7DmFaSEGM4Bqw2VylQQlmby7A5Czokg6GotSRy6IBGgVz3ROh4Qt8A3bWCbpRFYQUVgrTFNtQMoFadcYbM+hyQQf/eGA36aJpRUJQ9OcSGTrpgmgSZYd7OWKOqpmUWIyudSAFbDeOhVFD0t5tHPv5mxgJ3cSOydJuYyrips1vUu5zI5AdpPgWC+l3Ciat1FJ5OIKwy4AG6a32CMY92cBUde5ZerneTRpm70WZwHb6Fr6fuOgtg6+C/1V6wIMohcZiS9E9iQToSRLn+UDL9W6ebaSrFeHEnr8PunHWlfNnKe5dvMpiLoTOHuP1MWB7bBUO2APuVCLFLPSIq9z0mcY12GGGbVSRdpWxEzKCmFdPNnjIbZ4a3saQk8qP/ztsR8JgqKbucCkzK8hGcuUubaQbctaPbByIMWY2hOuO+6FuyvPLQXytsnb2Gb03ajp1LzClFhh4Ncd60W73AmPAerZXNNd3hlhE1cDbyUqXj6xUb5HEGuDNSlMsRcPelisIzapSgrWLDeqneNqKp+wdYJtiVuB215cSDrTdsGhAmU77mxrjnyWLO1Q1BUoQLRdBnnz0Mr91PT82XaoA44D2gpociehRZT7NtQmVC+JxEzcO3fFR/mhdd7eFmvJfXCE04dzJ+Q5NQULrNZ6faWqBz1UOYi7kni5cLtQ+67c7hFquDea04w4jV13sZVmWN3xNu5LkiNr4+1o864pazpRaY2SngvLRfSdtcTER8zhGSpM3zP5STMHUHQW0VDnJr2I0tfxgOdXYP4EtsmWzxbrsRjKzP/BZONVKmY9S99Lp9FHsA6JkOmBMog5aBWUcDU4kliZJbcuzrVV1F6J9+ZcRfsW0+4GV/H2OZXk3Yldgmdo52/+0hf5sL8+Eocd6LDwdnaJPWoml98MfHFET55b06s0zbhhrtijaigvBUK+ZNPJV2g81oSZKDZQPa1NMdovuJm8qPkohXlesBQp9cCVzAJPtU7lA3TuwiksDeqUwTsNpg9g2+Za70pwiiBC2NHdk3Sj6j7fa7A6uB8NLDo33s1RifOWvCDcecMaeUlzjAC6q2Wtqy3Jwpank5l4QSnFVUkMY8qjptJah2AZdSlai5Vcbx7lPEXna67pouBJD6kZFrkKTYHWHHtNXaDIosLQTq3fbBRXN/cy7o849Jp2rivwy5QFt01uGospoAarUC/G3kQ4m+LMTZUTcRLmkEWw8BTfXA61F3sL+bIrWdWoIW1qInWVcCHxcKH1/8iTZUGakctoP4Sx9Vs8HbOLvIraB2cXx/0d1LZ98LiaMP8y1gnWd7ph+4Xtg3u2RNiuCskvf+Ur/OqXfokghWeY3uedahhzD+Uypp5eNQXu4fD3ncJCg+KIiXXy5tu+8/d9o27lr9vrI3HYNZoQdjMxQIPtzOlQyF7kzxJ09m42pg/YJKGwsknBRUwaa6T8NRdMqPMh32nmynQzZMAy3eTmB7a1VzyH2rSOtgnvHFwwEl/X1AY6xkFtdYAe9s7UHSMojlH+FSl9VAJXk32wWDqEu1V4ExtIPA1zRb/3c4vp7UMr1W5lz81mkuMntnYoHTRlScSlSbcbt8SqBvMpdm+ycgTGytYrkx9zGcQCz3NkacVxX8BbVl3s1vTpVYqLiom9Z+Hr0E9rCub0cELK7kmVRgGnhpI8OmiKzVZy71HUWcTaSlMxTV/ZpRBPa+5tpB1Qzxy52AeYN3ElTfOMsc47Zm8FNVRjq+Z9FissKWC9kxW54RbKLIxpTQsnUw6FIinuilq/J6Rzj2l1O28syyE5jGdQ78fbrTi/UDGSxWj+tkOqFQ3frLWpKq6R83zyd32Sb//O38ME5WM4WQrttGmO04obtGsD8Zb74n4/ZSfsJXiFxrM+EtqTj8Rhd39+yw986gdYLpdBESquiR5x5YHFoXIaUylaNWm5AAAgAElEQVTMiawxmqx0pnk1sw1B90wUoySfVi9favPKOVgfin3zJLoVo17SuoUrGlyHk1aN9oOoCRFAPtPOIr3oQw3wG6DVS7pxRXxn0Nz0vY226wy5DzwVoGl1knewLq6+E+0Yr+FcKtkx2KtIK3YHUeol9dIaWD4/t4A84BDGOVfJQgkjHmvy1YJtSkz2M+ROgJFQoMMwG7OTPFsC5cN4QoF6GfDUPab3C6sPsFTJj9emEnoHxCY7uexOubC7mslPKSzBEQeOj/yjVIzuTaZCNKPQVG834jKOg/kZEru2QjcB2jh7sS9lISqJRlH+RtFbCcG+XZNZSZybo0X0AoG989lHgkt2ctL4HYyTo5uohCzKNb3upbDRo9TH0Rn0UdSWKL4AqzWr5Z2qQruBujTiMJU1panrN5tMwQZLy4n0e2hqxzUl227MLnVmTI5itax13/o938uHX6+h19f9sDOz7zaz/9nMftzM/oaZ/evz699iZn/JzH5q/v/vnF83M/sPzex9M/sxM/uh3+rv+OVf+iX+zL/2Z3h07DWbZM1UpPgg90uBk6YL7wIuXwiM1/TRtmWpiVKxSWiDy2pdPFtyEjxx37KnOYotaqVpsEUMxAmnFglyba3BiI0t5JlUHZ8RAbc4CNbMfsMhgHRbnRzWLNQQ/9CN7TaSKQWan8kiuLdxY9Hd3OrCAtYHNynqqoXlZNHLeKbJklZt+QP0l709R02qB4IO/n7oybrxKg5aDHCWbrll8oCWVjxOg9r4SlacHC1NIv4MlVyEZBPGSH00fZctfClUnI3i3W1hSENmZsQBBFp5Jx6/H99wFdyLC6Nce+HaRfSdOpvaSv242qAOLmt2lDzMWXPgb8WhM37mXsIGB5e7wl80bfrcWr83m6ta7Wf9mKKMtFB5m5UYWXfqZi9OEFPM9AxRRq0FfYDH4LWNe1JLn5PHqTh74LAiUiSSdzFXI+Yqiu/Wut13RoJi6q9NkWBZB83ivuAIuBt0bv7ED/+L0HPAfshf34je2N8N/O7u/qtm9gngrwD/FPAvAH+zu3/EzP4t4Hd2979pZj8M/Fngh4E/CvwH3f1H/05/x6ff+3S/+sSTsLTnIk+tEB5Kkq2yyVeTCNW7VFBj/XKBMerx9oQ5hGZmow/ZiJREEWRsopvYDm7ko9H+Ul+Ex0F4ykztKlepjbCtAY5pyVaKZMU0kuVB0WwzjtbKUTRZo7GznNLlmPQRn95skRxXyc3kl6Qc2DPui8a5dXPZEvHSC0sFWF4ABWsd5NLaqi/EPAqFheYWm1mesqp14EvrZvbk0sHEHelrmjXtS55dR0EL2WJJUw6EINgFzlvp6TwIF0jeIztJTBPSY6rJLc3d0vsppnVKe9roKB3I2EMWCMvwuqRnm5pDm2u/s6mbEWWguZornWVbRM3h1KXgAg7kdqhrhnPHRZiLA3HVAWEQJJWLPUnSXmp8iNHMKe5ecpSeQh9p5XRAu4l1r5tDFUeVXC/RxAWYmG8V/CTLp4So52fLYc+H+X7EcvUw8lZFVUgrCsIyMf18LhCvCp46+eU3/aH3xn7dJ7vu/vnu/qvz718BPgP8XuBPAX9hfttfQAcg8+v/Wev1vwGfnAPzN339zBd/hj/2Q39M0Nq5OEx5/2wjK3QT9UhRWyQBPYkQSFPmhPAnJJHATWr9uF7WAq2tBdfCepExuFIllUm/EtbWfee+imXSgXmJQfXDdGPYxjxZVqokTOfKh2RCmXzdRVW9C+3MJHumQmOY203VVieoK4+uQZ2xPhKUECZTrUJvOMVKztQWJuKm2WL16vGzSyhrhb4P6zH62oREaqqpmmKbnodFge1iAVbNvpJqdXiAtvc2JommKW+WTQOIq5y7KzSNbYm7cd2U3SZ3gvcQRU23opKW5UymSCD7NYQG9hCTL/YIb80H4rCio1mPZ3421FjJomeqN/36gyFaWwk0Oe8FwBAWSb/LmlOXF3HpAHMCdzHLHeo5qVbO3rJCb7+pRcDqJUbMrrdUyjlRbaydms6yZwPQQ6MnoywwaYm9xlURL1tPlmF2jZzGwJ0MecWrhQW6Cz9Vuk5z+Yf6jHt5fUMxu6lU/MPAXwa+o7t/fv6nXwC+Y/799wI/8zV/7Ivza3/r13rpjd178+f/3J+jsrnbPN124a2ntHkKmC3FGwEClysnccR5mHHcNCW0CYDp1lPVTReQwPfWpFLG7ua6q0SnWVQ4mOPhI9QsaAUOrBLY3SZr024DZHSn1rQ+6eaWFKZnTUtWJBbSw7XtlxauamEuWZLSVDZYSXzRYkdJaeHMQ30Tw8Xslv7KRpvGJRqkDXobtVXCXDClRMXCxFiaVjtaEhjH4dBEUmvCSM05XPrFe6ly8monyujLoFNHdhUWp8I9radJTTox3Cc6X1pDzzlzrEYz6Wx82s3Uzep7ooxsoNHWgwE3jEPWrJfGuJa2EU3XXosOE4Pd6EHSSozuSRHpMprFw47lpUivQhKdVRK/VPes8wryFLUr7aJN+GjIQ0iWsw3pA5dLP9c1PcXOOQ+UcEWQmZe0fY3gBi/MEmwqGUeR0LNyCxuez7pcImhrWI/3TVYzBTNcMC6RNn3+HwXg7ht22JnZx4H/Evg3/tYe2O4H2vz//tXd/3F3/5Hu/iPf+i3fyo/+D/8jh+WkYSg7rjoF0ndMYYgA2WgFNcrcPrStm/CNx0Os0UrnKNctFIHNrCru+QLeLzNYjd+fR6ahNUOYuHyw5wxHVUpLNpZkLw9/qr2TljBPfsxxc4mLDa2vKMU43DELdSpYj6xCliBaq6tjU4rTVJRsbSGzGaNFVBlNz7SrJ4H3NK75uC/adYNnQ2v+FPIziTGl2HXFVLXcGV7sCPYy6fVK5dhHg+1pOjOT7MZl36sUq0zb8EQukuJRydHjea2e8h/FjvsQKl0j+eimxk7VJkwWxlPaSS91aBx70kzQJpDUEEwbaHafeNpMZQp67YJOF9ofI0tB75O5yseLDTV+7DXXFkV6K1K+jZ0nD6CkpxNklbFHswiGRQz+dsxDN+hZVZOmfJFdOKoN6CFsXogzXSj02Hm0vi5qL5XE76C3GtC60ZbQKYxxiV3GnHeFHx/u1zfksDOzAx10/3l3/1fzy7/4WE/n///S/PrPAt/9NX/8u+bXftPXd3znd/Ij/86fV38Bwt102x6ShhSk2+xPegKbEGjaC0OTHy3V/zIl7VaKuLeHVzZTh0OKjVMkEtih/DxR+j7TxRjcK15ErzUrlc5IPW112Yrkt2GBfWRt1TOdjYDAWskmWi2hyYleMqWdtOHzczYTDKDsd33dXGIIyzBTJtpkO+FMo9fDiQDaCQffkVfYJS9pgAJvHVSl4+/KcVrQc5JqxS0vlpseHoZwLxxaPa1hmngf636DptpS2i6jO6t5f2s6NmzEwGWyjNXExPd02Zal5Dqu9yMedHEVRSlFBOg+JRxHMqK0RSzhutrg5F9z67n3BRM0DI0/pJa1xMPoOjNDD4p5QOpd04PLrcD2fL66dpaZEmB2ibQoOV12S/ycaz7byWu0Ll1703MhvMJxhEv7vKdWmtzC54Fqmx6ZgBnYoUw9QwkpnaFUaHu4K/Lduv4hfn0j2FgD/lPgM93973/N//TfAX96/v1PA//t1/z6Pz+s7D8E/PrXrLu/6cunv3SFDF6yNoUIB4E0wuBao3m61iojhmyTQ6EbMXsuAEix5PrzzOSiaUg3oKEL0Xry7fRopybiqTB6BelF2db3mS4cy0JTyUO5agsLyVMrJ/Bxvg4PxtJ7Mu60p9Sk4mYB7vr1JezoqniZOhxeIpm6DuFlrbU+GKLGhQWp16JHXzdR8dbSZDHWJ3RuOXPjByxropooHRJOTly80xXEO84HqHdJufOWmw1rWGq6j1I3bKFUXeuWfrCEXWWP3e1xCLnPao0OGy1iIj069GkfNhOhWim67eVzMpgA0qYzWI9MQCsdOKLQ9TComElKX6PKRxUkcbOnuoj1fmnSCoze8x89k3Axf7MiNsMepJHDXanVj1Acsy2Z0XREPLYQ63OUmC5FspBkCYg7cQYC8ZxJWvdKW2BmZKqTo+xQgkxp2r0oIuEf/Ef+JB8BMvYbMtn9w8A/B/yjZvbX5p8fBn4E+BNm9lOoMPtH5vf/KCrGfh/4T4B/5bf6C37lS1+S1bO0itgjoHDlrErzFAwB3NVbOA/xNRFL6OqpgFIKLRPs2AsdmgAGe4DinlyzNpEhRmldpEiTPCVmdVbe3Ug2WquNwsJEIPSIVH0wsnx3Jc/qorgiYUuQBNtNWjJS0w66yUaQpTWbc6YtZz/KOTS/ju9Ra2ZPrHzRI72YSaH1MxOa0qJ9mqpM32MNA+uMzGcYUmJkGvNzRKsMR/HRmq5bB4N1wlJuHs1EWQlPVOm5ghxmdtbfFw0hnaIzQH0JAX1MyMyiLdF4KesNTUHFelnlsS1xcjmRhdWdbYpbanpIIhsxB6yr8Ks4HlmAM7256ZBgJl/LmulWZLLgCX83PfcSVNBN2SHHT4YecDGH5BxjbDlsyguvVDr0rKww9jzTtaEp1V5IE0wkhjSo+lx9+nq7Q4d/6j2meuCNYWYb1tPH+I0v/5b1zX/Pv77uy3h3/6/85s+Ff+z/4fc38K/+f/k7zG2qBzf3hieDy+ep2k6Wa20o3VBpCmnsWc3wd0/hnn3DekIcTV+/GunieJAdOsRamU3C2EKHUYDkKoW8pmnYMnbLqdGu6HVvI/WXAVCoeBkzzNeLzq5M+XuaJCUfCA7KNg/vaQRwgYfYPinCjM7icpXOCIkCBsgWISDjeodQvl06vOUIEAtqvJvwHqm4Km8WOYE15YaSeB/Zcmvu6X4MxeqFXU5vMYSaOIArqUP+YpshukE6mtR0YjbhC9jgl3LN9AQytJk+h44RKduEKziP/dnMpCxpJ48JeKX0Gc2kdyQYIS3eJXy3Q5/Ry9CYG5/rxFvTl+KnAFzfFwPsm7GRjnEHmF3C/JjvfekhYKZY+loPCAE9zGw6Xpe2DUnmJ2x1rGQ9X19nbA0T24IeBq/WbaUeZNncNqovOvCwwfiER3rLjmcY14K//Jf+4m+vsX+vvL71W76F9MZdKnS7O7b1JLOSXSttpjBsJixNUcJ2NFnZTDrmYkzln51ibbQGmzWLVlJKtw6A6pGtjBzMJ0oohylcKAyyC1KHkG3057a+Ry0f80+PULYfbfa6eJlparcieqLBUiTGo66vL92AYU6OtCFM4L+7pCLN1tqYjBVL7KpwG5vj+jFZpg64WU0ZxnSQfWwKv618+g1qMKRilRaqNrCr8SWWs7w51iZdzoRtPhH48m5qmZZW7nBNN1eLfCi7hHuVDvkhHoXLzXoseZGN4DYExLlLltFi0n21ptxJHq6+tLa66fPb9sJOVqdExkgusn1RoYemwpH0kEokHRkBka6r0P/WgxPapbXZHsRHDpSyHxDCEDOthJ7THvH0A2bkML9lYNLIpUs21XOx+JzK9jX/p+9+GO2BJHr0Nm7Firm+rN89dOe6/ON/8l/i9Sc++Q24k7++r4/EYSchmtansyQIdVenqAHuW5ObPXCOBvawoY+bdzAxii4bVbwN+8fgcmqut2YOymH7APMki7EWFXa5VotA2J+BsaRhY7CakmPCYi44ocegBDjMt0D5tpFZiLlo92EW0ZrnTZq8jzVpJI+n/MNhoEmksPm5SQc79PTuoh/9BBqQBuSew8+bdHu3WpeYvUSHBfOtPT4KYvA2Bopsk/bPDOP6mvDIS4wlTHZga+qp0SZWC1ObLD03J+ZzMH/czJKGMPWXDEmjpGIloLjr70smrcREANjjoYVGT58HloX0LcsPeVd7CQOe98XmQULH4L6yqFkL58QS80EBW/ZCEDFVFoIcWp+yHhA2mGNjSpkQCGCbbiGtjYJg3XWAVik529CGUIwQ3h5kGnNNq2HFHGkUDV0rpoBRnWnv3rs0FYQ/jshl8Cs/8/47gPBD/PpIHHYa0dU9ulJYTpumuJqV1sbLqIRwI1wyEoHVwu4qtZ6m9cvFLFlAPCAg7q6KQq2xQXpDDLjNmOonvJal/FnPktQlmGlDKn+byVLaJ5v0DKSxM4HGjjypNoyeNSwL4X6zCtnW70tpYoQQpsD5spEdBNjD94q9rKVbsQNawxgcsfRkfxQRScKmf79m9W4QYM9o9nyYxb20xgWafErrH4ceJpeFDpXSny9G28ZStPnoGbdLr9gDti8XtEBPDFeLlOJx0ITghpgJqN4Nn/MqWJB+YJlEol5clxyozLFDk3u00XtgBVKsbOcLC23s+V4MaumA5d2BbEMdR6LprdAENyTMzqS6JOZlpComFrXTUAuYHrLZc0B3Y3ZnW9Fj0ytvdi3pAx+Tnfe8Z7O2lw9WOi1srbW3S++z+V2WxnlYuaMHgNvUQBb/x//+v3y9bt1v6OsjcdgB1LpUcRfQxzyFo2YyUm1cmqu6ECdznqpzUNoIcTE9rSmVoljr5vFUNV8PIeEOV4aCHzfs+8W1JfhdZSMMbil3W3f3Q6ul48LZM32W98vkoMMlR1irEpRow3MJt4ugub/cxIKvtEa79Uvm27IhlE39AzXaNZt2rKbwMaGHD3aWh9Y3FNntPpFPFNH2brm1wc1maoiSY6Jd8MCjnFnTp2tqvFxhCPXgZpqcwmqlZG16u1bM6QWRJGVTdsnC2Q8KRKDeCEgGr6uRZThlypILd63wISuZVROdSkKxu7AsSRe1frfA+QQ6XAGpvTFqGFUx8WlgNoky0bCkf9thXHPSSneodre0dywvFrgt+tBhlT2OCqASrM8Jc1W50yMr0ICwmDV1bGUNq8cNoz8sYmnSZ8p07ViYHqalFdmn7+JxYAcOfQzOuQYWjXc60Jvxa7/yc9+oW/nr9vpIHHZf/OLP4i0R7S6lTXgL2DVk2NfNsrV+0rhveg2wXjZrjeQe3AvbNauxRKo+eJ5TrBxsYyJ/KNH5bhOiuOcmnjKWydjVIWAh83vbC1ZYZVw+M4Ppn5rVA48XXVWNkFZSlRHXmpT3XWKcCRV7g8qYnWOe4FDtrJJa3yLYFdJQ8WAur9mi9fflEDg+rG+5rEhy0+m9SbuG4RYJlNbkjAk1sgrvm5hckuyl5I2CsGS7QkOlc9SUVvO9b5o+ZHF6zEDWD1H1HN6Mw0Kif7LlLe6pcCyavmvVX9XsLiqMu3RE8CaUY2jDgM+hempJ18HYjcWIuwkFv7Z+f5rwXTB820zP9mIJe8h+3GYapImAoxTR5K6fj1bBjbR8piBZh47NYhJpKrS+tnGPYJtTbOreRIbcPw+ya0TgHSmywh4+av3cD/IEYqbhwq6NXyUN6SUfs1ju4u2bN1//G/nr/PpIHHbf9m3fxr0XcSzMixPF01SHFPYW5CTtijgIEQ0togCUy4aPEj6MPh6+x9BhyWM6UhWNlb8ISN0Lvy2W3YdAKGH+voX3YSqBGVbTeg7XGujYFzETmaKEeFQ/SONlTc8k4bNelxm9GEmysVNYTNZ6sXhdBpUhA3tpJdvdEJLChI+Yol3FK/0Oy7GZeHsUepQ0btAvQLihpns41Gc/AuWwAKuh+n1EvnBxTX/qIm+GobTnq51wrbHC4CQIZqgKG0O6tbR5G+OyB4OotWxHTBmOPMk6POaZ42B90C4NoBhex1Zy2IWZs/DBHXXQiJEeDM+C3Jp0baxZDhw2DhV93NIY5tA7k2oiZFBrMjFSlhY5A41dxjGQY5jw2br0Ph8dCiwwsc21pp2MzcoL2/perJmvp24M91TS9TGRZpRgCx/yJF1xZPMZ13VRt6LPeGHOOxaJMgAL+JZv/9scmx+610fisLu9epoEXeEVvh9YzwGnDYNaWqxs8L1k1pbHU10ffngRvsGuF7N4n+BDWm5XDLcN+B/UpKesCQK9ob5RqfOPkKvDBuCt1vdTSxd0MQXUqYIYEQPOiUmKVZro0tV3W7apAcqjl7a33NhyXGMCEYttmsCWJ+ZGbmnbTtcNQYWAdjc6DnoFteTqkEVDySA0bDQtiW0ENxVWJw59igTA6O0vchN6MO8RrCyAFcShdVBi7UMssaUAeGl16N3q5rBDnt+HM8Q1ZbrXsMRq7sJ0EKoI2rA7I0AOPdCQnGLXsJ098sa9dM24HiCeYl+3GTnr5NUuMbZLT2c2kIPeQs25oU7WXqbU6CHCslNx6jSVSwESnlT06Nnu82cbTxWXq4dWD9Ftsx6jaT7rGYCcuHsboiHCYW1NwuUqGS+x5PCu6SwosechLPqhgezDBgMuwiQ1Mtu6nsz5Z//Mv8u6PX0jb+mvy+sjcdh5N+fh7DG0X6e0Wb0vrkJeyKnm2wF+QITBY11AFxWuZvnaPmkS6molk+4b7uccLugGwSb99iBrsxa8OnJEwotzBxdFrQ0zcaqtIBVfdE6vpwAwrT9u9LHIQ7qsF4Lgrv6Mflz49rg4oeKUU+gupha7WNMohW9NrEvs3y5jjYfTujnaiHrmaOMcGYsEpsMCkCOx0dbnyM1AOL6gwl6IoL16bHMme9g8WOgkL5FHYqj1sPDQSFQ0VzhwSSTs6sVokpXNYang0NWDI/YUdU+KR/dIb1O5hDfhh0qFGZZ91tErGjcjt4prEsdzc3XiJhbW75ujNpsa0kQgRBf0NpZBbdTd4YXVRdcWsz9JE4+JTkRwKj3Hv8ay1j3Wwk1cg5ul6QpJTXdN0WuE0aFNpAlFsA/Z4JPe7DVhtFZwTOpM58sDO8uoPrBa0Bd2JT7i7nZBIBmD87mBJ9lO3p/59t/1+xV48yF/fSR6Y3/oB3+wdxlrfJnbJWjdV3N4wlFQakhaFlS1ZA9ba6qsYxKpgg4BWxCZE5uuIhSvi/LQFBbqEY3/q72zibE1q8rws9ba33fqdjcRUYMESfOTdoATJMQwICYm/gCT1lmbKAxMmIDRxAmGiROHSmKCJBoJaIxMwEiiMSqaOPIHTduApPlpSQRaOh2NwvXW+fZeazlY+9StNF66EbvrnrrnTU6q6lTdW+urfc7+9lrrXe8bxcNKijOVephSCKxZ3YUZmDg+FNbahFICmeNcllBnoOq8qYF4nYkIu+3LrFJv6Jhj/lJ3q8kyQMPLnzZhG9MPVaEMtIMtobFWSpwCUsX64YlkQ+nIYuRG1SMPSgHoZNRP0nJ4dQ8vrrfNN0O5tPUWNLWqDfbA1kBdiVZvsI7WhpMd9YXcgeyZ86ILGZ3GwphpX0p5rYrWmN/M3+tvJnVSrrtOggxE1qkMk7XLz8ZpepURdCt3L1NlzJqaZ07DNUF7eTrE7ATLvhoWi3X6RYfe6VlGNY3LNceqL0aWOIJRvEk0qsPvkK08crF5yvU5NmiV4gqKr856y+itIQfDIAzpVdsTCzI7Lg01I3txBFNs3qBhiM9u/pyokPlCocYBY3IkffIrdXHGVmuzaBHM1ZIHXvS9fOkrXznp2d0N+MITT0x+UvFMW/okRFY3TEbS+iCnAolkpR0HzboqeBftBJ/jMtErVRPBbd7X3EpuyQWbdBP1wKsXSZfE2q7eh564lOJtDgFZiKU2sZZSnhOd6vlPCoxSNROy6lIjF3wSmxPBt0SH0TAajgVsOWkGXrK93Re2rqxY0V0ji9Q8jDWFzK3SGr9V9nsbxbfbzemRmXqSVQ8cKUXJoU4TmYpao6HYahg30Nmz7RL4orNIrxCjOG6ZjBa0TdiHMQcSaBjIYN3nHG2DECeXhZ5TLXgb84RT6Zu4YtPoxlrSWtWitB1G1ZI9e8LArVU9tc7XxKxbxTKqLqENKYVVljB0M7YBSCNYD4dSegNvUbPGYURuqCW7Bje02iSZUzQhdM7bxtRTaPhSGQVLYiuYtzmFAro1sEFqv7hhSAjahbEzNDfCdohaUUJaliYiC8haJ/Ga7EIjaNGRUaf4XRqWWVQZ9dkim14oUy8wVRG3osWcK7sNGJ3zIv6hw3jk53+VAzH5mHEtNrsHH3yQj3zkY1XAnVMSgiPWSSvdMZ/TsJ0iGTeftIiNOXJTlAhdrFI2b0TWCc09a2yqKcFCX6bJihtDZ1VKjAZlLdiKpDwccEVYyV68ssTwDivVCGnaCCkFjvTpleDT3YzZADm8YFvVidJgr9ClyKek02WOD2XZ5e2lYwcC8RBMOqFguaOq7QtiTliNljFidvMq5a7TX80TymqlLhKGu+DheATDR5lfJxfyR5I1EiZao3EhUY2WEDx2NA02qezae2PVNk2jZ4OZjsRGWBXVWabx0CTNHfjDBweubab1xWssEvGK1clXozw8qtg6uXLByEZmjdHRjJSNWBw9K86fmdPPozZxGTQrzpzTiDYJ6rNu5pN/WSTwDZdbRI4SQIgo/4yANlpRa7pPdWEnGYjdLFc5pGqVucAZgECvNFZTyV6d3RJ1HUg4mkXClgzWSBqlKygZeI75ulLGWvXZZYooHOZ4bXeQEAugvE72BmHG4g4jGJxz34u/g+PP/67JZidmvO1nfwa89OsYtXjZy0/g4J9Kq2mAriXWmRpwf0ITfM7MMvaoD1YVziRpYUXvyE6fKh10Jbecx6KGDSHGNCvWsvNbFtiFoPc5bh23rSgVUR3QTMrgRyjeUxTVxDVqo1BoKmVzeDB8kaJvxBAsasoCBUJprZVuHwOTRo5WtBNKCcWlDHZKPaTkz0XOGAq9JyPLaCVn91bFUCvpdZMSkjQdNKvUt1n5e4TmvLnAalW430JYY6BLjYyJ3EAx9use9c6aEHaOnW2cS4IlvijsGrSGRs0gl8hlQ2g1PwocmMKeNYpXclk+Nw/Bl+VChuogj+XZ5kRDad7tgByG+PxPzclQGImxZwR4K76wphM+ymiHfTV32lqGO4NZjqiNpnRVduWVoVRDpcFBIkEzy+AhBZqSayP1BppKCyVb1S2JBghjylyxnJc+YHyNxgq5Q6OmOEwc9eJqsnYiS6LK1S6IzbYXesD5MjvYOfDQgRwAAAuHSURBVAUWRmJdkBalz2iBWadEaJ3c1Qx2jtsjZ8eMa1Gze+D+B/I1r3w1skjJKEUtEDuQIXiDxh6RhqdVJ3UwVVq50BsTKcWNcouivF0TbJmkzWaMTVDzeeKC0YPdangOTIzRKZnzSQrONpVIuoINBCuCs1PuUOosTLex8Dmjm+hwQgzXObZDJzIRa1Xb6fV98arlFIU+KIXZs5p9vwF+7ixNEFP23bF1IKOVUOkwMs9hKTHTHVEkVyblIgFmyhX1e1KraXFQWanRuUrtIhVM6aKsWTO4IiVUmiQ5HLdAo9FsTL/SvHCrz4Q0x6PRZgG+hv0dl9IIjAxaFu+PQ4os9RsEiLhdSpfDzUlhdCUpg+xcWqXD69fZ789Yp5pxHdIm7STb5MaVe1mmzlE8nfXOGkfTyVkboSxWpGNESicxA7VKc7vktPcsH4nFF8bitBEMheYwWKB1wp1FrKQI7fB7q6aYYwEGlkrX4owWMUDKOlHmfIvUZidT7p2Dm1jKJCU7VVFs+NIREnOlJpOr/u1adCPV+3n661/ni5997Kh3vGshQRoEckNK9SPbpAN0sltxolq5Sa3hmFfh3JckWWqD0KnnEUAoIcXuj7U6qBe0/1HqEpO4RUjNNI5Wem7R29TyAR1CAAvQeyviaewwG5U2l7RwTTpQtAGxRMNxtBoiB15d9eOq5hIBQwlrRJRfgXgJDdgi2PR9zbbh2bBViC0gAksDvUFyjqSx6mBPGThL1IDDyEBZQAVp1UVNyZJH8jknHInIguyYbckgWQkdKBtrWk0MbDrVUwbudUJuCH0pO0JvzoJNpWeDTRm+ILLHPchdouOs3pJxu7tYDYc2eXfFnSvJwTr5dpaKa6qKSCi229BeXMtlb9xsHYn7WbUGTm1QEzjiF/y/UlmpDQBdkdzqdC2J91ErksrB/6OOc4JoQBPCKzUUnz0rgsiaLBnpsDW2HHUzXEGiE85F88kFVH3u6QMblWGkBFvA0pSUmn4ZDsg5jLVObvNGTpZ8kwVzuqTmrSVadb3bf7PO+eauNmX6K3Pofs5Z3kfIrQuy8jHjeqSxKWhXaIL4SurAGsTq6CbkvmgG+7VxroNgw1JYc6AoLRcWDtyp2ohEqLGvzfBodHQSOysNIAL6dMi6ZXBu4INFckpmB1iny4ZYB+2IBV13xfQfILEQ2ehRXDvrhmQjhqBSDH6ijHa6gbcpxW0KVi5VDSUfmDWsmPOPkbRdMfRLUy2rIC9B3Bx4g5RbjJFYGuuUfPGuVAWvNpF9r1OchSCi6EqRaq1qomNzfEipJrUobiAr0adOoAmrzD6zBoqTW51ixKs43sM4D4hwug5MOo3GagvqFXO5dVltCMzanpfseosqwmerof6Qmi01c4YESKu19VauCqH0RWoMMIoEHZ4MbyxjLdrFPCVmKLk5bobGvkogkfRRNySPiilbEbyD4v9FQnRFBiyjJMFKH3ClxAmAZWDrHqNsNmN6yNrUoI+YOoMq7HOg2MVECrZgLapzuxfYGmuC+EqsAo0y1JHawA9zuJFxUQ7p2ut07tSpFUHznCVgCcHpNNsRen67iXbkuBab3UPf/xB/+pd/XMTeJRBRNqoz5ksZ78R5ILeshrNNCHHcB8hNUm/WCCuNlgu7VM4skRyEDSzhTIvkoRHErs8X3FLUjeaMWT8bAmde41QuC77dgFnVit5Y9ntUdfKtaoSpTf+EsQxiCXZLTBHGpEmwarC40HJXPNAIllFkVbeB7IPYR7Hjo0FzwgVNZ0MIG9MXt/5eu/MdZMObMRbwKT8VGYe3eUnQj1Y1oHR8BDkE9VJSTjuMjtVNwzYjcsFJRDpnHYLG8PJRUKH4Xw3KzhCst5IhpwzBEauNgroWDcW8I6O8c2N48eW0NOMsO5HOKK7wTOmjurWp7FDUg5YbhNASznIwsm5KI3P6QiS6OG57hle/smcvGkuDxQ1YkQ4tvDL7ySHMKaqpCK1JubWFTlc0QAwJY8lKMaXNqYpojFE/c17VV5wdyFqUGPfyzY1gbfcVg0AaoZ0xJvWoCWFRNVWvupttgWxC61VmEdEixNdtYq5vY0VRd0Sspn2sqN8ptabhhqbR/Yx3vfej16Bid002OwR+7EffTMig5yBjT4s9LWt6oJ1BnrWiT5QELozajJIbLLFjZ4munSGdbbphlYfFytadrQ9WlJYdv9UIdzY2dORUKTbaIog1bqpislRWYyXnGblgS5JLKQvLUvW5aJN4S0IavldG1Bs6UhmpZA9yC2IEkQuhxjmBqtJ6qWW0tQylmxyGrBKzlUUbksuFGY8t1TAZfak/3dhKSiqMbCuqQkNooiwtYap9qGqdQqe8/eSoAM6YtaKUgbOQouw1MO2IjCoXIIwoz9ikNgUVrzqmZJUf1KEJZjtkmaTbxbhR0swciIUHVa7I8uuNTCqpbNUxX6TmiRNsb2x5Vg0PaewtaLGh0tgRrF6cOdIReYCF0tEja+plWXb0dDa5CWuNkMk5xDzlF0VSiCH0AcOEaFXukFlzxTYyprlPVsdd+4bFGYFyP44qNNvz9NP/wa0BYjdYNsE3gdHrpqR1Wl9skOLEfncxBlduifX3iaXPcbsp6S9FsE6ZdwWqY28WszZdk0fRGk0a+wUWF0bexOwmsi9ll2PHtWhQiMjXgMevOo5vE98NPH3VQXwbOPb44fiv4fmM/8HM/J7n6f9+QXAtGhTA45n5hqsO4tuBiHzimK/h2OOH47+GY4//+cb1SGNPOOGEE54Fp83uhBNOuCdwXTa737rqAP4fcOzXcOzxw/Ffw7HH/7ziWjQoTjjhhBOeDdflZHfCCSec8E1x2uxOOOGEewJHv9mJyJtF5HER+byIvPuq43kuEJEvisgnReRREfnEfO4lIvLnIvK5+fE7rzrOyxCRD4jIUyLyqUvP/a8xS+E35po8JiKvv7rIb+MO1/ArIvLluRaPishbL33vl+c1PC4iP3E1Ud+GiLxCRP5KRP5ZRD4tIr8wnz+qdbgqHPVmJyIGvA94C/Ba4KdF5LVXG9Vzxo9k5usu8aLeDXw8Mx8CPj6/vpvwQeDNz3juTjG/BXhoPt4BvP8FivHZ8EG+8RoA3jvX4nWZ+ScA83X0CPAD89/85ny9XSUG8EuZ+VrgjcA7Z5zHtg5XgqPe7IAfAj6fmU9k5gZ8GHj4imP6v+Jh4EPz8w8BP3mFsXwDMvOvgX9/xtN3ivlh4Hez8DfAi0XkZS9MpHfGHa7hTngY+HBm7jPzX4DPU6+3K0NmPpmZ/zg//xrwGeDlHNk6XBWOfbN7OfCvl77+0nzubkcCfyYi/yAi75jPvTQzn5yf/xvw0qsJ7VvCnWI+tnV510zzPnCpfHBXX4OIvBL4QeBvuT7r8Lzi2De7Y8WbMvP1VJrxThH54cvfzOIDHRUn6Bhjnng/8BrgdcCTwK9dbTjPDhF5APgI8IuZ+V+Xv3fE6/C849g3uy8Dr7j09ffN5+5qZOaX58engD+k0qOvHlKM+fGpq4vwOeNOMR/NumTmVzPTs+zTfpvbqepdeQ0islAb3e9n5kfn00e/Di8Ejn2z+3vgIRF5lYisVEH5Y1cc0zeFiNwvIi86fA78OPApKu63zx97O/BHVxPht4Q7xfwx4G2zG/hG4D8vpVl3FZ5Rw/opai2gruEREdmJyKuoIv/fvdDxXYaICPA7wGcy89cvfevo1+EFQWYe9QN4K/BZ4AvAe646nucQ76uBf5qPTx9iBr6L6qR9DvgL4CVXHesz4v4DKs3rVO3n5+4UM6Wa9r65Jp8E3nDV8X+Ta/i9GeNj1Obwsks//555DY8Db7kL4n8TlaI+Bjw6H289tnW4qsdpXOyEE064J3DsaewJJ5xwwnPCabM74YQT7gmcNrsTTjjhnsBpszvhhBPuCZw2uxNOOOGewGmzO+GEE+4JnDa7E0444Z7A/wDEQI9k1X79ywAAAABJRU5ErkJggg==\n" + }, + "metadata": { + "needs_background": "light" + } + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "
" + ], + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADHCAYAAADifRM/AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9a6xtS3YW9o2qOdfa+5xzbz/cuLG7G5vwEgKCwAjiBJSOCA4kdExCcMjTdiC2FQGJIjsYRBTbmMSRYhIkxwFbQY4wgUCwEyux45AQSzGg4CiAAD8iY7nT7Uf71X373nv2XmvOqpEf41Gjas219znn3nPPvu1d0tJ6zVmzZs2qr776xqhRxMy4T/fpPt2n+/SpldKLLsB9uk/36T7dpzc/3YP7fbpP9+k+fQqme3C/T/fpPt2nT8F0D+736T7dp/v0KZjuwf0+3af7dJ8+BdM9uN+n+3Sf7tOnYLoH96dMRPQaEf0jtxzz2UTERDS9VeW6T/fpaRIRfQ8R/f4XXY779PzSPbhvJCL6USK6UiD/GBF9CxE9AgBmfsTMP/ImXuuDRPTRjd/flM5HRF9ERN/7RvO5T2+/dFM7fgFl2SQ8WqavfRPy3+xHP5/TPbifTx9i5kcAfj2A3wDgj73g8tyn+/Qs6b4d/zxN9+B+S2LmHwPwXQB+NQAo+/il+vmSiL6eiD5MRK8Q0fcS0eWYBxH9bmVRv/pZy0FEv5OI/g4RfYKI/gYR/aPhv68kon9IRK8S0fcT0b+gv/9KAH8awOcqe/uE/v4tRPSNRPRd+vtfJ6JfSET/BRF9nIh+kIh+3W35639fpOd/g9bBDxLRb33W+7xPzyeN7TgmIvolRPTXiOhniehniOjPE9E7w/8fIKJvI6Kf1mO+Ifz3bxHRD2i7+W4i+qw3Uk4i+se0fX+CiP4uEX0w/PfFeq1XiehHiOhL9feHem+fqe35NSL6TCL6KiL6y0T0rXrO3yOiX05Ef4SIfoqIPkJEn3db/vrfB4noo0T0R7WOfpSI/rU3cq/PO92D+y2JiD4A4J8F8Lc3/v7PAHwOgH8cwLsB/AcA6nD+FwP4TwH808z895+xDL8OwJ8F8KUAPg3AnwHwHUS010P+IYDfAuAdAL4awLcS0Wcw8w8A+DIAf1PlpHeGbL8AwuLeA+AA4G8C+H/0+38P4E+GYzfzD///Jj3mPQD+IwDfRkTvfpZ7vU/PJ93SjgnAfwLgMwH8SgAfAPBVel4G8D8B+DCAzwbwPgB/Uf/7fAB/FMC/COAXAPg/AfyFN1DG9wH4nwF8LaQ/fTmAv0JEv0AP+SkAvxPAywC+GMB/TkS/nplfB/A7APy4tvNHzPzjes6HAPw5AO/Se/9uCO69D8DXQPoSbso//P8LIW38fQC+EMA3EdGveNb7fe6Jme9fwwvAjwJ4DcAnII36GwFc6n8M4JdCGsgVgF+7cf5n63FfDuD7Abz/hmt9EDIgfGJ4rQB+vx7zXwH448N5PwTgnzyT598B8Pn6+YsAfO/w/7cA+Obw/Q8C+IHw/dcA+MQNZR7z/3EAFP7/WwD+jRf9HH++v25px99j7WvjvN8F4G/r588F8NMApo3jvgvA7wvfE4DHAD5r41jrE2M7PwL4Wj3mDwP4c8N53w3gC8+U838A8O/q5w8C+Ojw/1cB+Kvh+4e0PrJ+f0nL9M4nzH8F8DD8/5cA/Icv+jmfe917c5xPv4uZ/7cb/n8PgAsIYz2XvgLA1zDzbYaeH2fm98cfiOh7wtfPAvCFRPQHw287CNMCEf2bAP59SAcCgEdavpvSx8Lnq43vbnh7gvx/jLW1a/qwle0+vfB0WzsGEb0XwJ+CzM5egoD0x/XvDwD4MDOvG6d+FoA/RURfH7ODMNsPn7nce2JeRPQtQ36/h4g+FH6bAfwfeuzvgMwMf7mW8QGAv3fTveG0Xf8MM5fwHZD2/IknyP/jLLMES3e6nd/LMs+efgbANYBfcsMxnwfgjxHR736D1/oIgD/BzO8MrwfM/BdU4/xmAH8AwKexSC9/H9LJAGEmz5yeIH8AeB8Rxe+/CMLm79PbI/3HkHbya5j5ZQD/Otrz/QiAX0Tbbr0fAfClQ7u8ZOa/8Yzl+AiEucf8HjLz16kE+VcgUuh7tR1+J968dn5b/gDwLtX3Ld3pdn4P7s+YmLlCdPA/qcabTESfG3RwAPgHAH47gP+SiP75N3C5bwbwZUT0m0jSQyL654joJQAPIQ37pwHX+KPR7GMA3k9Eu2e89m35A8CnA/hDRDQT0e+B6Lbf+YzXu09vfXoJIle8orr3V4T//haAnwDwddruLojon9D//jSAP0JEvwoAiOgd+vyfNX0rgA8R0T+j/elCDZnvh8xU95B2uCrL/rxw7scAfBoRveMZr31b/pa+moh2RPRbIPr8X37G6z33dA/ubyx9OWTa9n0Afg5iOO3qlJn/LqQRfLM2mKdOzPx/A/i3AXwDZLr8wxCtG8z8/QC+HmIQ/RhEL//r4fS/BhlkfpKIfuYZrn1b/gDwfwH4ZZDZzJ8A8C8x888+7bXu0wtLXw1xlXwFYtD8NvtDJYwPQexM/x+AjwL4l/W/b4e0+b9IRJ+EzOieqY1rfh8BYEban4Yw+a8AkJj5VQB/CKJzfxzAvwrgO8K5Pwgx5v6Ieto8lVxyW/6aflL/+3EAfx7Al+l172SiXiq9T/fp6RIRfRHEMPebX3RZ7tN9el5JXTK/dbSN3eV0z9zv0326T/fpUzDdg/t9uk/36T59CqbnBu5E9NuJ6IeI6IeJ6Cuf13Xu04tNzPwtP58kmft2/fMzMfP3vJ0kGeA5ae66qu3/BfDbIAaY7wPwr6hx7j7dp7dlum/X9+ntlJ4Xc/+NAH6YmX+EmY+Q5cqf/5yudZ/u01uV7tv1fXrbpOe1QvV9EDcmSx+FxB/xRERfAuBLAGC6nD5n9/73oFZCrQQwAZWAClAFwADdMMFgOv+fX8/Oj/lwv0Kh+2/Is7v81vXG45+gTCdZ8HDu+D4WKBTqpvo5Oe/WgjxhXjE/q8uhXJ4f6c9n7mnznree2Va+1H8fTygf/zmsn3z8DE/kJN3aroG+bc+X+XM+7Re/hMqECgLre+X2mRlgJnD4DADMaDfX1cXwG7D9/c1IT7JEaOO/s5V9Lp+nKfuzPMkz5/DW/yefefu/N1IePvN5zDO0aSKAtKMnYhAxDh97BcsrV5sleGHhB5j5mwB8EwC8/Cvey5/+x/8drNcz+JBA1xnTFWF6nZCvgHwA0soC9ND2TgAT9R2cwv9QwLOBoQKpSB5UZNBoAwdvApLlPwJP/M4EIAFsr1CmdrN9vltl9HJA80hAzQBPAGfJ2/KiCqRV76MAVLm7n83OFkE4HNMNKDS8n2QyfLZyVyAVeUZSLm75JqBmQp0AziT3lEN9pXav8d2T3pc9M8uTtW5qBuoM8MSokz4LavVFDHz0G+Pq+OefYtv+jF/1Lv693/p5uKo7HMqEqzLjuky4LjMOZcJhnXAsGcuacVwz1jWjlIS6JnAhoBCwJqnnYoQnEh/q25AXYng/l8a2vfHbuQG8b7vwRnNCpDaPRTuWN865qaxnEo/t87bfYlv3dsOtP3vflhvnFI4nbv2cACY+rbeTArZ3qsp2Kjk+EYf+lwBkBk8MZAamijRVZH3N84r9VPBD/95/fbY+nhe4/xgkJoWl9+tvm6lwwnI1A4cMOhLygZAOhLRAXwoaVW6ehgr137aSgo9XoAIRVe7/02NjAyBiz5vDBQj9cZwAJIDSUKZzDXYA/vhwJU9pYJQBLuRAaMenIgCaVuigZYMV39pRtoDd7sPKdsKiT/7vK5sqO7iTvts1OFsdEypL62WO4M7gBKRK4Aykin5wNHC3gUvLXbUslAT4QYQEbs+iY0ZvBmkH8JTtGgCYCQtnLDXjUCcca8axTvI9APtSBmBfE7CSdP5CQBFAoAL5Lc5oB8A8O9PtGMXws74ofvcDBnCubUCJhGKr3Y3lG8t9UvbT4j1xuhXIx98jmXHgJm9DQiLYyZaDPSCg0AE9dfU4lqerG+/ztKlM+HUyUAsBE4MrfLYnaZJndUPbfl7g/n0AfhkR/WJI4/+9kBVfm6lWAq4y0nVCWgjpKGw9H4B8ZAH4AqkEaoBqGLMJ7BusgRwo+LTRhePZGjniO5/mTcPFGcI6aYPh2PexrGGAMXCWxkZICagTd+BuLDaCqd9TxQDeG2V+ktSBOvlvPpCFTmH3R7UBu9+HnsjJykcyrdx4YMQsoD/OfGK9WDmUrbLVHQEo9qccb+V8k6WKp2rXWnwc6oRDzQ3YiwD7WhPWkhTY0ymwFwF2WpWpF+pnnB1Q9gweuAkkpXLtGRpZ6RipHWl5VWrXtDKgtV0MJGkLvMeZ8gmxiu/nSj7+H79vkZH4+zlgD+AdZ5XQWSfQvkeWDyV/SNQPiLR9G16kWBc2+wqBwn1QyfLcucqPrH3Eoq4tlFHfanBn5pWI/gAkXGcG8GeZ+R+cP56QHydh7AHc0wHyeQGoWCsT4GNleOO9jY2lgTf3Da177x8FGZQHouPY0jFC+UGARkf8COAOfNh+2pBytUEnlEWBVGQMajMBFnkprQbu7NJMvB8vZz1z4dQqrptlxONTAwFO1Oo+s3/3+6ihHGGKaffHSYEpyTWYQoPWOpPvfP6ZwgY5GSCoQhhtrG8tI4WZzpsF8E/bruXyhLVmfSUci4D8UhMWBfZSEsqaZZYWgX01cBdwtYHd6ten9ltgavd+Lo0s3V/U849NcoRuRrXNxvnk3A7Quz7It5Z3s1/fkm6WUwNoE4eZpAG89Oc0gH2U/GAsn/p6bNcZCmkDMNAGwzBQtpsF6iQzWTCrbcZuP4EJKMSgdHMlPDfNnZm/E08YPIorIV8R8jUhrUA+KqgfWD6vMu1vUyh2fbsboIN2fk5+6P/nk//HqdTJTHbM0x62NVSEz8oiR0nhxCagskaUiuQehb2L5kcC2mo7SAuDFNxTsY4kwNqB+2hP0A7M46wD4bw42BEpoJM3eJ6Sl68fyLhncPGyDDALe2cDcmP+58lHK1qYKdh30sCtCaEDbgz4byZ7f5p2bZdeOGHlhGOdRIIsGau+BNiTOBOsSTR2A3Z7BRvRyN4dODWdbfexTBH0zn3WwnfAXtp7skFmsPdskacTCXSLhJwlP/3/XR+PoBj6Y3evqf+9zUQ5zESb9FkzBeYcAZ86CfZUzgnvfj26GT/CgNnNTJP8VjOanAlyKZJXQqWEkvmFyDJPlxjI14TpOmjsRwH5fGQ3hHrlsk4RXaSWt1OJ5fZefRMDOFdv4zkOiNzPx7zhl74xj3q8GEgFqGP5TXvnpKhmg4WCeyoVtDJQWYE1gLMDLZ+CdSYgJZ9SAtg8FwCQyc/hnMAToVaAJ2pGpxMw0EET5NIJJwN465TsA02rx35w7piWDiyWEhjVeyzU3hGOj8/uzdPcnzoxk7P2tSpTZ0KphFJViqkmxaQTYE9mNO809wZsJ5r7COxj+x4YO6EBlP8+5BtBnYK9p3dO2JA6h98iyHeMfbPi+vduMKi3Dwp2r56dfU6N2DSApsbWMwvA5wD22chWAHOfyYY+MIL8GZKxOZux/kZwpwPM0g0TCEwMMp0/EUAJNd+Mb3cE3EmA/KDArobUfGRl7ewMj5lUexpGRW4NyX87vYz+ebYY3UM/OU9/HrPewo7IaEY9vM3a9L7M46Vwk5/sWpmC7KDAvjJorUhrleNrdTB2MDdwr1U/swA5CbBj0gpV5mJ5gxkIZUABdPrgN5sAVCQgsYN/qwzugXmslwhCDH9QHZMJl4+ztZrD4Ek2uKv+XofH2oH7dlneqhRZu+jsAvalJNSSUAu5Z8wJsK8Qtu4Ab0AJr8MTgAduvmebcCkIGXHy3/X8NltAs6cU8dRKKgfajLMBFffgPQJ97fvpWC6E6wOhj3h7tvYdBoct6bGTHQ0ogG7WqrN/TqRkRYBcJBHx7EoR5Dtw5461+yx2APYbmXsYFKMnIGkZ7JwKIJEwwkoA2Syi0I389Y6AuzJ2M56uQF4U2PUF6EPK0qHB3Gut3sD5tOEQOgmH9TfJMx5HJyNvNyAoaJ2QQgoNyC+AvoMEkI/HxEbrLoQBwFBYANgbQRsEDNh9QODwbsBeqoA/swM1ZRZczQQWUaMNCoU7RsVVpxmF1XuI/LMMCgAjALQV3X8/Xy82g+mY3ga4y7hCDub2XBIgA30Adq+/8HyfRJ99Xkk098bazYjqckzQ2akMwL4AaQT2qHeHQXKT2NwAoKPW3kljmpd5ZTmgr83d1Waaad1uvyMon0h+3JdnSyIEFMx9YKiNgMSZ6bm6tzyDRi59vJEaAfQks9lMqFMCKbgnd+Ft/Ca68PYOBtzAvfGg0+q3ZzSy9lAfNct910BgzXuHiEDqLsyreQ1spzsD7s4OSmg4ofEAaIDi7/r7MN3rklY2gcEYDCCSaSvGDVMr8+Ig7g1+J77wdsoAdM1wGhq3d0o+BaDzz6wrLxGd3Is0+NoVyq7LJJ0FOWSUBMQJALL2q0GWcZ1xTJVBgSU5i0q330Df0PuZTitbY13iAsviQkaN+Hfjs7WBO8LcGcDK2Vn7UlJj7TWBSzOgYhXjqTH2tA7AHrX3qHHbhSJhiAWwFNqyuRN3Wjtanm6oL5Fs2cxaSddSG9FQRu3PbpT5gG52uQm89h5ISkdgmDuy0lj8xgM2MueGU3JwJyJpVyk1iXKSF62MNAnI10kGXJEk2dm7DArskoxLNDcwd7nvvo4buId1IQRQJeFh4IYvCUhaDioAqzx3U9u+G+COBuxUzGiIoC9aqzUapgye27nG/DpGQMKcqMro27MWCp9bOTbdpBDPDed54dt34lNskRP616ZdIHUOl73RU2cOAASYzScrsbz0fADC9isBpKr0wuCc2rUcMKkH742OiATX5yPD2Rx8YiOtLMeFOtrSIH1wG2c3Xok6kNsgYoxfmWvXeZzRBvB4wUk098jam3dMXQkcPGPSCmXuQFoC0G+Ae2tHPRPech7okg2Q2rbJfkPLz42mBuarSKT5WBXcK9KxgkoFrVVmcrX2s77xfRiwHXgjk47JWLnOPmkt/lnei7TXW7zByAAdEPpNBGR9T0k+TxlYM0gBvtasslNyiYazeGB1rpJkDJ4dgE8I4Vj94XmZHNN7yRFqlcYvE211rMgArwBluFGdzVvqTLo74B46eFImQOEdUIA2o+XJ9IZPKokJoDR0dGr/nYyuHfj3wL6l12/+BgMdd6g8n6L8EYF8K+9orGWopKK/5/4cawCNyWdgVdcSbezIzQNGjEx27MDs7T8zRGX5znHGAJzvZEA3Td0yNJk0Y23AQMYH5sGtkWwxFBqYNcnKfgszrBfM3Jfg+lhqwmpyTBEj6qizp3UA9lGWMSKDng3GdNbmxI0nePuM7arCVz9nZel5YaQDIx8q0rH4C2sFleKA60waOMum7Z2D/UcG7aFRFGXnBuprEUBfC1DlMyorwAe3mZTC5ew6qdmb9DOlBOQMmrJca6puyUzM4DnrLSSwqpqcIJ9TI4sR0E8MtRsPoXekCHq7VVtiJJEIZOBQ0suubIjN0QzsN6W7A+7K2ny1pS5jj6sdAblhAaDmPdFAoRltbDERQ0CWR5nghukTaOM4DMdunWc/bxzfTYOht3Qy6Njx2yO++fYLy9ZMtRP54h9A/ciTkKEizJ4y90ZV7VicUwNuaH2FKSKsnKldo7uWXk/KGH7z64Q63GLTwejW/Rz9kLXCbFbk4BY9bWqY8bkEdeaab2GqTFhK9pcAexa3x6Xp7Glg7r13SgP2k1XIwObgtSmhbbZfavp4uKYD+5HFJfm6IB0K0vUKWgrouAjwGriXcjOgG2tWokC6CtEJikklLuvIzAClAGsBryuwrsCygosCfTErZB2uKY2Gk0kwjalTTgBJu3c705TbgBflIX2jTL0DgdaV5K8ukyp1MbFjwAk2DM+s8yAq8MGiAqDUJLnOxqI2l60+M6a7Ae7cN17T8XyRjk+zCbL6UCt+fKaMZlwsHIBUKmpkkKMPbFekM6DwtF51jsFEKp9Q/1CsjGNZhpmJ/+cSitRVk4ka4DIREqro6JnEkFqpGZGMtRME2DW+C/pL9/UQB54zdWDGS69fve/unHB/bkwdU6OWJ/VkWuXIPJtnRzNae2d7ocydhLWvQY6xeDGVVGenHsjdC6V16tHDZJOM3NI2e6+R8Htts8woFaRibsnG2Fek4wocF9BxaaBbWdg00AgEgI4pMwPZpoM2cEeiYYMA9bPAqsy8sgK6MfhVpD+j1XY9ALKyLYGYwCSdxUCbATFIMoFLld+rAjcPr8rex9zeZypCIiUYkhcR+6p5DgPD9oPQ4ioDjwZogMJzZxDb4Pv0JOVOgDsBbXSyAFSLuvqtWtGJgEkfmE9hTgFe/tDKUlZSJ7mIjbZ+0fAgAJy1vLu228YLew69IcQ64ADeJo8LsLbj282376M1HWgNQF7sjX+UmzjcGpvhKL6A7jvn5GU7mdnE6jzTSk8Yvn7mqOPb9HUctOxeBoCPvuybswStBwp5uZxg9pqV26zuxRJ3MAPHVUIOLIsAOy/qz75QHwDOPGNiOzppSyMd1OtsgX0sx7kBwCZAVYmTNRMbKIusp0hLBS0FWFbQIgA/SiSeCqS/5hwGHm0TWeQQmTEm17+7fqidmpmFadcEA2z7XT4EYLfKtms9SXoCo3+XlHBEggEIyTAJUfK1ctySl51vwH7muW2SoydIdwLcgci6zH/WVmDqqJ2TukFK8oiCZmA1oECrLAF40RArDfEbxkpjtCmq5sHUWPaATQ4uJ6A8sitlx0bYm992+N9PCMypiETV/ed2iH4ganp5zIeB4RgKx7aKbxr6E6cBcKJh04C9PZ9T0tEZUQ2suVX0ia1jyMM7V+hozWNK34uV58lv63kkZtJoj0GOcbYeFyqdrjwF4O1mNDh2YE19WzoheeeAX6+TCoAiWNnxBeuTq7BnWtWwuRZwrQ7sbEZOQKQPY+wmwaQETBOQE3hSv8Ipg1MSG05K7f4q1BsqMG1mUCliQNT8mXRVHDbYHamOb7MG0oHGZJksDuz+e04+2EhZWtk7h4Zz9Rvrkxrh5HTmuPG0YJ/rfOcHWbPhieHfzVPSuwXutbF2Wlm0PXetYwdHlxWSgnY29zHp7bTWTp+MBlcafBYdIAg9aKEH0Ab+7dzuvTsv/EcCMIw2II1hb+2czkthlYsmJvcfdmB3jZvbgERhcGLI9NHY8Xgvzt5vv5cb6wQAI+STemC3VX8xXy+7yQ2B8XVupVsdicWCEoEvupI2j5IwIt9idHreiUFYNJRvXRJgrN0APujczVUwZGBtyPIbwTwSBTs+HovT37VgXmem2HGhdvGgw6OyesU0TxWUDWC3pEZ6MVhO4o2SswD7lMXtMGcFdplBejGZgZLAqYIygQ5yfcwzzP1RuuuycZPoAX1Da7eZw8mAk6VsmJpbJOfUL3CiQF5i/Q/S5a0pPiNVH6wfeYiP8eV4YcHKcOphNKS7Ae5xWu0r3oKfrLFLjXFSZ0LZCUgaKMtiDxbD1EK+ijNqwHat7rrx87jAYkgGot0xo1E0Zh9lmARZEDHJ6rM66wMLzJ8KdBEXIx+MjXMLA6fffWEI4MGDxBCqwOmDgZn5bx7hb0xh0NuSrcyrJbILB/bRo8YXsgRgHwdCn9GcrkDu8sD47LTd6IygyQt89nm+FYkZWJes0R5J5ZjU+bNH1j4Cu8+IOjAJDG98d5Dndt6QSFmSDyz6jLsQDmhtTQyb3FwRgybdJWPtOQPTJMA+T+BZAJ7nLKA+JdSpudWO0iitLLaiZSzLDK7cTDJJPGk4aOodoBv7zll+M1BXtu7y0NTK1QH7RKg5eWwZIy5bgH6WpW9gQvc7x2dFfciD+ErD52SN/sx1cVfAHXA5pnlPGGOXRsBTQtkllIuE9ULBfdZKddZGvnpOrP3UFkFRzyINMNS2HQwaQwcbE592gPZFD4myQmrAXmdC3QFl34DeplZU1f1tBdJR2b2xejM0B5CXC7EzU48QZ+6gvqrvtpoHzul93X35zGUYADt2SA5A3cv+3gB2c3W0gcBnZuE5WfuP58eyxzAO3WpI64wvMDETSkmis6vbY+fPHuUYoKvXyMY7l7uBydk03Rn+uZlYbN9qzEsmPSacPitrd1rHRhbYpFLgVI4xycOAfTcrwGfUnbL2OekMljom7AP0JCSNEkn5wu0Qq8dcImH4pbbbdSkluD7m3MkyMPklgrriiwB6BPboLow2sxwBPhbwprYQn2dX1+T/c5JVsWWW9zq1VbI8sb8jQ2TqtwO4e4wKD+0rq8QACLBfZgf25ZJQLhr7lYPQL5M+EqZDWyptANa51lU4UjzJkm3/eg4w9aGZ66WH7J2AsiMB9R1QLhh1Z261xrbhS86zhftM1A1cVBK4VtfnTgxHW9P6k4oOn30Q7fPZ1MhHgO9A/fTzyYYeHbCg1X93ENq0NDCbCHxubxh/19mMt6EkZq5n8TJ4MxMzWniBqLXrM4Wy55NBywAgMnUHdW4szv/rY51spbgqEpXEMQWMXKgbPLpwvR67SBl7MWNGA3UAA2PPDuy8m8H7jLqbZIHQnFDn4LGVW1uRAURm4JwJSQ2eg43dHQU4ZVAOhqnonWPlMSNuarq6aOy3gPqUNuXFG20bt6Ug55zYkGDERmPbGBmc0V5TwwzOrDbI8539boC7N7hwo7a4JhPKPqFcJiyXCctDwvKAUC6g26uh6ebVwF1iwheLB3+kbps+oAHD2ecTH+BG/W0OBoFZyT30wF4uBNjLHqj7Cp4DSDJQF5GUhNErC64UZiW65JnPLJBKOMvUHZSrHqdTbpdzGL1+Drg3AQPC1MiYdGPwbRl5O5HHAQT997hYKbJMAypj8tDrEozVwWd4XYiCMBugaGuIgcZeWGqrUFuMdnSrTh1IQyfvwF2BvMYwtN7JId4ZPlU/XxJWm5MtiKkgJJ852UFog6jJo4X7GEXcvFmAwJhzEmDfzWAD94sJdZdR9xllp+A5B1YcZ3csHjtpteX9Nn2jBvBsfuQEyhUog1uUGUQ1vIB5hbkRdwg34LFlIqgbwHb6d+nXmvgAACAASURBVOsbm55HT9LOugGbTp5VA/ce2EXK1Wc/t633KPONjkF3A9yBFnQIxt6U+c4qxVySAPsjwnoJrJcsNz4H9y2GT3uTbteXDwLy+dj0bPMhHiNJdg/PporAyQh9GiMiMGl9eFWBqk4ixRhjLxeMesngfQHNtWnmRRpbmbQxSgt2bTYvhLJLOgBWv/5JPSbZraUrsoqqDphW3uDHC5txDGDt954t+qJqnsGT6KQMOiNS82dLQULZPpG84VuI4Dj4Scdv8p0/h2ikVd2dx/K/qMRoOyp1i5HoVGMHegAwYJk4ALyCuk7NOTM8BEWCZ9g9atZnVUl8wxWoqMI/++W7QVTjt4TIoh6iwuo86trK3HmeBOD3GXU/oewzyj6h7hTYJ+o2oXGpVOvGZyUL0JZfZwH4Kjcn/ulVjKBeeGpgbgbVnJpXziQgXycBf9fUowQzIcgxA9m4bVZ0W1OI0loc1MKzl5n+wNhn9neeWCK66p6qN6U7Ae5tCa4CnRoweBLWvlwS1gcK7g+B9YGAJM/y6vIqJAzYpzKEPEmF5QODFwKO7PJN9D7ptngjm9p3xGIT2NsAoQzKtuayDjnDAb5eMPiiIF8U5KlI7AgGakkoOaGmLDJ6TaACFGXzZacLSjJpQKOe7Uk9KqBq+VlnqR2bd1kq3Jnewyil2CDn3xHYcOWuXsJBjf2h2TSsfD5L25AgeKvRW16MLtaQM3TL156JBphycmVrBF5UUlD1GZjNUPW9k2NGYFfwjpprnSB7atq0PDMoC0mgAO7t+qRkW8rAhcRAb6zULs192Sxue3vVtphoNKRSajq7STKzSDF1l2TmfZHETmas1CIbhlmezVDjTELaSvJ2kGI4aht0PAMB9ybDULPZqbsjOzunLm6M7XwUY7l3zD22yzjLQf95o/pbGu0mI+3u7HPh3UhsAPc0VaRUQTeMKncC3AFjXNrBNSJbuSCsFySs/RFheQCsD5X9XlRgrtK4E7dGXMI0S7UrdqMEIR3siqzsqQGOaOXSPnyDDPtMpw8uuuB17N9nIPqAJqDsGHXH4F1FvijY7RfMU0EiCe25lgRQBof7qOuouZGwuKqNeFzJpwxN9iOlFrODqC2sABywSUkY2Uzbyr4xXZRb00ELDErCAnvjppRB3Nl0oAzXPTEadg1g+BpB3eSBym39Q3AHlfvX56HP026DiE4I0ludWjx2upG1j8BeJ5b2a5/DlBwTg6aKlFk6eWKkxF1nt7ZUK4Gr7PYEJHAV2cM9TKyuq8wSLRorrRVYq8d1kcVKpQXtismiLKqR0qWYfXZgL3tCcZkhep6EGYNJj13Zkg4wYoz1et0Ad/eZT0F2ie8jqM9tFuGzCbWVYQD2JwkEttW0u/PPsXcjN0YIDdh3rMxdiexU/bnnt8VmHTBWpmw5A3UnHjEG7usFUC4ZZa/AvqsuaxBB3KEYYEpgqsrCE4oSzZwAEMk7dDqoTDKb/l7NjY406iQp02VE1zygdczOnx5yDV9Xpa6Q3kF3FbSrmOYV+3l1cC9VDcdMqLWiFHKjSZsqwpkFZSnTid+9Mlfbr9RiazDpQpIndIm8qZGeZSYWtVEHClI3La/PLv8QNmFgMKSzAtljlZ3tdkyy9u6gMfY3ObNMAvBPMl9+nonRmHoYsM7NJkY9vQN279wC7DlXpKzvCuyRVAq4i7dOEYUd7G5HZNwlrK8IeyjYCvGlyIpUCzXg0Ri18nMOBkxlzLO4PVbT19V1ueyMqAR2bMTBwJ3IGyBVWeDFEwswFwaX1DzdbPZjyUBdGftZ+WXqmboRv279STSimg1to//bZu3yOVQ+cALcUWo7C+7JBj+VY8yrbmIgqSdRZn/ud5+5cxsBmWRU74D9gQE7HNjTrohBwfIg6Ao2RaYsgI+dkFeZhnLT0ixsrWaQzcinDJ44MCij7WkD4K3DlvCjHWTTrCwjLyZGmivmuWCeCnZq6SdiCZ1RE0wJacZFbv797vcqndMWdEgZqDUqhsxmqpxnRrRmdMLJYNXuSYFXwdqU85u0a79lk2qCnGPeQ/aM2uM6DcvrMwn1iLG1D8IiA7BbbO/hXJeLqiKqWZhfML7LRtbUtRcAXbk6Y/JNwD5XpFk69jQV5FyRSdi7eP9pP+K2lZ98T2F2RogbbpuXWXPFZQH2gwJ7aYG7eFkBA/cQgdE9U3QhUJ2TSDIK6vJS+5PJTFGS0T4k2K4z1CKkhjIhaTx1ykk4oM4OyWLCUHCrjIZSY+pmKFVjroP7FPuVgH1bAdqAeEw247XPJ95O4bnKDYb84owALf9oY4mgznOYrWWRY3KuyKluFc3T3QB3oGO+dSKUWT1M9vDGUXdtOkrJMES33KvkumIDV/ZsizFpDQHMCnB2bWcvZjBicqkjwQx0RnXIWT8Q2Lte6MRIqw8UOoWeUsWcC6akXgc1upcMKY7wDvpSFmPoLsVo53VZKUFOMGYVDL+sA140EsWQCxRuwVeG3pC8oRvAmxMDYiOmE7YSB1iRVcjz8w1bFn2Pm0LY9YbZhNtBKiAj9Q11+5Yk6tj6WQCwlxMCY6ynwD5NBdOkbUg7eezoMsYRyGKxAP65s4mEXZ7chVg34qBjYO3HRWLJaJRG95LxW1RQdTBNTfKYpS+73Wlu4B4lGQt9kFgBvfQAy9SkFPlPw2kYaQmsffSAqXMrTwT0BvAG7oTG2k/7HRCen40p4bluTow3WPu48MzbgJK5GqW4YF+hXIXQqgSX3i6LmAA0SWbS0X6vI/2+6dWYVGPXWq4G7CWAe1xyTuouRoyaghVc3Q0NzGwHKGioYWJh+eYKXycA0FE9UK5un0hl+6TB9gE0Q6F6NMiDqd3ABEAlGfKBqpvKa+q0OpVAXEOxyH6sPvLK3plJVt6F2ZHkZTOXoXUw3BBKFlnTfMZblZ5+5vCd0O/DapfwRk2hUbfrG6C7/hu2cnOdPXhVjcnj6VQpEAGiGb/INLL1mGwEtTYS3R0NdMztLQD7PBfMuWA3CUGY1LCWQp+IUl8xmQxQ0kJCQirEu0w3pZcNOSwKpAYIW1bwsghzX5YWZjdGcQzL+jlnBdLkJE2AnTojofVJK6M7CBizJULbuk5HAF9ERC4B+r4DBv43AHtbFNRA3SSZXo4JAHzaPU4BfXjGcfJ+FtxP5BlumnvnISW4YbbFRArsSRj8TenugLs+tBpHeXsFzSlSNoaM+FySeAEoKJ4Msy6pSGOpxto1u7QQyiLvaVFQYMiWW9DG5w/glMOSe8zYO3UNwI+j0MlCss4ohi9oxyP1sIjsvNWVg4KIJvAYFdqQjL0Lc7aZRit5ZNMA+pUifpBeytk4WYG76j0BruG7M8rKvjuTz56Gc9yAavH81YCadAl8598ebQ5h4HKJjwGKctULSpuDkbFWRNaGtlDJXd4U2Cd2YN9NK3ZTwWwzQGrgXlVnP0LbVQB9baIwGbFn7Kq5L4x0LEFrXz1eOwcfd0dV0rgtvquRGD0NRDufbQN2Ay6bdVUWKWaUKkJHa7p3aNMUJEN3nw4gP20Du4cBMbfDoLWPRlQAPZxweG6hiN3nWG57tgHc/Rkb6QukLQ4wyKyDHbs3VCSHwpHON+47A+51EgAtbnwRy3pRazFPbQrCBoIQYLcFIo1BG5A3hmiUkidWaSbBjTYXQNHFTnwkkTGKarUrpHLFz0xBE33Do7CoiBtAufeKDjhchZ3XKsa+UpN6ymSUaluvpbbtWhfjOyz8ic/TAJ77uNMttLDqkqAnDzHDrdF0gkzZyOC2PG+YNnaH2bOzRUrngL27Hm/MPMIBJrvfocQ28CrhVS6gHZ5VLtDp+FTdTjPNK+a54GJeBdxzwT6vwtypIqkr0soZNcwGCxOWuGFu1NttQZUCfLYt9Cy8r+18VEpzgwQE0G0FaE7i266xY+ouq94++mrbi33hoSUhEGj2GqBv52PDNSafGrQZsNsOY8bOm7fRAOzmsRP09tv80B3AR4APk6L42W5uS3azPVg7Zm/M3ZwpwvoFMoM5sQas5E2iGNPdAHdCc0Ga1aqur2YpluOg0oVJER4i1Fm7Zhl81jn5Y/FGgUlCAJQqC57KnmSx06SrRHVFp0ksFpi/iwmPBuzysCPiG1A1wxUXAfa1JhzXCVMuKDWFkLAELAm0kLycUSHsLzssvDqTmNBYMqhzmzyVUkbQhLdkDwyGVoVnr70B5N201jdn2DiedQY0ALtLMb4DEYdTrHvp20kgqzPlfIuTAUFsjxQHWwpyTKezM9Is6yGMse/tlVfsUsEur87MAWDiilVJQ+aKVFMvyfgg2tplM1pL3cMiQJYK1MHt0eK2aMTHE992DS9QXGuPrF2BXQEuuuBS8ODxthDaeReqIkFwoGP5AuzVfNmzLZbqZZjbgN2J2xlSEgH+1hRAneNrNNbGAcAWo9mLoDI0HNgN5NPzBHci+lEAr0LC86/M/BuI6N0A/jsAnw3gRwF8ATN//KZ8WBk7CFj3CrRRa5+UDjOEoVfyBurgbg0hSheBIfnybNPCEwPmA7+LhlvyzYkpNLQYE941NbtG7hsqoGOIxotJi4RDqAuhHjOOWao9rRkMYFkmrMuEesigJSEdRQfNB9VDbXWt79Bz+lDHWPU2XfUOMNm0x+6LXeM2aedGgL9hRarVt1+rG/ziMdSzGG7vDuxhybu/h+fQDVIJXZliW2c38oXB5CnSm9W2JTPtyAwfcOIOUc2vXclMMKDmuQH75bzicl5wOS3YJQH3KRXxlgGjgrDUrPJMCt5XsWKAuFDpZHa4VvEft9C+8TYsNG/OoHmSkLmzhhow1r6TAH+dtOpyTGOmAJpsJhUS2mZ7uedmrEuTZOIqZDJZBm2FqQM8WryWAOx1kudh0WVvis3TleFMmztnLG/AjYZDxuJj+yB2Vu9tVqVoopG9966vW+nNYO7/FDP/TPj+lQD+d2b+OiL6Sv3+h2/LpOykE0rcFQV2k2NshFO2bsGXPBaNz201hVo27ZkTdPdybozOpsJq8Cl7EnlmIaRVg3QFg2nUrRuQyRdOQOfeZ53IXMwWIF0n1AQsuio1JRaZpmSUQxbWfpDBxYA9H2Xn+bS0VzeAWRpZbRx8hh1nzN3QA43dBPBepSF2fMwrMnFzFaWhjrqMWnmBNlC1WCb9oLoVstgf7ygTRd/3ZM/9GZC9pTfetiMjUzuRt2VrTsEFztzeaK6YFNj389IB+4PpiF1asU8Fcyrq0Q/zZAfqJKCfMqbBRbLRx9YGukE1zpTsFojAtkWexUafdyCN+IjdDN5NorV3HjJ0qrNndv28i/VkIG4DT9e3t+pVjfy2MMj09ikJYAct3QHePGL096cC9jDYdL/hTPnsOYcUgd5XB7sUxD2TN0nG+7H2EwV0Cb31HJn7mfT5AD6on/8bAN+DJ+gAZQcgAeWi+cXWuCJPAZxW0u3JFNw1C+80hNMnVGXUs7AA0qmUPQTWVM0f90hIs2qTCqZb+xmyWeyVNZEydzfAqMdHPpIvRkJKqJSxBp981k0c0nVCPhLytcTEyddAvuYG8Cu7N0mbmcjFukFI68Ii2nUAzwaoBA9CQ4Y3ZwKSoV1nC+CB2HAHd8cbEgViOEZ1lMVIsTMxPIyxrwqMtIn8u7F2vHFwH9PTt22odso2RaEG7PYYArDzJAvdkoL7fl5xocD+cDriwXTEZV5wmRdMqWDWSqxMKEGHWjm5F03zfUdjw2akj2AaF4jFeEmJQMguxyBlAfbdDN7PGmYgo7gk01wem5thAPYwuHWDTAW6vWOHVbzexpK1+UYuJFSuMneXXCxmDMJ3dFLMOX3d0gmwB0A/cZrYIl2h7A7SgcmzDvwN9COgQ1k7+wzM9PZWwO1yA28c3BnA/0rSev4MM38TgPcy80/o/z8J4L1bJxLRlwD4EgCY3vku0dez+barHGPO+7ogBwyNjki+Q7zfYG6dpSsd7CFoi2JZ/UlBZnA3NG2UZUfISwjPagwxuDi2IFfSwFIw7kVmYp4IWS3zcnzWEAJ63EqgIzmwpwjsB8ju88raY0hkjows3K/Xib5qlCZ0FPTNfQHpLOr2uDnRG+p0c0u+boo8nDc2fj4tt3vJjAAT1xMYsNtAMM5UgCYh6crcTuN/uvSmtO387ndK2yQJG0GV+wUw1sknBs9tgd6swH65W/BAgf3RfMBlXvBwOmCfVsxUkLUTFE5Y1IpemDDVioQNXVbZcRdDJoBpZ7dIOt2dJqnzlIW1h1jtzth3umjJAoPFOOTZQgpgaEuk0mDzt4cPMgh2FqtYqAuwLuzzDoQWxTHEYO+APLB119c3gH1TfhmBPL5HO1/jfSHD8G7XDNePwN6xds2QjHzZZ29jN7N24I2D+29m5h8jok8H8FeJ6Afjn8zMdKYU2lm+CQD2H/gAlx11AOvAHuMnaMPsGiPgniFtFJZaJjtH36kYu3OqKvkAjcHPhDozyqyGVfPC2bqH0DDiHq2RuZPuIM+JkDIwQcpV19T80demsRtjn64E2KcB2A3cOeOEZ4+W+q1odh3LGFiR1SGATjeXG+Tu75N6AIAR9AdQ33ZbhDPytv+pAHg0ovqq1OGz5xP95e2SCpzPmN6ctv3Z72fWTRUoQewyHKqe4ItUMDPyTlYw7+YVFwOwvzxf4zIJc79ICxLYwX3hjMxVvK8oIwUPGimTzRiG+DZBjokuvbYwiSYhIt2GF7q7Eu8nsLJ295DpQD28RhCNg0sIf+zOA6P7r93H4I5rUmADdvQSjBsv22cMZZGMT7+fOh6gA/Qt5m5tj9A/4xHkjZV38ml3HPsx5oYNoJuJ3cZZ3hC4M/OP6ftPEdG3A/iNAD5GRJ/BzD9BRJ8B4KduzYjUMyY3YPfFG6pP262MFd5ZoYOe5z0oGlsrRFoxf3D0I6/HdQgMnorGkrZQqVqUTjszd8ORkWn+aZXj2laRusqO9BkW8djJx8bYp2uRYvKBkY+yp2xa2fM2lrDJFoa6dR3cVhZy68gGrgA6zfzEhKGMyVtsx/D6408MTgYa0T9eQduOl2BjAVzstyjF6OIyccsL51t9d37zjJMO+xTpzWzbtmMOM9qiG0MA6P9TRda4Q7t5xYP9EY92Rzycjnh5d4VH0xEvTdd4kI64SAv2aUEON3jNE1JlLJRxIAlJ0O4lLI7jCOw9yDZZQZivGVApbHrBwTPGgX2XWyjfmdTDDQOYtmft/bIi7J4mLyrcCFwkH2rb8oob9g8QvV2NqV0c9r4cnZMBWlvsH/7p/1vyzDndnYDNPumYET73wM4tg43znzZO0jODOxE9BJCY+VX9/HkAvgbAdwD4QgBfp+//4+2ZqTaXVGfPQItRLUvSRSMn9+aipP3XHl7YyMARjwGCTYe1vqpfso3IwQPGLOtlFjmlzpqPg15oVMYYCJ03yTi1i+w9SjE+eyhqOD00KWY6CFvPh9oYewSEW+rzRDoJYL4ZOtcanYYt6M53PT/MYs6UY2TnzswNjB20W5l66YWHQYDhMkytDeC3mLvZQnK7/hNtMzjew5vZtgFgUjYxDjgEX6BiMYcisL80Xwtjnw54ebrCo3zARVrwIB1cawcgWrv69M8K8pYqE6pVFTeX4RNJRgf8UAnw/UgBdFvTdcCemoeMGy4jSw5rm+0ZmwzjgN7e24BjbU7djJ1MGaNpdegB+tI2Ux83xugI0YCXHQsfwPscqJ8oo2PXC+DdZrlBZzdgPwPqz5reCHN/L4Bv1+nwBOC/Zeb/hYi+D8BfIqLfB+DDAL7g9qx6/1efxo43ajefWt9lgvoGwz1hmARMWBcP2ey0gdowC5AiSEWbcdWMQrrQ42zJCT1DiY3A2EdRUknsrMms9KQeNcLQgemanbHLAp4AwMN14/v4u9cXQmNm9NPwtXVo38zaOkZc98JtCu/LrePq0K4ADZg9lIBJLAbo0Wd9NJIO+ThDr7XJMbZJc0xq+APprCgDVGozoj9devPaNglwg+GhJlqRZdVhSsF4ulsc2N+xu8ZL0zVenq7xUr7Go3yNCxJJJodR68gZlcg1d0uVk4a10NXPtuaCzwCpJosiyhPgEo3tZDSl4PaY1e2x7fvprN30bQdRC69N7oLptrPoijlKMur26LGdGOjCi2AEcxrYME6BXfvmCKRnQXv4vCXZtEww1GX7/aQ855rmEzbZre4X0zODOzP/CIBfu/H7zwL4rU+V2QiQnld7GJE19m5+6FZ1uR8xxLsFQMdYjTXYAxorvOWHZmmfePPBU8yja8jo3DRtcEkwcAWSgU7VlYHqETMdWKLyLdX3fm1GQq2GsKlwV/djo4hELDIOBV43XAL9jERZUJ+PjX6NgY/sh8J7ixHDveRi2m7UzYPE0seaaceQ7+Wp79ZAgMYCqhYmJ2H5nPrn9oTpzWzbRECeRIMzcHfNVANA5SxhBB7sj3gwL87YDdjfMT3GS+kaD5Iw9x0VJJjWPqEgScyRoLEXJlQQ1prkc00ihcT2P7ocQp4pZQJiMDvbzciAfU6NtQf3R/eOCUZLM2/Fmd+otZ+w9lgWhD4a4ynF5xLtStZHYl/g/mWyLG21343PNx7TlSPgQdcINr5HcvaEckuMQ/Uk6W6sUAVwosvZzjEG6qqXe3JfddXZU/jNUmzAuuO8N6TQgAzQ/bQEXQAhy8GTSi+uVTvjodZo9B6cIQO+pNoW6EiAM6jeLidZwDLZBlBkGIunPUogJ8A+NuCtNA5KnZsZN5kqruK1wwPzt4GUKsDeYxsod2WIhtCbgD1KLNjID9gGdmP/4Xl1qVRgSnrOs1tU34xExJhnmfpJjCIFdhLGnokx5Yr9tOLhfMTD+YB37a6EuU9XeEe+wqN8jZfTFR6kgwN7poqFpfsWEDJmWbjECWvNWGvGsWQBd4u9FDYNaSuee0AV99EkcW0spG/YSLruMnhObT9UDRXiG+P4ptdozUn7LxBIls6IvT9aWQZQFYAndSdFGySGtIV3ztLNfTl61w1sftTOt5j5JlsPfbQj7WN/jISUcNp/x+9n0gnA3zAu3B1wh4Cl7YDOlYGgS2MILwCcYaoGrs5SrBGRr/J045Eybjd2UfvNB4zIAuzBF2gQHyDp5hi+OnV4YMY+gB7k/bu7S6rhVJfdj9NkG3A2V3li+DzWqzdYPj3Gpt16nJP04dw+w3A62uInYy7hz9ahXGbh/vu5FDVz0+EjsMcBoZ4CPBXWlcNPSdvf5EQE7KZhpad6POTEyKlinwsupkXY+nw4AfZ35tfxMB1wQUuntQMC7GDR3QsIlQkLJxxrxlIzlmLgnqQvjBt0b0kymcDI4sxAJPHZLQiXGU9DmAGRZFTONJfD2D65sXdp702aOXHFjI/LwBAK8AQ0+89Y0cNXB3ZtKrUdEvukHQsr30n/OP/9NBTGaTm63+hGLL45cevgFjuo3sLg7wa462gOKMPOhETJg3wRE+CSSoiSaOCp+jqF0UzcC235fgD2wT++G0XD984XFcZC4aw0OZA3Y28FummZTf1ssZM1hhgfJsaMaTojmixi5VEvgFZOBWSfT/dVKqxc0PbGRcrRsFFZp+56I7ZgiUOdnZyvgOBBpUhX7XIbPC3mNkvFcWXZGYpUw6/DwqhRbw/sfgT2Lg15iAE2ndTNW5mIGPt59c+JbIWhAPsuSfjeB9MRL00C7O+aHuOlfI135Md4KV/h5XStwL46uC+cUSFMvaqP+1InPK47XJUZ12XGYZ1wXCesawYvaYMto83goM/RV3mqIVRD6FroXGPsxtZLDBBm7N3IkdWBgXEFUtkC9kH2DMn7pg8SsjLVBot2YDuOtCOSkq4EoOpZxDi15W0x8vE5nswoh/8xrE7fuo+W2ennkaR5mQjMumdFaPp1OG0r3Q1wB1yjTotMwSpIV5Y26cOt7PGOitUGdQ/XJBgH9jUAuwGogQ/ad7PKx9S0c3YDYA2nykOVTHmQh/waGyDWuQlaGVwCauefgHqYSZxbMWr/e6Mfk4Fyaq3EdXmdNRHF/wJL8VWBmlXVvMAnrEgiVUK6FinTT5CZGZEMyAbyQMewngiVtxZUmdE1pabPv6CUwLiYVq/LpACfSeKw75JEeHyYj87YzXj6Ur5yxh5Ze9EaKiBUJFzzjEOdO2C/WgXc1zWjLglYQz8IwDoyZSEVSZe+25L+Fp+96eshVvsMWaBn/u2hDxEb8YLOpnH6qvD+33meWFu6EXTDF7O5ELnRNKH1VeboodJmz106NyM4IU99Oe2Y6MjzRGmc7g4DFoc6cZfW8PmmdGfA3a34q2jcAItOGCsOGPxxAYvt4szSwR2NucdGBLRVarbYwaz7CHkb2BmzqHDvEq5AqoyK1MYGFpDt3MAsKxt3EtA21WjX6hi+6hv+jAOgn0gPY+f0C7ZzjcXE331GkqjXIaEDmOlUWw2bELmT/J02Bk3Nn6qEfbCddXya7IO2jpQw75sqrquAAIypEMr8ffcpvzZ1/9sMQmZ8vB2m+C1MRIwH8xFAA/aJZO9cA3cLJ/COSWQY09gfpgMe0tGB3fzaCyTUwDXPuK4zXq97vFou8FrZ4/V1j9eWPR4vM66XCctxAh8z6Ji0P5hfee+Z4iRCvVyEqOhKzxNgh8sxBuw1yjKjl4z1pWHBUotpE9g9NgB3rFMjW/5uJ7YGm0CoJuhoUyOGS63nwmxYutVVcviNpco28zjn2XbuYmQRL3VUiqzdvJ+Y2t7L59IdAXfyh58AYJHfzCXSk025OsMq92xbDZ2+J6RJMQF4mjcM3IXSBlAq1BpO9Adeq/uFU1JgL4xKkagrAGV9IBuzALuPsdHYptyn4NWyHvPoQNUO045qjd7977tpIQRw+bR+u31QQ56waqcWJA2VvHy2yITcdS6wcyjAK4trnViBXmcfLtcYwHuIBfJBvK8T6t+9YvRlHjN4cSkR4yIvDuwC7hVzkk029mnFPgm4P8rXeCldC1tPi0sxiaoD+xEiwVzXGdd1h0/WS7ymwP7q86enVQAAIABJREFUcoFPLhd47bjH48MOh8OkAenIZ7AWyK5JMq2s5jbKykYkRgupHGOLlBDemxTT7Ys6eL1ZXzob8iD2h6Etj2TLF7ipnNSZILrQ3vCVnQnka2JgAI8zYBtnHFszeK+s03M201AP7BdmJ6wty/a7vyq0nzG4JlSqSEm8ocTGfP7idwTc0YAbaLKtxl/pZi4DMFJk9xgazxoakDJfd9eaIJsimPGH4SzRBxpjOKvFu67NUEPCKkkXVfjyaW9IFk9jmH1wWBnqhdbyKRCfeH/YMXb/QGP/W4dGgFcm7gOBH0SnsxU9F2VoMoEYyTnUOspYxjAr0PkX3I1V+5519jYICeibXCNgruclFh5WqyzxHTX3LVkGgBthN9YIvJUpE+PBtAhAK7AnqpipYkoF+7TqwqSjrz41GSaDB7Yuvuyv1z1erZf4RHmAV8olXlkv8YnlAT5+vMQnDxd47bDD9WHGepiAowSk62xPDu6hLpW5O8u05fwTggTT2Hrc+MKJknmdBVLTkaSTVwRqe/HAytu7sXxr1yLRtudrhvU6ScMiip1GPye0GPL270YTulHxGNuTgTRv2Ldu6qNjHkaetOzG3tkAnoyzJCTbQvOGtn13wD0CYJBY4kPw37ZO7xoBOi3PAMcaZtkzeNaFRBY2IBpq1T3RX0sVYNcpPlnwIqvt4L/bRl79bZzHhTK2FZob9fBGErsPS7tmCXlH6cTqd2BOMTTAVtlc6w/sPR7HAGwDbpsRMFodC8jL+SYBmR7fyVXG5B3gqQf3mNRziYIU88K9ZcB4OB085roAu+x9OpNEdTRwb9r6igQZBIqykgrgCGHsr9ZLvFov8Ep5gI8vD/Gzy0N8/HiJV46XePWww9Vhh+Uwga9zizRqewPYdnoDc3dJED1rNxCP+rox926jizHKot1/be19BPoI3vE19mVfXW0SqQ1M48yDpL7JZD6dOUYiYUDsp1H/zsP3lvlAMs+lgShxePdsjSDG47nVHbEJn9qHq0jUTIxKCUQVtRLKLZu/3xFwb8tyfQqHwEBvSCduTBtuVTFmTLlgiRm/4xaUrMgUB0DwiQfyAllQtFrQrireA6DmnrgRVtaMkrRJCbhnJedqJNbHyT3HFj1c10G5NQ6PADkAuLN3L6+VK7Ch4Asfp7OUyFerRlfEuOWgtFdyJi/eMpAZT23N98SmRHKeRfokqGdNSt2Cp77CjPoMv73glIhxmWVFaSLGrHq7AfucVlyQSDMuwSi7qZxQkbBAmPs1m75+iVfKA/zcasD+AB8/PMAnry/w+HqP4/UEvsqgQ0K6JqQDtQ1fTJqJjgk2I4ttOmMD2Jsc414xBu4J8GiGlq2tJA3tJIJ5r7PDZ7Udm4/sPgC7M/8QF8kJgnp7kc38k+Rt3jKjd1w3INkMekjdTDnMOIHbMcruz/qLOA1sxG+yvM2YTdpvVZ1oSoauOjaPpjPpjoB7SD5ao7FKS+Poih7cuwZkx2vDrZOFEgbqRZWok8baVSR0aUU3yTDf87RUUKk+AtuUtc66kGMajKjm9hc9YRjD59PHEiWcE1CPYBvraSO162jj2Fr0hAainbtZ3PvVOsTYaRlgnbH4Jh5bZRlYnMsz9vc5Y6e5S6pwyqjN0yh61lgyD5tEzQB7R1IixoN0dA+ZBPZQvQ7wtCJDZJsMRuGETIxrbVAix0x4ve7xet3jlXIZGHsD9tevdzheT6hXE+g6674AGmn0OLD2MICbq65v2qxxz914ugvAPrf+JMDObXV2IBDNkH8qU4xs/eR7cI/cZOvDDl2Sb2vrPqsO1/SPA6C32YZ1PPTv3t3MCfN29r4luXT3Z5PVgWx5iX2aQa0PFYBJtNWaJEZQtUWeZ9KdA3d/YFtltmnLBlB1x+h7p7PvgiSzq7ICz0ZFGGO33Y80vosuKjJWK9KO+PyWvYQ4LTO10KLddJTCTOKUFW8ZTd0LpQPEWDd88tvZZMG3kubKaCBv9YPQKY1vO7ADtjqUggsnE50yla0ynnt+KpPBnuOYlck9qU1JN33iwwxFdFT1sKGQ8Tk9/i1MCWI0NWDPqrlnGIuvDvyArTZNDdjVh91Y+2vlAj+3PsQnlksH9lcU2A/XswJ7ahu+HBpjl7DRgTwBnTtwB+xTkGFsX2PdKq/qvsZdjPYAhs1weEtfHVLT3XEK7CHGe4xZ1A1QBLTViDHjQJz0fn0tyxiLJpzT5EO7teDppvV1DugpNNE4O+kkGYT/7RiCe48Btum9lk3XoXAlZ+83pbsF7nF0Q19xHB/MhrbnYBWzs+niHDYAUWCnbEGwSGSZFS3k7gGYDhXpoFo7s3oNJNR9RrlIWC8Tyq6t0OtCH6gsZKEFJGxwNAb1BTU93FZUjkaZLVA/YcqxAdUwYyihJVm92cYaNpXNcJmpXVPyTLpJtbigtkHOgZN9WOjLEolTjBdD1uO14UZ3x+F4B3lj8YAYWA3ITYrZkl80j82NRd7CRAAu0gIADur9e/UgYIUJwCRhBKAxYZBwXWc8VnfHT64X+MTyAK8sFx1jP1wpsF9lB/bU7cHLp3HStS0YGeIE3VC6gbqToln6UZ25rUK10B+WLF8jTDbDqw2g4ns3KIQ8xKX1PLBvbZYO7ctdSzB2Hohe/6L2+8jYLU+7r2AHPOt/v9UMw0An7FB9Yqg/Zjs0gqw1cewrTeKsqUnJ59KdAvdRb+vALE6h0EZe+VPeugdkD832SJ0ZbDq7bjoLDaaUFukI+VqiMk7XFemgWjsr8OnSawP25TLpptrUGZO8/EUCgtVMyIuAkAQCI3HfdM+cYWUbtd+eiqkb04gpAj4gDN6m4KaX+tZj7d0ZCUt4heYXfeqhME5fu2ufYUKm2aNy0+htIVg3oCtImBfNCPDu8jh0dEsavvhJmePzSLIqtYE5gA7YAQ3Zy/JeUXHNM4q6PC6c8bjs8bju8FrZ45XlEq8cL9R4uj/L2NNB9gcwrb1bMAScMHZQD+yyCjXMeHfqhBD2Qu3q1lhpFUBKKzmonQP27rdYZ5Hkuea+AewbtiALy+2huBXEozdPzdTWuoRBZtP10ZqkepudXTRo7T3i1sDYvY5sELTrDO7dcYNsUk8ZmF3LnBhUlrkp3Rlwb2AC76yn0kVz33ONOzJ527IqTrs0qqO9bLjjSsBCoIN0iOkKmF9neT2umK4LaJUC8KQxqy8SlocZyyVhvQTKnnx393FaSitQdREVZ9XxSRey6Kpaqjhh6RHg/TtwUhcnro0bdRmDa4Hgni3CZtR/WbXUtoychnDEpFEryXeESrr1YAf0oSFvzahOyL11cuA8+44pMvh4rAG7uT5CGVlK/nqSsfF5JQK6eDBxgw3b81T2pyAH+YUzKgtjP/CEx2WH18sen1Q/9lePe7xmwH41o14rsF+lTWCPC5Y6I6q1Ax/oB68YBfey424THXaChB7QuG1CUwHZ51gJFpiau6v3XX2mEexj3QW3Rwv7sQXsrIDuuzK5kbe/ty7Sa8COc+tRbJWrueq6lBiI1FnJOwK8vdSQCgQnAqYe/AkgtgVYUGOqzpDMJbK2c98Wmrt7uNj0y74DjZmbm5zdTzLDTosM6Yze/Mwzi9dFbJCVJELeosD+mDA9BqbHjPn1inxVkY4VtiKyzikwdgH29YHs98rBsGQPjAqBwlZjFpMarqfLwgoD+AjgW/7j50Dc62ZzOsin7MZ8fM1/OQuolz2wXpJ6EaHJTAz3j84HyO5QB9LNumWgwkrNywGAG5GpTZG7yXLHzLWzJO1IdjM+UJyHZQP0rY075H7JB7AXm7bvoQZjKQAsyCrHEJY64cATruuMqzLjquzw+rrzBUqvH3diPD0EYL9OAuY3AHucbZntpAtxbTr7TgOB7YC6Z5Fi4p7GukNapJ+sfcr3rWVySYaztu2soK/tnAK4ckLnrhtJQ9PhebtNBIaOE1Bve6m2jbPDAGOMfWwmek0OnxvDl/78JIZVDHnIgEFwLd+JWLh0gvjoEzd7kmIjq2so32JMBe4MuJPfoAN7DBdgcofvIo/wQNsOTN1uTGbFN0CPwF4hwP44ObDPrzF2r1VMjwvy9Qpaq25MIMZTk2LWS6AYEJrcExZuMAOUue20FNlpHJnIVnnzycMFhu+hU9p/mwud4vHKFFqkIb2gefpo0KdyASwPCOsDoFwyyoXdk9S17+36WBaxTJMMePkg18jRpzyG4UXw040D8lhUNXR5/JnbaLYNWPHdWLsbczXDHKcHLzYZmMfHKlEce8OpvQ51UmCf8Xjd4fG6U2CfcXVQKeY6g651kdIBIsmcY+xRjjHWPgK7SzDiKlz9vQIzg+YKmipSYlCKaKSueYXAa9LuXGUVt9qQKjOSGutZXVy5QqUzCenBtlF7SGZD6uQOwBm72w3UqaEBOLnMZAuxur1dO++24WFZe1XIkHK2//zwDWJl4YXd7dEGBvW1J3un/r/OkYTbBVhX4LOCu/RrGzzfFuDep2gx73T36L8eAY8CsNtuTArunqFXtoQ9dY1d5Zjd6xXT6wLs6Vhg2l2dE+qenOHWXb+Aw3XHUM+sg0lkEHUCbMEOBVcnH8RGkN8CuXDPm8cFaaIxWztMrmfT1jITyp6wXgqwr48Y6wNGvazAroKyhAAoS0K91i3WdE4qA7DEBOfcjK1xX9UTL53wKPqHra+k9aMzGz8h5slAZOs0Avs5w+oLxfd+h6QI5hXk70vNKEg41AnHOuFQJgf2q3XG68sOV8uEq4My9kMGHbIz9XwgWZex9MCeYiwmKU4zKKpM0bxi9LVnAfh9FTvVriLtCnKuyFNFztXDFkusE0IpCTUlFGJUZO1r+j+r17G62bKyeK6BqAQW3dZ4nD5P07zbBh0G7BRA3jx+wj0Oq2mtHqK9bnSrttgu1iS9rQ7SjEmRJ5IkQn7U7tXzC6y9u3a4BlkZKmA7orEz95tb3p0E9/HGhXE3cE9FHtTodeJpnGaZe59uWJAPhHSAsPbXhLVPr1dMVwXpsEos8DlrQCQJc7peCBhWXdnKcSSvsUFaWZWlUGvIPDWsYl2kkIiQCotHQYjjeWJMpXbejcCPdq6BIRjCYqGMxoD9AWF5KMC+vFTBDwumyxXzbkXOFcyE42HCMk9Yk6xaoSrBp0g3GGkRMcP9AxonRq+5wch7TyjSTsRNnuEhz2A87d/PgDqArd2q3urEDCxVupmBOWC6uujsaxXWvnLGoUw41AnXZfLojo+XGYdlwvVxxvEwo1xnQBcoSVsOXjFH9WUPEVDjwp0+YF70jFE5Rr3K6p7B+wraF0xzwbxbMeWK3VSQU/WV18yEpSSsJWMtCaAMECAmZEUmlnsHAwkK+FIhriPnErxCziVT7U4YO3m7NlC3mDdmQ+ji35xZTcsIhNJAtsAlyo613/jQQz6GX9oNT7DCiKxhWTfbF8xi3Ufat7m0Vd1vD3Bnr+hNF6lQUWQxx126UaOMGh48NoNZZ6wCLVzwogs7roH5NTWiXimwXy+gRVl7JtRJdpwxH19rHECcTlFnCBp1NAc761TO2hHmZs2ThmCMFP3Ds9F/64luPeSB2YtzUDOiis4OrA8Z60MGPyqYHxzx6MEBl7sFc6pYa8LjacZrtMdSCEUDUOUDwAcBiBwaaucBYExteJQnRffHNMgzccwwph53cNKNO25M/AQyz3NOrMzdWboCumyqIQBvoC6bbEy4XmcB93XGoWRcHwXcj0cNBHZMSAeN8niktuhu3ZZiAHSM/SywX7AYTy9k9pYuBNR3uxX7qWA/rZizhE4wM/XKCUvJOJaK45pBBBzlxgXgmYFZ66LqngemH2c4eG66NwOnz5gCuDvAB9AOIROcrdt9xuOiQbg9LPdkoRokxYrm+TsofRT6lx8/vqpeJoUBxM6vG8/K6iL8x+Hd8r2Nvd8RcG8NT6ZX7ODo2peBpQb0MuNp0giSFt4TZpHvmLRuK7ZA42yg19lfK8iPBdiFtYsMUXZJGK7KMrbkGpByeJgEoJ9WhRSlJU4AJmEx7Ce1+Vyu3LxgfPRnb03eeDY0wk0pKxigWK9pWrvLMQ8Y9dGK/aMDXn54jXdfPsaD6YgpVRzLhFfyBZgJr5aMchSJyuJ5izFsoDNhpnETsHaMyQBemb77/dtgZsBeqw7wdZu1b3ndPAHDeZ6JARzqtAnohQlrFX195YRjyTjWyWOxy2YbGcdlwrJklOupAfuhhRXw1adLk2JODahos0hzeVTjedmrFLNn1H0FLiryhYD6xW7Bg92Cy3nBPq/YpVXAnVjuo6YmI6XZGf1RLy1yeVIwAsCyfWVSeaZmgNZWxhvrMqwibVKM6elNYx91dgf2Gagu22o+rYq0wFZv5pmFRpQ2+p2/d2xcp0rhPNtyM+YR9faOuUfuZwCvYG6yjOnuN6W7A+5BvqiZxBCj06HO/70AlMRg2XZDAgBh8B4jw/7yAaF1hHwFTFfiGTM9VtZ+VNdHY+1zW4Fq0zoH9jidig94C0So/e/TTtX8XE5mcS10jwEfzGxUsE0wAsjHS1iD6ipUv1d2FzFYzJBZOnS5ZNQHFfnBipceHPDuy8d4z8XreHm+RgLjUCdMqaDUhMMy4WqXPbb31o47JiedlY6GarEyAVA3xwbwYB4i+XID9lLbb57hAOznvGje4lRVRxf5Jbu+vnLydwP1pWQca8a1gfoqoL4uE8oxAcck8WJGj5gFsteAhbg2F1XAvWLMrhEB0AF+3zP2fLFiv1/wYL/g4e6IR7sDHkxHXOQF+1QwJ900hMmB/THtEJ1ObYFgi0WeUKv2ayawxvfv3BFvSgZ6caBK8JDEPmBN4X0egN189DsvGemYHaOOrNtmk2F23vX18O64bAOCEVP9zMDQpgfmbqCl5WuM3exccG8b2Z3p5iq7G+AepoyySrM98HEKRFWZe3Qt1N+7zXntkGrTVYhHwYHFiGrgrjo7zKc9k+zwvrNd3SG7zBiQjYPNaOAN99Qa0Om92rUqM2jc4ADowRlwA6VkcVtPsDzQ5SGGpuDutmPQRcHl5RHvvLzCey5ex3v3n8Sj6eDgnqjiusx4/TjjsJ91D80gUcXVp671h04+LPow9tUBO9n9QRg6QaM/6jFhwdcmYMdwA4MPfOee8gISM3CoMxaW7fAM1I81O+tdioD8YZ1wLBnLmgXojxllzajH7MCeLQjYUQ2ptvpUQ/mmMcRxaIdRljDgEx92Bu8Z2AuwX1wseHRxwEt72fbv0XzAo+mIy3TEPq3iAguRGA91wlXeYUoFUyrhvslfRaVTTOybxIvjQxh4NiSSrT7FxtrNcDr1OnsEdtfZFdi9zaZR32e/HoGadwyr3BuLwO19dPiw6oaRrdrImHk2xj7e7HM49Y6j8HuBEELDOR043zbM3fywDfzGmBSmQ8sOK4YLUgtU9SEaG/BztIJWXUR0YEzXwHRVMV0xpit1e1yKMEIicM4SP2ZqoQWiAbV7IHFhyFZDjJ4047MI99nHuIjg1H8m6wxBqhlTs1uE+V34z6exumI37woudwvesbvCu3ev49Pm1/FSvkaiikOVlZKv7/b45O4Cr+0KjubzbC5n5pJm29kNwLK1PR+A0LED2DM3Ed+oFFGIy72RdPGK15cBfGUg99d/EamCXHaJTH3lLEBeBciXmnBUUF+WCWVNWBfZ+xRLD+y5Y+49sHdxY4xEKBt0fdq8RmZ27xjeF+R9wX7fgP1d+8d4x3yNl6crPJoOeJCOHg8HgLpsztiXFRMVZJVqxL5AKFXYerV4KPpMfNn/BqCPthv/y5pF9IxxA2kvybTQxArwFjKhY+7czapdRuEgx4S0tWo2Ej05aGD4hk/nyKrJrzXkSZpR0QHB1oBU29CmlbPFoNlOdwbc41RLltrKUOeVZSNm5QbwOkL6DcfVZtZQdCemvLC4Ph4Y0zUjXxeko8SOMbAU1p6EnerOM+7uiIGxd7vJnLIlYazoXLy2gijZgog23aTt1n1bMkOybgpshtfINLxs5t88Maa54MG84KX5gHfNj/Hu6TU8SBJ//JpmLJzx6nyBB/MRu92K466iTlnYUgy7oM/IIz0muJG000qVsRuwd0wGOjvRYyjWhbqmkn72FLfZS+j8pCVOz4tNzIRDzVj/f+reLtS6ZU0Pet6qMedca33723/nHE5Od5940JwW21yoBA140+CNCYHGm8ZcmDY2Hi86qJCLxNxECIFcaEQRGloSOg2a2KCQRhpEG0NubEVE1CQXdrB/Tnt+9++3v7XWnGNUvV68P/VWjTHnWt8+e+9vdcFgzjnm+KlRo+qpp95fZ+vyGUF9Dmx9WZLnPeVTltynxySK0wjsRwlwl4IitTN71DEQrWQQLGU8k5LGW0qHgr2KYgzY39nf4u3dHd7Kktf1Jh0lgiUxCkukyvu0cw/cygnzpCKmnHHKBUtOKInBucK8SEHcyIw3VP990xLOwmsnBe0N5WndkTP2OgL7ZCsGiK7ImooJlm/A7929xFancdW+YtzqKBgB3rDAxDvtvnAjkd5hU0lNxXpTXGvs/XzfexDciehvAvhTAL7PzH9U970L4L8G8A0Avw3gZ5n5AxKK9Z8C+JMAbgH8m8z8vz90D3+oCIIYZ8E4S7bUWaKoUXmUDXBtH7CwGZNLTkdGPlbfaA4hBlwcIyIZY+0xO02crX1VEF8MwnHODMifzZ4zXq+L02K7idxzs4tX7Q0R2K61na39zJ07gm53bbTBnhnTVHDIksfzJp3wPEuat4SKHS245x2eZZG57qcFaVfA06TMXZXf/n6s10GYR27AbvLRVfjh0BbKc3xJbLbvAgSWyCOF3t3a4mwIg0terl9A32YApzq5COYcW18WAfaqimvMCTQn0BwUpxvAnjtnpbBKCv3FV2w5MtsWSI8OBdO+4PpwwptX9w7sX9l/gnenl3gr3+JZOuImHbEPSbotDHFGReHkdvr3ecIxT9jlijlXpFzFwWnDJ8QbCQMLjq8trvJs5ZmjSKaFzjDLGFuZsEWw9EiW3DDCbmW+J+6GijbOg8JTSF1IdDLKyr1jNQgQnGa37DFIiGN/dR2dlFdiX50M5Fz1AL5QLvk5WvllAP/qsO8vAvgNZv4mgN/Q3wDwJwB8U7dvAfjFR1z/UWVlYliaZUAyscvMklzDQ/ayK508LsqJhbEX2eTiKo7ZTypTbqx99ARdNfbAOLpiS7gV0x9enF+8bS5SS03sMFqYdLJKHzgUVgC0YgqRwUMdUXKqmEJs8R0t2FPBXvcd0oKrvGCXq0TTDJ7AXTRJHwjsrMTu1eyR0S+tLS5IGPS+BLf6J9LAPMknCI8dQwROtrQKzGBo1jPll/E5920GbQL7qWQxb1wmzPOEZc4oc26K0zmBTtEiptmyd8A+c8fcO4cYa0sTyZgS3MBOHZSmvYhjnh9OeL474q3dPb60e4l3p5d4d/oEX5o+wbv5E7ydb/F2attzTeJ9k464yZL39ZAW7JPEq89JYtSTER1iMPE2WRqYadeG1q9dtGTbIGc3th5ET80LNwA7MTCETzANf2csUYLJ9bmtWtTUFj3VfHHGsZ7KcKwdt4kNvIkXFNrtoWXpg8ydmf8+EX1j2P0zAH5av/8tAH8PwF/Q/b/CMqX8JhG9TURfY+bvPHSfToZ15n8HeAUCUo2xs+vAiiOzFvCX+OyefGOpDj6cJMyABQhbgbsx41hXtHr4LL2FIjZD+4zc1KEWHG1zCWodGoOtdmS+BuLxHLDEjqnDyseVkXA7Y2h8isrkcU3qMJtJEgmNOW4mcOYBHGTonaMRWyO1SWmM++FV3nr2sQ3IkmXrZ14f6mGAu52B4W+UL6Jvs1qUGLAfF2XxkbHPGbWQKE7nBFoIdCIB9KA8NcZuMdrzrJFLTTQIOAgCjbGLniWw9v0ojlnw7HDCmwdh7V/Zv8CXd5/gy9PH+NL0iSTtphOuqLhFzJGzJBahhFPKuKqzpg+UTFNTqvr6eDXfrqzN4ueK8ISV36BAjUHOoigm5kfmzC6KacAeX5B+2qRoPjQLhTzKzevXfq/iX2l7i9eu9vcBrzByj0hWO7EMOpv2hhGyOm3/XaAt+PQy96+GTv1dAF/V7z8O4PfCcd/WfQ+DeyzDeFx5a1pnsOVN5dChqZ3D8JjqvnS15Bs1BCHSWO2mJByBPcrx5eLt01lnZ4Z3/pliMostZawNSPNkGzXsmyWekzTIkAr5OCcxHfR2gbMRqNu4u7qXA17mg2cKOmmcE3O+qWr94AqorbL17B43ewDm2CYcf288HwGUSQRyW6A9xoV/6H2cL59p32YAS00SO6bkTWAvS5KYLAHY0wjsgbF3wB7SznX6D2tvNX9sVjIsliP7CtpXTDtRqL+xP+HN3T3e2t3hnV1j7MLQT3hGC3Z67cKS+BsVuNcVXiJ24E8B8cwUUn6QiD8iUTPgqkAMK9FMOcOzmCJ46mXtY7LubmXpRhpt3PmLsV2VBNTNR2BpoE4R2E1xvURSFjpXzBnMLHojDu8lkE+gn9C2wN3CfKwmAZe1X+7YP7JClZmZVlmgHy5E9C3I8hb53be3Z/HA5FeKQfSM2cEwKhIV+C3htSW5plL9pXjs5zzI8XJgmUFk4C/JXJINgKOG3di6VXKot++3zh2Pt3u0htpmn2Fi8Q6cBbSj/S8VuFinWQ+xhBA4EU6nCS9Pe0n+sLvBTT6iIGFPC0484UWVBBG3yx6nRRhmTCZuz7cp/zexTWizbvKN7WDvioHtQFHavhrLfcsEc9TRNIuCT1c+i759/dU3UExpWpO46ldRmpZFg22ZRcxM4kF9IpBG4+zCCpifhodeFtv2mOqwgZn1AzQPTfdGZbFn3xdcHWa8cTi6tdSXd5/g3fxSxDDpFm+nI56ngisiJ7yVgJeVsaPqybxjqUoG5FWqSaSBVDUwRRBHcCffBqCrNPZ+bk5JZUcdmJucfZXTVcVAKxm/9zedZDQkiQC35ZoNzmGebrMxd0+I34wkAAAgAElEQVQeEvsoQfxsEmR1yRBbdBP3GEHZkC7Evi8kdUOZGseJbjyOkaF8WnD/ni1JiehrAL6v+38fwNfDcT+h+1aFmX8JwC8BwOEbP8EXxTIRIGPMkaD8kN+rm3TWLOP5AGCREhFsZ0cTRmeOgK8Q2gwNd1m2ukjDa+eB3Nv1NRHDh5fWM3jt3I+BliiesdjZLp8U8yP3/qySSCHNQL5PWO4mfLI/4L39M5GVouK4EwuIwgkfLDd4//QMH52ucH/aoZ4ysiVYNrbl9aCuvbok4tEMMj5/TMagwL6a7LqTCGMKmiaSMWTQz8qr/K+PKJ9p337rn/4qLyyAXqrEYCkloRQNtrUkCZu8CLCnmdyT2sMKKGvPLhoIwO4hHlrSlz5gXYuGWHcStpf3DNpXtY454fn+qCaP93gr34lsPQtjf54Kboiwo4SsrTxzxY4YSdu5KuxXiMftwvKsvtpTZmybyJqpl7Vv6J9iprDG1vtMURaUz234A1uPLJ2qKv/ZxoFOMNr2LnqxdjfR15HDSikA+2BI4WbNbPeB3Cvp5JvQjQ/AwN36v+6rUmFP07lBej8zmfuZ8msAfg7AX9PPvxv2/zki+jsA/iUAHz1K3g6sKr0ZX2V1TmDpZ64ZZ8eRRTeHGgOf0PgOUGjMc2CnHBqcERq/tntLJLc1SG/NxGfLpls91rMZAQxNzZUgYibVqHmGpaqWQ3eQaJeHCXfpgB8kRqkJp5rx4e4GUyqonPBiOeC9+2f48PYa93d7CVh1Il+e+nPp6serl81sre/Q8uy9OMGSMXgHN91AEL94DG0TXEZbf2JwgVjTeAW4ba9WPtO+zYCAek1YigL8klFLDqKY5MCetoDdbNlnbuIYA/bIHFPbunC3JmdXm3ZRokpoARPHvL27dQXq2/kl3qQjntOCGyLc0A47EnnazCKCAcMTjBRO6qiVMetEdqrZJ7JaE1hXfMmIQaeUXLN2T6pjXtUR1JWxu4w9x2fnDtRlPJISPf0jxqZyYKcm8joFUD9xE8uUvt1bfbVLJnJyJV1UvXFtla91I31GT0gUwD2KZcZYWhjJ4APlMaaQfxuiYPoyEX0bwF+GdPxfJaKfB/A7AH5WD/91iKnYb0HMxf7sw1XYuOcrPEBkzGevM5Ru+R6W92dl29T/Z160q1lUX5LLvxFeJFrm9C2FUvcsP0pRpgPWjEpVltM2eQlzF6VcviNMe8KSJtymA2olzDXhg90NdqmAmXC37PDi/oBPXl6h3k6a7UcGQhyYrjS1NkqNtbvoCMOkzYA5HPkqgMNxo0hFM/fIAUEsA1rJ3KmKZcYDppBfSN921l6TgF2x2OciEkDHHqnJec0aJjB289uIzLFZRwWQ8zjtjbVXzaSU9gX7fcH1bsGzScD9zekeb+R7sYChE27SgmeJcEUZO8qaXAbIRJj11Yg5pCTvnjnjvu4k8FnZiUXQouBuz1ngAfwwMvYzrN1TQJoCdQR2Cwsy+FsYWxcW3IicK01dvk7e1qNuI89qcbeIGAxq8RJzFDMhOBfCRSuiW7LlfQAckwIwDzL31q/9WoNIRj4tuCBEf3GhPMZa5k+f+etf2TiWAfzCQ9c8V7bAeATA1fFAN4uuj1mzZvlDAWG0+NDrra45yMrY9qG9ANeVjgBvjHpU8tkLdMDj7WcdAMqUrF3+1TgBJQIm6VQVSawF0P5PCzx4GicCccLMO9zNCfNpwv4we8jfec6YjxPq7YR0m5HvJPCaRyGMSswA8C73Nzk7NbYS3wsNbeBtFIFdvW0Z1LNza57K6vDURE/ebmWcIWKzfv59m2HempLQohT12FwS4IwdHXtceZ/OKpIx3ZEBO4f+S8Zym6VXzKxUo+njruCwm/Fsd8IbuyOe5SOe53u86aaNM26IcaCMQ2DthSsKM2aumBm454yX9YD7usOLcoWXywEvl72ELV4mWaksukIx0VMVgG8mgIN4Qxaa8t3jstN5xm4TWhzD1RsfFt3RF4/K2FNRvUZBaHP2cA4iBhNQJ5tY1RADep2WMISbma9atIDbGJSKAA7ysa6MJpJU9kdKwlz5HAE+ToYPkMEn46E6lg7U/ftGCFc+833zooC7q587dmjATuytLyXO0tCZ2lm7vrwGwGgTeLzH1vN1z/8pabyDvHYuombyGdhLmiUZuIhSZEQthcRDcjeBbEKYEzCTMPY7SUmY72UgWIe3GSxGiHSxjK0k0OrQxZvfimE/FFlpKcDT+t3RuMOuX6Hs/VUb8bMrMr8E1l5TA7tCTeY7w8GGlsjY0WS9to1WGtrGTSSDznqkToG17wp2u4Kr3YLracZ1nvFGVlv1dHSTxz0RdmiMHQAqJL/SzIx7zritB9zWA17UK9zWPV4WyRh1N+/EImjOwtp9ZSKrki4kceQ7ssRsxMkclqYe2HkQxbQLwK1LDITZJDxqwx7FQWnpFaYmgsmzODuSMnaqaoyhGOSzhRkrEEnsl/hCwLJa6Zh7PxlHHPBVGNAD+xaQ8/B5pjwpcHfv1I6RcwP2js21415JnBFmW0Lv5CP3kIZdmSLF+7bq+Tvr/g8vL5YVoCtjXS2/RrJ5zmJmLIG9cwYomVdnn70GLA4vfCJMqfEaKkA5iYeuDZpsjFJj4Odjk0daQggH5dGLNooL/N4IvyVcgsSEGdormmZAeDvUjbstv3Wg2TWijP01x5SJJbJ21s3EMUnFMZ1SL4B6VquYVLjpOMKzNcV1s/bycLcxxsqOgZ1kUtpPC66mBVfTjOt8wlWaZSOxVd+BHdQrGOCKCsbMBbdccMvAi7rHx/UKH5YbfLTc4MVyhRfzwTNGneZJWLvpFNRuPIplOlJDDaiNIG2myQvA3ulxavtNfsFwDxtvpZ9ALVyyi2BMDHOqSEUAHlUs7BzcTVfnwe223vrA7JKQLE597Jq2CtNxpOPEJ7/QTqN4pvmUbJenA+6j+CjMaJ1Gndr/PTA/fAsbCJQEBVnD4QJtEpEOwEiFJBaWyZTHF2ggio3JZeN3NP2jscMFRrAS3XT3JH+Oi8/pGZDQxBg+iJpMLxUGzyQ2yxAbZFoAPrbnpRJkkkddwh5lMJin3Yo42yQTlqAuawRgSl/fR7Jv1QeAYO2igyMAuyfUtiQeaP89FZBnGLCbYjFJcvagXDTrJberdmcZaWNToHocEuv7Zm5qprsWN8bMH5W1s240GbhL2N7rLB6lhzS7b0MOL7OiokLiOM1ccM8FLyvjw7rHh/VGgL3c4OPlCh+ervHJfMDdvMNx3glrP2VAVykpOAWtzR6tv5GLWJq1DzyxSJdFSUsnxuR+Xy/TDozdvHqD53o+sWYXq0iLgLp4sTM8j0BU7icIu07UpkIxTGt9v1oW4R7gQdxbz4WVBoGbpZ2GRbC8rN0zPaJbPy1wjzOxMVh9cA9laqAaPD39/EsPTIAHLdLv3fkK6sbWZQmseUKtIwY74rOlW3UM+zdYe2QXF+VpG271q+vacQFc22XCbBTOTYsdKx2IirAkP6uiMUqzHLBNB8vIJrvqRVt3oDlmJZusSe3zWeX//XU4NnaM8jimGnOWw2fr8loKS9xzj4xorN38BYxFuvNMCCcQ47MHJZ5fmtC549dsNt+B5eqGzEgTI+eKKVXsc8FEVUAdLJ7I2pgC5rJvZllG3nPFLUOB/RrvlTfwfnkDP5zfwPvzM3w8X+HF6YDb0w6nkwL7TK5TMNYek0/YM1BSzHNZewT35oDV2bBr2wID4I324dovUmEH9xxDNnhYkqq/awP1pWIrzDSRADqSiU9C+jtl3dCVJofxSZZbAe35zezXxTKpJ38dZoy48Dl5qH7+ZWPW9SV90hmuX4etytmgXLqcchOleM+KYaYXkCeLE/+KuDFaw2yx9mZaFaxGzjXLYyYWwNspKlx9ZRDrUWWVglkuQJWQcjjelrJLA3YTFfhkeIFJXFz1qGjGJnZn9eccurb+RxggcRBWbuGDX2NhQBh7APaRtXcyYJexW98LqyNurN3DOQS23iXhiErHiQXck8R7mTSWUCIJJ2EepQWEExJOnJBQUVRZI8rThBe8w4flBu+XN/CD5Tm+f3oT75+e4YPjDT4+XuH2uMfxuMNymsCnZuJJ4RmjnN1IFjM3ImL9NqTPs1VJZzSAgQBGk0qbDMM9XRkdPNbTzA3Q56qrpArUnrHLvYLxgpIMXnXOoBhlFRUGWSRbTPZwSh8+WI9Ldi3qMSN2qj9QzN1KN1MFDbWBIAFgZXt4XNYfv7SJAKLNtM6w7vas90vK2mu0xzVQCvFmxkbfArpo9tSOiy91g7UP17kE6mRLPSib7ZZA4VxtO6rNLLMDcIKwkEKIk0EHPK7YCxNuBFRAxV5w1mLLyu4ZLjzPyot18yC5ARUZjGJSWWE280+nCGvvnXnQWHvBQCTQRx40s8EIiLo6s+iIPMraA6hLjBUGZXHoygrwiVjFcdC4QklC+PJOU1sX76/3nPGS93hRr/BheebA/t78DB+crvHR6QqfHPe4P+2wnDL4pArjWcUx9rwrXRKABNQ4hk0sE4N+jYpTBCAP4haRp3P4HtqyBFGX2qxLjCkVvyzadyw/b+ENEhEAfiw6XtlEx0qcfMwxq/5IJzIHEzRCF3GPQ5uEe7wKuXw64N4BXZB/h5nYGTAYKAPAP/Y2CvDIBC4G8K0OJuOsk3USSV7NmVo89ij7j/XfeJ6t/XEZSfGFxpc81NnLcN/Y4dx6Z+u+2q8MHOS8oU4FGm23lwlGN/FNcRPQ2/5W4zR6EbMaSK1Dr+qXaBWLfWw331/aJGxR9qD2x70Vib3s11dkzlkD+wrgSxTBoD3bqDswr9+YiWgImuVJoY3xJgblipQYObFnUgLgeV3v6w4v6SD7KOGlInHlhHve4eN6hRflGu+XZ/hgfoYfnJ7jveMN3r9/hhfHPe6Oe5yOUwt+Nie1x2/PFN95pzyNJcjct1LwxT7p7WWkoyMg5njUxrQcW1ehSDpQV/+Krt2Ds6ObaT7Qr6KcHAr6Jn83yUOTTPTAzhHYz+HII8qTAfdoGtUttwYma89IkJnQRTRDY2+yPlNAKTgLYWig5DOm1iVpB2IbfDo+kfu23pKRbS6ltkq3lGvHdUxNHvh8Ge9ty0HSPy+Zfl5k0HBFETM00h3756gn6eoelE8ElacHRr0SP8X/gvK526cydQe+pSqoVxmYNjiBZqK2Efbgiy4cWDuZd2To457QektebP0oKNOb1yaaXDqGvTVFaoZ4TCaAEpBSRQyVM9fswH5IO7ysexQQ7mmHhIqKhJOaPL4o13hRrvDe/Awfzjd47yghKQzY51ME9mYBJMDe978I7NFvwfVEFh+nW3HqV5Ohm1jL5eej2GVUSDNSUUZuoF5UURpAHRj65Qjs1qf0+9b47PIIK3snNPl7t4IZwTv8/0pWgBvlaYB7eOg4y0cTITeHRLODtWNGgO8sU6whqXUeZ+AJMkFYy0YF7iI5F9MiJllpYVRS00I9d63xHkAew/eN5+5l8hvM+JHFlTjRnEgFmeKtOfxlX4MCNrqv+34oi9T9aZGHTrPIA1P3HMHJwzAWQIWufqLDWFgFjCuY1jZtwPm1Gc62yOSjSxVnpWFQijv4pp3aF1oia7dQy6PCL4ZiiEwO6EHOoiPWqYljuoTQY3TExALwA1u3GDDHOuG27oEFmLOE7jVmP2tU0NtywCflgI+Xa3w4X+Pj+Qof3l/j5WmH2/sD5tOEcswah95k7NQUnI2RdeANhFc2gPmKrSPgQzFz0eALYPLz8J2WKix+bv1li6XLPfpB15HFAOoR5C28RpcX2C8gz+GraV02m2h5jF5Lrdt3z9x9f0VceBrgjshWuNOor5fn7LNgx9j1dzvu/L2i3JJGt3VTZKjcvaq8WezGe6YRX54D+4p1cfdytkQy9vznG2f4jM9IcOVOBHhitNgWtkoJAB9NFTtQ7zxL9Riz+y8CGDWTYKaemxKAuZn7RoCnwirqIXQBv6wNgox8E9jjxG4imBqsGaoCu8nbn5hYBkALe6s2otZPYr9p/V3OWfX7wNibTfuG8tRYu7nkJ0D0S60upUqi7mOZMNFeYvSreOYTqkhgVDRxzV3Z4eVywIvlgI9PV/hErWLuTzsBdkswsgRgr61PuyjUgF3nrrOr0/jKAottyv0NBb+B+0lk6VH0sqUk3RK7SF30uwE6sAJ1Z+wxIN5GP3Ola23+HB7AD7GvA1Hs1q9YV5d9dHki4B4C+USQD0qGaCkAoF/GFDTQijawW4BpL8k19LQKoE8SsNrt3bmwxAEh6Qh+WXv33Ne9xabeAH8D/ABcI1PzqkS2fQ6jrE/YiDGFaQrfdVCRyWuD3NA8Gi0n5Wh2Ftk+VaCqpYfEBZdgVzkzcmbwqVkfuPwQ0o5eV5+MFbRraAO7zyCCiWwd3MQxEdhXJpAuknkC4A54x+3M9rY2O8aKgSLZu9lg6yFvaB/TXG7UmlzCICxVgP2W9mLHzoS7tMMhLX7bygkzJ9yVHe7LDrfLXh2Udrg77XA8TU0U0wF7IzZdIXRjE3Esnyk+fjmCOtx7N3qUGqjnU7B8mYuKYUovfulusi12WYH8BUBvk9PwQEY41afGDQ2GYwD02Db8F9vwVcrTAHcDP1+a9oxmnMkUMlxkYCANKNAPZUv+7opFsDsf9PVRE8giIXKNyVJiiWOC8C4ZPbCHWBFxadpZRtThmVfLwvgAtm8QOw1t6P8RXFwlDE7YYtWIdWC4sqo5v1CL/T06i8TnZKAukEkvA2liT3mWE4AjACQVmVAP8BQqin6y6+rfMXb0YjlVno7A7mSgju34esGd45duo8DSN84jje6puhNn7Ku0coP83RJAD6IPaT5N+VeyJ9eQML0JUxLGbizewhTflx2OZcL9MuF+nnCcxY69zBl13gL2FqpWHg4tBO6jG61Z1rjYzsaNOnS1YF/qeGS26nNFOhVZ1ZUS2PrG5O/pKxXYU5Lv+QyY63lnwfycBFC7Pkf5u10isPYmwmltp83xqcrTAHegB/UNYDeWF13cOwXrBdtoAF3nEpAmgNvgGeN+G5tMC5x1RPd9E3dE2X+Xeqtj6uH/zkSLe0sgbvXz5wjyuc1yBiCY2grBYnRQtes11m620tGmOILD6LTFDJG/Vzk2hUBOVRNm51yRTknc5ZUMklrQbCbriL9HUYwBelhWd44lEdifYjF33QDshIHBb5xCCujyu1nI9IwdHgbXJuYO2KldkCuhFEJKCUdMwuKZcCwTdlnS55mylRXcYxJvyfUq2aPqnCTm0DIAu1oBdUBufXjjGWPxdqhwqxJvr2gVs0juWAvslU8V6ViR5wo6VaS5tMT3S+n6SRSzmMKdUxLLuSkpuENyIAxJue09PIQtD5VITh9TNkVWvl2+8ZMB995SgB3YO5k14PKrLtbIANyRNVwqZg2yyndo99HQoJTMk7N1EGZ2wI8p/bAxOblppwJ5MieLJbDWcUDoZ+eEBDkuBt8CEIBwuIYBuLYjgxrWdHJ26pneRrvFicxA3wCoyelZmU3CpEuhBGFbVveYQm1VLukdpNEd/FdM7Ez51AHYPsOyqsK5KhEcUJgakbF3tEoErVYyTYzGHikxArv5LlTKWEjAmyExb05q+x6BvXKLYjlbcpFFQygoqHeBzwzYOQy5COgbq9DNNrLhZScGxt557XbydW7Aflp6YDc9jF1O2TmyArklXZ90S6aEl89obNDFTbLiuiJ4337l8irn/IEUy8TC6Fib7GOXzTKhB3gtnUYegCkmRqBiXxbpNYxRYmNWVuabFukYyXoekySgTuHGzs43WDz3vz2aYrQkwDAoDdgf80IH+bWfr3HdXbETZv1oRxzv4co+KICXBuTjIDWvwertTgCZVUxSJxnN06MTWZO/D0vk8Czte9gXrRoeGhADs38ShTV2zxmm3oAdzXOabSIOIXyNqccwuFFPAnR9EoD4hJAYNzJnZfEJc2I1j4SIgHSwVE2cLuGJkwY7S6J3WhI8g1FpopiOoBhAp74am/qjTVAP1kTO2ntgzyZjP5Ye2OcCLAW0YT0l0RtTD+g5fjanMItourVyboSsGS4Qo4UpeZUupw6U1g8Mrzj0B4T9ryKqeTrgvvXiAW+oKJ+MAA+gt5G2LyYjB203BK0B3u9H4SVVuWnTNUkvtAwrdu1O9BItITo9QhPDmEI11ic+RAR2D+JV/fbtUF/OcqdAJnsWhngAIjL2vu5WV0Z7bvLJSzGym3T6pvQ4IDu0E5y6iUIjhYknsim5AD+qsz4KqLvJ4YmA+4UqxMHsilON1GnNGBNWRLFMZO01M9biGLSZdwGYk+T5rKS6IyBa0nhVjbyY85XFw6kC6HBQ74G9e4X2+kN9tt5xN+70M9r8pyEsg0fKVJEMzUEUE4G9GhsQUOecgCmDcwamBN5lVAP4iVB3qQG76jfihOt1c1CXscyhvj42z8QBenTZIKTdNc5h5VCeDLg3+2pr0IZio52nt3VsxIG5c2D3I8A7IA8MPl4bgLwk3ZvCCCUm1Mpr1rsF6sbag2w9Wsp4nZy+NGAfXa492NZQmq04ezuwhcH1pT01UF9ZwbCzpY5BjsyB2rmrjqb140nsqH2W1PYCJxBX0W3YxNYO2Z6E/TL6RZVZflhgPX1DCahT5Za56SmVoW1jG3Ma5qfUlN0e03yDsSMCuxMT8kme9TcSi/WX33cDhcL5sPjoHvTLQvYGk06syXi/Ag33iRNPNyhtrGyw9hA6wM0eZwP22oliOmAHemDfCbjzQYF9lwTUJ9J8rOQTaWcKHOrXrdAz+rqOIL9RmthNq6f928d/GGOPYuwXAP5pgDuxLzupqPWFOb04WsHFNA1y299b/63ad+iBo4gGNbjdS7U6V/qkYpo6GbPtlap2TgTxLk/oGWCXEx3lNtpneBiS59zM7BQUlS5jD3E6au7l695hwat7AD2jjCIdkUMOEwEUjKZWaRFFJFCpqMr8unv5PdeTsNx/w6Ip9gvVK6xWQpXFrv4pMPdQViTD2jMp8GagooW6YAqWMLvetr0pwlX/091I+6iupKhS8M6m7X4WzpW+RK4g9YTSFY0IDHhtp66BaaT12/dDAErzDveQAkEsIyIaCc2LZXBii/biROApC7BPCbyfUHcZvE8ou4S6TzpZqhWSefuOSmlvjx7MY/wf08uZaIF9MghvZcOjldEAvts/EoDhv4eUqcBTAXe0DuseaOMSEwidqQH5Y4L7bItl2iVHgO/uZS+oskcZNMwwptXfHw1kA8B3MkmtU7RtHUF0ZYaonWyTvUcnoIreJIvgTi9xdeR1NeblCuy+nvHBejCCL1+byWVrk5oB7AjFJjeNhmiDnqy+wOp9efsoMIn1iAxWWAZ7i40NrAG8VjirfyrgTpLTtXlYt5WItV/Nzc/LfRFsNdTJ2+NkvSWOaZ6QvvIhAXaK/Wls9jjhmn4gAJuDfOgjNJzar/T6gyJxsiaIoJmiuGcVAhktjIACPKm5IzT0hDyzgnoiIOfG2HcZ5ZBR99lBvRxI8rKOqftyIG0Iz18RApORhiVhJCK/d2PxGgbY5VPn290Afms1LO1GoU3x6PI0wJ3gWdpTUda+CsrfFKyNS/dAbNfqrx1mxHOd+Vy1ormeWqhU/W0vw1PIDb08OmB0zwm71hrY13W/XL/NugIN4E0UE23ZDQhGNhJ0Ac3hahAdUQBzt43nLsYJptYuVduhaMIPU1RRZN4W2gBrNsmK5xYbR2y/qYF9gsSW6VZ4vLE9vh0/l0LD9zhBjlZLOYIy3Iw1Ars5nbHatI/4AQAmXqEAsIQwkZ7rW5HU2G/tqFsTf/dzBPbYv8eTrH6BDVt+0zWwhxC9RWLEkMaJkXAOigumg1OLGM4C6vVqErZ+yCj7JKC+l81NSi2F30YUyuijUhfN/7oweIH7wJjJNOkEauesJAujctaJqrWdWekEQB/LmUlgLE8G3OsEpNw+ReQRRDPWiyrcJp0vPB2fa5iHqnKGua69LYVFEm8zz3iePeNYv3aQ7RtY++piUjdn7+NtTTRhsvvBXX3L45QCG+kSMG/Y39sqoHlKostKD4hTjAE8SAHdxAZ63tlidQ9erL6UJfaUgc7e7fhoVeVtUYGa1srb11VoY0vtk7MwaxHJtD5jJpBjpEfxQEUvGzbcjo8cOlqnrxgPPDNY+msNzzM+n13mDLC3CSOsChzUe9buER9VodocmHRTRzZi1smfWt8xMcxOmfpBWHu5IpR9wnIglD1QDiTJw3eBtU8Qk9LQlm4SXQCaAF4A8pWw+cIIolOV9yefaHqwut2+YS233a6XX8/F8jTAHdqgYYkvQBQYGRBsnIW1rUQw1ACyeZ/hPGv35WYzJewOIervcYb5nwPw8VoXj4mgfulFDk2yWdT+3h1fzrGRus2OPJKeu/5zu66uBlwRtZeOX3fAopWrwfEjPptnZIqrh0vFLJoo/A4M1FcAsQ/UKqZpAJ6KWIZCH5R+bu0o+qVOPUBwIPAVUAT26TKwe9kkKKs9j6j88J2xOm1Ltn4WjIKIoxPHRFn2gmb6uMHaUXmlNMWUpX9kcjFM3WWUQ0K5yigHwnJF/ln3QDkgMHfu9Bd+6agwNaY+CwG11bGJLJlIiBIJu4eal5IRsrEv6vNH8hTbNOq5Pk15MuB+UT/gcvDwWdcAYoUDGGyWEdjLeaubqpHf/LrOJIf7jow/svGhfl1VNp7h3ErgXBiFno1RA/YgQunksoGxuxVCyCFJxeSZDE8MbPfLYn2QloqyS6AiswZxWxkwQbLBGxgMbbpKkfiYEvUhcdIf4sADaPqRJwLuwZwJpgCOsnZzBqsqZYmTfMyH2gG72bSbbJ6w7sOPePTYfc/+MQB8v+rsic1ZuXBg7dA5YBTHpMDYx0/LSjUm0ZCVoN4wKVvfJdS9AXvCcpUU1CHs/QrK3FlEXTtG3bGE8s5BLqUKZXfWWgg8sxtSNOlAXMzOp3IAACAASURBVG2RhnUAyGTxNgZcDmtN1kQ1j2HvZ9v0THk64B5MoFpjtBf5GO+vyNo7bbMfYPeya8PtwzuZGMHlw80NP4IX9XUKM3DX8QKwX2Ll4+rj7PMFXUOnWE1il+whFKz+pohz292AMcHrT7K+W3yOogmCq4fUjZ6jpK7anBNQsuS2BQTcqVHyutNOG0xCNzvj4PlHDESz0P7Pge5vmUIyA1yFMVncmfNN+vkXtSMn64+pbSKKkVguaQEwwZP1AHKMr7omtBjto5WSWWhEgI9VeEw19XML5B8E83iBS8AefxvBOmP6SJG1u6iwX2FzSm1lnUksYSaxgimHhOU6OVNfrgjlWth6uWJh7QdG3VdgYmBXQZmRUlspcoU7b7GmDISmN/SXqk2SQKg2a0FEMsn0QWpU0XEcg46EdTwsPvP9sf9reRrgzvHF2gweQxCgWXXwxpIvLGE2WXtgkB1bN1BXUI7X8PRlpmhJPTN1ZwZ9cRZSgKOZmHWSIOveZOVbq47xELteZNHDjO9xpd2mnfogYLEdahO/tCTBBelUNKpeWcdJJ1KHlgyaZKAJSRbUyiZy8Hpwbz62YRZ2dtL2SX37761jURlsbL3qyzbm9LqLiiw4sQM7KsHyajpPCADtZqwh+Ua0SnIxCKMfGGM/wRmAH1ZUra7t85zVywjmF/ttuF9cXURnpQjulhh8ZO2ddzKRMG1Kvpqsat5Y9gnlqoH6ci1sfblWYL9i8IHB+wraF6RdRc6yJdPnMVBrQimEsmTUmcGUUCkslTg8BwOJxV/GPIzZJjElnMbiXbSItbVfbDfLqfpp+++D4E5EfxPAnwLwfWb+o7rvPwTwbwP4gR72l5j51/W//wDAz0OipP+7zPzfP1gLRoj0pjFXYgjdsFLyYiAyArsBm3XAiPEB1DwpQjQrNHf6XUI9EJaDatZVLseZ2vUM1FUOJ27R1EylzjD4zccflr1nmsj/7jqFTTbO5MzLLjhkBNYOoPf8s/jXCuweUW9exLxsAHdOpI4iGWDuA+ElKHsXJl/VkqhLwHKpozLs6cI+BeetWDIG4n5cRDRl7xdWfF9E3xaCpyCssXc87IOJVpjdtp2sue19mrljsELyiWLVrVTp33cW/Wejaw072Cusv8MEMo6nTs6+cS27oL/NCOo+4QfW7gnD2T8ba28TAQAf44ASGQX2cmjjVkBdxDDyCZRrRrmq4KsKOhTs9gW7/YIpV+yn4vllK0v4hblo8LSZMaeMAkjsnQpx4lJv3+q+AMr29Ter05itWl2E6ng2zMDxc2xeJ6aNWD4kcnwMc/9lAP85gF8Z9v8nzPwfdRUh+ikA/zqAfxbAjwH4H4noJ5l5IxBvX5KB+xJm7kUGbJyxTe5t5bHA7mKe0rZo6mdMV2b9hPlGZvzlRmV0wb7YbV4XMfHLRyDfA/kkm2v3HzHjblvNrI+Ly2Zn8WGVYoGmfNUxoXnaBXM5CoMrWaJgdQihuTZgL8HbzwA0kTrDcAML0iVoIvBEyIlDZeVL8skUIgbb6JRdarKhhz8okotmj5b845zFUV9+GV9A3waxxG5RsDRgFyAXBgpb0psdORmYB+VpgrT90Le7PmEA73+0g2IrjkPDd0a2PoD6aAnzOFFMWFEEmfPK9NGAfQnjx81zQ59RnQWgY3Yyxb6Au4thrhEYO6NcAfWmgg8F6UpA/bBfcNgtuJoW7HLBFBxIFk44LpP0Z0Di7ewItRC4JjXXHnw8gqI8WrNZG0agjiLiMUlN8y8gPT60IeI1zjBBLQ+COzP/fSL6xkPHafkZAH+HmY8A/l8i+i0A/yKA//nyTaj3QrMZuwSWPZK26DFJxqobsHOQ5Ta5vXYWS7dlbZoFmOo+oVwnnN5IOD0nzM+B+Q1GuWbUKwZPVTVekPgaMyHfJUx3hHxH2L0E6j0w3QuLdxnhmdn4TFOEA3tOBhigCiOCdiJRYFIbR1GZ6o5LQYTDYdC4eZnFvy4C7EtZe/1ViA0xhGkxIEojIqRUmp4ih+egnrlfKqMfg+/z+Phtoo9hf7uQv06d8jbbj239RfRtACkxagJIPVDBwt7rBA1roQRFV4MOysrUHeRNXq+X6BTdGH6HReYI8uFDSnhXHbBHBblN2g5YAeTHwmirr3CjSCyoUiBx4xZYu4plvN4E6YMJqNk8THWFfdgQw1wzlmtGva7AVcF0KDhczTjsZlzvFlzvZhzygokqpiTztKUgtHSDpRLKlFBLavHy7b14JNT2zlZ6ivDsK3l52Mi6Kw/HD8dgvM6Z8qPI3P8cEf0ZAP8bgD/PzB8A+HEAvxmO+bbue7DEZZmHxB2XY1aiknME9iiuMaWjNZZNFGoFAkAUgyQdZFFgP75FOL0FnN5ilLcW5GcLrq9O2E8FKcly/7RMOB0nzHc7lJcZ0wsJGbrTSSUflZDFAYs1sEf5qv0eO4YMVF2GKsNzkCfV1Ju3o7XLAOxxie6ZoKxulT1xMELGIzADpTSAHPUFlVR0UyQE7FLVqiAjhRVDW05uhF0AViaoNPRcA/ZmCmufoZ4AugTZj2fvW+Wz7dsEeUeJVSEnIE+sfRfcLIiMPNgiJjVgl/ZsbdOF4WinwWZ5nwDWB2xUsgF7MyLQ+znI9wAv5w0zil2r6voh9GsRr6G3afcV+6BEDcTOu4OJ/chWp83TdDFTxyCGWW5Evl5vKnAo2F0tOFzNuN7PuNnNuJpmXOUFewV3S1QypYr7ZYeFqiQxofYOfXFpDT+2x0azrL7bWIg6PxcvauyeyY7rtwjsD0kGPi24/yKAv6K3+SsA/mMA/9arXICIvgXgWwCQ33nH5cBRW96SBYcBbMo6GoA9WLJYKFogADzgTNVyKkKP7YD9bcLxHeD4pQJ654S337zDu89u8c7hFld5wS4VFCbcLnt8dLrG+7fX+PjqBvO0B0huLDkk5eUlI2YcXsbGSwnYK9/HlayPG7kYW1tk7s82thcChXUdcOgc5vk7JsCIwMm28gEQXfupMrhUcfNmDhOygIu7g/v1wwPbvi74WxwFFNg4o6Uu3KhrrC+gWt5wnVcrn2nf3n3lLQUFFmumFBRuE9oErZmqzC4aMFbYWGKXg1aOWDMBAC6TYepIw8je27FngN2Uv1HUFiyyVsXecdMYrlhntBn3uDGWNm9m5JDw2sR4dj/z3uSJPNhX2TfG3gH7ta64ryvoesF0WHB1NeNmP+N6N+N6mnEznbBPBVMqGp4aKCzJw5OCvbxP7h4xNv+KRcffAZBXOSoK+naJ2MDc6yNs1euGCQHoz87WnxLcmfl79p2I/gsA/53+/H0AXw+H/oTu27rGLwH4JQA4fP3rvE6xZwM/DH4yhq6dx7OpYAXsbq5orIirNGRRuTIzeCIgEcohYb5JOL2pwP7lgunL9/jKOy/wTzz/AF+7+ghf2r3ETT4igzFzxotyhffmZ/j/9m/h96eCH+A5luWANCeUk4mZRHseQxGMGYja6pV8AK7eWfgubK7FDhHxUxi5kbkPy20arzsWd3smeOwWZZTc9eqN4tZMHFi6OiGNp0bTSu3Mm8rS8bpmlhnAnMZJKK4CthIsPFA+6759880f45QE2IlZgDKHYxEm80r6HFYB6PvkZn4XT9QvUQXNCEARloWdGGdjZeiy9HPA7mKZdtyqLgbslTF2WvPyTAs18cscgF3zoTZHOqj41OooHVjSCorzXNmdAXZj7NcVdFUwHRYcDosD+7PdCVd5drIW0wuCPHkYKrfEJdK1yAdVr9zsWbVPYj4W4n+88b31W4KMXxs/fmw0JzZl6gND8lOBOxF9jZm/oz//NQD/t37/NQD/FRH9dYjS6ZsA/tdHXbSbxUKjbFYATQRhHXIE9tDbbYJoylTTgUks53KVMD8jnN4ETu9UpC8d8WNf+gj/1Js/xE8++x6+uvsIb+dbXNEMADhxxm094L3dG7jOsu/+tMOHdxPKXUK5A7JlJkKbaWMwse7RzXuN0AYqA5v23r5cF1C36JT+pnVwbimVPZRD137kG1R+zqY4FU2pyOAp/LZJ1tar0pThHuF9hu8jq+nEKhHExxKOeZC1WzFgj33hEeWz79uSECOlBOYq9tgYyB5BAlGl1g6OxgQVR/TPtyXT7q535r+LNfU+swHsPsHwqkuyAU68p4PSYMcegF38KzYSXZtde6z/COz7hLKHhBJwByVTnq6B/Tow9qs8Y58KdqlXogLC3CsIC2cUTvK7JklaopExPWSCNpoZWDRSas8OX8n6KlkZe8S4qJtjsE+EjfA2kAfQW55deMePMYX82wB+GsCXiejbAP4ygJ8mon9OL/3bAP4decn8D4joVwH8Q8gE+AuPsiZAAD8rZzqty9QNWMytfQD23t6d1VmJNXEu6/UIdRJlzPwG4fQWg9854Q+9+zG++dYP8M88+w7+yOG7eDvf4hmdsNOOMHPCS97jJh1ROeHl1QHv3TzDi6tr1P3UzA/DILB4LZsiGWMnSVcaiuBhdd1fyy5joM76nEqYukQDeg0KJ0fXd9swJQVLEbEgsYh8TOzi54e2t+8uGqNuQiGrmg1sU+JW0Xm0pNbh3Z8D9wjsI2sHeqser+plVP8i+rY1h+hqEkTZm3qAr/qdAQmr0SbrlZybh8/YSbZuzsC2IidcM1S0jTG9b2YHd4rMPVzHoqp29zDgMRn7ApCx9hlIJyiwq4WZ+VqYcl/f91nGvoOIZMzjVJ2T6oFRr0TGPh0W7BXYbwZgP7icXQZf5aTJwjOWmnybi/wuhcCeqCREfwwm262fD8BunyOwR+C3vlt15cO9U6dch3pgPxOvxspjrGX+9Mbuv3Hh+L8K4K8+dN0LN1zvImUqQSTDzlApdMwAakZSI1uuDNLwoJYMt+7UJvYGWN4seP7WHb7+/EP85LPv4o8cvosfnz7E8zRjB0YmDURHwI4rMipupwN+uH8Dz/dH7PYLjrt9J/MH2gvqxDL+bOQDVVi4AHyUl7YL6Tn23b6EJVpc0XQKGAr/J6iFEKPuJLhWrWqzzgzi3E4x2bd1PlVAs+ehzGBNW4YYKjUAfHSYkiTXwfplK+3e+PscY4/buZJG9Wy8xRfQtwnIqerqm8Cm47CVjh7j77MqUF64nnQPXeMN7LZd9EIZ/3cbPTTwdtGMAruyeLfZt9fWKtSuzaTss6XiM7NhA/Z8YgX2kFUpeKN21TVCkkdZOyRGzF6dk/ZAvWJgX5EPau64m3E1Lao8nbHPImO35DtV7SoXTpjVSuZUJ8w141QylqK5Y0sGlwQMz9UDexMtp/B9C9g7cUzEBp3/XW9lk8DwOabo3CpPw0MVaA9sxUGKOtftZsceQD1YRWyafmrDtTCh2oiqbV+ugfk5I70542vPX+CfvPkhvnn4Hv7w9AHezTOuiJBAyCAUkrgRqS5AAt7Ot3hneok3piOmqeBeQ7b6qtpmWouyGAsFdmL1rHCvRe8Awa4t2sw2EKfefyfIZ21FZPkxrb0smqN0uuRtnzSGsY9fddYwBxwXxXQpyyQcQbV8lPF5eLB+iEkWzgHzAOxd6cweGSuLmSdWCIwpi4UV5/gwSd8fi4t7UtZgn1tFgTOCq/9FFwZ7UNL28iD0k76PMaWGAdgbuIfLMPqTjU1WCMM1U8eZhKmbT8iRkY/AdBRQd1GMi2Pkwr4C7bzFNQb7Hg3gFeTrVQXvK9KhYLcrYsO+W3A9GahXTFSxC8pSoAH7qWbclx2Oy4TjMolF3DJhWRLqQmoRRs3J0gB+2Bqwc0/sBqu5KK41kkNgjUnTs/WoVH2MSAZ4KuC+VUkCLOJaJ8+1l63A38V7oXBuuHansQ5AIE5LurS7rri+OeGrNx/jDx/ewx/KH+HdPOM5JewoIQW7uhkFhQoqFtykI27SCdd5xpRkOu3EId19h+e71B4b/zsrtu96LKMf2NY2XocqX5qFkZiS0c5ibvU2g/aLiIBaxSLG600SK1uDNHk+yp3oL/qJhZuJq6ZES5YSbYzqF0u0lMHA2m3fpVC+UWz06tYyn3lJpHJ3Nj2MmuFW0vy21UGbq83cw0W6F75+podYnJ3anxT6VGTtztzhwE7RDHMQv7A9UiElMgp6C4GMrS8qfgmM3YH9VAUQl/CONTS0s3YNnuaZqCzd4A4oe0bdM3jHoF1Fngp2uwX7qWCfC3a5YJ9GMYxcv4Ic2E9lwqlkHMuE+2XCaclYloSyZPCSdMKiDtQtW1T3u2P03IidY5GuXG2fiRn18SVkMHdWd1hZz/RjZKs8DXAfiin+7MXKzoG1j9YD3QXkozFndo9UB3aKdrIy6z+/vsdXDy/wY7sP8G6+x3NKONCEHWUkWGeQF3JFjBkVexTsaUFSczcrFF9knG0xjI0L4LM9xmnzuVfvOU4wjh3cmPsODnxMwDQCPBE4qQ17zo1NEQmgW2jVrOnKdMls+gbAOryFN6gat0bBnXUlYPHdYzvofw+y+seU1wzu0lxVwaQp8oEqwF4JFUlXYvbctKGk1Bl/6/FHZeZDZSAPDvDG2o2x59oB+7hgYGPstW0C6CRsfQHyScUxgbHnI2M6ViRj7kvtgZ3IVBPSPxK8f5UA6haq1yI7YleRdhW7nQJ6LtinIpNrGJtVH97MHhdOuC87nErG/bLD/TLhOE84zROWeUKdEzCn7rnss0sDaP09xsMJilF7PgN2l7W7GEa7a3Te7EBdLWiA3nrmTHky4D4yjyaO0Sf2DjjIokfW/tiSW2CtmgFMFVfTgjfyEc/SEVfE2FH2rVWsSqAsbpFmCyePR+FLYFsthO/nH3579/icndKL+v/P2dA7G+DG6DnDw5LCOzrgnF0nUMqyFAWzDDY7JKV15vgA7BYHXwKqsYY2CBnqbSWQRYaJ3E9yBDSATxK/o1e2c/95rqRP78X0WRUCY0oVRWUe3DWklQquyVeqHAauPOIZ8N7qVzz+T/3YGleFZBM+r61jCB2wb92bI7Ars3Vzx1lEGM7Uj8bYWRm8TPquXLdLJ225YJXVkr1A7dzh2dt4YvDEEtUxFwkCltgJV0xuXzlphAqxijmVjIWziGKUsRuwz3NGOSXgpMB+UlHTLBPWmN+15VXlFmMqALuzdWt2M/X2OFfsfaBTxEbRTBTRPND9nwy4AxeWlkoZVqaPD43dFZslFyuYfbwzzSTL50wVGUNALC2FKyoYhRkzGDMI97zDbT3gruwwlwyPlxHsXM890zgpbYYFjgpkE6uk9bFOes6AvP/W9qtELkf16xqD1wGVFmHpvUu8hRhQpj4ZsKNZy7C0ASpEjqqZ6um0APMSgDuw9qg3qdQA/qGydQwl3Z6GWCanip3OxKL70GlVTU+JEipVEdMEFs4V25Yon6bE96+fXV/yjTtxjAE7ETt5YQV1Lkkm/4Gx0wKkk8jZ89FYewP2fKzIRwV208FEObv3C11lmqWMjtUonmH14EWWlUZKjEyMnKqvpMUKRkIWwICdxdzxVDLmml3Oflwy5nnCfJoE2I9Znusoz5b1uVzMpIEO06KmnKYUXtCHTumc9QI2mIGBSxaE0HqYlJLXDJ4Dc79QnhS4bxZLxuCdsr144LxYY/XgCS4GICRX0vixFSg14b7ucM87zAzMLMqXWa9VUVGYceQF98y4rTt8WG/w/vIMH89XOJ2yyuTaMmssUW7u++JMEkHfQR3tdxiM43h3Mc4Ds7opzkASg5oIvtwD1OE0ixKpC42MNqlKTPc28HoLJflMJpKZqzP2FvaB/L3wCMLG1oFuqf4osLekHiFq4OsssthkJQ4MTipvQAUMcNDMI5PuFtk7gBHYVXHZHFxotTp8jPxd3peOq2jeGBSo3g0V2Bnogb30wG7KRmO2trkd+4mRjypjXwKwm3OamoEyo/Xvod8bwPtqI7NGc2Wfz7vnNCBHltgwIAf3uWbMJeNYMk5LxmkRtr7ME8oxN8ZuwH5siuHk5psQr9o5iGEisAcb9tHUux0T20AtaxYCZTtGjRrMFPIPInN/sIQZvfsEnFmuxDsG6kls2tMi4fgspoyzzCXh5WmP9+dn+P7yHF/JL5DpBNQZSXtMVcb+sjI+qjt8t7yJ3zt9Cb979y5+ePsMy/0O+T51USHJGW8YowHAVww87HdrAbMSisAeO789axzcPLRFbDNq51MiNcuzNIcEzow0hWVlCGHclNrwvKwGoNR1WLV+MI9gDUjWAbTbyus7iu8vsYiozU9hPMcElFugb0m4fcPrKwRkqpLRC/FJzgM8FUPVYOUUTx7AvGuecyu3oU6AjQ0Msnb0oA80YK+kwE4a198AXRn7aBmzYfKY5irMVpXr0FyoUmVrC2siamSChr5vKw0CtoKYlZpQasIJwtzd9BHmdZow16QWMWLyOM8ZZc6oJwX2U5IVyIwO2N0u/9SAPS2M7Cw+jIHOQsZZIkxM0/w+WAFe+0lOQTTTNtOhdRY0Z8qTAveVn4Wt8jdAe/sC6F9y6MQmSuCdyHl5kl4kMdkJ6T7hk7sDvnv3HN8+fAlv51sAH2JOJxz0OhXAy5pwyzv8oDzH785fwj++/wq+/fJtfPTyGnyXvQN4JESrWqI1QAZgH7M2uXVZMP+McT86G/+xDTpb2tie1K0EPMaJ0DN4fs8s8tNU4nKwXWhVH3s/RTGhDKy9lN7BiAhIanGTzF5eO7UFnGILG6GA85C7kIl53OSTXPz2OguBnblXZe9VZcFSZQ2fTOT7iELsoK1rjqwdeJDFuTbUJ3nri/AJf4s0+eUjsEfLEWPspTH1LrTACQ587qik/QKdtzjUZDeMEa2LG1ZQX18Or9vry6IoTZUw1+RAbuEFKpMoUtWGfakC6suSUecEPmVgIaRjctGSA7yz9p6xuzhmCJvgVnJBmepNb/43mjrQQ6Lo+6eiQfiqgTy1ycBFd5f79tMA9w64qF8mXiosg6cTzWwAvLB2ApWky0oGZ7EzpipLrHxPOL7c43u3z/GP91/BVZox84SvTB/jGZ0AAAUkYQfKG/i9+V389v2X8VsvvoLvvHiO+xcH5E8y8p12iOCNylkAL9ZzlZ0pgi71TH1zU8a7+X7d5jiIhobBa/eqKp6WT/KsP+KUQaogggLJMDnZ+DOZopuABdZea5MnAm3FlJOYu5mXrF6ToAAf2bkB/cji/R2HRjDvoBAm4XWXpEq9RAxWgEeqYM4iX+UI7Nr/N0Qxm2z9kjhmi/XH3YGd+wVMtqfiFzvXwH0EdmfspyaKyep9Ksw2Zvqyyd76RXEHNevPY/VHkeX4DKQiKhnXQK2EUuQkZskvYLJ3ZokTY6y+lISyyFbnDMwEmhPoRD1j3wR2bkHPxsTyY76IDd2b27ZbqO1aRWypTneUFdg7xWwTy7iNPM6XpwHuUFMnlTEZgJGxQj+IlZ2ha6zNrEfhNxG7AhCQBpOgYdJAaQbyHWF5OeG9j5/hd3fvYEoF93WHF/srPE/3yFRx4owPyzP8cH6O37l/F7/38h185+M38cnH16AXE6ZbwnTfXnhne26OScDQSQdgD6x4FMesgH0Ed8M+22hoqxHgbUKFtneCBLYMypsaPO22ZhLTVzRFj3byEvKwRrm52cmbtY1l1TGzzAQXNW+KZpSdy0qImk7G8mn6aqjd6ykUs9oow76Rvft/GPDAl3LtjxGX27Fn9j9UwvU59hsD9iiKiTL2mGjH2bvlZoCaDHJL2mIhQCpcJHNRpxJXqB2oo7WH6gFqMgMvQk0JFpGT1ZrNwJ/tc0ngWU4yi5hkn6cI6jZRGVtHA/VFVyP+fArsHlJgYO2ALTHg6SwXFVmykphijD2aRCoR2FiVb5WnAe6EZuaUZfalgu2eyY2am7t+vA6xxmYJE4MlSKg7kXNSZIusL+uekF8kHA8HfGd6EwBwV3b4YLnBG/mIHRUcecIH8w2+f3yO7758E++9vMHLj6+Aj3fYfUKYbltGpuiRejbNXlxdbQH7mUwvK5n7uFIBOqVLtK9fyenDb5uAOpneYGfbAYuJYuyckJOVjKEZa4/AbmELcgN4G9wEU84Ke2d9p2LpEADA25S67/5oNAQ2e40l2liTimhEqmKOZQNTt6Kil03Lp2BVs/pv6zrhO4XT5Qu34ypk/HD4n3FZxj4APA1Jd1IEdvcSr7AE5lDx24bBpdaB++d1QAeokhCASuDFnMJEeUqpNo9ahmRQYuoSX2NJvW3+3Cx9zKNWwiO0SasTw9hnmLSo1MDYN16IW9Ipey9VkuOY7wcgZCVkMFt5qD4gbweeCrgDmt0dPkOZaR05jYsHt6VcL9fGCkDFMoQkwzkIlZJPChZPm4q8xOmOUF9MeJmu8fuVcDvv8P7VM9xMJyRinErGR6drfHh3hRcvrzC/3IM+mQTYHdx1CRpl7uf6rHW8oFC8BOxN+dUD+8oChyEWBINWvRP7jOdb0Tq4UrbztovaetXqo/1vtr1J7ZY9bgxRCy6WSYB9p8Cek1vMEANchM261YudP7J/mzBgx0LMfEyGa7FvXrNC1W6dSOTJJpopA5iPVVyNW59UI/Davo0br4TRw810oiQVZ0gFAoDa6W7HjgbskbGrI4/HZ58DY4/5GUy2HOXQXrfwLgcw7II2miiCY39s1iVm0V6qWfy0NmCm9Qqk2AoEonc7BUsfY+0ndgWqs/UTN+c8D3TWPjEm8169G9kvYhk1NAgTnRCqCq6ql3B7eOpWLH8wTCGpuRSb221KkAw0C+QldWExtcROiMbkOzm8gxeBdw2I3KyKFJhmYd3TS8JCE+7qFebThI+vr7CfJMLzUjKOpwmn+x34dkJ+mZBvCdOtpNib7tQyYNYXx+35gA1y5iD7CFFMVKw60z9zbZIXbyDN48A+8w6iOeNKnBMPNdbEaE4blklnrp62r/M4VIA3QOdddiV3wxSdzG1CJ+pEM43FS2UlNDHgFjcVvax9Q477OkraoFjy2k0e/MDss7EMf4y4JS7fu7m/QlQTFa64lUVsrEsDeqokYgtl6pGxcrgajwAAIABJREFUR5Dvcp92XpboRAwyqWjHdFFr31FjHgdSMkc2yRh79dyrhKYeSICJdmMj2STFEGukYvUPDlemJA36AmfuxtZDHBzTH3SgbiAdzIe336kwdk9CXxq4E2mAObO26UIX9P3gUnka4A52pwR7YXWBmCBu9GIDLHHvGBhQRZNvB1bqac0Sub7KgIVYl1tHXcozUMqE5ZTwye0EmqQOXAiYE9JdwnQveVOnO2C6E8ZujhpjZDuzUpEKhqd+LLBT27+ybPDr9Nfn2BHGJhyZ/qqB27zpE0QKr8LYk7J6984LUR99oCaASa1iJvVsNZA3pyd/d+wgzfruN0UzgIO31Iv8XmpuALeesWXuay5mLfMqlWFVFPY7L51gn7QG9jgxENxByg5gkE/a/k5sEq8WLEuBPTD2HtRt5dZIVOfz4fW73AgiokEzCOAA6GrvnYr2yUWc8RJIFnqFJEwxNXD31U5tz+DPEiepeWTrasoZ4sxLHBx1yguMvTMcsEnLnvWcaLCqOMYi1Wq0WlQK+YtbO0aAp9ieZ8rTAHeCuhUDvNCQM/KBU7eUqSMZMmVd+BtowGksIJ2ASVl+KiKDq7vksVI8wt2RRLZ+VBn7kTEdWyYZWyqu6j+y7ADsLiIZxC7R3t3PpfVnvL5/pwDyP0JxpbZ3rGB/WyzEQFg+urehKjkzNfl6tgBj1ERmgAwGE6GoorEr0WomsHf/Lx5nStrxv9dS+sY3kDe5uyxKPr/pJ763WCVKA8AzZDUUm9LBXUHUJnJ31EPzRA6A7oCkfcVFMUAD9gjwlT1QmItrTM/K7ODcxH8AF4Cy1AekCwAmsYRTPwHXwBgY1r7eFoaYSlCcmv36ScQw+VQ7E06amwNWB+o1sO9Nr+l1PyQ7vlSgFGHrgHqxsYt32gYffw+ZQQJPCdwnqBmeNLKw6qZw8k5alYr4wL58XYRDN9kuNRaaZ5kVpqorh5OEGTWlbbTldXdqX7b1wG6d0SaG/r60qkMH4oRezk79sY96VmiHtnM22HvXP7g/LDKEUc7njKyLgmesXSZZSknfX2PryCRhgZO937bMoPh+zDrGLKNGq5moTAXE4cPi1USP1NcO7FISVSTQptw9EaPof2b//iCjgU64vP4+svZOqT4AvIuyguksjX3CFOr2nlWMQQHQu4T2yq63mOVFMUUsIUqisfW+Hiz1IHP9kvo7sKf+1gKYbXJwu3wdz87a3TGJW5z5Ywt814WrNisX8y4165gtcA9Kf3f4s2MV4D1FJLVr2MTYr3ge36efBrgDqJlRs8na0WTLVl6F3HRstk0C2zbh0tAJFolPlKG1EOosk43FpJCO0ZZr0d61eWa2uo6xb7aA3S3cRvFM2P9pS2eHPk5smyeEwzh8RoCIg8QHtHRA1kkJ6g0MwFk0TxYbRDp6NwlBBuQ6zES/KmvJDAJ7V0zv5O9AaOfXC/Bm6pi4OTIlKJADaj2DTj7cHmHDgiRO1vbXODYGQB+jkgJtQjCTY7ke9dfU19Hk6NQz8w2LrEvjlAdrphUIVnT5ZUdxjCtRFZjNQ7VC5n8uFpqA+76vE56vMuogWgphBDpQXxj5WBpb14imbrpo8nID9CiOseJRbYWUuIK4O6cqiEvcfwsmxli3azdpPoCJTwPcyaxjgJqBpGngrNc1xQo3ljGsTD7VILZOSdCZVzqPyPIYycwz9dqSeAKbYT4706SwUnB5+8aqoWPjj2Xowz06Me7A5rr7xXLpv6ETje7PFqK0DbzQw5IwcyJ2ayZZlWiQsSENH4DeamKsSpC7r2mgXsQnhUH+Ho95jYXQQv6O7H1KEg3S2HpTsAb8fkSfaEy9sfbx/fl/sWLWeaIuJ968m9D7wFXRkebVGsRY7PaJXW5RY/A6qZg4RvoUO2MX1q5kagF4mKTGtnAdgQb7ksiV3DxOTRSjYaotNhLNpQf1x4hiqrVvAHkrUURl0UKjrN6savzz1Zr6aYA7gBaVDpuduEtPNZTRdf9c2TYZ81NhShhOwlI8zKgNOJf5RcURN9livD/1nSwqT1uFBtZ+AYtGU2gH9S32dgbkH8PefSIdGNrKM25oSwufLHhMbgVhyuAWICzcy0VsQyVMXLCqnAH6EAZ4a+A8AWCXIkx9SrUNdIgvh7F3C01rCzuXwW88AttkN3YIBBAD1sAeyYfex5m6Kau3JnoWYB/FPFKX0P3s+0hSAFiMmLNvZAzuFsQSVBiUVSxj3pokoqEKDiIZrMWYsU2igteU/zGa4xwUp6eKPFfQSZPLBGB3BWgEdbOQ6V4GwUNOh/d+sV8qhnSJbHwCOH/aufJkwN1FEmPHAHq5k/2+EO2vD0egjjHh/7bU4dW+BsJBqQs4CHUiCWevcLFBF4Vw61nifrvuFnvHxnkRC6nfB2y8/20MuAzsW6zPGVtYoUQCkkgsFFS8wlZBm7iGnLJeWQ4APf5tBM+jPGIdXyaydxPJPBlQl2LgbeEHTDyTSZJ1TJpftTKh1ubB6rJ34gbCW8X7Mg1M2zbySXkkANbGItLidb+LEzmH/2JfDSLLNn6l3o24hMnKrJzU5G9lqhrBLDxHWnR1tljFFNTV38L9QjbGW+zHTsxCHPYUZOzO1k9FQ1UXCZMwL8LWo3VLlJVbMZl6rMBWZrDH6h9+hPJkwH0FcrabAzMG5IVuUvuNa+r4MICXfWGJY30uMho9sVmx6B5jw53WGt2AWTF1vWDH2gPgj6w9dswHleEG9Ib2Ecg3WPwjlOur66+UcVsrJ1IRmt7IvRv9Ofo26TT+j61TtJQBnGl6WGCrMJEDldvWP4GyS6VnbxUC7ArwNgGQb4BZDF0SqXmJ7+gcsG+shLzr6Jgi7fedPB9hoo191J5F7+sTRBqO60jM5ffhXqpBmdiSWcBFMwgrF2adVOqQuS2Mgy66oilSA7CbJYwnD5kF3LFU0LwARRm7ORwxt9STHBqWUiOeg+5nk3j8qP3zgdOfDrg/psQBckFzHEMQbLH0EZi32CMBLZ/lVtnaHQdjBOlRHBNvMjKhDebxqBIAfquK3tcfuPZoJbMC+aHusrKRq2+C6QDs/taGyc8nKp0MIpiQMTV/YbRmPhZjBuhXT6/ZicliyiSSHKoSIbJiShKh0CJG0rANDKf1EQ4N6u+ogfgK2KMyNbBvb70NMI+gzEqOYOMJQ9fXa3s8qGDlFQkSB5IEIneiWhVzACqSGcpD56q3upB9AXaqjKo6MnbT46FfcGiXEpl7CyHgYpg5pII0Mcy8CEtXYGfm5gU9igHVO4wioCeCOdbFfrnSV30O5emAe+x8EZADI7H/XN4cO+UG1luHXpkHDisBAE2LHUsKy8lz1d6yxum+x5uvWfvZcun/OBF0lemP6eTyjG4cj+deUtZs/ecD13+bMm/o8F31OEwyvbAsym+37kWM3lKGCF1SD7ufi2qoV2S9pkIApsDca6VOPBM3O76dvEGhx3Fiis4zwL4Zg2QTzJW1p9A3TOTCEDElA0QW96mdXgGkCvVdMHEMYAw7zkf9821M0lByZmO/aCiBpYJI0tSb7wZzWy1cCoHdM//gTW1OSeqQlOYCOi1iDTMvja3XKsBeqgD4GVEMKDXWnnMLOW3A/qOIZMb39Yjy5MC966gRiL1BLiEtXPbnCk7qrQQiqFOYMLbKKm5N9+eZOnQvIbKV9WcD+fW+7nrjuVv33BrAG3+NRK273rlFijE2659eF2oToDP8voKXJg1Wj1IyubKNXAMZI+weCXSDuUfxTAonapz4i/LqL6hYDPcMdlFMoopECQnNQmalRupYunwfHXq68VLgpoLO2tG/g3N9iwlNN0KBFKC9EtFr6A9LKLIo+GeslJpd8QljZBUDwNvqnFlk3JSARZTIaa6eTBzc7tdWB9zf11l7CIFgyTSWqszdLGHscy2GYQV3B/VRFNM9DmEMgfFQADuOz88VvT1o+OsV+/GTAffo7baKaPjARNfFGWc4s3CQ37iXnNCfb6XzdIVcyyaNx5SVGeIGsHeRIrcGw7kyThBxvz9PuLcx9vi/faXL4Hv2vsroRJEpAE91uGaowyuby52tQ3yIgDwG8DZBVKgu44w47AssFs7KxDIAnKWbOCaWVVPxemtmic1+ewvYt5So/mmAnuAe4cbat8QbzE2UwhUibioy3hpzh487U6SaclXuqxWwfjO+y3YzWZHYBF8lhHRFQporeEpIHOT7eWM8aNuRKlw9o9iioXkN4LesYUoAdmPrWzL2WCiBAlP30BeXEsZE0moTR35kh30EZjwIV0T0dSL6n4joHxLRPyCif0/3v0tE/wMR/T/6+Y7uJyL6z4jot4jo/ySif+HBijJ5FDn5ZN+aS/u5c0NdzXzKbGW7iIZtaxMHb4P/I5dNmyIZv8h5YIkx3FcTgV+P+hd4jrVfKE3fMIzVOGnGbVVPdJNPl9s1oVsOr4KYfYrJqm2voAztxF4GHvQoWfsX0rchHqrxu1nOjKWZOEtDsJoxdu/I+q+KXmzbAvYWAgBr9m6LnNTEGjUDPEleUp4gkVp30O+MOrEco6FCaubG9m1yiL4MD7zCVe7c1QEcXPQF4D3DlyeiZo0FUz3xdtyS5Ww9VuRj0X0F6bgg3S+g+8UTt9OsCdyXAl4WAfZa1OyxroE9imHOiGPascPvSxgTAX/VZpfbNJbHcNEFwJ9n5p8C8McB/AIR/RSAvwjgN5j5mwB+Q38DwJ8A8E3dvgXgFx+8A6OF2gxOQuZqHHMOnjt/xW4CyPesh8+C+qPLwLw3Ze0Y9g1seyWGifvHxxuJzQYg2r1WVR0Bnjf+O/M73m8lIdgA/ougvmF2+rmIS17NHPLz79taMm0DuieR0E8LFua/PZohKWOnnrQUIJUB2AcSs5q4fXJuzoOSS0F/TyzbjgH7HjeL/WTnBq/qVT8efjugb5lBxiq6rbuOWYu6qMHpLFWfhQZoVi+6KbhP9wXTfUG+Lw3UbTstoNMMOs4B2BfwsmgY3rIWw3gFe/k6pSTA7s8W8hSc64+Vm3fq6NkKnD3vscTnQbEMM38HwHf0+wsi+kcAfhzAzwD4aT3sbwH4ewD+gu7/FRZB0m8S0dtE9DW9zpmboGVwOYU4LR4yVEGa0CxhzlxnLGdjAm7tjhMt99YfK+ck/46N7wPQdQBPZwfAo8Du3KNTA81HiUK8PTf2j/eLYLw1Kdl1aH1+r+8Y/n/V+XXLu++h4y5e7vPv25deadUOEIG9VtnAFNg5eZjaGAZAFITkYpkoi98Sx3gfNDDOELYdgB2Z1Tt8qLyKgzwssJIFYf4PTOwXyqZzkylbTMZfw6Wp/U8phvbmgbhwCxOuSUJk45awfSkabre04F2jfH0snsaRgJSBpHGUTM6eg2XMFlvnEIPG958R9QBwSyNCd61x1b9VXknmTkTfAPDPA/hfAHw1dOrvAviqfv9xAL8XTvu27rsA7prS6hgypYdll9m7unPM5jUe+P2YYgB1rlD//SKw2yXPMfEVo4nXotXgGgdPNzGMA5m3VxDtZFx+znMTH9lg3JxF/b7rCeOC6OsV31OnXN8CcdtXAUph5fbQdT+vvh2KgznIkzUv3JI4FybUmsA1oRaSIFhFQW6I7xLD1hqwpwDwXZOE/tnk4sHKxIB9CsBun4C3oSjWZWVBZqn0KqDuytbgxITBhHa0cOKmzyF9FvmbZdIbX66JZN1OHvDIjYtmf7IQAktpgb+imePK2zQsMRy0UwN2Y+xb4phYr66OYfP99WHEfoX2fjS4E9EbAP4bAP8+M38c8z0yM9Mrxi0lom9BlrbI77zTmPuQwsoziD/mmuN73gK+h8oIfMM5Y3Los8DesXX7HFj7Ixj7WeuY8XcE1jgoz00IG/8/Fmit7q9C1M45fj2qGNOJoDVYWHTJHwAQSRCmzpX7XN0+x779/Gs3AOCZl6qC+cLZAb6wJG6uNaEUYe4N2DVjkG0B2E106cBe+3beZNPK3JvylAVQI2PPeoEO3AkoALtz1YVwAghj8VzLqaUU0vBeL12vaL1SrFgoHo2y9RfybEa1T44Rfjuwnwv65ZUIoB7EL53Mfcs6JpARZ+36yQ7ytb9Pd189fcQlujyUHmX/QUQ7SOf/L5n5v9Xd3yOir+n/XwPwfd3/+wC+Hk7/Cd3XFWb+JWb+Y8z8x/KzZ0NC3abh9obYWiL5xVYXl3ozesDnje0x1ztXHgL2rUuPA+4xCHnumK2XHbYH5fKvgs6fVTFWpQwzBoqK5SyzH9nOCPjMLdaH6WsulM+7b1+/fVBAT6gQpr5U2eaaMZeMpWTZtyRh7ksClrRKRp002FVaGrB7yN3BeGDVph3RCDbtrlQNwG6An8J318xi3W/ipB3H1eYK0EQ4Jm4gV8Zu+iWYs0+UwVuWJ9ssvnqpws6XFujLzRvV+sWtYbaA3QDWRT0NsJGyyNZzamw9ZzW5DSC/7gz+2YUEtn660T6rtgJ8dbPSd10oj7GWIQB/A8A/Yua/Hv76NQA/p99/DsDfDfv/jFoW/HEAH12Ut1th9PL1qBB9ROlmbPTfVyA/3PchsG8ed4G1XwD2Vqlt1v6jlLNMnobv1Oq96gyP6BiXyqX2XMvYh3AN9l+UC3N/3c2V2mOV4IHBi+t6PT83flF9GyKSmWt21l5qclHMUhJKSaglSdjaIRk1lQbsURQTA2HF+CkPWT85sI99owNwvqiXjmC+uSob75+CBZRbmKBnv9E2PJpNdg+xAZAmXzeG7omqA4jH8Lw1nBvqB0ptS1k+8waom4w9J8/V6z4tWzGvRmCP5RxptTZIUeaOi8RxLI8Ry/zLAP4NAP8XEf0fuu8vAfhrAH6ViH4ewO8A+Fn979cB/EkAvwXgFsCffVRNOgBkfzgiBA+wDdvlFeMLfxHQPGHaf69iTvRwvdfAvgL/M+VHqsenZOJb97wodIiD9Rzwhv/9esP33iGt/77ybB3q9SihSBi0EgSrikKyXFy7fiF9u4Jczm6s3Rl8ySKWKcLa65I0X6luxtgHUUxzWgoTJZ/p44FkwIAdEA9fZ8xx4468dq9mmDgefDf/f3vXEzJbctV/p+p2fw9MQONIGGLQiWSTlQ5BsgjZCGpmM7rLyiwENxF04WIkm2wV4kIQQTEQRcxGxWwE/yCIoNEok8nEMGaiAR3GjCLoMG++772+dVycc6pO1b23u9+b/vr2+1I/aLr7/qu6dc/91a9OnaoilJGrtkKWZo4QwGREO0Pmc2GuwAxBmi2Zum9EHkQ4ZvgF1VUATNLOx4aSvrlf/H9zw7R5PASr1IL62aMOXNLKhGOQNYdt2U0XmWTCjX2/yAyOiZb5Gyxf4sdmjmcAnzp8d815UZbXSxEIkUovfJ7s3hkoMC3ImVDJ7Hd0BG/bq1OPIsUZ1V4Zn7vW5MWaO/dwmhP4NPK1Dx+/F1pGc9Euh8h1Loa6JnWuts+5Y6rj97nelqADa+p7cuo9LF/zHLbNUJ86G8GHWV97ShrXniNlCnmXWHZMW5pLtqyXmSME73MXF41eRMmCTMVXFzwSZuM+RDJCWiSBwYPWLppuXm3I1jhuk5tTwuY9sXtvpqHgKGUHQJT1mIQsvYLO98fII0LN/trVvFxNxy252+82v96WmXMl1qZPwyDXHZP48ocBGCIwRLcsZUCKutiNlaeW8T5cxghVksESaQNQIowJrgMpSDOLsVgzVuTS1NAVwWP+GrNGpTi0jutc5Ivffip3zESp03Laj3NZxn4VdmjQUzXQbEa1e2KfVCBNJdCC2yKcyMmZ/Cb1Ix/ZGX9usHtgzAA0FHKOvKsK1N+O2Zj+rDrPPbHmcEdPDBb6iOJfN1L3namw7VSUvb+PphJpST3paFoegIQgo5ltQrDEOlBLie7I+qTwsrX8rNWm1xpZ7scq+Rizmq86Tg/Zhncj+f/Z/+1J3Z2XkOuLau1YZrBzz1BQl9EQdaoFElLfbsBXG6TtgLQNSBtZ7CYNyN+2LOk7Uu5nAQFpAMYNaUyq3ERQ5U6RwCMjL/gASOHA/VblV7lldPrNirwnYU7aRPTHzJHyjGqfDXlcUObHumrmMBcuWaXV4kjF7q9pon1CIhXJcCGaBaIv57sXdqYimK8gmiz6R6DlTXbt6sUqD68oIyov95OAuZq1aemR3+bJhMuz8+6XimQH1IOUfEx74NyRSqFu7TBIo2PKda0jlqqOUJdWJNDAEvrJUvUwiRubd1TGrzAkRFHvYb5cpvt8XxxxqSBKv1shfNvGLtpqslj3Eg6R+D43jF6bfZo+isfy5Bf9MHLfRKR7A9JVxHgVMG4J45ayABaSx2R95haXQe4wowNYiV1Uhg5rz4sja6FACRuFQGhktKqGuCb4nJp/Ji1RtKDm4c7un/mNUkHMu2oweXkX/aQo320UzGw+3PETcNl/MCzRK8ZDcNfaGwb3qFw7W3n5Qp6T+qqMxgSbb2gtEBiR5FNtd/+LIOSJXZD/bwuiAHmuI5k/CZngK3tqBiglr951pGlF7FHJvXk/GEBuHljFokthcpRFRiiSCOJsB8LKNjWFTMtLZVUlncyr6sc55jl5G/MCwi3KTRXBl/vIx7r/B9EMHDrqlDYNVxHlUF3L/+j6CIjEBbOJSPcidvcixnsB4xVh3AJpS0hb6LQQjLTZ/4JeCLlj6h9sC9I6XUgNG2WcavVQ8/FCrgQleCA3r/Z1ulZJhua4VrUvkatT/hPXyYKyb89r07X9i+4ZzOybgykhVXmihPccj3mlTtWnLvd8ziNiMT82K6T6VuX6lpDdbHNSStJhdSHKPcBNIEZlmt8YZPKwEBgpu0c4z4/DrlUiUBJwMe3VLbaq3aYYMJIfHLHrh5Tcg8a3V+ZDVcr6oko+OVKexDANxROSu0CIEGywVCSkvDSlXIcyIR9mzbZfh7RF4F19VSXhea8lduD4/p09K75N4K45n180RC958529HAPSNohivxewu0fY3SMh+CtR7uNWn+OG977rF0Puc8h+3AQtAIa1i4iLoq5qaoWNpHTOG2DkiR1RoMO2ta/1NUvyNbEvumyOVe2o98+pdv+9735yxykw4UPfgdoS+iGXTO0X3vPiEPSZLKs1K7+sWnMilAe9+NaUTRVBlnZ+WZIM218ZeYk9KgtjP0yi5olssQ4IseaRoyx+W7baWG1K3wVKADNNKtHcSWpCSb+Td8UMQuYYVKnHBIo6942LlJF+P5JKB5INjvKusTKHrIBISC6P+bkpqVMEWBfJSHlGS8nssap9Spbkftv2ut9nrgU5ifbal+4xptO+Q2m6b/Ye87tUXMqywDyBByXzrRE7kK6g6l1dM1t9jk8CuVcxu7aA7QgEP0AB0JpaffHmG3SdMvl6alxe7bZzu4Ogo97g/IaU91ESxVN6rN7JDR4g9hnVfgxmwzDd//pglMgYty1/z6lzRyb1eqpFtbeheL4jtWomt3OewNS6VcNL9yiZLtERbufSIKZ88f0jKW8bBKhaT7oCU8IuyRqq1WIdQeZ3F6WtFZjZnvrQ8nB/a5DMRIhVNmVTCwSn1nU0KiKDhgQK0mqgkOS7KSzWFjMjaFEnMAcleslfghK8dsSSiRuNWgnasZpH0fr1h1V9S2IHyrI5bkrkhH2Emo85Iq1jMfsuTbbz7LGtx8FcWGkg9bMD4xUhXQG7e0Ls4xUjbRlpm4BNwsQAHC6D3JncQrUyt0y8YcSbUVYgv97lWFQOJf6zajI1UTJF/ZElAT+s3AjCYulZO6vz3NBG6EzyIlhLIY/WA3wEgbkU5LTSYsiXcd+G6f5CcZN96mNtFxChVNKHrxu4VvDVi9EQcG7WtgSeoJ1fmHzkeK6mlwWX/0ukXse5o1Iu+Ty3dmae9MlNQVC7gWQ7jbpijk7XakqIxvFkL/JjgaALYsvHu2ZiSIghCbGHoJ2ZEPWuqj2rs1Fr5QBdjQryjGfSs/hnmzcG+dtInbMbJoRC8CGkSXcGs7wfo8ymXgheXyDjy4RC6qQum7CDzMke1X7z3DfU2ITL/6FnNUPe86Q/Q+LNtR/HdXjUtY6oUOr8U37ZU5T+xnELpI2o9vHKEfuGwVuWVtcmzXmTMy6D3AHEG2C4Boa3GZu3Eob7I+JbDxHuP5RpOXcSuMoxSBxoCMsdnTbyLY/y0v8o35n4rCIw90zUgVI6eADB/SbOlYZfIKDq5NT5rG1VmFaVt+dY/UF5ezmP4M4Ndn3kdSp9/0SOaHCoO8bc9obQPbEXcmentPyH69/VNZpOMi7pPkoH2ITU89wgPCV2oCyy8OAh+PoafPOgqPftFmv63QmMiOR87vUnqt89ECPGBE4aTMAmz3WFIY3nt2UG2ZWtS6zYlk4bkKNhVLFnF4yq9RgLqYdQj+aVR6YhmghIJBo9EzyFypaxA1KArHc6sir34obJM1c2AiMntq8cJ+RJE1JfVs57rnUMlo6fXJf37GvPa/Kv5G7KPUfGbNQlkxU7gzcJtE2IQ8K+aY8ug9wZoJ2o9c19IfbNmw8Q3roB3b8Gbh7IVJyAm9dBhwMDaKsvP90m2agyT/YQgpxUACEIgcZQk70RPAEc9ZwdhOyNzIOly25ua3JkPk/2QE341Gxrz+Xc0ebIPTTnGNq6b59a90q8Gf1Yq3ev1pWUR0fMjRoH6pe4VixcOpXsPCN2I/Vx1Dn9UwkbA0pFYXNiP9yBb27Ab18jvX0NW6w4MINWtnK/8lLVoQquFPxo6j0ycqgcMTBSdrOQFcHcO+3txVqYRuqNbz3EpGpdyN38/wDytxG7TXpmLV8iQiIhe0YheA4s0yWoAKFI+ny8OKDKDh+L3GeOP8o9si+dRyT8+fTouDTb/GlZsInGWMId0xaZ1IXYGdgmhE3CsBmfAOVOAA82KtUTgH6HIHND2JDdpVFj+r8aKrxHwVfbiKT331R80P9G3rEQfPbNZwXvCL5S715dO+Vu9+zIuPKVz5B+peDbUYee1Nvr1cUs+4BCFFY/Js33EmtCq62fAAAFxUlEQVQ4V5S4g1gUmrmLlOSNj5jtJp2xqxItxE/OuMugFvnWQR46xqFS7s4+WMmdANjMf8H87jGC7l3JXC0ropB6QqQgnao0YhcCIieEFES9h4QYSQdNEhKS5J3kXpFoJnrGIT8jwCb9ovxBdsHETOxC6uLvR+7cNdg885Sk9TAGWe82paB9VTJ9ANvCFKOGPSaWjuxKJFB+zhW5A4eJdS+R08L2/deYHH8M5o7fV5EsndMe7wW/vd8b6QTPnacbUezYJsSrEcMwYrvZPQHKfUh485mE66cChvsBw/0B8foK8eZdMre7i2HPi+E6UpuLHNkXsSL74ZRO8+3VuCqm6THT61bbquN5PwG32zHdLuen8juUa2Pm2otg/3HkmtW7I2TbngDSlxzt8ZUqL2ksNZln/aONkhPyN8L3eZhe21oQ1gkfHzJop7sH8WHu/vZAmdwiLCrIXDABskj2Tkm+uGYSBhUUsliHuECYpCOVkwoRbeHMJKQf7RTVEEtP6pnQY03qMaQJsQMSrJOYQBQwJoJFqIg+CiCSdU05JCSbKiQVks8Vkn9+qbaZRXLeh4rEy5+J6T8C2T5W2gvbpsp9+lLOvh/GORqymiLAG+08HRhkin0Ysd3ucLXZZfuav6cLiAMmov8C8BaA/147LwCewvr5uIQ8AHcnHz/AzN93qsw8CojoTQCvrJF2g7vyLE+Fu5KPRdu+CHIHACL6MjN/uOfjMvLQ83EaXEreez6+8/IRDh/S0dHR0fGkoZN7R0dHxx3EJZH7b62dAcUl5OMS8gD0fJwCl5L3no8adz4fF+Nz7+jo6Og4HS5JuXd0dHR0nAirkzsR/SQRvUJErxLRC2dO+1tE9FUiepGIvqzb3kNEf05E39Dv77mFdD9HRG8Q0ctu22y6uhjzr2v5vEREz95yPj5DRK9pmbxIRM+5fb+s+XiFiH7ihPl4PxH9FRH9MxF9jYh+QbefvUxOiW7bedt3pG2vbtesw5zX+EDGSXwTwAcAbAF8BcCHzpj+twA81Wz7VQAv6O8XAPzKLaT7MQDPAnj5ULqQBZn/FDJG4yMAvnTL+fgMgF+aOfZD+nyuADyjzy2eKB9PA3hWf78bwL9oemcvkxOWbbftA+neddte267XVu4/CuBVZv5XZn4A4AsAnl85T88D+Lz+/jyAnzp1Asz81wD+58h0nwfwuyz4OwDfTURP32I+lvA8gC8w8w0z/xuAVyHP7xT5eJ2Z/0l/vwng6wDehxXK5ITotn043Ttt22vb9drk/j4A/+7+/4duOxcYwJ8R0T8S0c/ptvcy8+v6+z8BvPdMeVlKd40y+nltFn7ONd3Pkg8i+kEAPwLgS7isMnlUrJ3HbtvzWMW217Drtcl9bXyUmZ8F8HEAnyKij/mdLG2ls4cTrZWu4jcB/BCAHwbwOoDPnithInoXgD8E8IvM/H9+38pl8iSi2/YUq9j2Wna9Nrm/BuD97v/367azgJlf0+83APwxpCn2bWsK6fcbZ8rOUrpnLSNm/jYzj8ycAPw2SvP0VvNBRBvIC/D7zPxHuvkiyuQx0W274CKe4xq2vaZdr03u/wDgg0T0DBFtAXwCwBfPkTARfRcRvdt+A/hxAC9r+p/Uwz4J4E/OkZ896X4RwM9oT/pHAPyva9KdHI2P76chZWL5+AQRXRHRMwA+CODvT5QmAfgdAF9n5l9zuy6iTB4T3bYLLuI5ntu2V7frd9ojfIIe5ecgvcjfBPDpM6b7AUgP+VcAfM3SBvC9AP4SwDcA/AWA99xC2n8AaRY+hPjVfnYpXUjP+W9o+XwVwIdvOR+/p+m8pMb2tDv+05qPVwB8/IT5+CikafoSgBf189waZdJtu9v2qWx7bbvuI1Q7Ojo67iDWdst0dHR0dNwCOrl3dHR03EF0cu/o6Oi4g+jk3tHR0XEH0cm9o6Oj4w6ik3tHR0fHHUQn946Ojo47iE7uHR0dHXcQ/w+5IinRWwwa5wAAAABJRU5ErkJggg==\n" + }, + "metadata": { + "needs_background": "light" + } + }, + { + "output_type": "stream", + "name": "stderr", + "text": [ + "100%|██████████| 44/44 [00:00<00:00, 1016.15it/s]\n" + ] + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "" + ], + "text/html": [ + "" + ] + }, + "metadata": {} + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "
" + ], + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAADHCAYAAADifRM/AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9edht+VXX+Vnrt/d55zuPNc+VVEJShRWCDAZUbDBEAZEONhCjjxFoBBQQ2n66BX1w6PZ5cOhWgccGpMWWhgiiiAqaPGSAxHQGTVKVSlWq7jy899733nc85+zfWv3HWnuft0qqEqEmynfVc+vec84+++z927/fGr7ru9ZP3J092ZM92ZM9eWWJvtQXsCd7sid7sifPv+wp9z3Zkz3Zk1eg7Cn3PdmTPdmTV6DsKfc92ZM92ZNXoOwp9z3Zkz3Zk1eg7Cn3PdmTPdmTV6DsKfffJSIi3yYiF0VkQ0QOv9TXsyd78jsREfliEXks5/PXvNTX80oU2eO5v3giIk8Cx4EKTIH3Ad/q7qc/y/da4Abwhe7+0Rf6OvdkT55PEZF3Aa8HTrj7ON/7NeBfuPvfydcO3Ovun37JLvQVJnue+4svb3H3ZeAkcBH4e5/Dd44D88DH/2t/TEL2nvOevCQiIncAXwo48Ed2fXQ7v435/Cy/0Twf53mlyd6if4nE3XeAnwMeABCRORH5WyJyKuGXfygiCyJyH/Bofm1NRP59Hv9FIvJBEbmef39Rf24ReZeI/LCIvBfYAu4SkVeJyL8Tkasi8qiIfMOLe8d78t+ofAvwG8BPAm8DEJHHgbuAX0pY5v157Efz9X+fx321iHxERNZE5H0i8rr+pCLypIh8v4h8DNjcU/C/hbj73p8X6Q/wJPAH89+LwE8B/zhf/wjwL4BDwArwS8Bfz8/uIDyfJl8fAq4B3ww0wDfm68P5+buAU8Br8vP9wGng7fn6IWAVeOClHpO9P6/sP8CngW8Hfg8BRR7P94e1kK8duGfX64eAS8AbgUIYhieBuV3f/whwK7DwUt/ny/HPnuf+4ssviMgacB34CuB/FxEB3gH8eXe/6u7rwF8D3vos53gz8Ji7/7S7d+7+T4FHgLfsOuYn3f3j7t4BXwk86e4/kcd/GPh54I+/MLe4J3sCIvIlBPzys+7+IeBx4E98jl9/B/Cj7v6b7l7d/aeAMfCFu475u+5+2t23n9cLf4XIXijz4svXuPuvikgB/ijwbuBBwpP/UOh5AITwWH4ruQl46hnvPQXcvOv17iTt7cAb06j00gA//du6gz3Zk89N3gb8W3dfzdc/k+/9yOfw3duBt4nIn9v13oiY+708JxHhv3XZU+4vkbh7Bd4pIj9KeCPbwGvc/ezn8PVzxOTfLbcBv7L7J3b9+zTwbnf/it/BJe/JnnzOIiILwDcARUQu5NtzwAERef3ncIrTwA+7+w8/xzF7VL/nkD1Y5iWSZLH8UeAgwRr4ceBHRORYfn6ziPx3z/L1XwbuE5E/ISJNJqAeAP7lsxz/L/P4bxaRNv+8QURe/fze1Z7sySBfQ1B+HyAi0weBVwO/TiRZnykXiSRrLz8OfKuIvDHXypKIvFlEVl7g637FyJ5yf/Hll0Rkg+Ct/zDwNnf/OPD9RPLpN0TkBvCrwP2/1Qnc/Qrw1cD3AFeAvwh89a7w95nHrwN/iMDwzwEXgL9JeFJ7sicvhLwN+Al3P+XuF/o/wP8B/A/8l6jBDwI/lcyYb3D3/wj8mTz+GrE2/uSLdvWvANkrYtqTPdmTPXkFyp7nvid7sid78gqUPeW+J3uyJ3vyCpQXTLmLyFdmJeSnReQHXqjf2ZM9eTFlb17vye8WeUEw9+Rwf4oo0jkDfBD4Rnf/xPP+Y3uyJy+S7M3rPfndJC+U5/4FwKfd/Ql3nwD/D1Gwsyd78rtZ9ub1nvyukReqiOlmnl49doboETGIiLyDKDFmaWnp97zqVa/6nE/uRPnmiyYv+g8+v7K1tcXatWtPq/gQEdydK1euYGZPu8XXvOYB2nY0O9ihmnHq1CmuX7+WRwr/RQ2JP2OoBHDHBQR5+oe7/v3qV7+aubm54YOLFy9x/vx5EIHdkaU7Wgp333MPS4uLn/P9P/nkk6yurj4fT/Czzmv4bczt52V+/c5OcmV1lQsXL2Jmz3I6j7fiYQ6ff/ZffY4j+kebj1lwPGYKiOMe/3aZnUOBffv3s7S0RFMKosr6+vpwHkFQVURAtQCCarxXVFnetw+V53sx77qRZ/v4BdIfzzW3X7IKVXf/MeDHAB5++GH/4Ac+gIsgOOaCSjbAEUXciLJ8yQnW/w+qg4ogFbzkGHpMCMEwClrBJf4goHHAMOZO/EOyf5GL4AaqpHKS/OU8OYo5iBjiChgu8bf0wZCD5ckFR4YFIXn/+c+Y1aHD8jMZtGQ+s2foUcdwFBGPL7rgko2CXPtLzIOND33kw/ziO99JtThWEJqmxX3KP/qJn2BrazvPqPy+L/0ifvqnf4allRVc+tM4V6+s8ra3/yne8973MCoNU6vg0I07mnaEeaUIw4I0DFEBA9FeEwhoXLPVeK9pC7/4C7/InbffiauxdvUqDz74EIcOriAmmMEUAxGqG2//pm/m7/zI38GLxhBZDpOE+hEXoItRFMVx3viG/0L/vqDyzLn9gQ9+EPHds62fbI6ZgArFwNX6GyEtYv8XWMwnzXmPAZrjnZPeew3iu+yieTSx6I/zXAuA5thZzsP/95/9LH/5h36Qae2gxvmKONUFpANXijmmgrgiCh2eRlewKiCGItjUseKICCrgneEeirZiQIkxUQMzpu4UFNTj+x14a6gDVqhFKAINwld91R/m27/zO7jnzrv4hV/453z0Ix+N+1KhUWFxfoV2QVmaX6GUwvzSPHNzi7z+wddz7913D+MVq7WCFVzjkQzjPQyUgfa6MwYw1u7M8XDpdYmnbtqlaz2fVemXsKdOkNRR8V2nIrsUmFSgzHQUnmue4Q1AeMMb3vCs8/CFUu5niW5tvdyS7z2nqDsGuFRAQTQUMUoUu2kOSq/mldC/jpReEfYLHGICAWKIpDI08mH1SlV6BzNFdg0g6Q300j9CRxRwDYOB5uKN483jJwTifSFWkPjsTP1DNDCVmTexa/33RioPnl1Sv/Ad1HX4zeFidxsDU9Qc8QahxscqmFZ+8zf/I9PxFMtJZl75A1/5lSzvW8FzKvV3/F1//s/znnf/OipKtQ4RQcQZtQ3mFZHw0F3BLbSSWhhnNxmUjVhc56gIVSoV56//jb/Kj/3YP4pn61BrjaddFOu1mhttaTlx8014UXpnrioU7/29nBvepALzmbF9fuS3Na+FyrC6JRe1AaI5j2Jakvevg7Eip72nUyLh2abjoKQSZ/bY8zT99IYSBlJ7AyA5UmIgmq+NKsIf+4av5/UPPUhXKyrO3/67f5eL5y9g7lw4f56jx06gSjpc4U0JEfU9+OCDvPVPfGOvfvjJ/+sn+bmf+znEQsFbI3h1XJXiHs9YwqiJKaIGNa4JQMQR01TaRnFFSkFMeN2Dr+f+e+9lvDPmk5/8ZCg+d/AGL0DjiI7SORSkOniH9GPihuV9G4oqiIe1jLF1tB+rwVnyWGQyc+rMBdGnqQrcHCkx1r2X5WWXc/hMY53f7TWd9FY7u0pJvkTiaoXQFb3eei55oZT7B4F7ReROYvK/lc/SDc7zrsMpz1sewrGY+pYTO/Sp9+/EwhYQ6y1pHD94x9LbaacftcEbl9kVDN7yrrd9MCMQvnHvrTuenk9EBDNLrru/2V+A7vb8Z8cgqX7cY9H7bmsNw+PuL6jGi0Hd5xhputmD+Rnsm1NFMKuoGlXARJkvI9bXb1DzfOHZaZpPBo9CqvPe976X//j+9yONpDPTn9xC2QggBVVCkRSjOljncX0aEVZxR7RgVqkaiwMXzp4+R78Q/sW/+mU6qxlZOW4KVkGE/ftW+O7v+u7BqQIo2ExhEYsrjKUjvsubfX7kv3peQ8xn741bP2l65d078mn4JR2Ofm5ILnLv3e14kcfPVLrRR2W7FATpAJCH7fLysXBV81fjGFXuv/+++B7OP/wHP9r/IO993/t44xe8gbZp6V2rWKu9YR1CUXDh0NHDVCouQknrJQJuU8Q1IJP03GNVSih7caRqePheB4fAi2SkIczPLyCiEbSmRyF5Orp0wc2RBtpRG3OzNHFXEsYkkIG0rJLet3ugAOZDlNQfEreXY24anr7Y054BMAy2eTg/IrO12vt54KBOQRIBiPHedQI8n2Ef3btHBDTz9frxfnZ5QRKq2Wb2O4B/A3ySaPn53LuupEJWAmaJeJFUZopLgR7eAPCS+ngIduKmDTDBkEFpxUNUnIKrp0eX/+91eqywtPYz5QE5oOkqaz/g5s8I44wBEsnQLV77rosDj9AkF4TOfqiHWIZB7H8cyi6nv6rtWpS7jF/CHZ6ecfqxoXwr6TEoeHhOxRIa0VigbsarX/0qvvmbvyWfYZz23e/5df7Un/7TXLp2DREFc6zrwvNIxS5SMnpw3A2zGF8pGuH7VChq0Ci1Bt5VTaid4xSuX1/n3PnoLfUf3vVr7IzHdNNpevAdmo/yvvtfhUr4hlIBs5nyyvHoh9M9FqHsHtPfofy25jU5d7BQuioZWQoi9ZkHxrwl5/Vu7U8Yyt5h6GFKIB2e8P0k3ZiYMJaGt58XvZvU388MFAiZGe2nXb0LX/J7v5imGT3tN7FU873HVX04W8yDnHuq6ZDEf0UF0S4gERjWm5Z0wcSoFl68aqFoCefFjJXlJW46eSx/3KlaUY1QpgiUEmuqKQ3VKoLTzBeKOtrEGqnaQyszL1r623cSStrlj6cyNdd4PXhaMYb9FJuhAOEGDucMPCKM0xCmxUjFcRFhDh1hU99I3iMIooaLR8ThhBP4jOnzTHnBeO7u/svufp+73/1ZOrsBuwcivw+ggmkMlvgMJuiXy6AwvfcgPHDe3oPpxzHGdlg2/XclF4MQ+L7BYGmlt9bDxE9vNhvhIxr2pX+wKFVDeUfo3Ie/EXqHdRZEe++p7vLOn261Zdd7kjdoeXDJgXpaHNB7ufmF4T33XHcRYk6HMRDOX7rAufPnsdqlp6iM5lr2LS8Pv/uBD3yQ7/7O72JjfSPgo6mjFJrSxj1UD1jIBTWD6mlUFK+KqeIWhset4A5Fm1ji4hRpwJ2Pf+oT/PKv/Btq7egmU7yCVc1EWjx/N+Xb3/EORnNz9HYRDSXXqzSQXXAYmNrT5tTzIf+183r4HjqDA/G8tzJ7Xuk99vkZlR4EDkUJ/Tp/xg31hqL3IGUWtg9up8zwdk1Hw9UQifyWPe2EgnsJZUOull3O6bCe6OEjHa6BknGjCG/+w2/m0OEDgUZlwt7yv6qCmQ6QEoRvV6xmDFFm+TOMSo6BC/fcfRd/4Mu/gt6BKV14vwFbKGgX80WnmOW66AJkRHSIaaT/cZenp6mkj31neqTXEprPg3SgwjNiMAree9O98oh0QkSfyCwKSKfLhpe7n2ncjya0GYFQ737q8Mg9huk55WXT8jcUolEDRKUfVt2dSM1jrQ/1RHCTSHz2qjsP7I9VwsknvUq0pHLO5Kd5KGnJX+nj597sWTwKHVz0/B0ZHnneQISUXtJYiBOxYs0n0a9LSa8sJoYXBgw9IoJQUt7HYKQnwUzdC54oj0RYK5LZB1LNxVQVCU/57IXTVDOKl/AAKGzeWGfj+nXQQqFi7tx9172DAZt0Hb/4S7/I+Uvn8Nqlki4YXQ+LolUwgepdXIon9l8zCusEp8b4MaJIQynQWSbLc3Kqh9n+1V/7Nf7Nv/tVwPBGUFrMHWRXgrSPbSUM6AB3yOw5SHqSs3zNSytDnOW7nnVq9byVjPR67KKClEGZS6a7Lce4ikXy0ROjTsUyw3B7qCz+PSwGcSpCgQH6FDF685CaBBGj38dngAUsHC2F4e8hq0vABqFgY9wfeM1reeC++7lw6fKMCSOZwCwCncySBpbRslUUxcxRgc497F94eijOyZO3PO2WUEFUMTfUC9rMB+xihYVRCwSEdeDgIW677Y5Yo1VjnSZskg8px2oXpOlQcYp7rMf+V/vnlYa4v2c3SXw91bcM4EEkb3MsUc05O5sXcVqL577rp3CJ6LO3G73x6aG755CXjXKXtJQqgMmgCAF2r9FeAcdA9gu494fSzmoOiMxmweA5O5Hg6xGUMvv9AYN3cPPM6fZJ2zQG+bkMGS0gPeT+TKGo+wRJ71Gl1emtlOyOCfrpkQyTXQlC3GKy5eJ5mpXrr2PAX70P/ikeDJ7JdMKHP/rx8PgTMmkKiBZUFbOaUIDx5V/2ZeHpCPz9f/B/8uN//x9QvRKJP6Vxp1mY57u+4zvRUvipn/jJMGIWXvgP/tBf4Vd+5V/x/vf9Rl5ehOxv/uo384u/9K+5duUKSZ+h2jQMkjjBFHBqN6GbTkEaRv2qIfD7L3jDG/jCL/zCXTCL5Lhk2rQfs8RnZ4rnZSDDI+qTdJK5gcCaY/7M5kk/ByBYWz3w3huBkiyg3U78gH33Dmm/zVoORSTN9Wkj0hvFPuqLNWCJx6dDmUpP1GdrBAjGWCa2NeGXVMSSRujzH36YtWtXcS0UBGkSWjVFSjLi8kLMHbMa0Ea1QFd7b0HbMHDi3H3HXfHzNZ2sSBYMcKv2AZHqEGVYOm5tCSg3/IBdijGTWsPS8ly3ahmMDBmyIQKS/oZ7thZ9ctXjeWZOIJcdqj77PoZKgmhpSPvfccJ57eGapy34XZRQ73XVc8jLRrmnDonJLcOMDC9Bnj7pECAz0EFhEsRqWM3EhcOdH4Kw2cAau5Kb/fjI014Fm2GIE+KTntbQr1RJxZuQxvBt8SEKmVEZ4zhzT+ud4Xd6nmnRcun4Li8Udodiu+1d77X5wDsOY6SW0Y56IJomqGVuQUMJVjGePH0KamCSwfYSjh87Dm6cPn2ad/78O5l4TSjfcTWaRvmB7/9LfOefi81xvucvfE/cmwX01GjD133912C1N7OxsIoIv/pvf43L3RgjcHZJr8it0tDQTZ13vevd4JVRO4qQNT04VeXY0cMcP36sJ4GmtxODUXJM3SuUiMxkl2f/UksojDDOPeQmEtQ/tz46iwMzjcbA9+4ddPE4NjOHIjYL53tWSCaBtGcu9ZECvfPkA/tKRAZGkabnHdh3zxarkavyUOz4sIoGDaipqHDtg9MBeXJNSNASby+Bh5tZwDdpqSJp6klpjRVgGkl0tSbmSM90ycjNSB2RSTaji9+RgJNK0+AKTdMwahqaEnz3p+vzNHS9vrTeKw7e4pDgzNvtWXf9QjQUzTU4W5cejulgKPJpDo5pf801WV2x1q3XaTnGxSSNMQMcq3k2Sbxecl48l7xMXJtdelDC/wxNxS5fQRmIublA+qdlAq4lkp2wK+HR44mDTx/Gow9rnFAKw5nS/xdJ/LD/Zu+F5wNPl8bJSZ0e/OCpO7gH78RcB8xtiOzScyedccMDzvDwrjzPDsxyAfFi9onKjKVTe80QCZdQoEJJfj8angIFXBRMefyxx3FtkGaENgUTZTKZ8PFPPsLb3/Z2HvnYx1FtwsNSYW5hgf/p+/8S3/5t3xrjI0opDY2OaJqGIiUdIKVpC6UtNG0sLi3KD/7VH+Lg/kNYDW9PE1LWJsbw7/29H+Gf/PTPMD+3AKqkPmE0N49qy/d+7/eFkhvMBrGo++dmMQfCiMd4DJj9b29KPo/SGxmLZ2iCD0ZbIrc0HJnees1Z2XuaEt6oSEZypMLCcA2c2/qknNtAl/RBM8W4qOf4GBHtmETimV3zW3p1IgOvPpLnPtPtMEQXQTXu12pNTzk87577HUioUyToj910yqSb0O101AqdVabVqHXXGvOKutGgNASkI5kkjtoJp9QYpp7qaW4UU6RzOjG8NDGqu9hsQyRFjs2wJp0qZVAv4v1x+dyenv5kmGOps8JA9rAo1MT483SDcXUpcUz/rIaL6ge2DtfXq5fherQv7goj8lzyslHuw9V7HRQkBC1pFz18ZmHdqRk+9dBdr6WHwzM0de+r6/IYbEaplHhcPnx3pjx6Gl1v3L0HgYYvynBEr2dDQc8MeerBASsbPpvdNREux8TpkyyBzXpv45j9cFyJpZUPGGW3lxH5gd7rk2Tn9ApQEIrUUK4Kr33t57Gy/wAmzjd/07fwpi95Ex/5zx/BWqfQ5eIXvu97/yLf8z3fR9u2xDT33cMwwy4JBovl+PeQw1ve8hb+yNe+BXNjapUqmt51eGNnz56m6zqmndNZxaWyf/9+Hvz8hzl8+DC33nbrjFPcP8+EsmJ153jvHtvBa30ZiDh9XYT3jmQfyeWFe7IpBqPdf9ariEzW99PYxDEJ718JfySmScHFKQOuErBJ+DI5UP2iSe3h6b1LQiQ9bXI20kA6GsNiGZKScZ2pbvMckjmZZHf168udiXUB80ihHTU0aizMLdKOGspIkKIUjRtWcSgdLk41BS8x8yzYIiYD74zShMegBaRRqGRkVLjpppP0xUbes4hmblO6wUoxULeEbZ/hWOazGRgv+duWkJj0UTuzcZGB1CCD/hYfTHsY92H08lJy0Vu/rl1AszbCZ77r0/TibyEvG+UuSiQ8pS/2iPGe3byFh7obSJRQb9ZTi8Tp6P1sH8KBGXfbhgUjveLLs/e+eX4aD2kXTzrV6GDNxUhefY+d9aeSQQ/PKJe7dHNKn3DvuR5Z1TJ4UD1XP1axP/17zHISEZ32IfaQBKCPUlDDqoUScKPi1A46C4+hNFH5WVxQphEmm4MZ5sLc4oj/5Yf+14BiklKnvcLJxxCWxuJ9SY+vgkmUXdTEe7//+76Pg4cOxDO1gjc5egLQ0E06zCJ5WkVol0asX7/Gn/2z7+DgwYPJepiNpSKDW5trLYYsldwsQf3SSl+Ny2AEfZYTCo0X19sbqN6whzaGpOD1c0yH+SeI5Th4b+by/JnPkD5C7RXsIPkMh7GSjCIlIZ5dhEi3vMY+ebrrFDp7BgNnRyIq6LWP5pV51iuMRiMaUbQEvFEteO1NURptaVUz+SpUIZhXNgTTMc8UfvXf/zum08gZxaXMLlpQtDhFjLY4J44fH1T1bBLpbGkllIISsJA74sYQKVrJUIjI981cf4b8mbOrsp5IVqeRHmDEnJiznFp8qLsuTTScT60zj8WH30kdmI7Tc8nLAnPf2Km8/xPXclBsGHgBDq4UbjsyQiV4puoEta7HWKXhykbHxas7TCtAYISeVKuKsNAI950YIcVBSiQbCQ60a/zedoXHz40juSiGVYghtcEav+q2BYrMHkissqSMZfbksTOTKPPvKwB7Zog75oU7j7XsW8ywDEHocC+ZaHNU4dMXx0y2FdduYCFIThpBObJfOX6ggZp0v1oiN1Ez1SBw7sqU1Y0O68Z0DmJG4+AaTIzf//vfxEc/8hE+/NGPZu2vY7WhjGB5cT+iwtr1a6wcOsYb/uBb+dBj17M8fOaNIHB4peXkoSaMrAjFhZpumlRhs3M+fXFM45WtHcHaNnIoSVN0hOXFZdr5OdauXg2cvDPmFhbYuLbGwcUVft+bvowPPb5G7aKiUAZPJjwqIcrS7799LvMzDFRUd5/1S3kJZHOn44OPXMVEgWSDeORFDi4ptx6dhz6QDzeQ0DCACqvrE85fn2ZHhYprE3PKgpZaWuH+m5uBUSUlmCOl8yxgK+zsOE9cmGBigwLsqkW1qQfUc/9NS7SNU4tTTMOglI5MgVLN+cSZ7WCPWCp0VYSKdeFo3XF0juX5Ju7FujDiRTApqCTLTQSp/RMz3DratrDVTWgsFLInrtPTIfsWHx3hoDz61A3WNjvOXbiOuUXRUTEKLaUpiBja+iyCFGdjPAZtkhvuXN92zl6ZQB+HitFkJOIieAPHlxqOH2hxDK1h9CbeRa2Fw3YHj17YmUVI1ucCAI21PD9fuPvEHMVI5peBR87JxanVefTsNtPqSUEFHdhKfZ2C8Nrb5oMooln5ncV6zzW3XxbKfTypPH5mPUKsIfSOMTp4o8Ckwxto1Tmxr2HqiljBioHssDICP6g8cWZMVUNc2OmMa+sdU4OihU+fCZLgzUfmOLhcggtcQ0HvW1SW51ruOtSEe6DKlbXK6atTXIwiwVH96BOGunH+2gSqYOrcc2KOhbkmIo9aaUQYNUY1pzQNJ/e3OJXTl6dc3ao8fnYnKX2x0JywE+OJcX3bec1tLWrKaNQhWSatQi4Ip9OOq1ecc5cl6I3R0CWNQ0Q+1TrclOV55eRhpRSYduEtNkWRBl776tfwea97iIvnL/BP/9nPsl1vwAi8E173+od49IlPcdP+/Xz3X/pbbI8NkcrJgy3H9o3AjfFEuLbZsb095TMXJ4hENWF14ZFT23SQRSxCJ85N+xRtCn/qz3wff/t/+4Fw76rjGK998CHOnH6Ka1ev8NDDD1PmDvFnvvUd3H7b7YwnymNXC9Or67hEHsEk2hLMtQ1HV8LLHZXCdOI0Esk06yoiWbT2WTycF1J2JpXHz92geskoro8+lcMryeKeOjpyTqyM8Eah63A11ArLjXPzQeGR0+NMtE8Yd5Wr63UInp46K0zFufXAiEP7C26ZZBZnYb5h36Jw99FkJolw5fqU06sd0gzdSvjPT0Zi88KNiln4wvecmGdx3vHa4GIsqCGWLR9a58Q+RSmculq5eqPjkZ2Ooj4ka+94zZfwsff96zRUFswQDwMUOH8i+xmFBIyog1MapAbLiF5oUDYmzrlr2ywtKvPz6WhoQZjgAqVtEC+oFdq5llYUl5bf+MQaRw5aKOBkDzmO1Q605ejBwol9LQZMJs6VzcrGesd4PKbiYUCATz61E712cPDK1AtH97WMBKwE7VnEuffkPAujBilO3Zkk/bdyfsOgM1BlY+x8+uwO5qQzGRGUm9E2wtEDIxynUWG80yBzoBNHzalF0c6Covws8rJQ7kvzysP3LSEonzo3ZmtS02J1bI7hY6d3mLrROhzcF6R0FwXvaLXwwO1zjIrywO0jImtYGU+c62Nn1CYjIcYTM3j01A5jD8WCNCy1zvLCNLwRM2o1qht9Acf9tywwmle8E7xWjiy3VILOrR4YcXFlVAqtwhOXJ9zYrDRN5erGBO+S3qRhbQsLzMQAACAASURBVN3zb3MmY+fWYyMW5pTrm0H7i9A3HpoiwbMtwbYpBocPNly80rFtM3jHLUPIaqDBlV2fGmsXx8EBVoaJ1zRzdEVZGY141f2v4lu+6Rv5x//3T7G1M6bS8YmP/yd0tMjb/8Lf4I67HgiDYJXVtcqVtW1chZ1Jx/q24TUofTcdHnFyf0vnxufdtYR1lhGDIEU4vKIIhfO3HWF+tEDtpkHbdHj00f9E0Xn+9F/4m7zh9z5MaY5iDk9djTFrinNopeXOY3MBP3iwoprWObKk7EwFl8KkmzL1Gpj21LmwNuHS+pTNnZfOc1+aL3z+/cvg8Pi5bTYnWeXsytZ2x8ee2olK3eIcWezoGVOY07bCA7fMUxrn8+5YiO6dWqhjY22nMlccz2hWJObUfz61Ra0JGQosLLSszIfP0rlEw7dIVmGTwn23tCzMK50LxYQjy10os2RcTXPOjBqhlYYnzu5wY1wpjbC2XvGk3LYlE/nVAuIoCtLgXaUWKCitRG7JDWpCUaVVau1opCQm3te2JOogitZwlqSZBvxnxuY2rI8tIY7Ey8UjHugbliGUUigGtx9b4MSR6HR65nLHtY1pFE414ehduWFcvb6JeWE6hetbXeL1zvFDDSf2t6gKr7t7gXFn0AkeMAKHDhRSDeMYqxsdnzyzE+tNZ/ybRoVLG9NIc7jQmdG2wspc4a7j8+yu9C1aOLy/MA4vKYzDdjgpgnNmdcyV6x2b45e5cjeH8dgx77jzeMPNh+YjjLVwTdRh9Xrl/PqMGx2c5sDmPn22Qwpc3ZxwfTPoVQWhFOGem+cjwZjRLibccmwUiygBdHfhlsMj5tVwaThzdYcbO07PXz+1OkELCc/UpG0KHc5kB86sTTkwX7jt2AgxOLp/xLH9gZFY4mlRnGGB5yV97OAB5fK1KVe3O9iKBVml0LjF94rTeUfjDXhHFUdVWDBjftEpXYmFlFZf1YKKlQvHtbC+dhrvDMzQotQqtC3Ma4tkWfjnvf61fOXaW/jnP/vzqDbcdMvNfP2f/HM8/IYHKQXuu3me8QTOXdmJCe3C0qjh6LKgxakWdMcb25mgksrpSxM2tqNk5uBiw/pmwZsJJ+58HX/8rd/E+TNPYLXjicef4tSpp/i27/4OvuEbv5ZbjzTpscXzOX11zHQaUNvm9hRKk5DcFNlW1jYrjz01ZpxJds/+OAtFOHmwYalNqOolEjNnezvm2e1HFrn5WBsYtEnMcTdW140L6xNwaHNxB6lG+PTFDhFndWPM+sY0m6ZViij33TQKuE5a8A514c4j81R1Gg8IqBPllkMto8BXeOramI2tYJ24G2dWuyiy6XMBAm5KI87OZMK5a8bCvHLbsagsPnqo5aiPQIKjb1aDkVLrkDOoIhxZKmytOtYYQsmoQ6keGaFqHbVWJq4UoKMDHPWCNEaXTlq484qUYPbsO3QrK4uSEKBHjxrrgpliQsVppclOlxEbzB84xkZXeOL89pCTWFgojATuOtHSVefc2hTzBhel4BxZmQOFao6KhxJNEsBTlzrWdzoADq4I69sjigjGNOjG1VlsSzQz1LjulfmGm48oMI9qYOdnLo+ZdhFFrE8iWupZfG6V9a2ORy+OmdY+mgrIa7EoJw+1zM8Hk+3Z5GWh3AVo2kAkkcrGJJqhuBuY0jTO4QMtRw6XTGw4p69NuLJRY0HXwN1KERZbg9ImRq2cvjxBMHY6oZt0uAe+dtexeQ4stZHZF2dnYnSqGGMOLisH9itqBL4Z6Sm0Lcw1ETU8cX6DnanTubDYRjHQ6YuTCD2NGZfXklYvMbGrOTs7lXtvmYepcOv+hk4Kqha4JJp4XNLZ3CkWnrglP3CuKB8+v81ON0YQpl1MSC89TcpAG6obpz7+SCxA0QHnN1e0CG1baFXYvL7Nr/+HX49CrQpv+tI38fmvupNbj0Qvkc3N6N44apQqztnLU65tVKrDykJfph6QUBSJRVOkpfmgp3pxxl1ARRub1/jiN34+9gUPsbq6yvXr/5KFxQXe+vVv4cBiw/ZWn/QOjLMVaAtJ9WsQ4FMXt9mYOFKD761SWBkp3jilRlFOq854kpDaS4fKICKMmkzwaWFz2zPBKKDhqR/f13Ds4HzmlJxTl8esbkRPHbqOmpHK0nyTeEUs2ycvGkil6yaMax1yIbedWODwcgt0aIXtakxr+LcHlhuOrLRIhUhFKirBQZ8bATiPnR+zsR05osWFUDZPXeqya2t0UTRXtNaAwEpFavit6+PKfScXcIODK6Mssot1ZwmvqEfLadM2qINVMJkG/NhFArmhQtIKo5u2Iq2yfPA2nrwSY7M97mK9qYeX7JkYbpUqhmOYGfsX5zh5sB1yZCXbILsJW5O4z7YEbHv68pjVTaN1ZX4u5rUoqCtuE2gjl7M0ijxCMWfSRfLUMe46Ps/+Bc0oGbYmlRq+HJvjyZAcV0KvoYprQ6kV84ZPX9xmczyNAjGNxnhLc4VqTVSAF0NNmJphbs85t18Wyn17bPx/T24jJswXuPXEHOrTVBodbQu3Hw0WRi3RXvb2wyNuPdQi9E1Hg/lbxXBvUDHOXO7YmExBW1avd1yflExoKJ+5WBGtFA8oREpYfTqnUMJLlphg9xydp11oWBgZN+9r8VK5+8QC6iUmrAa1MFMgQdMSIBWedhXTwo2tyqnVCZ+52PH4mR0e6wRpZ+wPiM53NZMuqnU4n+CoOp0IR/c1PHzHIs2I8Ghqw4zWZZGI6pSqzr9+suWqRBIGDeqgS2D5SNDH6mZlbfUKmHDbrTczvzDHo2e3eHLrRuSGqmMFpCo1C6FUCq0aNx1scIRjy4VD+7OSksRVnej8SEVcqFZ592OnEJztjQ0unDnPhz/0Qb73f/4Bdso+Vq93nL46YX2zi4gqZnNwmi2UtqUHp0MhWWGhEU4cnkNlCtmTZrFtuf1YtIBdHL10rvvW2PjQ6W20KqNWuetoSyTKCtWNRoW7j2swwVwxOm45Osdtx+YAR03o3FEaXCagDVh43Js7MT+vrBuTrYpUxUV56uKYJy8ERNH1MIfPIEEh90hAuePEiNFIWGoLNx1uEHfuPTFPFC8lC8QEtYo1GspZAjY0C+XWKWxuG6dWJ6xfch45P0EuGFtr2wMDR6xiJaAZyyaA4cx44PSpDGup0RqYvnFTzmmJ9gJfeP9+9h04gGvlxuPzrJ6POazaz4mIbiPDYTStcnp1wmPXtwGLFg0VvElHKIkMLqEMa99uXJ3jh+ZQgeMrLUdXGmpeh4RliCIvtUiEetQblGguFX2mxLm0NmWSuYPTqx1b29m6XJLKLaBeQMJZKhIVysUMN2FxpJw80CTl0xFrmSuFO4+NQCqLcy9zz31hTnn97Yv0wPhCSyZbNAsznCsbY+iiN0lUbxliwvJyoS0RbF24NuXstYrLDoowmRpdF72iC3B4RXFvgI5as2pOBaQk7dIYT5WN7Y4izt0nRywvNCy0OvS4ubIzDaKMCabBA3eIsKlAoQEmEXJXAZ1ycLHh7JUdLl+bstMph1ciKTkUMhGhavSeVixiGKLsXIbPRaCasLXjXN6YsNDGhBOtSCswDTpYzR7gBWfaVWzQbRUVRbveE4lum6Ut0Z5VjTvuvo/R/BwrTcPy0oiSnfrEItEza+gEbg2rGx1UYWOj49RV5fajIw4vBgR1ZaNGkVLSKum26Aw6h2mdsnrtMqjzxV/8FRxemkOpLMy1TKwhOOGVT52bMh7HGC/PF+bb9M6kj0LCg7l6Y4JKRSw43hsKa5sdjrM1eekw98U54aHbF7P+TpkvSbNVT+XgrG1AhwE70Xtfg3a4slho22AFnV/d4eKNijKmlsrOZjbBykrQQ0tzGfFFNCuiA8vEPKKgaWdsbMc8v+fkHIuLwuKoBMumwtqN8ILd6oAXR3V1tvQwybYcESW5CYcWCucuj7m0NmXbhENLStcpTSM040ItoF3sgtR3JdUsbuqrNJFIqloNQ1QBrxHJKnXXWlJWN6ZMdUoRmEy79MCJzUIk+PxiRtPMUUqBprCw0LK0IHQIjSuiQg0GBH0lsCZV1K1JqMNYW/dk2mxzarXltqMNB5ciZ3BluzI0ypdp5ElUIiowBTpcKisL2SuGwvJCS+0SgVfnE6e3GI8jqb4yH8Y/2g33aEFE6qs3oqJZMKICecza9hQRZ2v87K77y0K5b42Njz651UPsAYdo0gRFEFWKVKwqxw82HN3fZN+GhvkFYaQF944TB+Y4caBvxD+rZVTSUzWhaCSrwxvIkN2NT53fZmqCbhc2t+JhPXZ+HM2MROiBbW88mTIBhVAFK4LWGPigewWvd2FeuPf4AvsXjNuOjrj14FwopeJY37+DSBQb0NE3dQrDVntqltpA7xcTrm53vP+TO0zqTtxhT//r+RgJA4FxaX0SFbK14kUpJTctoQbNzQzEKFKoNmU0Eg4dPMQXfcEtLC8tRB8ThwgQ66ywRSKX0JkOLRW8BDe4eouJsT2eYF3lwtqUy2ug1x/HLRJVGxtbvOd97+eBh9/EY5dHnN5YD15wNlXqq2ybGti7qDKaKywvWDBwgFfdvMBcoxEpYbRNjI8RblhPQ31pPXfno49vZY1MtiDOkn5xRUqHeFRFnjhQOLQvIUWZMj9XaCRoriePFI4fGyF9kR8SShiJfFDVNBqRuwkmCKDOY2cnbJsxHsP6eAoGj5+bUjWURq9EOtGgKmY3RcMo1agKIiWhzpiTi0vKPUfn2D9XuPXoHLcdnQ8LYUE/VnVOn1ni4x8IzzhaYUCrUAyqpnurILnph+BQQZiiGknXooWa/ZAQ52Of2aJZGGFULq0bTeLU5tA0kdbEI9/WlAa8QedWOHKw4a4TI4pUxBuqKsUsS/+DhRW9zJLAkDq6aNTOFG+YknOvqWxMIgK4eH3Mxc0dxCTWgpNKuDDoGKCxLlheVoLVk4zJnsC/MCoszGsYOXfuPzlHOydD+4hGIiKN1gSGSfRzWhy9zD13YNZHxGB5ubCwUFBvOLio3Hq8zSy7U0rga2h45F5jIeAFExnK/oNxELxoI5vzS0yynYlzdbNjdWvKuYvB+w6LClMZ0wZhOHZVyUrYUkCq4FUR6WiyeKQTRy08nmhrEA73a2+Z5/jhhgUFbxSpjpZK9Si/LjjVHKNgUlGERrL0XJObLYoWpatB96Mqpsb+hcKohWk1SipxrxkianCShdhNB8uFk1Qh9UIZjUAid1CKcPr06ZxoSjVYXFpged8K1FiEQiR4ontkGFutQtUoJ+9L3MXSD/KAqQJagJsPzrGzPeXjH+m4Nna2p2OuXF1le2ud2+96DaOl5ajyy6SbZvbbfIppSay0BqNhncgNWOHS1Y2o0gRaER64dSFQeYEj+4SmCJ+1L+qLItlVFGd5qbBvvuCdcXBf4eTRxVBQFBpRSqnpEApVS+SSLJVE57jmhiru4QqI4LWHC4TxDlzZjHzUmdUpUmDa5ZgRFd9oUHs1mIEB6GViMzhihljmgDRYKFHo6bQOD9w6x8nDc7SiAZmpDPsUuHicx5RbTpzgy77kS3nXu9+Dd04ZZRGGR4fHmoWEgcVnLxWNrQc9vfloImjUCnPzK2hpMDPGa+fZunImPFwh1pAKopFHi86kgrYtq9sjrp0d85kLE15/1wLH96eHrGCdoKJ0ZGGZxrzWCmpOVxS6aExWLIqSWhPuONygJtx6oDAp81gnPHJ2k67ChTWy51WhEHmRaT7LvtFEVUeq0ihUUy6vVSi5o5TB+atdUkOhFOE1d8xjVWm0cni5oW0iz8ez6/aXh3IfNX2F28x1F1cc5cpW5cpnuvy8RJK1L/GXmYeXRBhKlieKSNC7SijhRoX7bpmjbQsiYwrC0aURx+8IVkns0+pD2XbtK/L6lqgq+CR6uKMdxQoqxjTYUrgq02pcXZ9y7uqUS9eNSzfGSdIaozmZIlGYmfcae5Ggs5bCfXvPKqAWVXzeKZTc1cbh5MGG190+otLSWHCcxTUXblYlqmAV3ntGWF8F6/puIU5pJPrGCCgt/+HfvxuzjpUD+7jzrjvCk+uEj53aiUQdnv1xajQeo6ASOzwpDAVYZNOyvstkagfcK9Ot69y4fhkR2N7Y4DOf+QxmI77qzW/m9Xcu4Wq0GaloIRk/c5FI9YLaNHuQZ3/mGDYiBokKW5fYEMGGQrOYSy+ljJrwsq3E8zU0trFzuLpuXN7coVBxbwJujIw04g3mwRjpIYPYYSqiE0/D2jRh9B+4ZUTb9JRbOL6v4ejyfFa2EcZY0quUaNUsNYsdFVqLFrs1O6qKR0REG57xpE65ulk5exEuXofza9soJcPsgAYUIleFDLCjH3wtS8cusX7xU7h0VIPOGiT7/HcV1Luo+RChWuzfqumpZpDN/NyIP/QVf5B9Rw/hopx/SvnMxyKKMXEaCi4BB1URam672Qo8fOdydqTsGDVCrcYjpydsT43a5yCymJHeMfSEjhSi6CgSwyKaNMjE66vlBjqSjeCiwlsEjh1qObk/nn/TQvEyJECHHVFz7dIXKWpEoW7h1PRRlZekIiVc1tfJPJe8LJT7tIuBDXzdWNuEGxvTzMqT9POC+jQqgL0Jf9yNPvQJpempUIxbjy7QinBwWTlxsEVcaUp0kJsbNRw/7Ay7bXh0olOALnjZEVU50GNmQVvDmmiVK/SPh2rCqSsdpy9NWd+K3ueXro+pWdWmpd9+On9OPBJMNfZXbCLwo5ZQ8p5b35lUoFJq3JNnhVqH8NBdI9o2PGiHoa1p30dGJejMC6XgNRI9kqwNwdFSKBoJo2odk9pxbH6REydv4vhNt6JSeM3tc5y6NOXC5SnXtoOXHsHAFGqPexvVCwUfEnDhiGWfEo02xrp1Dc0WxFtbOzz15Bnm5/chSzfzsSe3EHXuP75AKc7SUsPxpSZbn0avE9V5+r7f5opIei0SxkOshMIQYrxFokw8DcBLJZPO8RrKWIHN7TGbmxHJWaM4AfuJ5a5TGnueuk9CkUu+VkPdKVK46cg8rTsH9hVOHmpAoMm10s45x+dapOsbS/UslcSla4miodzjYNYB0aNneY0NYWoWiglw7tKY01cqa1uRS7pwHUAQmSI1NrxW9+Sc+rCO1WEynfILP/NjvPEND7F8YH9QlJsp6nOZ0O/nkKA1ouhOK2A994EihZ9/5ztpSsO3f8f/iCNsXU3D5xWXqCKNPYIVVc2ke6FplRNHmsgreMA2Drzq1nmevDTm4tqUKzemERknrCTeIppMvNQn6oI1FTPJfYtjG8CSdqzv417cKRrb7F28OubStXC0Xn3zHEWclTnhyL4mt3MIh0hEk7cUSe+gSkdtQM2K2KCRWuawSNgsnsOzyctCuUMoHdO+V0Z6XxYWjhpYrrvgWhG6GFwRpDQ0RRjhoMYDty2z0AjLSyUx5WxQRb/jkGQfnhwkiUWXCDo0Ht569jB1FLpkMKkgmdVGhem4cm3sfOrsmOvrFZsKlGTbZDuCkn0i4lwRDZjHDvEqFZMmC7JAu34sIjTTEh5b1ewlT85Bah+2QG/9LTCtvpVWkcJ4MmF7Mg7jpP0mCGH9ixiqhpUeEhuBCo059959N1ApItx5rOXm/Q1XNqc8dmbKxqTL9qw+tFwQDZik9rAMgqvRZaRhQDO+EOH1ZML2+iaffOwRvuXb/kok+zw8y0+e28FNGI2mrMzBsf0NNx2ew9xZmQuP3TBEurD4lWALEcbWmoimqgVWGfu+yNBy9aWS6DWerC4PPLpvb+uJUZfsQxL5xXjehjJqnWUBk4ZX37bIQgsri32b4yYhsMyLmEazNxRvSPZNbA6v5LZ3Emxi7Vt09HCEC1jFKVRgUo0bm8YjZze5tuEBJ/Y3JIRhSHovLcGkMkFLePyS/cvf9Us/zRNPPsbly2dYXFhEVLnrznu4/a47KLnGauadus6oPWxRO37zNz7IzmQbM7h6/Qr/5Gd+hq/9Y1/HLSePo0mpRApOzYgufrNtm7AzjTBq2lw5hkkZ9AsKd5+Y4+bDLVc2Ko+d3WFz4lk8Gg6J177UpsKoRWo03nWZNc2LGgGP+gQkCvdc6Dvbeo1o6ZHTO5g4c0VZGsHxAyNOHhyhOPMLYcX6GhIJgj2mFSkJlcV2U7H3T/aSF3tuz+Vlo9whymo9i1GGjaLdEQ8Koxelp2tII9x7eIQW5dB+OLKcNA6bbd/hmiX5PU+cwA5bhEDfe7w44QCi14y4oFWpJbirUcDUANNowIVy7tIOZy93rG7V3EZMAjNDs0eOxiTxMrx2qTiVFo2MeCOUYtG9rkQ7BGkklXtsw+c5aVSj746jXFozJjvCaCnGq7fksyZrwtgqp04/xVOnTqVfZrmre8AlQ08KEbRpUO27NYYHVL0Db8GVUQs3HVBOHmz59KUx565Urm/XTB5lj56S+FQFcaPT2Kc1JuoaXkO5rG9v8qknHuOue17DvQ98AdE0aeatuChTc66vG9duVB45t41Iw53HWooW9i/ATUeaCGv7HnNeggMslihMbpTgYNhzLoAXWoSADCyNfFcFWoK3rRaeekdGagTNVIV7Ts4hKuxfGXF8JTZ3hhpMj2kq/+JIowFt0CBiYUBUUI8iJSnpEyZsN7T9rQkPehiRitNIwANnL3ecujph9UaNCKDpy2cctCLWJsSY3UW9ifVUQqGJgAeSytXLZ/HOuHb1GtfsGqLOqTNnqe/6tYjuRChaA6qpMV6SuSOlzHqdUzm3eo7tnS2qKLXLZevBdGm0QSmUIqgY2szTinDnPXcxxWktC/4C5M71osw1ws37G24+vMKTFyacWp1yfbPLBnlB41RVvJti0iC1YA0xb4Ho+9Tn+KBv5xCGL2iehRIUT4NxNaabwpWtHR65OMY6uPvkHC3KyrJwfH9D0YI1CY9ZwSzOk9ln1AKyGpiizyIvC+W+slD48gdXwG2X8nFaLVSLhkHTSWVr6py9OuXyWod1zrm1juCPppLRLvZmTO5p9DkNqldUBAqq0zAAtgt6Sf5vJPJKesKxnIZNB1zo1Dm01PDQXQscOTDi8FKDNsFNdgvuboRLBbVglhSPxkleKtQOsRFWOoZNF/omZBL5hMiPJrwkoZTFFTXDSiiti1enfOCJG4MhC3JdKPfQk/H3et8YKT0zFaVkH3DRSMaNmoY/9vbvZfvH/iYHl+ZwE3790R2aJmiigRFGwk+ytHyrcw4uF1536zxaIgSmCYNkHoVHRv9d4cknLnB6bYJNjZ3tCV6Nt37dV/N1f+DuXjtHjkWgp0BG+1rLHX4EkwbcItmd3RA/eWbM9nTK9YkOzbSi62Lw3Y2AIta3P8tOwi+grCwWvvzz5iPR7QTboWFWMS3OzrawMzHOXp1yaW2CIZy5MkUEzl6Z8ImMfsCHXFRJeA+BfjMVLHd2KoTyg+yVlFBfMmFiz4/IpooFFm2qHF4oPHj3Mof3Nxzen+ybIIGhbkyJorCo5HTEZ+tLJKGVbJVBRrfvOTqPuqI6YuwdiqLexfyz/h6iBYA2NfNFiU2XXIRVoDRo57z/kW2eXN/g+qWdwKk1PF0pLVoCmPHMh7koH3l8Bx9Fs7mayypClxp9ikpwyqcliA7jCRxcVh64ZZ75EtCWNQGtSebJhm05PROgRdF+imXOote8HbmXgmd+QwLapenCwPYxuYG2Epx/Oh49NWWn61jb8tBd/Tomso+u4NW48Rxz+2Wh3FVhaUQMjEVz/dh5U/jUmQlmsLlprG5Od1kqiyKOOANgAZ1kMsOxwKkNtDEs24oaIFKjEZdmsUQnQ0Wp00XIa8GhlmKIxGq8/8QcNx0I/H6uOKpt9NbQBqSnBEaiUbwP/xzpIoFqzSgaI/UTO0vJ1Uvm//p9FiXvpX/4ilmDaISt+5eEyXllMiGhmMDZI0Oa8BXCeJxJR5Wh53QzP0fTzNE00UGvygLHb72P7/zL/5DJ2Q8z1iV2plEoU3KwJbf6s2pZyedcX3ce+/+Ze/NY27Psru+z1v79zrnDm+eqejV3V3fbbTvd2A02HrrdBtwYG+MmjhKjOAhBEoIcQaKEKEGRghRAhMgGCQKKhBwkBA4SAQewMXYgOJ5N2109VPXk6npjvfm+d6dzzm/vlT++a59bAVc5xnZXnVZ1V/V9de+5v7OHtb7TulF44tzA+VOBxxwv+Z5Tg04mMz7x5CV8CF78pY9z7tw5/sJf+HOcOnmayPmvrTmq/WTaqa/DTC3bXgtdMZ0n8cF51zNzmEauPaisIrj/sHH9/kJwRcrc2jL6SNe35OVmbG123X5kWFowhfPyjYPcoBP3HyGiM7mdw0XQQQr3fJYGrdbsMBMaSAwZg1jFepybm4x4sWiU0kPL0F2v5bjeK1YG3vXYyKVThWLIcW3KZbcSiTMbm2FQiuAd750RCdONguUKFIda1Y2N40AzZTXpT/ehFtov4XJPF1M2fQ1BeeLZxEWYGEQCY++wsrdoLFaVhuAiN8k2IxUkEpzowjjIeOu+FjVrVTp+J6h5qK+KMTR1Vw92g8/dXPHk2ZGLp0fcGzHkxKqEALsiL1phNKeO0xGUa5b8T6qD8jeXYm/QjNc8A1r+bhhrU1dU4z1PzWlt5NodXfb3d4Nrdxf63fLPE0F7XRz4v/56WxzugYwtd++viMm4/rBya2fCCYX0EBqfhkETY+3h61/UyhF+2UnFYjq429CIWrIQbnmriom26hm4JHK2pZ49SqFWDZg+Ng8unR54x6UtxiEretOk0moNzz9Hv8Wb5kUqF8bwVabLRy56V9vdI7sxVdCKGkAfmntuYMsDPjM6Mkvn1PaccbZkqisdjjXJzGiU6rTSsmWtTE04fGRWjUXDhoYXdRpttk0rI8VPs/HMN7IZWnAlHaExgi/TMzDAipo72rhxf8GNnRXbG4Uvu1C4cJZ8FpZdiKC1U6dOcmL7JM898zyDwTCTMoSAmExGKd1LdMWUslWKOjlp9daGsXU/DoQ7l884mHP52yKtCgAAIABJREFUbPDep0ag8NL1BctFZW/5FmIy+ZpacOfhEmtw9d6S2zsKqVtmKmlP8wxrqTwSOVwS4iDhrZ6l36f+uLMe1KHTAaUFhj77YnouPaDNDIimcXcB21vOhTMj7zo/Z5zpwG61pRwysWzJuQjKes0qfrtBcxj0mUXRumgNomZxYMaf+r4/yT/5P3+YBzuP8CKtPDUlgQ5M6pg9TV0WHedWNxAm5ZpZaCJTQjatgFE01GNS9zCYszGbawSn5wiczKexNTYnqEedhmNNtLG3DCczEcQ3dibu7BxybLPwwuObXDg9k3goD2CAmqFtUWpW8ald1BBXXGltmA8aAwryw1iPL0+oC8AqgWOl0TJqoLjx5NkNMHjirPGeyyOY8blrCw6Xikx5M1zmN3S4m9krwCOk6Jsi4qvN7Azwd4FngFeA746I+2/2ffYPK//qc3vc2hFQ1wxVbmmyiJTeyY6ULYoF5kVOUY6Ix5Ek3mof/RXC673gLQmRnhldkNGjVCkakIbbGmzM4PmLMy6ddra3x6QJRY1bOC0KJavUqMJCzUwsu5nyQEiDiQE1mCKXWapoLN+3ZbKep1yu5mb2bDuJlnZkWa1vPFhysJyy+pdSxrPbmBK+bnXBgxufAGvUJgWzpdmjtWCaKrQVsXcAow7OUkQqOyKVImI9pd4a+DJS8N+JUvEFu7uNn9/f59w959mLM84cHxg85WMliTwP5vMxc3ZaXp6ovWywHllLhrwVoE7UkkqTJEmlBe7GKpHOhFQnAw6zkZiCL3t6g6EZB1Nje+PXz6j+Zq3tvUXjFz+3x527k3gVsxwQ0asvk2s68ieVwiAyg4rWkyHHr/uQfEjeb4K4aZRUljS8Bj0VtNWszJFyRaobYdzvemyLiycHjh0b8Jr+BW08vBZRW8n3mMn/4CaFSnUgJy7FpCViVfBCLQjBD3XJ25szpVliyt9HvMikXZ2QheIMJCeUN0VzVbVGLKMFyriBDaNQxtVSOvRMbizZubqb3LEGh8M2K9/QhdSMOgipbdlZGv3gz04heYUBFRXVnIf7jV/83AHnTq549vzIhVOzlCTrIhC5apgNuAcrCoVJirrQmm2dj+odOQNmldyWeTnrwvVJefT9yu7CicGCYVD39+VPbWHA4aqyPX/jtf2bUbl/KCLuvO6f/zTw4xHx583sT+c//9dv9g1WFW7tLPtjVguVYv4+uWZa5oGT+GxzDYaw7rR0aZxrhylKhlilNNCs5UzHhGyyLSpZQVKCEhMb84FLJ+e8+8lCcdlLIlomSxpTU1dggdQ9HW42kB52xeQ5dGBVaZSjQbaFJE0HbJpS2aJWPIxkvz3fb9VFQhHJixa50aRTryg5r9p6ckvzxDAXh9z41P/F3s5V+q/nBi0mlvt7LKcFy8NDBhuxh/fZWhS2L72QG7DRvFCtjzvMhVnUIa15BctnayFCmOD2w4l7uxOnNgtPnZ/z+JlBfICJQ1FWv0F1hqycvOjSMUISs4SV3DQDU0t3JHBiNmXnlBwCRlsJZyiuiU9eI9eGqprNEcbyJqzTb/Xanho371epSKL7MHPmneeY5mj53F2a9xa04v00liDRktOgYGl2aenBiCxgDFW0nhCXFRVGEeBR2ZwNPHZm5N0ZI+xNN0SL/nxCI+YsPUCIp7FOVBe9/yGym/RIiMNolDywI7tUyxRUVbEK6NOw9smazDu9e6t5xHpL3iTA1amSpiiY+OC3fw/nLz7J7s417nz2/6Gai2tAUEzUQl1VpqEysQDfJTZWhA+sSiT9oHmsPQWzTQZDHrBdceOWcgt01hTj5sMVdx4GJ7aXPHtx5Kmzo7if0OfUoTFP522gEYFuhamSPBrpONW8OJGyco4z+RFHOMRRIFi6titIbJEeB1plPhjj8FtUub/B6/cDH8y//0Hgn/NrbAASb5a8+3Uyo2ymbGq0chQpQIkcXpH4mmVGtjcdEC0T5czoscD6lrocmJSNYV502EbDivGOSzOeOLvB9lySu5aGpm5pryT8Memgq97npSSxabn5KvQ5iNKpFtw8J76ZBOgjtAyLCpswZCqKNT6T9Yjp8FtV4/qNq9y7c4vv/4G/zMPdFVOD2TjjcFJc6rd853/Ecy98Ncu9XQ52rtNW+r3c6loGai1Y1cre3j7HZpsM48hq9xpu75IZaMhpMKFxxL2Ijkh8tMvpgj5sSlUOKg0njPu7lZ39Q169a7zn8TmnTowMQ4dVQp9lxi+01jUJZQ3VwKDLpQeGGal6kpqkujTsNDCXkqSiy49W880BlGzLf9Nev/61jdQbmCSGJQ/eQMMnrC9XS5l4wn7ejqo3S4NbNwwJAxfs1fPOI81DkRAM2TmKSIcXntzi8TMjx+aK7pgCRtoay8cqLQphI6Ml+ReZ7d7kK1gHZk0QmYNijOtZn6PpveE5h7U/AQNQgeSmTq7mJcKk9RCQMj87Uvv0yrg5CkxzFrsPuPbpH6et9jEfFIhH4WBaMj3aYbUaaPt6tqXcZ3Zvh5Pv+AbJLvvyspYuQaMUT2OkuvpKk+gnO2DFOTTp2a1yfxceHFReubXk3Y9vcu7EkHi6ODq1VU6LQQWMB0MY06hzwRP6IpEFOdbFN3heapHnRPMGk68HoNTW8vcIMDnP17DOr/L6jR7uAfxTk5vlr0fE3wAuRsSN/PpN4OKv/V1yYUYqRuiHub4WxSmkrJGyvhnNA6bCMIlgqrkHOj4ZnhvLE78LPWDhhIjAs+D8sZEnLjjPnJ8LOolIaEh64imhmGJxpLJBrkhy4ZL5IIaKb3GbUjY4VW0cpsClSUqF6sbQYKBPfdXmEau+bhj5/Oc/zw//8D/iB3/wbxEx9SJA0sqqDV7c+dEf+qv88f/2b0Kmmw4YUzVitIzhDYbjW1gLNmZzTp04xfaxLWYnn2ZpKC7XsipH1bPTMlBJ7Xn0TWmd5W9MLsLOUimgtrpy75Hzc59f8PSFyjvOj8wGKUUaqLJ2xSrLvh70/3hIXip9fkrjqkLOrLIeCNGULEWf5tMx01SMEYRgqrdybQMtpzBZVu1DOJQiE0yViWVCVa1ZBwDV1aX0WdW9mTwPzWklJaiVxOaNNhXMax5gMkqdOeE8dnbG8xcFjxCSzw41FVNAs0oMIz4Jb1/RpyRpzdcWwIiFcPQ6BiD1lmf4Frn6u1a/H07iwHTgkRG14oBcl7WkNkwtGBJmityruDrzbuQyIOoSFof6egh2jaautE7B6tEhwzDCOKqLKbs4Ssvs8GINw0t/n03EfghaGoaS4Wg6R6aq7soGlZo5MIoHjyZ+8Qt7PH1+k+cuFYYyMGSstmF5UTvWJqZSsSkHh1vvpJ0pY73FNaQPp+SFFkDV5zCFHLtD0+VXCJpEf2/6+o0e7l8fEdfM7ALwY2b20uu/GBGRm+PfeJnZHwP+GMD5i09wanskXOTG0DbkzowjJYBVz7wNxdeqjhhVwWXljSmTxWxQ9YCkYZFQh5aHqtxG4cF+5Z2XZ5zZGJnPlL4GQJtSHim3qqUNvESwUv+2xulikgKguSp3i5J4qLgDvDI0dQLuysmIKdtCa1iPHPZMqKvdfalV8NqNK/wXf+pPcvXqF+VehRxmEZgVbHDGwXjmHf8Oq/1HfP+f+R4+9MFvYmM0ppIEcCuMVmEozGYz5mVktrnB1sacYRi4e/Mlbt/7Ob7st/1eNo6dJmxFoFmzrV891jtLdUIdEw0XnKJUgJZZHdq87hpefufBiv2DxpOX5pzcMA6WUzpHU3fdUu3Qdw7KJu9REGHCPy0PuxYo/zsvE5UGyYWk5lj+CKmrpvrG1c2XYm2fOT6ola6GM9Ko4kgQ4elRdUGm61cwjBMOQ6tJNs+kqIpKLaY1FYmzT4YP2VEmPPLoYOKFJzc5tWWMQ+Heo5zy5JPIdS8ZJRx56GSWTE5p8oTkWsIyzatMdrJj4kVpjYbO7pIQXRSpXCIllMvFJKlkq8k3pPIKk7u6QPfk4WlICwV2RcIUwQA+sbVRWE27CUH1oSV6tvNhYOvYcfb2dsGC2eYmj1+4wLBxgo1jg+IywrFERDA/6jrzIlSno/kDHjnxq0cPu6DNWhM5Baw5tx9N7E8Tz54Njm86Dw8rmj4mj0uLzF/q6ZZtSgiysnLXAew1B8rnHnP93lN6dMoQNFtmwSLnrpsKhjdTgv2GDveIuJb/e8vM/j7wAeA1M3ssIm6Y2WPArTf4d/8G8DcA3ve+98U3vGdL1VgGBeV4FlQNNGJl1OFonmiMyDxUu+TQ0vggDHB9KeDKgiiatGIteHgw8cqtFTsHlc9drQTLvEBCNyc1Dy4djNZbYNeKVTWl3k0Vd0BGm65btArRdbqphjFLaWLqYKNIezuG07zScLxlRZfdyz/43/5Xrl27gftAW0neVwBKoQwlo1SD6698ir3lATEFn/z4L/O+931VqmM832vDyiYV8RHFoHklDBYHh6we3ed9z46cO7fJZAqFaqmP70YqYoHb2MUrIm5LSc11Hh7hkpVWVZoatedYpjQeHGpc32euL5Xf0ypuMvno0MgqPV28QhgUO2Aova/SmMI0/q/qGYfkErpUIrBQiG5YYfffYszeb+ba/sZ3bdGSM41IZyGoI8mrqaYrukcDrEdjJvY1RH4e6xEbQBM+HNErbOPRYeWV1xbsHzRevrFQQmRTPgyuqtC9UUIZQTL6QQYzwRTZKaR0j1xwzVg7XlvDSiFCgXc0iBElK8pXT/Wqgeyt8tu+6dv59C/+BDdv3KA2F29VUDpqLq0cBawOx4Ia4qoGRMafOnaGev9V7nz2X4qnMl0EZsE4Gzh++jSXH7vM3Tu3qKuJk2dPMpvNebR/D7/283zt1/4ONraPUaKj6dAye6qhw1XyI0vCuubnkoWhBWM6pMOk6qkt8CE01hDjcNV4sFN56dpSMxmauhW3ppwgRPVFShjNUw4qclEwdBr6etAczYkhJd0dlk9S3q2xe/jGsOOvX0bQ15zZtpkd738P/G7gE8A/BL43/9j3Av/g/9f3a0dywkhcWsVNHhgOgwB2WikMVR/u4K6VUZJ8bcmCt559UhWragP3Hk78wit7/PRL+1y5tcyqT2FYhKSK+hx7dQrm0f1Ekl02LYrWjKkewRVlSlLVE9svDXdPB9+6EKf1DTlooxUj7elD6so1kSaKZ1XVeOzJ57jw+LOYZ9LjrDAOg/JcAsZRYVSxUsb9zZs3E+hJQ1ba7YZIE40F4QPGwO7uHo92H3L8xAkiB0obpNNUcFYhEmef9Z5FH1rRZVrpTlyR3l57Gy6ZqJuSM82Cza3C84+N/K6v3OaZCxsc2xxViaVqIFaavlNKupWzrbcwfOrwlTGUIGJSumHGwTJVHU50vsLpkalv2doOCBulkAtLfmfCizJlhijQMiDKE29LKNXNKNUzyC5zznHMiy4CU5Kjl8athxMf+8IeP/XSLl+8v6C6LvWGBrkzpgoqnKjG1AaZi1qf26s9F4bUMZYwoVnmwQeYXJY2JCCN3lcbRJh//vMv8al/9X8z7T9SJR8qdg4fPeL8hUtsbB3XoVRa4vX6q1hq0C1nVHVcObQOLj/7NOcfu8jV125ysFgKPmnObDbyxStXuHb9FtNyya3rV/ECs+0NaHBYK9ev3eTjH3+Rw93DJNtrx7ko3vBJjs8aJS8NjaNsnp6DVONoS6RsMrpiz2GVmv9ibMydZy7P+Zb3H+O5S3O2N13Pqpb1QRspUHi9OqeDYFhkVa/4juYVhkqENPsWpiE4tHTxQnc7/Gqv30jlfhH4+8IIGYC/HRE/YmY/D/yQmf0R4IvAd/9a38gCWmfnLKerKxgkMVy175ZpgbpryWB7S8IvaCG8URG/uUeac3hYeenaHq89XFGnijWpOKIK+21D17vqFPamBD2QC68NKFDMVEWSsaTCC1U21nwPhjaVUJeMIwCRIdlq99s6B8KpsnI5/AIYrFJNGsdw4+oXX05lkJQH0SrVnGEoUAZWU7BcLXQYt5pWZRO0khhsaygDJBePY6ymFffu32W1WDCfDbooMx8mXDkjrXVCrIIPrELUskdZY5jNFKhkOFHVDbjJ4XhkKktOMKRfLzPjvU8Xdg8Hbt6vfO7qAUuruQQaTaNsiCHJrjBqEbQR1knKNKJUmWfidbDGWkNsR5vzrVjbGAQaKuEJIcmUlmmGeYhahMgyh7Y62vxmGi5RQhPIarOjIMYCuwfBp68f8NqDSePq0LoNqzKGeSRdrQ64dQco6gLDCqWSJaGOmYhgVD+qNMvW1JE6iiFI0XzrQoUWfObFn+N//1/+LHd3bvN9/91f5eKTLxAOU13y2u3rXPnsS+zvPdIYmuxcxlTOTEwYUCwhSnPKzGmrxiKCG9dfZT7f5LljL7A4PFwXX5/+6U/ysRd/mRPb2zz11PcyLzPKBONMtPWtq7dZTAtOnTnJuD0SgzgDSpMEGmNpwWDGWCLVO+r6OoltfQ2XrMS9FxyeAWsmHs9IKNCYFfiKy5s8fXbitfsTn71+yFQ9U02FJkR0w94RT0d0Ilu7pVUZLwdaxoMbZlMGbhZVa2+ytP+tD/eI+ALwVb/K/38X+PCv63sZ0pxnXVjIbPdJVXzYQMSkRREdNzRFXppgkT7VpsMBNSAm49rdJZ+/seLRoqZrtNBcTL/3KjWEwY9WqThTh1E6tjZltWST5F7NGNCBaV5UuaabVGl7UoNYhNptK9KwhyrU1owEqXXoSQEo3L23p63xaOced29eTdNI5CELYxthnofYVJnqkqlVfJDtXNNuhD3X6Pp4wUtDGSmjsWwrDvYW7O7sszEf14RQMc1eDYRHktGrkdiwWWL9Urnr8LEinDQt33qeZB5+F/Kq4jFT1SNRkLG5WXh+PrJRGlceTNy9v8w/g35W2BoecgIblA+k80U/pxlKztR9wFDzRnq9W/YtWtsALbPARUJXDZ9IPLa5ZsDSnDok7qtqRxn8YyOqKmOKpyxQxP7V+0vNOz0MiIKxIswZKEBh8oZNMEuj3mTaI15jjTe3qs+yNshBcvkZZ35P5t4EYCG9Tx/8XpDs9HBxwN/6y/8NDx/co5jzV/7cn2AYwNsAbiz291jVwFKe/PilS5w7f4Hr165x794dnrn8LKu24tbdO7zz+We5eeM1PvShb+bH/sk/5mB/n8NFZbHY48Vf+iVe/tSnBHsatFWlBdw5vMsv/MLP8uFv/haiFKZV5f6dHR7t7DDb2OQ973kXx48dU9eqioxeNw8msfkq/18rUCdBWMrfmfT7Mkou3dQJhQwjhIvMFqwisjhxTI5tFo5tDsznzrW7C17bsfz0xDNFSe+GVXyl59yyMCQKowm2S59yLgvDB8Fq7dcQgr09HKpZWnZDg0XgDpPE1JLyecoMraDRW6Q6I5sbn1Boh4YT3H048YUbC+7taqpSn4iix1sxqwRawP1nNvejOYYtGXTrt3VePgKBhaMTaWfWZumhZ1JZaYIL+JoEtAgZVpKd11SGdMkOBq07avVzHjx4jetf/Izecz/Zk9yySaTW1BQQ5a4SwAfhgOENN02imQhlwJgwVqbGctrn8OCAsUi58dxzz7F5fIslPbGStY5dudGpmqGHk+rnTUgbX7xXpZ2bSEeuGVYFRSlGQMYR1o9V7f6T5+dcOLvBzqMVn3j1kMWySaXUXAhzSIpaIn++NGrY5Aym5z9BVqGd6rDXmaPeqlfkxB+l+sj23mg0akFOYCehliSSuxsU8GUjSl6e1TicgruPVrxy65DbezpgNORE5i43PQejDwuQ7JFKJjWKxGwhM13+m+uCqGO/1nQYw0SL1Ls3xVC7KSKkVyIRcOzEMZ55/mle/Ngv0w4PWJm65pIKq0gi1cy4/ORlCoXzF85zb+cO737vu9jb22f3YJeL5y5w48p15qUAUki5Sz5htXF4uK/OLQRiNq88+9TT/NIvfIz3v+9rOH36DIeHBzx8tAN6lxzfOqHfl6BUGf2Kd3w71oSwKWRTn09YKlM0h7bEiqlJrts6H2HyvOhcSA5AAcgE+loleOrcnHOnRh4+WvGpK0v2D8UHqVidtJfSw2LNaYNgTA89Q1xBgzUU7tbCmUi+401K97fF4W6GFnCrlMGoK0t1xIowBYBZDNQSDFUTzY1Cy0NDLe1AGHzhxoorO5X7j5YMUdZ6avOa7ZyqyU6UWMlbNKSXH1LHXoeBktItaLQQ4pnCVx3ghOCBMJSxKmW4hT5krCjWIKEZx9a3vCRp+nBatnNBw1bkheY8+eyX8eW/7cP8q3/xw9LHJq8Qnk7bqjbWQ27ebCJ56tmnmc22BMWYYnx9JqikthWLVbBcTtAa28c3cBs4d+4cYxEM01KnW1pQykSNMaMdCl5IXXUGYYXs4EOYJttYpMEqx601lMuRm8sz+70FKWdbZXfQ2CjO/GThg195nKuvVa7cO+DeXhp1amVo5FT4gkcwFRlmdUf0Q3HSZ2GVo7yfL/2aXr/CsBgYaEhLPmoNesGJ5J41PasMJkPYqN9HcteOrwdfuH3AtTuVB/tSvAgXPLoopwxUS3ST1tr6EDPzHKmorsmLhshXsrMMmLwxrNlUaQ81i7VmhyvljE0ySLXsnq3C4WpiZ+cBynTShCZz5eM4su1PiNj9+Z//hZTT6kf9yI/+KKAq9qd+8l9y7PgJXr16jWm1lPO05brMblTu6AWGOKC9g33u7z7gYH+XjfmMh/v7WAvmm9ucOnWKp59+Jmc/yC1a0pouRLVX2k18WMZuuGuN1pbeljwnLIqMj0QerxVaz8SHsIHBJMGtrS++YB7O+ZNzPnh65NXbE1fuLLm/l3k0YVhUwsU5DE2F5ApV9SBIV2td0R5Z8fFmXenb4nBPUHqNRcWIKvK0H1MlNdCUpaYqR/wrRmW1cm7vTXzyyoLdw1WGZEmeGDlAwFtR5WJOq1khhVrNSuJlNd2gRVK/5noP3nQg1YIO4YwTt6obt44Fa5PI1VTITL187A7Zlje6NSwqnmSnnKmCaKxoAIUj+V+kGoD+oVYtgOKNzqy7kyarlA06PPfMOyhlZBwrUZ0ymiJ5QxHGrUniOdiMzY1twoKTp05ogeqBi7DJ4Q6RhhI6TJKH8ZoksnTUZcqepOZpcsmohyBVRJYyswbYpEyRSRdPk+gdb/DkBXjs3Bb3dhsvX1myczApNqIKZ9SFoXXSsa0RZXboM8hNIAnGW/iKtcO4+0n7wJSIzBoywwZV2nVsRMhohAWLlYwzL189ZHdRmYA52RGt/RhaL6NnDgsSAwwmc9dglpxW0uEWUKf1oS9sWZi/EWsRQ1RLV7TpoHIdwDXXgTTtGge3u/uQ26/tsj3f4rl3vsCnXvwYf+K//+tsbp3iB/7MH2VxuJvqMR2gZTDGUlissootR+KA/f09fuZnfurI9GN6T27w+77tO7h+7TovvvgJnnvnO3np5U9z6+YdDGexWrJ7cECtKy6ePcejh7tcOH+ezZPbKgTDsotK7HsIbNKYO8NlnPKiYi+DW60ogXM1rdkzcJf5cb0HsiOiEuFMCIEoONWqlDFjRpGH8cz5gUunBu7uLvn89RUP9iewYe3taMmPCOF1+QuCIxl2yc+HN1/bb4/DPS8gKTcbpQmWUIfdsDLQUvdMS+NLor5ffK1y7d6Cuw9b6kNrPtSBNql66OahVEkm7GNMZmCjlDmpVGg18EkEliAQlUXakrUrwoSDD925J2bdOwnVNGV9sqzXQ/9LOlrF9fVMaf3uMahCIXWx5k5pdY2NRhIAfZxdpFVa01t61aBLw4pTBiBGfNBkGHNjalnNBmCFze0Zi8WCi49d5PLlJ2lWGBIPVy69DoIpYTJDlbOGdKSzMTy7kzy+4oj5lzRRCqjS1TN0HFGLn4wL7uF2zSNVQ9IAXzw148LJgVduL7hye8X9R5oqT83JWbVpDJGRGmrLSqirZt7il0lNUVcBgyrazvUqui0HoU9OMOGt6PkFXLl1wJV7S+4/mpKDUORGtZay2T4LTEVIJbOI0OfkpEYdFRCtH0EZTFeKMeWBoq8ISrSOZRk5q+AoeMvyv6dkyNxEzlqSuRcuX+bE9kmKj1y89DTHT5/n/d/wEX7mJ/4e3lRoDUOhRWWp4a4MrsvDK6wIhW0ViRqa9eMheO7pp7l8+SmefeYZXrl6ha3trYRX1QlPyxXExMZ8i739AzaPbfH0M89o0qCJH3AyvCyCqOmLmKWZqGal7Bwd2mYo1FU8g/avZhE3m7IAq1LamWUqZaqMUmkjXbz2yCzhzpkFj52a8fjpGa/cXnLltSU7+y1TIsWBeP8MrA/CmQQlp0zWQn+90ettcbh3M0sL3XyS+CWC1eRyGL3H/Cja8+Hukhs7jZdvHmKrjjtOSo9MM4wQkMgqvQm/MzleW+uVR9XcTScZ0rJexlbyFje1hEMj809UPVJSYRD55wmsytijaUsy5ivecyCY8nDO4KuigRYNz1zthIdC3wsLEScuw0dhyItG0IiVQi2Ze9McN6kONsaR2bhJhLTyfQjK3EyZ8JMIoakZs805Fx97XEYtmmZoIseohxRHgxcdItay4k7irzVKU6Wsi9gV0QpYKZl8p5Uow8ukzRqWbkRVcbQElKxRMjrASCs70mU/e3bg0omRj//KAff3K8sqFQ2Drxd4N+w3S35gKLSea/4WvYJgBZS1zlaHZ7VsrftDDlPmUAQP9pbcuL/iczdWRASDOy2HoTcUp2tjnxvQ1WRZxSVEYBZp6LIMFYuUGWaX6eiAtgI1h1UnXNgnKVmYqsnsCPuQ9T5tzPLPluyOhmZc+ZUvUFeF/+Gv/RjHTp8izPiDf/i/5O6tq7z08Z+iZAW+jK5+U+SvI4VTy+CzMMkgSWXIO59/nt/7bR/BZ4rc/tAHv5Ef+7EfUyeXWL65UXxkYyy0qXL2zEnOXzib0JzWd5DNnHcjXmRGvUm+XHX4ttzjns+oD/HGizTxQ5ODNJHaFslBrF9y0Gsv50EzJHUcAAAgAElEQVTMRIRweytCEQLj8vk5F0+MfOLqAbd3Km2pwdkrxAUEXfaoQLMSMCET1psVL2+Lwx3SXBHaDFJe5MPNRVZdmtpVrXzmyiG3d5ZSCRQnSmZeUATlNGHhNRrWlFNSmiuTJJJoKk6p+kB1jUi77ia6MNQfJRSUmvlQ7EGGYWvh5eFcXFdtt2Jr7qHwVuVo9LAngyJFR6UbjELVT+iSC1MSoC4skZfhUtpgmhLlru81z1FrU2m5kIwyBGVskg2iYQOlOWWUtK5ROLY1Z3m44sTxEzz/7HOENYYmBUd4wk9MGjRCJO6ah3XK6wijlsRpcziGeYApETKiJiriRJ0I9xzmQEIKhRganlN1FFvQsrMQiRcRa/hga4QPvHuLWw8bN24vuPFgxRTKKfGMblAeuA4jhSvam26AL8VLxjo/Ukd4MIaiFoguKTSmVePl6/u8dq9ycChU10aX1DMK02SpZBEp3m+tdEQIsowVNnjmwTRqVTFDHmLqeoTXq6B0okwqpGpNme5E8UF5QflclfMzZfc8oMXe5L3IbkE5OZXWVhw7dTYFBjCbzfj6b/lOPvvJn6HWSXBc09HjLSN4ccioAUt1CHke0Brv/+r3s7m5qSiEYjz73Atcfuoz7Nx9pEMugjIObG5ugjXGcYPHHn+M7a3jKb+NrIIzYNA6BCVoz7DsWIOO/1vCKwoPrp3IYHKjTClTTb4uxnTcGvSh7IJ7xZsK2i/knHkYclhPFnEbc/iad2xxe6dy/bVDbuyIQI0Q/BOWWvlJZ0Gxrod949fb4nCX18iyog6pLLJCLKnN3d1fcuNB5XPXDllhDBOaaF4D2ZHzEADCVJUPfZmYHnqPBbVMrBOhKZydUEyo3pDaemyikQdelT5dRiktvpq9tbWJBGAkf0zTR8vKRBDSCl86DJl4OCI5VdgRdhfOevq5oarMIq3bTgzqHMroim4NpLwhh3xQiKkxlE1GH6hmmc4XlIJCcQPKTAfvE49d5LGnHufkqZMKNxokI7WqZMIoQ2LawtnTlKuNkN2Mh3DdlphpK+JHosn8VK13UI0+fKSuYDCw7gSOuh6AsN6A0TeeVDitaKNFNC6cNM4f2+b5w4nP3lxw/d4qN0FJwjrxazfMflODw37dLwvD6kCxYKWlmvrylGZEsHs48dqdxmdu7yulMIsAEcn6nUvH6w25qHNsXiNjGvJrLbsuHRxFM2Zb6tsTRsO75l1hezrgpbhquc6n0Kp107i4fsgTLjdqTIkhI14mB5aLLDYZcMhsGSt8/Kf/qRQ7TcOhPXX8AjyTb2qReSmaUXA0tKPxoz/yI3z0u76bS09ckpSUiW/50DfzuU99ksVU8TDm45z5xozVtOL4ieOcOHGG2TjSuWezEMwUo7D3CGwwoilSuWTIoNHpUXWQ0VYqIJv2qVumXWL4kJ1T1W/SmniUwXUh95/dncAelRh1UEvSK7GBIe7l3MmRU8cLzx0Gn7++4MbdBVM6gz1juVfo2fx/O4V/8/W2ONwx3caKOC15uAlwO1it+MzNFa/tLtnfJ9PcoMnbfAShmBIhpTXvizkr7B6in4u7z0nti5WuHpYNkyPZnQ4j92zlDCA0HzQCr5YDmvXFhi5tJb9pZFaE8tTNXEMETLexyBDPIcG2hnKoCjXyyFz7DFBrAV6NVoyaF5p7EKUw4Jw9e4H/+I//p1w4e47HHnuM2cZAT5aNlIy88oVf4cz5C5zc3sDKwPHt48w3NwgKlJatYxAufFYpdchujvwGNeMCRGZWPAaqa3OV/vhqqCMxB8sscnTwdl13q5nBEdlNGETNaOPeFdgoHoUmExiqUqmSWZ445rzvuQ2ePi/c8va9iakdkenS1fd5Um/RSwxmDk4WTksOVDlcBJ+9ueDmzsTqUNAWlgPbA83RzMwTxeA6FB1Q3jTxC5ciqedSWEdODCbplY6GtTcZqRq5ngjI4eaajtTWCaY9olfknnLk+1nSEcyCLmbdF0poDHRnFTMNqwnnxZ//CX7lpY/pZ+d203wSrUum5ARSGSL5cCjcK+HKnUePePGlT/LEk49p/KYXNiZBFSS/sjGbUwYN5tna2mJja1OfvVcVi3liu63U4Q++5tsLxpHhLRMac893wYBn3K5l8iWT1lkW+7SmQd0zdGEAGsEXTV1Yl/B2N3sBaiTcVtRl2cQQzvFN5yufn/PkxRm/cmvi1oMDpoCVxXrU36+VHPb2ONxJlxciDRrOchG8enefz908YJWTlNT2J3ziRqd6ekWoM0GVpkwCAZETTiITDYtl2JiJlESQiZIkMxO8BWXgiPh00AyOgWYTsxoJXxRok2SVJh1yNaOEdKmS65CDnIV9gshbr9LoK6tD710aceFsGuw8cvmxJ9n7qq/jpY//dIZ0iUwtFAV2NZhvzPnt3/A1vPs97+K5p5+FxOOwpjAp12K4ePEirSlVTu/I8xCXDlnvTi/laCC1UARGpdmSkgMjRDPINRorGFwVPOlUXbeyLTmOqugESw6FJMWjHBGMedxAQnE9Ka8ZDH3QSnIpATmE2Tl3rHD2uPPgIrz62oJX77yOaH5LT/bc951Iz9t2f6pcvVX5wpVDVknKyW1oUj65HLolg7qaCwqgBT6JF1I8siV0oQE0JYacOZvGteZrMvuIbE1eCJXo0bJDiMSFTXtqSnjUTJxH9DiPvLw7jBnDQHCo6N5IqMehTc7ozmK1zw/99f+Rhw92aEXrXqpidSjhRpkFtSmW2qaiQqJUanWKibqdJuNnf/anefKJx/meP/Q91IBHD3fU4Vm6osuAR2GYz7hw9hxPPfU4QaHGlFwe6k5MmUSWCpqAvEgkuXYP2hT0Vr4He8dIDvHWMzdhWOqKq+FdopyhgXJyq4uVn6Wp+CmZLzT1C1KIgHrU0vFELJwzxxqnjs149Gjg1buH/MqdhG+b4LM3e71NDndVoql05OFB5RNf3OP+nkw/5kCtuI/Qqp55DsImoCHnZFdeaFV2rbkLYkh9b4R0uq0IZ7fc/WETbqPIFbqBaYQW1CSROoEUkNOPpsyvTt19CEbx/CuXgP45vTsWafyhqerK/I5Av0t4EDFRo2De+PL3f4iLpwtf94Ev58qrr/LJT3yK4iPD6JRhxkc/+p0M48AL73gnT126CKQrNLTI3JAuPtvQnmESaX9fK3xCJE6NyElVUJqUFZEQlioPXaARuUFCEQk6fNK27VqwlQG8YmlQawECGFapSJA9XrEBIsUD4ffh3dOtPCusZliULNtDTq/ScAipDE5sw3uf2WRrw7h6p7J7OEnO+tYsaoDEdSssFVH8cDf45VcPebS7lLQ2ddVWU4+e6LNbOq1LEnXJderZhnKLQtLQSPixYUcyUJoIV1eH2qFJcgCGReqwWxYCBEwmWC2TUwsahL0m103FlYdRmYghI8xCxZalNd+icv3qS5w5c4l/+Lf/Mru7j4iYiAle+IrfwR/4w3+SW9df5R/97R8gYsXMByiV4iNeBI1sb5/gu//gR/l7f/fv8OKnP5lrNvilT7zIf3X5Mvfu3GdaLASXVPFkv/Prv47nn38OSmF7vpnZ51XdtVWiiyWy47ew5EECmwQLeye4SxLRwkf179TcA5H4/BjqjmPASsgEFhluiLrGCuBFxWIS5yRJDYJwzETk1jyOrTUofZBLwa1y6phz/MQxNscFr945ZP9A057e7PU2Odyly723M/HFuyuu3Zt0aHvf2DCFUajCvUqRIkI9F47mn0ZRu4odaUpVeagtLkRWRtktZ2GnWzbbu0bmqoiEspQ/laq2tZqUAj3PRn/jqeIRdlYBX4GPpBIC3FtunkZLo4M2jMxOKWzWgRd5+Efh3q3PQD3g1MnznHn/WT704Q8zm8+Zz+YMZWDv0UPG+cD1q9e4/MRTnNrYEmeANltrTkyV4plHH0DLsWyt6j1HIlcRyRlpinyznA1pyu+pKNemWR4s+e945KQlMyjKqfaVYfMkw5sq+2ZQpqzQTLySt668yYY4q8RGw2zMjkH2fLOVPrQoTPn8pnxOEt/IFfjOSzMun3Ou3F3w2euHX9ql/K+/Qjnr93ZXXLk7ceXuJEOdBx7afiEPsTpNImGGtla7eEboepe8doNQE6RXrEEbM2UypbkJnUVeFx6xjpaezPCVY0UwAjSoJQ/n1ymfCtikC0pdUJf1qnZdR1DTmKIP6DBu3XyV7/8z/yEvvPcDvPgL/yKNhAp7+84/9H089fS7eOLpd3PpzAbLR1c4efwkA5UzZ0+zsbnFOG5TBmPn4Q5f/83fxEuf/Tx1OoBwPvfyy3zf9/3nvPzplzg82KNOSnQN4OTxbU6ePIkh6EisgGSLmhKVeHoO7mijBnNYBCtsPfnJsyALuudF8FKkaq2RN112IDUjggUol1ThVKU+IiiqVYk0cgSBisaUyZJwpqaHpSwbzZEo1rt5XcgvPD7n6XMDVx4s+PyN9qZL721xuK9q8DMvH3B/r65zTcKydTEtGu8h+qVnnYhcdJdaZYzGZJYmIZGMNPCiuE3PW9gIEVKNtGMnFlpb5sLogVkoIZFsh3uutU4lPXzvFZBrAIB0p4nHOdlmaSGpLdb3HVYcXUAWSQeImNEbElG5qvvU/V1iBRErhjKn9c7DgxUrxnGkLsWs33v4gLMXTiagd3QQlDS56KJR4h1lUrWVWKNmcw5SqzgaYFA1Ccq7o7eRRDWMGJM3qJHVnpy5jVjHMdAqU5o5pqlL1fL6q7mwi9FWBlGSGum9RNFYxJAMzTN2NhrJBUTGMBd8FSkJzHgFh7kH77g056lzM/7s+NZhM6sW/OzLezzYq+IDGjK6WE2ZnWULnth55GVfsuVPaGTq7UdxbBW0qWFDozVPv0Zbw1Ui/hMzTgy5WcUmW+eAl5IwQDq3KSpiqgX9ymlRsUEQCwkzNhpYpbRRCYoJr2zONtiYFXYe3meaGkyVlz720+vOtJTCR/6DP86lZ1+gtYm6qtTlI7xWrFVmGxv6rAFnSa2FjdnIV7/vAyz+0AF/94f+Dgf7h9Ta+Lmf+1mi9vwjOL59nDqlHDoGWmbkNAYGr7Qur259WIb4G0mkVSiWrqfNgqVllAMWCQ0KRpGhNHulDqc2Unqq4kXGMHVK5jC2SmsFjcvLjrRkgTmlmi05uxoykxW6RFMhipWJaFVKuxk8e3GTy6cbG2+ytt+8rv8SvfYOG6/tTCxXmcwY+sBfD21YqiHUBeoBBbK4UyWVFA6lNtESRIwkP2JwQeAzmRjcWrYF0oYrNjjAx4S8GkNT8JJnZxGhC6PSL+00KDRLXDqoJdaXEtWybdbCMYIWheZTxgOnpjy1+BRZ+XuNMT16wOGDa1x+4knKYCybMS1X1OWC5bSgLpbUqUp3PBt4+dOfTNgoCbJmkOQiTVreHoZg/aM3SUxBnUiUrF4iF3CQ7yeIIvIMj1zsrp8XsSapNWQjIQCLdV6QpKGVOqlNUPaN5eANLXaJPiuWKgurDY+qGInce4o1SRijBqap49CnQbXenejPb4zGxvjWLfO9RePW7kqDF9BabF16ly25Ss+a0BQQdS0jJaGsIs5f/oY8NLQncl2a+A3whB+D/GZEyyk/hmDNhqBHRHrWakc0SMIfYYLTmJJob84QAn+ElXe+a2KYz/id3/pRTp46zcb2cQpjCgf0u7k5v/ujf4QPf/v3ariGFRb799i7f4UnnnicYoVWVxxMK1b7h+zvLVgdLpkmjbf7wAe+lu/6Ax9lNpszKmEQkGJkY3OL06dP8/u/6zu4cOkimcmdkOSkosv1PlrVBeqpaGmvE1q4Wlbh46FuqJJ3bbS8iKueqRUaTpf8Qp7zNQSjmfZxzWfbUo3UfCl4J6XRfSCNNRGt2WPhJjOXPgcp/yw/dCPHMzaYDbAxe+PD/W1RuYMIDSUr1jRLyABkTZONpK1WVnhQjmQptWAxqcXv2qk8CCJzqfXhiLzRoISslqonkdHW7aclJBMgx5onOZLPUNioDseoTos+X1Efalllhe+qxpRiWEigJ3XBkneBCFhHUa5VKKda8jDwytbxLY6d2GZ/caB42NZYEdrkWaUOw8BmbLH0Q+VdWOZrqihX5xCghJO0OKemHqsKSiMJ69pxf+nszaec/IIO5aavT8l5dF1yTJ4WkWzzywQr06zYMCzjrCyq8juKKdp29PUAA4LU9qu6LTbRujOwS8aalBqSmSLoDNes2owoxkYUbaALLt7Mxvdb/YqUPefFGm4in7usJVQ8VHpMsdZtNw9VB2/SyYPRYqKru1qe/qVBZGcrf0H3rTa8JfcUYEUHXJD66VIy5kLSWAuIlWktrwZBEr5KFUzV4HHUUdcssFopWCt8+Lv+GN/8bd9LBX7iH/5NPv6L/5zz2xvs7NzjK7/29/ChP/hH07+gSI1wY3vrJFsnjnO4WMj5XFdMxbFa8VEt8TCbsbEB3/TBb+K97/0Kvv+v/M982bu/gp/8yZ/k67/26/ifvv8vMZvNGDcG3DdUaOQ95lV8UGvBbFC8bzNJCvsZQI45DJAEy5KtsMDrlIeuS3RgWouWe7v1ArIKEsReBzdaYuaW6pqmLkEckWmsYjJ+NQbGaQWoeNGsVIhhUGxJDguKYlmkSdU0FfR7vMHrbVG5Q25YVCF4AK6xdAw1w32CgvJO9BBLGlxWuqICaE5pkYN2Vf15I/H5KQ026CZ1VSfVdXvWgg7ydJpJWw0g8xF45po0QQI0wut6M0Xz1MKnDq2hCscQoRjRwyWJJvdmi9SqIyNLKQHW1imHt1/9JKfPnMZnI14Kw3wQNDX2vHrd+MWN2XxGbcErX/jCmm2XnjkPeTcYp6zM9MGHq61vmNpGVHFLyNLAV3hJh25oUeFt3RWROKuHKUog0IEPupSR4coGT5iNFHhoQRYPmXw5mrok7NGUBKqHpWdnUEvmledl4JSEeQIrjpWAoaV221DiXrzJ8v8SvcK6cIruk/TI4esgotirug6yV/UGTGr5xYTSbe9YhxPRZz2gYSjpDWgpwbW+hpOEJfSZSyGmISGlppbPXEmLBoM5Q3ZcpJU+1mmEMJmeqmYYyMQ2K8Z8c4ONzU0+8t3/GV/2Vd/ISy+9xPMvvMBv/+1fQ7GSkKYij3eu/DJnzp9mHAqFgXEc0ZuXi7p1eJNgvjGjVTh16jQf+T2/l09/4hN8+Xu/ir/wF/8iZ06eZntrm7HMlULhIVLZg8kTjhxgGWtdsPpCMf653uPIG2Aipqulmi6ALpzIvVoj1kVZJ++itfVQH2DdCUXN7w+AvARKk5REshAUJsJgSs7PsMy9rxp8g+JNLLuLVjRL13Mm7hu93jaHuxsUb2KdARq0aihLJn2fgWAAl2uvl9Q1DR3WNbwZVtVhZqsjTiGq0zNgvWVufMIIpUvsIikp18xHHWYoaMkMK457d7sWSt7qOpoGwSoGmGAgZbd3iGNaK3eGjoUmUdIPrOqaCFMjqItHLBcTOjqNWZkx25gf/Q6mwH9rwa07t6AZh4tDdT0JSfU1bdi66qZMecqa2PrXSRA9WubZJO69GvR+1ISjrHj9vdrIgn67mgc+WWGbLjlSp53SRy8lbdlOWx9UlhKzSOhKUIMUOZKwxaTPK2qsQ+aa1dQZG0wuJzAhWK63sW/xCjd0iXmzPn1Ql1jCja3kMp6KgrVMmSrKjfG1QiWaEjlbQfi6WDk91ymYokcqq+qzXNCWm0DzBKpQiNYJfBU4JYsgTLVLi4keDOdtyPtD69qRMoaq7rrgGo3YRHyLIDTe+Z6vYpyNzDc3lLHiocKsqHKvi10OVktdMmMwjiNjGWHo+Lg4mloqd2/fIyKYDg+4/NiTXHjsEmfPnuDc+fNraERgJEQLWhvA1qtaF1/T2lLMb2HsBzzquFtBXFTv2xsqFrywVomRAo1GatcB1OG65R4gjWbN15+5RCE6q1ZNXFyPFJg80ygjJMAAnV3ZcVOkuJEHRhEUXhFs2nXBb/B6W8AyxzcL3/gVx/GquMVIwiHqBGhEmfVEOfPMWhdsEVFkzw2pCprr0Kv9yGr9BtaGKCaSqxuPTGteMSUhzCxaVoqJvWn26gqaqePvwylaWx+KQaFW8HR5QmR6ZGpXm5hwXRzCmi0XljLLUbtqgiTwyj/5wgb37t3n+s0ZB7uPmG9uMfiY0k9PHmDFwUFjHAZabXz2+j5Xdm/o+/TgoiAr/Ib5gLszLSpRappRDEow8xmr5YoosDlu0spAPdgXpj9sqBkY5ik77aoMaabDnBJJArUp8cwkPaNneucF4VVST5MrL9YbT1CROAg9Q6mhjFiFYAJyt+TXg1DypwmPbuZ4O1DyH4LLHh28uargt/J1bMP5hi8/gVS0WnMKsqyMQPMhXciCRwodMpNmuiUkVk3djDgQy3GJ+hnrKAdqchs6tDyJh6gpDuhdTMLxfZpdMc+Qq1QpZWVvWaHqwlZRQggS1eGrKlfEQOjgzkPvG9/zu/k/fvDP4xinNuF3PL/JOM6k4IkV//jzG9y/f58bPrDc22fr/BbDOBIh2DIAamU1VcZR/c40zLhz7zVefuklho2T/MTHH9DqhPlwlEODa3ZBGpuU+x8aZO+parNMiK2dI0oOrpbkwiCqTFQ0xWn3Sr0ruQTzgk0mIQZJNqPPrRczNdU3+krio275eRh9AErQpd3JK9Yjg1TpXVcJomcHIVXNo4M3dmC/LQ53dzi1ZbQ6Igu+2st15C9AaBqTFcmSaEVwiJtusEgMcjDhYYk3W0std3FMJZHcY4mT9yOk0dKZZ2mj71ku/XvPs21rhA8iWFqOyapGmCqX4raurG1GkiGVqQ0UV8stuMHzA5MKxWuGFSGCxdz49t/3Hfz4P/txDhb7nD93mo35oIVuc7w0CoWYzaEF46qxs9pm18+x92gB/dBLAsyb5+DlFV2NkxAoU7LzjdUap38YU0atauOGL2gRHD92UkFok46CKJk6GNPRhjEyOC208KuS+9bEcSSdZx3vTOjLfK0gaT2ulyX4INndJPhmyjNmCEUlR+RYlJCQL1Ie6aVRGd90Qvxv9au4cXyuAQwigqVIClfqpchoiQCiInNel7JGAJVVAV85YxG80ijZFNX1UAmd6QViwkwzQZ1GD2drrSlPaZ3MSe6xdF2S+yWKZJT5/lcc6e5pCYvWkg5Z8EG8RhHmQ8sguahdh1+5eeMqBw/vcOnJp6hRMZvz7b/vO/iJH/9n7C4WnDp/lnEcmW+MYPM1z1ZGcVW1VXaW20wlmKaJw4M9DheN27d3CA9qbQwOkznzcYY1GGYjHsFipcLHxsIQSI3nkAp0TaJaiwE0UjNtLVR3MAkqS3Pcqn4/MoRNH5su5vW+TzkjrDshPAf2eE9NTWK2K8Zca9gmiRYcFTxTSSxfrBKYhpKLfHeRvm+ytt8Wh7sBVLWuyi5vGenaWe/MazBlNaQ/jiiq9Oqk6rNzqQrhKuleY/2QI8N2PHH3LrOMogO6dQWAd7gjW00LDSdIrLqEZoYqQhQZnNJIESRRGQkJ9RYzW3JVZNBZH5FTjVYGYXbRqCId2Dp2gt/1kY9w7/ZtXv3iq5gFuwcHFDfGMmJWWEwzpnDutePUMiZplQ82NeSRyqBoqTiKvNB6tU2+1+i9xSDYyVMGGU6ZNND5YH+XgTllPjBYJzD1NeVBqkJvGf2wTtWk4NNEjLGu+pTAJ+kljDRWVEx63zRNtfQAtNYys0QDP2jdfEVWqMKFlfQJ1bUGhvYWnuz5ikI6nDODhdQ/58a1noNT0hWKOqOcdCII0XWY0pSYVNXqrT+4IP0KyBsgqqWlqkPVeW1Jpjb9LEEQntBhOrlN12NkV2tJBkd2l+lEo3pJeELphRErdZ4UmZxMtao2iAjZiaW4Hitsbx/jW3/PR7h99zZffPWLNDMO9w4ZBmOcKa54wZxVwN3VScIGNritOALf4Fv/wH+Sn3XGFABWncVqJflvXWVX2piaw0Lw4zhs0uqC2ibG+ZxSNFXKPTmnvmdKunJJH0e62EkvR48bjcloXvDWUlMGjQHzzKnPz0EDdkTwWvU05Al+tKo/OxQVOpWGF2XPR3Mo0pCpS5h0dUQfg/LGmPvb4nBf33TF0Lg8V5W7Jj6FXbVs+XogVmTEQDFSuiSsVW2uJGfFTbuLlapEa1QfKVMnOVIp4rmYu/syGpMnN+lkBoQlvmxS9PT+Nm9VycuVbe2WUYcpS1TYEAknxTpaVJswD8WqrkRkp6KIt8aBjacv8/jTT7J77wGv3bvFrZ3G3YcyWOwuZwq5bJkWVwuTp0QudfgWQ156lSHDmaJnsKBnZImpB07zireqw6AVhuxWbKrQJqYKbbWgDgOtGOMwEy6c+T7F8zsnH+XNlUQ16Kc0kwNgKknuhTOYqu+ylrIlXp6xA6QEU2azfM5At06ulX5uGX+s339a92Zv3cusN9K9WFHRUWmMiXWDy49AMLUQORjS7XuwVnWVbmFvwuJJ+CYi1WNtRW0OPmURVITreGhGwVrNlDEXmcU/+JDYbqw74T73QDBpzXhmYz2hW8l1zKhMURKqEBEebvy7//6/x2c/8+m1LNYmp47IjBUwbow8/sQTPPHUEzx8sMdrt29y+2Hj3qMlLQr706Z4rxBWfrC/YKqVAWPzxAlqqzQbRDoOE8PYWE4whCl2Q491nXVkAYfL/fWht5oWLBYHWBRsZpiNzIdNZULVYU2YGpZrLtbwaVc/eQ4bpwk/9w6rtlTKNRVNZNfaI4Ad18CZqks7A7+B5AMdKqkUzE60+SRDm7X1TMk3RtzfNod7v+nBmhbzFGp3SilMNZnoDLIvSC5llhVF1Nel6IFRkvxJ1r0GDGk9ngZsnNbESpcHuzdBQl0knfI5wzP3ggwCs7RkK2TCLJSKB9LBZps7haRpQ0YkWMbcWgini35BxArLNEgJTo5a5GqiUr0Jszt2+iTHTp5gefWAm4e7DMOM2dBYLQ4pgzOtFkxFCpS2vljENVagv9QAACAASURBVMg23eSmk1tL8BOF2lTFD8i4QvWUeVkqmAQthWlxhsvNMdWFcNnVIivEYBjmjINaVy+zPNhTa5w6bFxabM+au5ukoslWXwViYlEprmfaOjGcCqQoAVNLXkHPqVlq3wEy7Mw1ReFLtI5/lZfJcSw41br0HJtMA5CbY6MgmZqSN3nCdGhP6ZnwUBVdLavqyLjoLt2NksTdPCv/KTmo10/RChT9YFhNf4MZNrTs6tTBDhhRAp+k4HBhmOvcJdxSFSWsvqW8Edd6slYZzPi2b/0If+3Kq2xtbbO9vZ1W/7wkEm/1CP5f5t402Nb0qu/7rfW8e5/h3u7brVYPklqzhIwsBjE4HsBQ4EoCiUNcNnaGKjsuV/ElQL4klVSqEleoShXBQyr54orjCSEbhBExmAgkJDSAhSy1GoQQSOpWS416vD3c6Qx77/d91sqH/3r27TLqRh5C966Sfbl977nn7P2861nrP63ejVsvHnPp+LXEowuPnV1jNa1ZJ8zzRtb8WNjMW46PLvH2P/FdNF+BO42uyZtJYXRWo/rI8zEkd+wOTWmNey3GYrXGOLFZ3gLPTveJpLFarQXXeJkUq7WTFN5p3ujZadk0KQ5HMYt8H/OKfcnu1WyV6EAXQk0eSFzQfPB1XVOwjYwpk5zbmzgEAg46WXXk+V4vjeKe4FkPp42HU8aYXp2O5cCkrWSSLrwpkqWyqTOiSCr2+0MjOz5VQYmu7qmrw5dyxBm2eypal2QP0UQqYMhdZgTthcyCFcrll15ySyuddilg8JEOXMueGyPvpnviMQgwZdm4YEspEfwmYYhD69pepKUEzuHqUFJKoB0cY81ZTQckoW7E68FZqAcxoPDtPi6ZNrGKpWRfvXgNHb5WWgNj8AwGIZJNChxTXGqpgJqLDOu7E5adY23FtOyUo09yuF4zz52lJwcHa2I1ySRW08xYvdcBc2nUhjHEnjMgSVESEJMaVWnHajIpZQ4ywIlkf7FhGXFAvQWtUkuT4h6oCz2TFSWbrYLfqFV61WREgbjKqQ9xGuWQDOtMKfiFlJZ7qLUSwYuUqtpWmqIWC7xN6mOCwi0R/l+ywN6sLmHFf7hRG7lqEkmR3BZyuS5Rz5K6Br7mzW/kx/63H+X6syfcdvvtIlurSKbrs83KVVfU9RrPrqLaG96coyM1AKu50+56I/deuItXf+ufK1hOf89KbZJpTN73HXCm0bIhUxFyXefIlFJj1ibkTq8k1N1uh+WunKhHHKwO6xnWxNsqpNDMxGOYYjqSZKIuzGbQvS4RLbWmG0yQsYj0p6auksXYPipSF8eYWKzQAHGLnT6taIsVPPXCTctLorjrZnPMFrJprZVLIiPStJZ6UR1QkPRxA1oXsRg6VLOCoiV9mmDl7DuOCGGAtXOodNfaTC+4wqiMAE1CCHfWA2fau7yYVBtdX1fbh0ISyb6oMy6JpnZVAujBCFlopVFN00NqMkH1KIontaWoT8lYUTcWej+XZFvkw5eu3wpznAAax35RGLsFu3kmFkEpzQLjkM6O6eiIo/VaKo5SqVjC0nfEVuv9NvNW/0YtsraBnZt4CikfBHsFwq+yQ7hjfaGnigIBJ0vBYpmcbWZ8m+zMKvNmTTeYl1NWfsRkpq+/muRoHEFPlOBtLwGrz9AV5qQwMU0XWriStaT4xX3lKIqhzz1IcqI0+gGLiPw+ySpf6wUK5849xBTVwWdKceU9WGhk09kau1RlhZeaSoZ3NRxu4L2SIl2Nh3nFWZSk0Zm1rQtNk7YYxoybFtB3U+O0Ytlf9kOHD62iJ1rteIVmjdtuv1TSvip8tSMg1YHgoRiRHmgHb0zkWjpwLxjTfMJWt7G+41I1Wgvz+ZawUAx1T1hNLCH7Pj3JJmjVxtWfhi+F0pY00XpBmIz8FpX+tGDebOjnO1aHh5h1rB1i3gSfmqZjr59reAxsuKdLpaPPTvUhMSiljhr3evazsmyGgKHr+8hibfeLQzDaEjJAWVcuzgucuz+wuJvZPwD+Y+ByZr6tfu9lwLuA1wFfAv5iZl4xzRT/B/C9wBnwX2Xm/X/g6TdhilYyxJxUZCMFBwwHmQ7OIC4lwVMgfq0gCyoPZsiLsrLUq5uwOmBlIJlbU94GjZy0e5XC7gskUs6DOTkt0qOCFAgDB00jJk0NxkQr08Fisc9ASZqUOpPVhvXqXvBShQh0s6RW/g1CWVny2lCPilpw02HXnSV1EVokffJa8yci1M1ZT4fQjDkXqVdsBRxIgthLt1ynu1swrdYiiM2ZDtZkBNvtjtX6SH8njXl3CubMi9I4ZeTSuVzcSydfm7GqA4nUUmT9fpdDszuxdGbfkCx4NiI2bAI9jK2IrkwO1weEr8hIVq1gpe77zzRC05A4806MrHD/faftD/dsU5BfqEPvqfrmi855uqS8WkSdZeBawJ3eVfzC7KZL2PSZSQKqDtK7VehYvb+qZSQSDrjXdFOR1gN2v7kfdx8wQYYIRvPR0UL2Js6oGizF3kyl3BFG3xmQpmTCVpOzBAbJPvIjhJK2VXE/i8jF0QVjYFNUx1eqlKhv2EcxVXifHx3SDXLpEsx6o8dCRrAsWw7Wx6Q15pjZne9w73rPlDWwLwc2rGVeEspQE0ddqNuz88qCWZgqarxNa6ZpRbcQRGS2D07LGiWt3MSSfwQjPsAqFpuC4VZuLB1NGL2VAzb2KIZT8G9NTC3HNDdymJ7n3H0VZ/MfAf/hv/J7/wPwgcx8M/CB+r8Bvgd4c/3vB4C/81V8fX2jTXhp+qSAHgx37RwdkQGBs9gYNSnyNGjWWJwqkCaCpDkgKaUa8V7sqGJmu8v4FC3BZySGTGG7VoFfISyyW1Rcp6tQBZJi9i7zxrLo19npFnQWjVLZtSKtl7mqzzLg9B2Zs9ydMRE0culEX1gUdk0G9K7N6YTBAr0H3XodviR70FzyxW4iKhTPqy5lKaeupzE18LaSqiFVaGTM6ARdCx1SeuDxNfCGt8bhhSN8NeHNaT5xeOESR8e3cHx4xMHhIVkO4SSYrKtzQQXBU53k5CF4IG9iiGahrjPLodqKcYjCJ8OJZaH34Gy7YbO5wfnpdc5OTtluztnM52RfiN7Z7U7Z7s5YcuZ8d8Zmu2MzzyxdstAX82xTDmVDu7wRwiUIqtymPWpFJArAGxzG3rpOwQqtfh2tEgaDtLkklbm/AZrcSnKkJvTal0DoPHrtOphwjXyJcHx9w2SvQtxlKuy1dzciWLrkr1mTaJCVi7TQXQaoIAhbIJfSxWsF4pwQ6fQexK7Oa9Zu3ij1WhbcFmC9Q5+llqplHpGy6JPgC7iv8En7hb05vp44OrqAAjeM9bTiwoVbWB8dcbBaM03T/vmxBOszNndgGeOoLpwBa1mvwtqZe6f3YLfZcHZ2ndPTG5xvT5nPt8zzDtDZTU9ymiqorHiTUpapM9Q8RXeFniFuC6IiNnRIwmG2UHZ+uCaNueqcP39hh6+ic8/Mj5jZ6/6V3/4+4Dvr1z8OfAj47+v335Gyfn3MzG4zs1dk5uMv9G8ITuxkkzZ1sOP6BrzYfHWwLQYSrfAcb4ohEF7e6rqy0r7azSwIJmypEckLm8nU4Y6RgGh605+jAScXZczUSrIo7ekzTz/FD/3wDyJJUt2m1dFjLsUD6mjT5ZIa0IcXrCRkyfRz1Sgox129KalM7kSkTyEx7KKTO7tpckCpkxFFdEUtJKioBJmY2N/6InHWpC8a2Mfux9ToMKScQ8OnnBm1g/e++q1871/+bzXsrw+ZMJodSMK6S3ZxWk5BqX+6ywSSOFNmLfZVwWp1WZsrICwX4Ygq+FNlWssAphhV8Q1L37JbROjNvhPchZER9NyoUVikJY6Rx/8inW39O61gbEncgipCXbCdl7rIemKTMdWSdSsZqFWuCb2Vkkpwi5BaTVmteKYMU1HH8QjCBRG4VWFHm7R6NxHVHrSyxAuP7jz91DP84A//kJ6VULeufK2kT7aHAlK4zh4bjkh8UjPVl5pIrMhL01ScA56pjj7NBEPUV930pJeJMIuw0ueuZmRE5bbBv9V+hRo1yvAFY8E3KicwHKvjWeil6ikXo1njFa/9Gv7Mn/9BWBm5KNO+l4bdq0v2lMTWPOjlnI/tjiW1X3bebEg31kdH9fl1sEnQoXcIL75EGfAZOgNZ0uPWvFJnBcfmkA2bUkWzO+5d7u74tyzuz/O6+zmH+gng7vr1q4AvP+fPPVK/9/seADP7AdQBce+99+4PWoyCOx5mk5ImyoiT+7AcfTiWkhhaR2NhzxJkOAOjp0H0pIyquMJnCr+VrEvRF+qwxtZxhQPV+S3MTks6Ov/8F3+eJ594XB+OOWOX6UDBgsLDq6OSL6v+voc2t3Toa6AX/moK5GoEB2sIXOSvB3NnPCU6MEB6Uxffk2iB07ULtSIBvEbPXtr+Xj/XAOiV3KfYgI4rNqAWcWvs17Vh9T6FJdE7jz38u7zitV8rk1OPkj4a0wqa3aJubtkRK33L2805rWRgXu8l3iTl7ureMycqSAVbrIgu8RYjTydqJDcrHgB1jd5Lh2xWjWtZy7PG+hfxbL/63nsZwVMxstOT+lxz7w3wbHXp1b6BEKlPoIbGBJ1EQTCVm6mmwtQNZ02fagRUWAXZ6GxHyYp3qY5/GGjm7NoeNjmrnfPzv/DPefLyk8wJUy44k2ITLEQeFPmKUJR94U9C52pcVlESYEa8hX78g6nctyXBXebOEiKcl4IgrYq+V3BfMox3RS5GNVxWXA+abGxcDFX3LMeSjuIgSj5NNMwUZle1n0cf7lx54mFe9YY/ytw7EZ3cnFcgW0pM4WpSBOeYmpMR/YA8IRBsTk/VreP4dIj5zHp9TJ+T3jcEME2NtR+SzcjsLPOC5YF2GgAMrXs3ZtOU500wmJDbFwZe/q0J1cxMG5T+v97f+7vA3wX4pre/PdNiv7hBY746CR2Npu62FWwyFcveKWeciApBrHJxCd8sLLObcOQc+mcgspLeKkEx+z5i0wNwPXbegFR6nsR78Phjj/GRD31YndIEbbkZfZUxmG197yr3UjBkTuqiQ5DQ4lXPJil4IrUe8Lu+9Yi/9n0Xhb2HCtzf+vHr3P/gUulzTeqbXGg5qbCbQW8sbkSX5Cvdqgsq3bIaZsydtEXTQmo0HeckbXzH+kitFLgDD75x9Sqfvf8j3P3KN9Haam/XHifAXZLGab0uxRLYurPZnSt/Y6gtenVyLg4h9vvzjJyW6rTKFmIr2cwDvFmttqAeApeMknIVohV17uqAWk03L9bZfvvbvzHNepnvSuFhmjIsFlquyVYrINFOUvNKIuzJYiru5jAHeAuU6ZLl+xgtRZes1B03LX3PgRsjklGGMpe8NBHB2KXwciCW5NEnHuNXP/Lh/UWaUeqb5yhKnZJhBhUBDZleXFB1naPRSRl36g3gu77lgL/6fbeUhtzAk7/xjht86nfPybYwhchcB3bVMFkJIaJWZjaRayWGCNQGSV5LYf2OMvC9jfymkhfWBWqVKmtAZWJzfu1ZHvytj3LPa9/CtHa8H7AuHquTzLutzntf6rnqTGH7+OoRYOjLzUOXgTZGEczzdWH6pfCZuzHHThfXYWM3b5m2Z2SH1foQWwmjbwbOJBSjCthQsd2co37/69+0uD85RlIzewVwuX7/UeDVz/lz99bvveArjcogqUNPbek0VBDq+coiGKMOJVZ2dKjsCy0DVi/U9mYRMfIUjlzJjywiXnsjm8alsQuxNiTUOa1uwQSnbHZn/Mj/8iN86YsPE+sj6NINX3rznyJtzXz9Uc4f+zy2ahorlxTMNnmNmbrxwyepgxhdiP45t873fvsBR8c3nax9Bb5WlxZeSYxA9lrR1iDLZLX01DpOAmrL+yDeGkrYlJzTIIXh0isTpFx0cjyqmHcNUSMNFSf5zY/+Irdcehnf/J1/vjJRyptrhhK+2mjswJJlN6zTxXV7hybrfIsJY8F8JqLBlHtFVJQZy6PX+ZAyQtI5OSEjteVpGdhTWfEzo1REGq9frLMNY/ob0AYMJyi+UjHYQwdgU6sETkEAzZyeJvNTU/GXO7IBkvMqf2bosOtziI61+vpoQnMbTYou8VqDXr2rJqwf+RGdbV8fY7HDWuOWN/1pwNidXGb76Oe0nCW2+hwTSfdQBz3VDuSM4srqMh9ZQd/77cdcPNb51NXtrCZQVV4zpAxREQQ631ZigOGmVXT1JESDSKNXymI+JwjPInXBNV0wrdfEYk0+lRonzIs4Drj/o7/AwS238+/9me/fe2VyBW1p+PExvS9sznfy3ZSfZJDGyg0bkmg1p25IzaMYV25m3tR0Vkx57GZaVL0xWHYbbC4oKtf4akuLZDo8qgZVXMkLneyvhlD9Sq+fB/5K/fqvAD/3nN//y6bXHweufXWYZI4LX99sSjverDpeEyHTilH2klCNBbUWqW40qoBNGteMUk9EXeg5HGNBZoNSBmQ2RmtSUxWiT9SWWqh/NZL7PvYJvvjI07SXvY6jN30306u+mfnut3Pl5JRrN66xsQvEK76Bgzd8B+2Ot2Dri8L4DEgvA1GlNgrihqVWhhhlPkqUGaqLxbqqwpyprOuBNU5BpxOzHLxzdpmNWpCmy2PEpiq3G8EhUaqcdLwrQc9KsmiRxKxvQbBL0kIBY+DKV+/JZz/1a5xeuQy2wqjLFogUVNQXlFHfe22fqQu6HMM+G1MkaV14ZJtwD3zWCDyF7wvC/kr0MYFIVTWbgytO2EtBo89TpGFzMNb8awIz/07PNgnNhX9bicpFX6hQ+QifmyRf7GHY7GXhr3U9yrKFWERSDxd2wX3NhldDyrHsicVE70M7rwagm8xgvdYsLkw19Tjpjfs+cR9ffPxp2u1v4OC1fxru/gbi5d/AtesnXL9xwqYf0+/6eo7e+O1Mt7+JXB3ffM5aJy1Y9jxBiSSKC5EqzEtzL94qS2VlHgQT0bOUaikuwLJQoIXoKZlkEdDSyhuYaxeE6YxNlXXvBm2icP+UXDk1CWeIr5AxS1zB0pPenWUOHvqtX+fGs0+TPei9Ik9aNYoRyhERsF9SYHXigqEkeVTC0UL3UH5URZXLcSwVHjZqmP5uugKAhRRIYZYG2EwunW0GZ6ennJ2dsDs/Z7fdMhbbf6XXVyOF/ElEML3czB4B/jrwo8BPm9lfAx4G/mL98fcgqdiDSC72V//Aw486WR8Yd0v6YqXJzj1GHKaMZRbFgLaSFkoipQvCvazZi+AY35OvkM3oHR0dVyH17oI+BKDpxiVFlqRidmOQFgG7OfmVTz/F9uJryTaxXH2yIllDF0UoeCjN2Nw44eDW19CnRn/8t8UJlKyNHlpztg+L1ARS8CG/8TvXePSyETvK1WhceUYLi3W5SbvcQjJGjaeKLjBBkSJOu++VM1rokGoXJi3dcA9yQW1jhRsZqTCohro/b8LjSwbZ0sA7lx99iEce/G3e8q2vqCXMRRQ6IkvLkLbbzMr32GOeAZPMJOB7EjGX2rhlrRTTvTgBStIud6ePlD8rtFRMXrlc9bNIczwRUduEngdZ+cM429KrR50zFwZuio6gOJyeA7eOQqbUVbbmxbPUkho3TPJyPTUpQELLEBKbRkJgnYcKpVMwFbSKSFZ6TKULhj7r7Wbmg595gs2F1xI2cX7yjNJCS7hAdmLS/709v4Ffeg0H08R8+XOEzXgoKkEOzlquYvVvmCCg6J3f+Mx1fu9Jq49ERfHZZyDdWYU0a4ruNp1JQLk3Q5xQo06n8p0KosOq018UbBf6+smkZrG8ExnjUsx9Z04q68dcWfKPPfIFfu/BT/HWb/5OcDmMm4NncL7dVApp1PczPoem5NqCZKyYXblyK6rNs3T1ak5oWe9tQlj5YzoLmr4H3zSMjVNLluzkkmya4ZvdHg34Sq+vRi3znz/Pf/rur/BnE/iv/6Cv+ZVeg3hnZKuYyQqM8GCvOUa66kafavOO2/4AxTBGZBVxK0y54kspUqZnVlhTH1wo1mEOYyo7pCm8g2kaMsbg/373r/DBj91PVvfb9uOGlXIAVBWTXCXJzC2v+xZuXPkSnJ8K3mHkT+iAheUel9TCAOfaycK0ypvEJjCHa9tONyKKDC0DVy9FiYLHoKezpAqzJdAW5Y2krN5eW3Ssu1Lo6gILBuFbOG4dLsErhqJ9awox58O//E7e/I3fSU41SVEH2gpqaslclKb33JOp0XUpOYhUKwOIOI+od3WQAIWoeoEHbvQM2riYSkWhdE3hkD2nIvomFMLxlV9/GGdb0ForGJHCecekKgjBjVI45U0ZaNb5L9VYTKO7UwHNHBOZLuZsXpxDokVskp0q7hfleXjh2aljagvanzB3/u7PfpAPfvRTxWcsTCNF1cRxTAVTGjpjzTu3vP6PceX6l8hN1DOjgisT0AgW0i5TT201un464+uChRZ0Rnarurx0GejvLTLudVOOfSpRUVSKlWAiWZVYIjDc+7j5dFYqWXFd8socAWOeWJfoYChOeg3MkljDr773Hbzl7d9Oa1J7UfDjkrVztgxoHkl6iRGWpVRNpU83uwkKRi08d6G+XpM8A4YyoOxmE1IadfPilTQdLRlM1AamkWJozz+V/pvCMv9OX7rE6zCG9M+kxm5DEQJmFPHhtEmjaLPi4YPCLpPJwFmwqLzzMDKXuhmFRwqvkppmaGu7VWiVTYxMaEkW1f389Pvu5+ff92H6siuyKagQS8yM41tv5/DibZBbWnYO1odkm+hn1+SWkzShsEkROh6aHjSiZqkEopyqzs1tp4a6HL1fLfs+kmFBMJOBkhhT8ava91p47yJoSTI5/ZlpHIoo4toAD8rCUidD0sRR6AGNgZUMeHL9Oh//yLvrjBXuWNrnRFLPCwdHXLxwSevDUskKeqYq1MvlIyC7Oi5H0FUZTRwRrqR0yV7nI0JLI7w04dGtDG2Ko50ImPq/JiLz7/6VoHwXF0zuSDKXhb7vY56boDVLBeHhhT63qE4+ahpS8Rxnoptjri1KinGQskl3R1JALlZLH4hln0goK0jyrl/5JP/sPR+CeatIYDOie3WoyfEtt3Nwy+2CNkim9THmK3J3TZBhuhZWWwX1ldEHG3HQaiZw27s6fRCLVBRHQbCgx9JDZ9ZbwlITIwg6rIYHk6sVh5WhaTCReai21EwlKwzTuVXT3VRbLEmPulQrVteTbAsnJ9f45Af/HyaTYqe3xna3Y78bdgnlBZoYp6CTVgmTzWtFYfV71ZS2cijrUUtNFJWaKuNfWRsrh2RVULTSbYUvL8T+LFg9F8/3eknED9TwB5T0qYgPL51oFAalz7e6m2ohM7sUMi0x7ypgpZixcjCZT3vIJ6sr9xoVkzES6xuRENJKUy71y1PXTvjAR+9js9noaW2KAV0dXeDCrXdhq8bFW+8h+o7Ns7eS7pyebbCWnF/+PH17CoV9eqpjsCx9g8tt10jJNd2gd3Y7pJow4fHLIjXDWEzcUzpxacitzFoLfdKU0lwBTlEYtTrc3EcC9wgWU4aJLNCKf+ipOC/vKi4ak2926/QBnyj//bOf/hf80W/8Dm55+culYLJqvPYLNbQK8ODwNs5PpRaQCkNFj7pIIsvwhNFZaOEYStp0feAq+BUR4QbZsySQXpLXqKUKygTKuWPTi9u/1Dugs5WajmTW2+FMerjTKsukYTmruy5/gus/iFtBIWoL0MoMMRWGbVkTkqsr7jX+RxGXls6UnT4Wp3Q1DpefusYHP/IJ+ryTKqUu5/XRMYe33clE48Jtd0LC+YXHCDO2Z6eYBdsnP0ePc2guiLPfLNCm+lTdY0WLhNMXZctrU5gKdSzBEqZ4hilhL7+qhqQynDR5ROWgR0V3F9FKl8PZwGhYo9Ro6o69JnLh7YJOEmTcioJWkUopS0n3ud/6CF/7Td/Gy+54pVRDifYuT9AXkd02uKzizMa6vUiZjlyRnhQ9qAucoDdERFfdxmWuHPEUtCF3HzsmdK6zILahzHuh10uiuJOomLZR8KDXMtlRSOe8mb8wNrmPkXd0+pm1caYKs9r9aa81jxw7O9U1BMWwm+IK1MxqzZ2jnI9GcvX6CY89/YywMpxVW3Nw663ces8bmNokd6cFTGuO73w92XecnT5Aa42zK49h1glflwhHeHMhEQD0WrrgJmjjA/etStXQiFrSuyyFHVe4l/mEEgFDv9cSonJuagJYUrhlmNJ52h5FEoY4UcSWBcbMMGplddVuMIUTzdUJkdhUC5KLBH/60Yf5uZ/8m/yX/82PisirnyMwpm4sTWaMvhRLC/o5vKKEy7RqPow+wtWl0kekcpMhyXpWxEAFN9UojYUs6z1xW6lNcO2pfD68/Q/zldQ0suvYSvxRK/25prSahmhY1iO5FMxVagov96iVQkwPgFROw4eBtb2z0yt8yiqfyD3KABOFHBo94frVMx5/4gpmiq2Y1msOj27n1le+Hp/WCttz4dJH97yanHfM5w/QbMXps49XnIb01y3kipDevWC3/duvNYEfuL8mVTOWXLA5mNOKZyk5cJGqrbZTZVPXbrmoLnjWM1RwXnYyV2BescYmf4BpNWZ6KdWy7w103imhMkxtKO2kyx/80dOXv8wv/OO/xX/xw3+DHp3tstXbPkxVQY0ZBe9MFK8mEjZaRXBkL139UDOV7r/6lZxSF0FK9riQmgo8JBEGTUFtbG3TngkfeTXP83pJwDLV3tBLh+sGq7KuZ1cXMlUHCfqzbXSrBZLJ/VldZxv27XEroq4gUqbLghpaKRfcrBLjhP+2WPaQTQI/9RP/gH5+jTZN2BR4a1x82T1yx6JRb0kRW5mLWPL5nNMnvkSeXwfkSutZjD0LlRijLiKl4dellcxzspud3aaz3TWWWd1LH33QBG3khaSwzViKWG56aCn8PUOYXZH1WKlwGEqTkm5JGkmFT5XIJmqfp3oJIheyyK5WGKtZ8MxTX+bB3/64SK0x5naR1AoxdAAAIABJREFUxyLEjL471zdkyVgPoiQCPUzmClQyMUzVAdaFUJ9Xd2H3Uj0NGAtIY0lXOqgAbLIkPyJuX0RsJvUza/FLNSUWir9wwFc6j+7qWuuQ28qUj1OXmhccU9ShBp7eJfMbkrg+D3+biksgVQam2Gpf6t93aIK33vXTfx+Wq8KwJ4fVARfuuLfiO1QMM7qK/KKtR9vtlpPLXyTOn8UKUrKIEWqoULzqvBWYhc4Lwbxzth02O9htk13odA0ItY6OOvaEsa7Ri1Q1OhLGaJbJXs9QS6zv2BOdpU7Q5VMTYm3uEjMP1pIJ6eex3JPyMVQwYTz91KM88OmP6SzXeJ/IKEWZ9/TzIZlvDR5uBSeFoEXLihXo9bPhVWJMTVnJXffNeKEO7iEJdNMUEi6Hbpb094VO9kujc6fwQSvFRo00veRejG4dUwdkgdE1zk6GLT5YCwRWiHyJboJZkor+LOy2yKhWPPuCCcNNJ5n38ZppxgO/8wCf/PgnObvxDAdHL2e6/Ws4uPUOWIusnPtMW605v/ok8+6c2G04v/G0poHNNWoorEuEfWjQMLRgNT4HgJh5JrAunLZZ3XyurrxBQTqwSmPJpgc1J6wvmE30DJZldNEdUnJFo2t7VI/Kpy5tu1XXGPVv5pghY5/y2BLSmxaTO5BabTabk2dnfOYTv8Kr3/B1HB1c0Afa1FFR3MA0HbCyZDdviAiaS9pWczT0IsCnUjlFUmndtAyNufVZeepi13KWztRhafrvURik4mxT/3sxm3cz6aH95uc/ulo1mco3N+/iDZoI7eFl6Bi+0hKWKA/H8Hstk3YbLLkIhzdKQqf/rrWi5e7G9goiJbDCg5/7LJ/6xP2cX7mKHVzCX/4Wji7cgU2C8rbzjqPVIWfXnqJvd+y2p5ydPQu903bXCDpTSC0f1urTgmTAQhALQFfMbdOv1bl2Jibp0qNVMTZaeSw8gvSpsPmUH2WPSwt60ZSeZDRWGcRkkpIOkUJYTeaB9UrmW6DLTqGGpSUREz6XwxmK3wEc5rNzPnffr/CaN72d4wuXWJaF7fmZJgOqkSFqB4GuH+9qULRWs2TG5ZfxVnEaMYQHUZCnohtwSlasmuemn1fRJF4XyKRpbRp48ld+vWSKu5AoY0r2kICIOmOJUrmYkgPbTqNaGjAnMYnc8BQO26fyVVotEqgPUs2EHq6IIlSK+DNgJByGqZDGvPC+D7yfa1efJbyxO3kG2/4W2Yzza08jnWuntca8vaEUy+r47fRpuPGEuIAhczpK5WnPwR1/8hKrY2NzpXP9k9dpkxMbFL/aFVWr6FtlbZgjEtTUzbSkDFtdZpGy43fX4uqMrhjiW1e0SYN8uLFKo58my7bWhmElJRQhJYQr1IqEaQqKKLJ2wphVNFzdw9T19x78nfu4cvlRDl/9FhUsWsX3qENZHx+r0PqK3fm5VolR6pjUn+5xM7tDDU1WAqouRBl2J8a6xIl6iFsK8morqRj6QvZkWtXikBexcQdhq1aLXiKHq1Dd6tCEZ3PaUrhUH5Ci7WWgmZLAZjZ9bqG9AGTBT6XGIHUFZpebcTIbPU3lpws2iR788vvfz1PPPkMnOTh/lnzi05y7s7n+tIjGnrTVxHx+QixWMteAs8vEyaNKmaxNRKtV4uuJnIPbvv1WpmNj8+zM1U+eFfkaKPpQjc1q8CcmH3e6NP9Y7RQoNYy4TnVGQxY5lvSsj2vRSWpPQnNjd55wLsDFI8kVgviskb6IMHaFoWUZxdwVbWEYLRZNPZXJYw6f++37eNvvfY5XvPrNtMNDGs6cocYzAWtyZs9qjgbkIr9MdeCT/o2MpE31eUFVYEWrRM+CRsf5qAUkiS6MLAlpBtara3+pE6pQtck00xhLBerUbV2ZFtOyIun0FrWHKrFJkEdjgqnLVRk3yatW41kOYD8QVtk0DpFK1POmYrkyE7ZnjXf/s5/nZ9/9TwFTIcFgPmX36KfgjjeQbY3Nxs6oDejAyeNY7vCTy7p8ut789atX+Ms6h3c1zm4snNy4jt0Qh7B6GxxcWpNPJ/MzsPnypiATXTYjGbCi6qEtsIioNFzFNNU9realDmaj3QL3/NnbObprzT6IrRlnX9zy1C9cYe6SY1q6FgebYA1tO9IBi9C8uYQcjQOkaanxVKsOhQW+5x1/iz/7l/877n7NG/VQSkyvTsTVzUzrRu+OzVFKHUFq3bu6K8SBWE60pdObuq8OJZkMWo9SXlR3U1rgaQWHqyPGsohYFtp6hXnjxXypE/fq9FSos+ABobuGL0ZMIvLFs1cWzKBaGvqcWtTKyYq/tZKSAmPbz4gfwH2P0wqu0PJsEt797p/hXT/zT7EUxNYtyd0p86O/gb38zeArMp3dXBend7hxGbcdeePJvcoDFg5etWK6I1nd1dheX7h+/Spcc2IyDr4B1resiKeMeKZz9uUFI5m9mjGbqgHShJgpyDVbV/xthWf1RHxB1OR+Ce78j17G0V1rdbv6yDl/+JzLv3CFWGp0SVcssHem8hhMxQ1I/+8lPVWO0pKNNhzgoAumwy+96//kP/jPfpg7X/V68SRue9dxpuNDcZs385issrLMNYlbc6nFUn4a4ua0nO4K0OyV6V+ij6jL2EuYIDx/wUK7DF6oc3mJFPcysEfp0tFt3Yooyhpn05+Lr9eeztB1EBZFSujBH036kBB5aqQ30y7JJUPqBAVW03ujVYiHhXHl2Su8972/WDejY9mlwFicxjn51GcJV16HuUvjOpm6kyIzHbADuOXtR2xS+RK7a4u2P9lNl1y6szvZ4Rfg6O5Dbv0jt3P5l5/G2iTzE/q+oqAoC2HvOiCdpca6lRtLrNBJM/xwwo4csum9bR3L5Og1a3IVMA9oSESOZPaL9M17Q01gujohF+nJU3pmpXDqYrSEa9ef4Auf+Rh33vt6YbaFC+Yk8wYA3VgfHcNhV4Je14gevRQMWYIwH0Wp4LqKUWhFpqYL45QXShd931E5O+rCbHWwV2+8aC+j4DFNRI5Dq7WBNV2FCVDMBVUo1/LjqWCVm07IKFWQ07ogNl3s9YCbCk5HOfjyb2laaBrdiMm4cuVZ3vf+9+PZ6L4QWYWPoPUz8qnfJc1LFikYrmUQyw4Qju0kcWjc8i2HbOcN2Y7o5y9nvRaP1SsnqvcrLCfntNsmVnetuetrD7n8iydM6yxJZu6VNYSaHZGermezJsSWVUhDjd3qaEW7ULxPN10+6Vx41SExNWLpRTsXj5Yhc94y4Dy91+ZD0lPTUkrzvurK3te7kty4cpkvfuZfcufdr9bfGUqXmgQGDziiSgTVUBlAaqAiivtCJknxBCPdUXyT0s0rPZOCbBOKOYaYFXLY/uDO/aVBqA5GtTrgdMf7Sh9+htIgc4Rwja5ch8CsbkYv7a8Js7WmLmaJ0kGXZlrqEa9oYSrQa2TH6xbtDr/6Lz7CA198oDohfcFEnVDQ8djBbgt9iy0bImboc3GGpVg5TFZvhk1sFO9qw0oPpX0r0iT3eGqPgOPgltcd137KrAIROiBVJGKQylNo2YLVyD/1QiV1yWnRSZTUC0jbr/6Lurh6opxtAotJumwUk+CFayoqdnQchtpEfR0qs56A+z78c2xOTwQVtSJJh2vUwCejWaNhHB4cs16ttdmGxJA5yUoGKFenYJvsI1cktL5sxB1U6JjyVHZs53N2242UCtOefnzxXnsFSRa9kNKQ04CJXMrNEIm3jnvHutP2GSXibHoZy/T5lUa8G3MpNjx9v4bSE+0NoKuAZXkZkD77Vz/yIR78/GcJU0GccmSU1CQWMzZviPkMWzZk35HzrvDdMqMdNKY3OvOyA4yLB/8+lw7/ArccfD+3Hnw/Lzv+S9x+/P20gzexZGNeUiTDIVx406StaAmDQVRDq6TUXBQPklEmQ4BsREq5ZdV1j73DWs/ZcHP6JDm1pLqjbgieXQrLHrn1WfJgafVj6BykpBmIi/AdGsmnPvZLbM62CBgLLQwx+Rb042gKiL7a4+jWQTPbkF+K8/N6frItmny906aVDJvu2LRmfXwRM6OViasdTBxePGbVyqSXL3yyXyLFHUaS434MNZGiyVSKRh3q2Ksf7GahNGPksICNkDfaMqJ9h9IkRWZQ8qjCPb0phy5pzH3h7PoJ//Dv/z08lPMQdbObGdOkA7SANKfcVDp4lhLHnPWtjdVbnbigf7NT2GJoFZlPTlvpmGQmTCY8PrSUY/UNa9qBOjY528QxtExpbTG6j0Krgi9p1ogisLovRbARSQt1Sc2MO77ttn1XsyJLZQJqbayMJ0kuxuJNCgLPOvTCTq2SAaMwY0tn2S386nt+AnBtG+olm8TJrjRMgKyYAfeJ9WqNTyvp8VNEta5Sjbx0YApsLQxzSGTTtNBYuSEI/+3Q+8L5jTPOr96QAusP4fw+78sG56VY34VGeG2wmhKfmjrq5liuoI1Ya3WGvSjEKZJu2uojotRLlUGRkXNxHDmuSW2nkli+JmDj2skp/+Dv/0PBZosugG7B4iu8aaNQq3C5HJ6CpeKsU/nm06U167c57eL4DNc0f5W4kfR9PswcC3GmjWKt6aFconP4dQe0C66fdWjyUXPWh/EiO3iQfYd5p7cFyiQlGEokso0Vfaka4ubc/Z23Cgc3cXVWahlJLZxuURegjrv29+p9nyj1nDUNhIsm/sSYl5lf/+A75TBdhJ8vJZYYIgcpv2bxJN33tcNbvylhDTTBdTV1uoCdo+NjLly8lcODQ9Yr56CtOT6+heMLt7Fuh6zaMQfTmsPjIy4cX2Ka1uJdnuf1koFlrMYcGJLAKsamrAqWGnE8hFVl1AJhk/EmJe1yayyY9pTWmJrZmUZYQNS10AXLUCSVo3jV3AU//dM/w+nVG0XAdm1JN9COyJBtOqWtjnDoXZ3T5MSiTnp9p2EHQCikX/rkRq61LOT2N17Cmbj6eyfsHtxw/NY1bW3kRm6243sucOPwnJi1CNkt5Maz0sOHDtPYbO9IAdDVaJfbEa4/dp3svaiGRktpiZcnFhUdK2KJ2j5T3blRawvprBKWoamOiodtVaT7iBQWORpmPPzgb/DYQ7/NK9/4tSSNxWtxSV2maZXFX2mV69WKmNeQK7bbnZL+utOmqMRL6YbpmtKidMWRQ04Iucghu1ofCmLyZN7t6NvdC46ufzgvKTesJkblpnS5qBEdMbD3sibALLFsq88jU0qaKFjKahb1zOIsvPKLBFnFooUgeq50CS+x8LP/9N3cOD1V4a6pRqa3GQvBm3M4itBVHkwrXivq+1m/3OkHIkg1BK4pQWI5NjWtbLdnnF0+4La3XSBtV+mVzi33HHPSNirqQ84X45FXsZssiZDnRPr8Uv9kEZEY1x+/qryorK6/DI3bJztW26I0zDdJaGvK1wStJskzRY5Otl+tt5SZUFvYdAt0UxP2yEOf5pEvfZa7X/vmvdvUogbxms4og1aWgkYek+qy0xhbA1vL0cLo69sE1llNayWDGjKHNTi8eKQLKNTeGMbh8ZEkrM/zesl07vI9CAhropWlGc/CwryyjAMVkhDRmCnYpLk+PDkd1Y2C4IIWutUjiwQyhK0nWE4apQr3v++Tn+QdP/GP2aFxr7NisSIzupbTprvE5onW42Utsli2kvkdNS685aDW3km3Fm5cePkF7n7Ly7jnG+/i4l0XOLrzgLu+/nbu/XP3cOvrL3F+ujCT5JRsH98Su5SULHSJGNLXmklGtSI18IUO087kE1iRwsYt2J52zq/MbJ+eOXtmx+mVc86vnnH2u9u99yLDiQ7MQz/jdUuM9WdlFENFZUQeSG9XE5VTXIdz/dmn+Pl/8rdZcodhrJGj1qpAkClCGHTYs9Eml4/As37Ohb5ErVcbKppSDXTZ1QUT6cLWGTGCmciFKUSuTgcHL0g6/f/+MqD1wmiVLUR65SBV9IWPPJ2oS8DU1bd6Dkrn7bkUzNWxXAouUzxwLlZQjdJRvd4Sy665tSX33/dxfuKdP07EXB2virEFeFg9AYMkTHrOpPcigVFS68XGLW89wAdfE8nxwZ9ksuNSbRXayNjilFx85THLeaf3hfTg9NEd8zYrxKvRGoy0x7GOUEGoIoenUWBdC6KlBoPdycz81I7NMzu2z244f3bL9tkzNp/e7rXuOaifCu2LTKYwHIkHfDKs1aLvJn/FkFsK5AqMFcNjcHLlKu9/998pSMT3ZHfPVHxJ1RRJrhdBcd0FHS5iy7V8xer891IyRX2umnIDV9BbSf1GQnG6hCJLsodsn+/1Eunc9R3KmbaiT5L9ZJSiwq0wXEF07sq1E9OvAz62JGVzRdSa7/Wi3XVDj4gDdaWh1ZGVH28Yy7zwznf+BD1mER+lwrAyDVQ89h4npMbNZBzoiTapbbZJelqr5QsX77rIy17/MnyloKZhQlitrS6OZH0wEc8406UVmy9tyLO5omwhsmtRt0kKSUv6IqI3PbA+0WqUj+raSWBZnhNAZmRtCFFRUNc2MqmzlheoE1Ehn0zRva12+pkJmsyK9rUSLoW0ZDQ63ozz0yv85q+9l2/+tv9EXYxXuNnAHptyQVqRuZnGrm8VzGQim9wqorkP9Y54GTmVtdnIujOZOI45oW+2dJJdykBjqxf5iGeWh0LTZEtBa/vFWk34eoCwczNYkt7GBKUCqUajJpU0vOA9muAeKqP/uQSyUQWuG0vM/Pg73gFL0Fat+BXw1mFa6fPL8n6YsbROs1bfg76/QERnsuylgtGS0yuPcnqj01vKtBR6ROSj0DSxXjeWa8Hq9sb5wwu70wVvBrEwFs0z9O71c1tBr4Z8KkvrBcZo6s6lMTOX9jvBnMWKq8Bqc5PUZEqtlNsdSjRnZYajuI0yIlptR+uj26fvlUmZyfnZdT798ffxdX/ie/B0bTibYi8KoNQtIyhvLN7Gm3T8AfjC+uiYORM2c2n9C07NscVt8A1Z6bBB7pDnpXbjvhDm+JLp3IcGtJp2HaqmhRvZNZY+d7SdUqRJ1iWQpvCkVSi5MEiihX7CqFG+OveevVzDOZgc0pN/+fFP8OADX6gPBXWCXs5SoyKDNeallx51HCSzcuMFsYH5KRGQEBxcWvGyN9xOOyioowKT3H2fE39wcc09b7uDe77tNlZHE9cfPCOsFlKm8u0tqqDSFaDlUTr+ynQpCePYHRmnnTzXbS9Xbid9xlgUh5tgLYULrGBCXZ72Q2rgVqhYSplkwm91yXT93TIXZaDsN3MyG31OHvjNX+P8+rN4N6Yu4lrQmKYCswVvw1E4MmLqgjEVksEFWDlWg6CnRnZbUnEJOIuZJoN952gYC8s8EhRfrJewZDed2fTSOpoMV8lSewMETZoH3a0+5zJrDcWEocCUSGhGH1PTiBiwwK2EB604qFJtfeJffpwvfOEh+qTCNiaHpa9qR/A4PwIS2jLy9K1gLU1cuYPt5QBXtvzhhQNufcPMbrnOcn7CfOMau81V+u46y+YGbnB0vOLOt93BnX/8TqbViusPXcMrLGxwLPSCVv+Vf9NCjYC2lLHfL91Pkn5jFmw7JnED6zXJUFBNaccDTTVCBCRRDOVt7y+vnDQZaVpOyZub3YQCRWMxz1s+/+mPcXr9WZGmGQrnQ9OWOO5xToH6TPDKG2tSM618xfH6Ase3XuLChbU4EzeWWZfOUjElhiSc0aWoGZe+GvfnP9svkc69pJBu0m9mBfKUFtjdVIBm5ZQq2RGNQFi5FtUhjzs/S1Kmm7uwPVMRb9XHRq+goBacn53x3vf9v5xvTklv5Qzt7EUmCCvf76bEZIN2Z0RskSbrzhIsl4PVa9ZsT4Np1ehnO7IdVL6MpHDWltLCN+bzLdurZ6wuHHHlozfIE0EUSdtfYksT3h/VCQS5H9u8YCl16drUs5wmy6cMVsIXJZc3RVnOMDS8NGjVHWsLUrByTTe6BBUVa7EQ3kTihRNLKnlxVkep51EZ2wY89qXP88gXf4c3f+Of1mVQGuVxATUrHX3ZBafpAC40iIXNstP30hXTgCnO1t3wPunr1T7PyNJMU8a0qNz8xWntRcbck+IzajdAd2aTYWdyw2bbk27NomSMgqYExQl2w5XJH9WhDvjxZq+or50u2V+mjUQ4zs7O+cX3vZfNRsqWSvIVRt8EC5HD1IdUTqMxDE11E9p+NC8L/Snn4DWHzGdb1lPj6I4T8EvkriSqIbdnkNzxthN2ux2by2fYxUOu/vopeVILu02fSxQ5H2YyGFUUbtK0DLpczD20a5g04iTYfAZlLGWqkvVFuvGdJImWjZhinwUjk5f+5718Ug1yN9gHEaC+KF53RG9ErKD20JYHkicfe4DHH/4dLnz9n4JInc1E3pzsBSWWD6VIbfV6Ur1RKj7ryk1iNe0XCVml4XphjnugIPRJqNiXie0FcJmXSOduNXaDJr9ez2O5xUYw2CrG9LXHURfPm2QFKrpUJ5g5xjONwGGQcxkX8Orw9B+/8LmH+PCHPqLuEente/EAjhytg/RJYi/HbL2kelXcxZInJ1/YkU8Y60tHXLj7GD9UUQwvU0WL/bb6nIOzqxuuP7PjmU9f4fTLO0hYMtE6MBVLoyz7JfMcPxugJetD6uU1ko5NEGfJskv6WaqTP7daKFP4d5guES8cHGlvWzORpeNTyknY7liA0WxPCo5TX8bKMo/Bx973rsISVYaiLlodykkPY68uJDtTc6b1mgsHFzk4OsRc0c5Qi9OXad8tUc8qFIbpgubCnwP9YMXlvEgvg+gDApHOuU2CLAQ51nBZzujAmEsTHWMSowq7SAdEsKIiYCso/iG9HNZe0suATuOBLzzEr37oQ3rPQg0Ai/LCzUIuVnS+kqg+pe87URVGwStuyclDW/JpZ3XrmltfeYHpALAuDsF6mW30oPac2VzdcPrUOSefucrZozsVvTSsN7THWEtEegoCBJSe2gXv5JJ1WhqGtlMFwM5gG7BL4mQhziDO5V7NULpoLNVBe+1SKI0+FWlBGrkeXbkpKlpkhM5syozkNQ30Ltlqknz8V35Oi20jaKZpoGevfcROK0JVju0uE5bJqdqz3l+v5jXUOGbqjLhrs5WYZD17kys7C6+Uyz+gaXlJFPeEvUTOec4KqpxwMyalfSn73JVgkTlGE22WHyvYeirxbUqvJRQSZllh3OYJtH3nneY88siX+Zt/48cKEy5yp7bhiAFXBkvs38/ql9xYTE5CwQShhQEubPjs8+f0Bxc2z85cf/yM7dUNy9kW24QMRHPn5OkzTq+ccuP3rrO7suH0izM2q3hNdWks5Va0NMkCF3W6Y3u7QB5K54wikjGyZG64Mxmov5DmfL/CrMga60qY1HtbqtwirT2lxU9DQVWTLkavgpMubkKi3l4PjjiSy089zvt+9v9i2W6xTBF3qZFXmPCky7c2MzWPusy0kKFZKiUvJ7JPWFvEM6QxYp1tQnK3lOLEui7FMXn0DF68VyrzRD56BVN1aqKsRhIV6iDxLpMdPhaUSOExg6bG8FKF6TPrlNKi2b7oWEleSePRRx7mb//vPya82HRZ7LH++l5iQBNen4FRPgIROJaORVeaY3VJVz+zYfcQnF/ZcOOZU+78xmc5esUZx3efc+GVZ9z19U9x57c+xOFtz3L9yzfYnC6cPLjDduoD0iGj05eOLUoabb7QDFZVCK0mQG8idJkEPVmifQtlUFRfkUU2UoZGg9khxUVZNvY3af3/zUpF1m+G50VW5+0w2VITRtCtoFx3bSbrjWvPPMmH3/MPmRetCfRWX7++nzA9UzrbjZgFN61MXg8bRseSO1ubGFEVuGTSHlnowaKzM+mSWfC6xJ//9ZKAZfSQi9xL1Jla4YCh/wdjRZjQyfFEpOkW7pjG/SwJkw/5l8gOM1iVMgEvjNK7pEpL55fe914e/r1HJBoeYUTm6lpM0qOIUPBRudgY31c9A82ytMpdskWaEvS+sGVzvuP4aw7o13cs0fUwrURWba5sdbhmoz9p5NPqXBRJHBBaPmG2EG1FYVFi3PHqfKTR9cq9NgqqaUatEyFqtd9QJSmNspcmvoNNlXtS0asDZjFFyvYueAEootvHICtDSeo9U6GSBt9KHfKZj72Ht/+x7+Hlr36DxlKX1NGsF6k1LB6N3mWickuWpUuWtji21kOWo+gk+4As2dib/l7qbl1cF+HIj3/xXoYv1VmnHuSB3Q4I67nLk2NSToq00KYNWi0U77qII8qSqVpzvIWUTjGu+F7kdZIRvPd9v8SXvvSQyMraRNSqSxkTzVAbGfWZdDUM6hiK7JzUHHhXp9kSNg/t2J3vuPDmNasLM0d3PSENuTUijNMnt2QsxJz0x5PlKcExIyCt+01OwKrb1XLwEWZHPa8SMi97QsUgF6gE2b5Uk1OX12SIb0sT8WhAbS8TQSt+o7f64U2F1LITXtuUFp3nZkavg+YSshdkC1jnd+/7CF//rd/N7fe8DkisW5HI6N8fKrOAzKBNpsweX8m7snjFU4/QtYJOBzdThV8PluCcxXJv1Hyh10uiuAMos7qyjJfYFwkLJ1edJVL23vSbDwrCwQJFDwRGkyC6lAQ+4C6WcjYpk0PSQid57MnH+Jmf/CmpM1CqnO8PviuYV/ULCss3Wyi/IJZTKXeUPtcnp3V1xgy52+PJ6VNbtq9O/DaDlWHMAPRnIJdGf7TG4qhALSGOIoebgU9FpKSMOpWgKe3KICuzNhQIOsK8EglD+CtouZJPfMc3rVm1zrUbnU8/mJxt9D6RqWTGggIzUZRCMf/kTSjGbMQwVwGwKtKL6fMsuqdn40Pv+Xt8/w/8r/W+GhQs4UUyRoK1kDyuyLFInQNckrOIrtyUeodaNQRRf178riC1FbCY7TdlvZivNNMEhdb+7levueSFlHu5l4ciXYouo1eolX6dNRESTotgaYJQxkc7jHltgWydxy8/ybt+6qeASZNSjKUVkvb25/R9Rk0/psx8umnxujni/MN6AAAgAElEQVRmZRiyVtG7uljdEp6Ek8sz0ysX7PbADg3PmbNM+jMp1cmXE5tH/nrDV1QG/Eqy1gxgUptmgjH2GZNWShPromZM5zArGjMkGNfPEB1vznd+6zGtwfm1Lb/5ULCZC9PvusDSe8E+is4Qlq1/u/XGEgsURClxckklkUJrSpHevTeaB7/2iz/Jf/rX/kesHj9alDRVQWVkTSod+pySfpYaqq+CFnKXe53zbjIcKmOe8iQUumHJNGTI8cLn+iVS3Euv6+wZ80Rvfq6QasL0pwZcoqJAKS0mPSgWxFxYZAgu0PuiSNUccFqWmzSNn3jHO9nseo3Iw6hjmGubJL1hFczlqeAessbfLHmYIhpp5M3F6Mu4aGvk7MbyRZiOucn+E+Sp1fdVRQplyav8tVJZFJzSq8B7LzWLYUxSn7jieX1FkTWCVMafunSr8S1fe8jXvX7Fw4+f8ppX7JiA3bzwqjuTe+68wNIbP/P+61w/NfrY9t61iLulsl3CxTd4jbFW+KcVwx3V8Nnonipf/PEvf4EvfPYTvOGPfPMYPoTJmqJ9ow5/q1yNcGmvPVOQlBX+P6YJkz657xJbd4ZjpeVwMQO2lBHrxezcs8hmFebYTxGjQQmWbtikM7sIYt0baNRjLCr2ATo1gptWKdKyWxXaUbwbOI13vvOfMC+9OuFaCOMlsZT2EsasN5ri1EWtPQlArXRU4kSXcTCRoaYI+GwwP5K0p3ulRCpOYr5OqV18D7NBTdBWzYyV0zwXPS/hTK6mJjt7v0WW4WfYfmQwNLIbt11q/LG3Trz1dfDFxza8/hXntMk5P+/cfXdy1z2HxAzvft/MldMQxGd1BhMptQrKDIt94umYCPUZapUjFsxU0+CqC09dfogvffaTvOZrvknQTX3WuQx5tr5f84Am2ekIU/HK01Isin6voemqW9OO4xQn6Z74DH2SvDPihVv3l0hxr4EyUfBRlitvKugjS5udSYsmd6Qn4Y0WwtRJlwGh6QBE3Dys6upkUS6BK57O5x/4LPfff7/GfdSmRmnJW6zU8Q4dvZdLMgv7tgX54Styt7pst6wMcnU3q4YInpSMKTb6GsG0h6ICU9bXSqVY2d8Ky5KluzTNhb1Tiwz0aFox8Cq4WjbQtcXGGjTnNXc7f/G7GtvtjqtXN1y6EFy9phV8jeTiRWPZnZE9+aG/sOYTDxiffqDzyJMBe2y8romCziKS1io6FYgJvAubZZKMUqv69P7tNqf8zsc/wGvf+DbWB4f6uSoRryPtrxc+1yP3W7mGNnnKEfkgaaABuQRtJVlHhGA0yWWFu3rXr1/cvr2w7oqlHeotSjGUtQCasebNBa1IpStQPHs9+E36fuvSTodVdviUWIwse3XYn3vgc9z/yfukpEW7OyfrKspZhpFJiZABgtEWYcWS0j5Hp22T1B6JxANFDGeatiRZYAT9xFmyltdEFqGnAtbHjd4ENU0sYFmSTGpy167kpVfDVGc9Jk3rA/7UiWlYOve+0vlL371i3p7z7I2ZOy4YV68XDBbGbUfQz85whx/6Swfc97vOp7/Q+fJl5HVJKXKWrCspqG1HhjVBqNikLWMuGaXZVPyR1F/nZ2d85jc+yitf/0fxtpZHoWoZWa7YEF2bBROuzKsjj9LeWy0kF48VBcu4yzwpXF6CCuVUgTFypL7y6yVBqKprzoLS7Kb+uv6jLM1iS8bGpAAiK7SnML6MJl12pcslaG9qaIxr6M9FGDdOr/E//c9/ncuPP6HiaHrmWqr17J5lnqkCPz6U6Ojhm9TdTDrMeJaeWqNfTIApfjha6W4TseJpRQZVgoqraGk/aXE+MX76YCz77WYKVaKCiixkYJr03oULcpqAZkrSuPdO+L7vMK78f8y9aaz153Xd99v7+Z/7jqQoidZIDZYlWx4Sq4qduqmbGI6DJu7gFkXdpC1aoEDTD2nRD/3SAkGbAEnRNAiceEQk27GtxgmiDLVNDxSpeaREUbJIkeJgUeI8k+987zn/59n9sNZz7hvUJJ3J5CFevOTlvfec8z/Pfw9rr7X2uSucv7hF/De91iVEP11WONoOVoonnt3xnncUf/5HFv67HzvN6c307RADYRDG1tPLw0NZVF4Istf1Rp25oX2g63fvnZ/hE7/9AQ30Ss+9KQEQUQrwZv0RIzg4OMGZU2dZYqHXxhBP1wxiqF2tSoaXjI+u9hpc2S2NovGyFu5IgBNzmK2xnbpS3JGkixAELUS3ACuT2ljEN2E+JywK2UoEdvxEcAuN8+cu8pf/8l/m8UefoHJ4H6spdjEN5lTwqN4pNt3d8RCra+5wHdNuepcy1VL0Ea5MkdVZRleRNVk3A4jmGZU+k4zOqE51FUMrxpG1E102u96UFJ6daLFJJxk+tiVVeleb96bXd/6zP5Wce+4i5y+sdvgNkkVFTypOrDstr3nq2R1/5J3Fj/9I47/8Myc4fcIzqpFearM6mfncDhUWOa9SmIlTaOdr2qU1gq9/9XN85uZfJdx5Nc1QiebFnQbURQSAHoNY6zjBTZW3WlNpGMLLR+Y/koaLc0/uF3m80OOVEdzHDHy1t8/EmK8gRrPZm4Z6QtfLm5MkHLKEjF421/JIvrY6RIPu9l6DoZtvuYWnn3xKatLcuLIGmtC/7MjMa1b7Qx/6ril8E+bcuJrBf6qnlkUMNKicVEYnoR7ARt8bmnlbYanskpM+IXNukmamjmrkaNqwPj3pVexauYinLKuu0Le+4ST/+Q8tXLxwxCppr27O1dcuobqDdCaLa9yLh4O1Vt7x5s5f+LNn0BxXkJO2+gzhhhbOtMK3Uyf7Kh/36PRZMoc89CB44J7beOqRbzqCB2soYEgK7s/NJmWZEMsBJ86c4cTZk5w4eYIlreKc7JjqsAgCimjCwiqcYBwUX2bMfe0rOYo2XEH0NLvIWLttG5SIhq1wB90QVYGFb4JMVAgJfiGLsIUwTVDBTR+9haeeelqt/nAmSClcBeEgIU9INBirFrJo4OtVc7hYMRFBZFQlg6guTDhVFI0R9NpAa2wOgrag4X0kTbxPP2nI6tavX3sBVpombf66uTueFpar6JFyeVxDePNb33DAf/HDC+cuXvHuU9NU9FPuxActuqEoQSSXrnTW0XnX2wY//sMHUozOLmm3WhAZIlx4NVIz5Fc1F17rbKaTZVoU+c37vsqzTz4i2DHFZ8+RGpqGhFLaxNbcGWB4VuyxDPk9JcVSTUvip5YhSjNEYdeu+l88fL9kcI+IX4yIJyPizqu+9lci4pGI+LL//OhV/+9/i4j7I+KeiPj3/wBnX7FqADRtKy8FrH1aH0EXA5W1jg3AykOO5g+yhrfzuLRpNeTPEfquAsjg+eee58Mfupnd8IS6RL0ilEH7MtQW9UlXEy837ca4MzRQSFgx1pzIpQ78gQ7amlrtN3HfsBqhVhkATaqemI2rbihP/EWFAqH6wtzEDBElMRnm8rsyntV0727f4Y9+W5Or3pjwlqGYZeHMGdiYTjays4SdKpd0pZUcHhbveMPKX/yxE5w8lfKhd3U1am7VaVTK6RBL0mXvYAXvUPuImQrPP/UEX7/zVmqshgtKkvEKqjcnzaGBrjH91oRELidOsNmcpnfNLeTDoRlIunMZU8I+FPhnWHrZzjbBMrnngQJ4M5W3zxGC+dHhBczYc3xoO1NDGF8wiNZVvU7ocaDOs2tj17NPPseHf+d3FJhXQWnURgrQMebOFCAcaEq0Q4HYYglOMUkoOUYIg5dmrMB0QMg9Vl8hMsCoTq9BH67OJmRqm+KMjhiDIRYWTUpsmn3LXaUOme1pA5j2KQc70TKBP/LOhaOuylmbwtSdtIJTpxeWjRJhHyk3zSaLgKXkKrrddW54U/Df/EcHnDipxBGbhWaNCKsKyvIMrQyxrNGsJ5GbZe/aKZZZXDj/GN+474ty2TTll5gJ0WZ+oRnG/KeN2MOrvZqSQOElOAWhRTWdgnSc8rWStuuFC5c/SOX+S8Cf/X2+/hNV9R7/+S2AiPgu4M8D3+2f+dmIlyLs6LDJsW0wluFqsKB2ps6JOx1DWHYJ4hYNsgfdrfhSIfveudzAPWJF95BVw8svfeHL3Hn3PVoiUc04eu7ZAlTaUQ9jpMFYGvMkt6ED0pl2pseVOSgJZJdnBh5gEeItT/N+g6HiYPtgxii0LUcV1JTmtyrJ8gsxKQxHJI3W1RYq0QnzD4JNFj/43sb5CzspIyf9rQU3vPEk/9b3XMeb3nCah55Wm5itcd11C1C0JqOw6oO+wrvfEvzgexSk51LxyhKFcyBOeYahMYvDXPXEBEpyoRkHve2Tv8Hh9rKCTG9qx13UqmXT71czNOiVtCyoA2CxWrBZSTzMZNCoMd3/DtKKWCWjl+1sMymyagAnNLffU+uHUnkTG0gICKbGMxD8Jct8Qx81Fy8H8i/SEvnb7vwid915B4WGkGI3dV9PXY8wnVKEBNth57wPXTtOiDEnY2u4sxXPr4XXtbsaV2AXLXN46ciYM5VUZZ5jsEGkiHT13gbaOJUTSs19QETNnZ1KMRwD2Yo/+d7GhXM7ZE3txRy58NY3nuT7vusMb3n9SR5+amHXk81SvPZVB7AGLEVbgr6qOHz3DY0/+R69394dWAmiCZb1vk5EnxyCWDr7+NImBXsU9OD2T/82u8NDq4oHPacQLY99qgyNBhN5aOTAHP5jkgJW+srTfnakYua1mPOHF368ZHCvqk8Az770IQbgx4B/WFVHVfUAcD/wx/8gP7iSdg7UhejMC6Kva/lJSO7exGVXRWF+bqqV6kMDRQ3m09V3CPde4M6vfJmf/Mm/TfRB7tzilqrrMXTYWEvmTaFgoXV33lGZtRcbgB3dCGLId75CLn7adnuVGGXUbE+YdrpZc9uN2nK10I2VzoqWOhXFmtPjW12IqGOdPqRKrFVtuZocrVjrwIXDHaReX7bg5KkD3vOdr+bd33qGN7zmFH/0ndfwH//JazjaneW6a0/xjjef5trTepp0kN4NuHC58yPff4q3v74piGcRY8HzHzJSNsjIb0bMpJ27InvfVGdFCevo4jl+81f+FkcXLoqwjwcMCdSy70Z0c3vIlUmOFdjKBbPkIUIZxR7q3PoQhhkJ1ZysXyC2/2Gd7RBGx1KisYXFN6NNLpMr31DFFt000ixttKIrEKdFYCkr3wxgWfEaGu74ylf4qZ/4SWpp4skbr53dwYjVcIzrvdLzpqGQOUw9ZumoaGjz/A8Jx6AElxgaiuxELiwp6mS2laQbulMnF4b+ZJKXrB7sDmSStemGIEvQRFtUwLTC9Ncgl0XEihZc3s0kJAjn4KDxnu84y3d861ne9LpTfM+7zvIf/PBptuMs1117Hd/21tNce/akyrEMlo2G/Ze2g3/ne0/yptfNhTEiHLMLCBMZ0KBeiEzKzqPkaxP+LGbLtTs8z83/+Ge4cumi9iL0xrRWGIXGZ6UCkprOqOUOTL8jY9j+IaxSF2Gg9xRDps15yAv1pHr8q2Du/2NEfMWt7av9tTcDD131PQ/7a/+/R0T8xYi4LSJue/qZZ2QEhirrnscKu+ghR7g1961sMGmIZVpj2ANjtuQwYgU0cKXc+u523HjjjVy4/DwVKzW8ICSDiEX4YKpyGqmhpxglqiayTd/NnVghjKv2mhpbmuKrlJNc9Z3ogSEq5kiJoaJJVVgLtLWxtGSzSZrNxJKJrQaNnbHBZS9Tlz/LOhmAAoXGYOPKPcMbe0JLLN7xtrP8wPdexw1vPMWZM43M4uSJ5O03nOa/+tFr+OE/fpbTJxsXD+H8lc6ySZa2kItutBMHwR/7rtPQBXBF6j1M211Qou0DVcyuKN37qLXmWAr+jXtv59GH7xFTiFm1CwbrEWQtWvzcdQ16xwrcpva+1N7XtEfQChsyj9ve1TfAy3m2n3n6aUgFscl2GHuMvYDhAXMogII6SsYxiWCg2VBXgO9I9SggUGrr7XbHr//ab3Lu/Dnf8WWPcRtlBfYIwpimEl/DQ0O3wyNtYxahjiw8HxgLI6aXjWDC1UybaVExRqDlOhsiFnUKFEtrLBG01HIP+T4dM5s0eBlKSLFoYGuTvlEiFGhVXd+TLkb3Bivg299+hh98z3W87c1nOXNWFe6pE423v/EU//V/eJY//QMnOH0qOTo85Pz5QYukLdqLnMDpZfDedy6aUVD23EcD+n4MicZcaLOI0VJe/JOm6kYFjOSh++7g8YfvM9xiLrCV8p42C1ahq/q+mpfQNWeglUgbvj/kN6QEXJ7ZvHjd/i8f3H8O+DbgPcBjwN/6F/0FVfW+qvq+qvq+669/7Z5yRzatvUpVO+HgDOLa9hZmSU2PbHu4G4dOpT6yu21vUNFpI7n7a3dzy0duhtmuN3llZEHVylq1TywSwzRT/4YOnOmOE1Vhwi6hlply9WzmyKjm7gJ6iXUaq8rhNNaWVqCVB6TCHRt054soC11SODPiwCoBNDENclVJEEGtGpJRkKnVYmdOBa+//iTXv/YU1xxooHPxcGVXnWjBZhNEFic3yWuvPcEX704OTgSbjQQXZVXv2940OH3GQ51Rei4GPWU7TCCzMw+HNfsKi2fcyhIQG1oVH7nxFxn+XCWQ0gpDSlXrGGNvkYD9SDJPsHCgir7XHnccPazsE9tCYU+Q08t5tl97/WsZwxW7qQAReOuP+dImFHQnzXk2hosH0ZodZIYYI477ZE9Yk3vuuY+bPnyLmB0hTUI0QRik4Z7oCubu8Ouq6lnU3GKSsvcrHof9XoZSyZiiIlChNcY+MYWfO0hEH5ydSey7AHDnaxuFQrzttWygVgqGadhf2gYN2Ldmq3SErQedM6eTN1x/wLdcv+HglJ730mFn1webpuEuwKklufbaU9x+b3BigYOmWU65S377G+HUJjXfCMEymdZboMTiaI281jdELlIX99yroSUmHHzmI79KN9HDdaoTg+CuVhp+TAO4Bqa46l6Qz7+2N42c28yGWYR1rEN4kRD/L8Vzr6on5r9HxPuBG/2fjwBvuepbb/DXXuIXQjeboAOVkubPixlpsx89I6Nsfj/KWBw2VZKoQkFXv3gQRG2oKH7+53+BtWvdlgZYqkIM/Al7LtMmBw5SypxV2giV/pr4qXDd9/wom9OvoltN2WoSp2BsL/PsfZ8gLj+3H4QCit2hdrciiJRb31rHA62K2FvBkg2qEUNULVU/OoDiyIt/NQVrGbCOwYe/sPL97154/WtPcv2rNpxMiTSuXFq5vO2MHZw902yKtHD2zOC9776GV1+/5VWnk93h4NnzR1QGuz74I+88yZkTweUrK6OSNv1Qhh0MQxBlbzqgIeN3+hKiZ5YERomuxcVnHueOz/4O7/kTfw7oFpY4mgdoF1HS2uqBuOClEydPUhyw3R2yq66bL6a5lTDfiHkW/sDHWkfxX/fZdjAfpcq3zXNUqk6r1VVGeIvMpdYirKvopXlTeS6kebZcJQl9ntE6P/++X1CwHPN8KjDnvADVablg7TctgxpyDx2IWhgIFhJUY7gT9sFpVLPjJKpCc7JSdCpFs9SavhxT/7pCNUGrZuu06p45zDtlnltBIYX3MRiagGE66RR4wc1fWHnvOw94w+tP8C3XnWBZoPfg4uHKdjdYenHN2QM1dCM4dabxve++lte9ZseZ08F27Hj8iSNZ8NbgO99+wMlTK5ePdL0KbCGgQ5SRXlIDsSrxgp0+pXY6doaM4vwzz3DP7R/nu/7YD9kobTVjCKZHxtJyL2gqE0PUBbmoDTtIAhHdsBYqfoeS3osd8H+pyj0i3njVf/6nwGQb/Drw5yPiRER8K/Au4PMv/QsVhtcsyO1VuWjyfXVRosOoLi/yKNoYRAYtxWLBXN6BNv+MnuSqquMLX7iV+++/T6rNbuMlwqo/swYEEsOCZb+6odaQStL1irj1DWo0NidfxebM9Zw48zpOnXktyzWvYXP2Ok6efS273UqOjTDypp2ONGX+xZe/nO2HmxUJSIJc/J48lByxExTCFH0YF93jnhqmLtldgRVHWzh9ovHUc0ccbXdmMqwSmFRw8oRglVFJXzWkOXkq+fa3nuZbrjvg1dduODg4YElYmEyCtq+i563nlsufWWnUsLNlae7LHSXatmMqftc+uOv2T3Lp0nnjjzYpWwS9tVhEf1uXqxI9tKXRloXTp89ycnOKE+2kkskoCacWzVnWZP86X7azjfBR9symOLamzlB7lkEzZVZUt2TuZlK3I8dO8cbhoCTQmgXD5754G/d+/V4HpDyGD8qwjMV422LvQzOsNtXTjUlK07J1D+wZhkVMJ20MQmuTCAsMyxV9q0WukQmth7Uo2gdQ0Wj2h2l4MIu71UXe5hJvSWtRpk1OJk444GFW0YjBlW1x9nTjmWeOONx2wbI16F0w1ImTKhBryJMmgDOn4FvfdoLXXnfAq05vONg0Fh/bsWHPmI2h2CLmlWwsuj5CLwcShKVsowS3TmFhiIU0div3fuVTHF0+b+JRo5q6JN3Ag1q7GpWAJeNYD6NfRNbGsGvsRVkM4e6ympiQz+//eMnKPSL+AfBDwPUR8TDwfwA/FBHv0RXnG8D/AFBVX42IfwTchSw0/lJVvfgr8CMBTckPxH6QU5VwdQ8zCCe9pgCLifB9qJKplgytrBWaZjP27ZUdN990M5fPXxYeG2W/IFX45cDDMhSYCmVZJxO/NMiw66RKxLNv/R7a2esY1YnwRveZKgIOL59nvfaNtO2zUNs9FS77Si0z68pBMbOJrmYHyuqip8k9Lz2Ey/0sQoNU2+DGQg+97zG09KBn43fvhbe+fuHb3rzjqad3tAWuPX3ANWcL+qLFwqPTK1gIVorR4eLlI54/B0wBzE5qxaUl3f7SB7htD2DIcwRM4WNQy7Anim72WGy9vCqZpAepj3/zDh69/07e+Uf/XVUrw4cYeZZHlVwfZZJjaprgtOrJshwwcqv1ckjdGmYOadjlg/Mynm2su2ihQNBNd60QLKdzpi0DWZ1oKSgkRPHsZf+YIYbFCBiLnE8PL1/hQx/6MJcvXSBGl92Fk220vhdLNZwgM1UlUSwlXJkClmAZeFjb1VXWZGL5VkusnkTXNOb4cbohhRk+Zd69gvzeQqKGqthUx0KHWotqO21KisamEAYPx795dtipZwvgjvu3vOV1B7zjTY2nn11ZNsGZUwvXnvFWqQj6ViKvTWjP8tEOtle2PHdeHXBLDXEjByeQTchg0NJVsXHu8v22DC8CaQrw4TlQAxnbdcUWndvk8W/ex2MP3M+3fvf3e7F3dwemz0LbtuwnA8RogiVnbHNy7gVpJW2OoFqD0aWbeJHHSwb3qvoLv8+Xf+FFvv+vA3/9pX7vP/czQI8mSTMrMM2QLJsIrEY0FjfG3ntEpbcEOoEpg3vEUgfjF37lfdx8y00wOfKWB0OaLeOhaZUYFn1WMrp4MZQBsgE+wEURm5M6HDVYQ61kG2qpek3M2duUZjs8VvmgV+3l4mLTzO9QYOoheGZPSQkxiOT1PICEZRj33mkDzcT1UCDdbjs33dr5v/6ns3J63CacaSwxXElD9U4uydLEHtjtgkcfO+L+h69w6iS88fpTJMmJTXLbV484d2FV1bfZyCK2sBnVxIVVpUSltwcNV1BIOp+q+ru3R1Fw8z/9GZaTp3n7d3yv9AWJZgyB6Ka+WarHfvuTOaI0gt7NnFotnOqqIJlw3QsU7n8YZxvQOsc+PxfZL++GhipJMTLNQPJ1Yap2Bd9NP6SRpqH6SKwEP//3fpEP33wTsgtoTCRKS0sMK1qoU/JoVlAOQQJhe+3aQ7hSepPzPvPrGiUL5vIKOLuW9tDPR0lMqL+77xV1v9P3qKIJmnKH1vcqRTPNJv4TRdD3sENVp2XSy26YUex68aFbd/zv//1ZlqXou4KTmhM9f363X+xx8kQSS3L+UufKlZUnnz7k4ccOOXmycd1r5LJ6YtP48v0r565005CTkUNOr11zPDHh1F34RVARGup36Ftoi6HW0TxDDD7+m7/EcuoUb33Hd1KjpJJXZJJlhOHZ8OBaESftjIv9rESV1j0rQVSLkKHgizz+Vdgy/xof5WSlC7qJ0ozCuLngE/MLPJShZA/aqwFWotmyoEewayq+v/nQw3zyU5/2KrHOVMpN2ln10GYjoeSeVDevaxP0kUsQKesC/exgOXENZ9/2fQy6Vrxl0NqAjSxXLz3/BEfrodgvzujV5eGMB4GYAolVhDocmnY10zwl9CvTqnQ1ynLlMshcBNXExkhFVw8SiwuXBx+86Yjtrjh/1Dk8XDm6srItVVoKgsG6VftawK46h9uVJ59Ri58LLCfgzvu2HG4hPICNnIXrgN489xA/ngqzPnQIJVq1XHqsxhQVqa5cusS9X/kkfd0xbQ7Ys8smLaxgucq7e16H0sLhMfw5mpPcO5KF/cFq63+DjzB1v1i0qoddiSkrSFdwIvtuw6wL4yqLA2BVKOgMoGuw+OCDD/K5T30KxiLqqml+QVOFORCGXcGgm5ZqUZ3I62IgRTdzTA6rI4auXUxPeb8u5qCvzF7xApoh87KooGLdV5wBTgxKVsMVzC66FsHjWN5KEEMVFVuNOF2/DGTNrTnFqoFnh9GDS1fgxk9sef78yrkLne12cOFK53A72HU43Haev9i5cLmzbncwisvb4vxR8chTCp5t0R7kr31jy9EVmxaGEAMVJ76fWjBiUniaLANQcVUINonh4tHJEgaHV57n/js+zeg7dediftBo+xlVze6o1NmrK9D8Que/9p0LKMH3yn3h9EKPV0RwjwoaK6OX6EcW3AiOsiCp6W5vrXmwoDK+hdqcMRkluGIcClKf+MTHePShR228lAbW1PKMmEOh1cHWG7BHUYvggRiqbvswTOLXnAeniNoQLLQu9sj035CjnJLQqB09F/Fd/TxFkauqd2rdW6+uY6Xn/MD0gY4A1om7iwkiyE43OFQqyHkAACAASURBVMYxc5gemml6moYzSeOLd+343fsOGb249+GLPPDYIc9dOKL3TkVwZdd55vyO8xc0z3jTt5zlDa85wQ1vOEFkce2ZxoVLyefuWGlNEI6olxpqg2GjhOqLCq4csCZrl4kUTeyNKAlbru5TOsU9t32Mi889afw2fB2HmC+LoCl5cOjzwduZoEwt9Kgw9TGPNH2tvfDh/8N4SIil7qEbI5dltLHb1OfUhr95FNUlvBMnt1xQpAytPMTs6+Bjn/wEDz3+mAKjXR9HqbILTA0tbzUbhtBCTAsxW8p21o3FymKiJIjyDIQhzUCb7WR00VMB5tKQBmkdpeYAZgZFyXvJewJw563hqJ5r3qsMF1zm709hkw71yvSTEvMmLeyBL913yFe/3rl8NLjn65f4xoOXuXBJLqEx4Gi38tTzW56/LLjjda854HXXLrz+tRvI5JprD3juAnzujq1hXY3xp3VFNsTtn54+I8jW90yskTHTGJB794uk6KtEwvd8+TM8d/45etmTv2yjPZwE3cUPEdqFs5dtNry0PmMIJTBm3zi2pnihxysiuE+1FiOJYfaphwhY8j9WMSd6wmpV4oQgNDwW06QWVfgZwblz5/jAL38A0FLnMUIezrgNmqzdyXAZ3YEpjJzDKtNwY9/CDaOSV7/zB5m8mL5BFXTTTUgFu1ldXngMdofg9zQ0+VRALyhjrRVq2co9BKNZHh6yCA7Tp8pVreGLuY1FIk9zaQwbqVsozl0efOCmLTd//gJHW3U/Tz674/wV3QTnLqw8+ORFfu/Bizz+xBHB4LvfeZY3Xt94zZkNz5xL/sb7z3PhCnvckzGEb6ND2bHrpoc8NSBZaQwaaX8Sob/QXKHDyB1LDHa984mbPqBrVKYMGsaIMoThbqb2mG9RLYRFWhDfSBtB+fv/BQeq/7ofOqIeLJKen+YxTZQ0X0Sfc8SwZ5C3N2G8whVlGEs/d+l5/p8P/DKUMOJYoFk8FKbTxVj36tNAkKGqC3PXl+Fzgs2oDEH2cNIWRBqzcEr0PjLICiKaPNK7OqzMoLckrLfVAFSD9azORMjSgS1ysDFXpCiri/0chiE1TMY4t4uKKKTeHFy4MPjgzVs+eftFLh3tuLIOnjm3cvliJ7Jx+dLg8ccPeezRLU89c0hG8Pa3n+GN3xJccyZ4+unip371IpcuOXFEspj1KNsE2XwPUFLLYq3Q/odmAaJtLkYzh8b8f3VhAog/f8sHqexasLEkJ04cCG4J6KwqYrIYNgGkypoDJVJ1ZubVR7GLwkTaFzx7rxDLX2OrrFRrHqhJyhwpA6UDRFmKiaygm35UU6XmTDu8ouzK+cv87M/+XbbrTpnR4aKyvBvVrnhW5dkSz7QwVdUdcZEHbb/0oQ+tgnv20bvhOYsbmzHl0uaWoujjSFURYbsAMa+b5fSi7879r2ItxBje/KJbvmyuVWNoq1IJ5xvDiypAOFzKC1yHQNVsUPRqRHZawXoU/OYniksXL3HD6xt/7N3XcOXCyolMtoed8xc6a0FbdiwbDVC3teHnb7zIV+/b8fzFscd6tURgR1tTc4ZWEKorZPWiLkJCrHndtHGomnFfx61ggxYlwDfv/V3u/uJH+Y73/hAN8el7ExOjWeDDrJJSVW3vwSY2nDn1arZ90LeXjVGaI4yHUy/XI+S+udrYbK3jINXLrzNdaZc2Fe08hMyazCjtGcBeMJfOX+F9P/ez7K6s+0CwL9PsaTLQRiI5bQp+61OXMQsVINP7AExFVfSVNmFEUbuy/W7QuooInblhJXCXUd0Stm1WJZuppE7v7CLYEMwhyyj3qt5CJCP/RVSe8usr9spMGcu5mxVG5yQvO+HDbXDjJwcXLx/xljck3/G2sxxuVw62xeFRcXhxZQVOZGPZrFQLtj35Z7+95a6vb3n2YlcxwKAqWSMcYxQzPO1SkVmdpdK7VKVLaW3O7pIKJTHZgsuylyoe/r07uf+OT/Ed3/0nyLWzPTpic3DSOhqpVgCWkplK8/AUV+99OtuaOCRx8zGN9Pd7vEKCe8wZhQJsaCi3VNFbcdDDSv6ALPpIt4GIy2v2Af4dVcWtt93KLbd8SAfVLJNylR7Zj5cmlLa6R1cdr8m+/k5Sw5yY1bXaozWKePoexqs6cfJagkYpE7HD6k1K9KcxjAINCa9MoYpibyksRaa8auYyaa2wKLYkSXrpgd5byxJzxIFfDYRplRPf9iCSxeKRKLbb4Lc+37jhevja18/xrhvgHW/REuozJxeboDU+/uXBlcvFb3z8gmiT2fbMr/JUO4YWesQ6WDNZQgksxuRtDzM+VNU1wwlxFbOpGLShrqTXYHflkI/++i/wzu/9E2Q7KSFJmHXThN9CMHeFqolZKaSkPbEEtTnNunZ2u0Ohut2V78v2kMCte9iuGYmq3K3FaHvaH6LcLWtRC9ZFmDnmm78Kbv3CF/jQTbcoPxYSxqwqeKo5Vs6he3dFPFbTGG1NgAd4AvHxZIsi/fk0D059ltO2DjORxCQWBCyzCxFqs9SwHlOVcDOrZnZQqm7VjciafGGMQZu2xPiezasr+jI1WYybgZb5NLT79WiF37kV3vCazj33P8e7bghuePNJgmBzaiEptgGf/NLKlTW4+TNX6HtvszTWHooDTQVfpbuEsjo+NKeAUtFnPQDoOgoyVRzKVNfSHeh3ly/zmRv/Ad/27d/Pstmw215h3e3YbDYsByf0nuciInc++wGzWX9LeW6RA2JRonuRuuUVEtwVTCNF82poIXAjyGHVKO1YZGRq05Ia7A1ij01mNA63R7z/774futr79GHLkLnSyszMEnXMtVlWYCMeQPMHnYwpJ3L7G10ta7v0LHXyGq1HyzBTQ63voKBfpi4/wcYHo9yCy2tCxv2bUFLpHqJOT9YiWWOnz3EYg4zQxnh7q3S0VizNRJmyvmjJGHP4o4HNsClRjuSxp4rHn23cfj+02PLOGwb3P6TqIPKIS1vPOcCtpz1bHOEbRcWipdoNFm+amR4lhNv7MqQEZE3/koWastMqRozjuqhge/kSn/udv8+/96P/rdgE7l4YMA3CWgx6NQ+Qm4ZfYyhwxsLmoNE2G9a+Zb1yWTfcy/nIZNNlrtZj3qS++VyJlUFCRrBavQplWw1BUVGD3e6I97///VADIa+weKDJkECop7qrqmBppYG/aaRamZf7IISZNLoPZLo3TAucKGAEjG7nzkiq5K6ozaUK1FFWjjZx5EVrtWoatIdh6P6eLLe9AyUrS3qCrjGUS1N1e902BQlmWiFa4ShTN4X99w6PPR089Szcdm8SsePbbyjufRSoonJweLSIYRaCJyVpHO7ohw3vXMmXrLanLW8nqOi2GndH2Lp8aBYpzRm619yYamBqd8fDwwvc9vFf49/+Mz+u2Sgr27WzrkfSk7SG5n7lH/RUqRAiYCh6YOFh50ULl1dEcC9gtNqLFeR4OBi9w5LUqgs09z/GbCOnl4xhFbHLig/fcjNPPfuUA5qSxKAgNeBT69iRXZluKwXvhp9MATOTPrbC4ox9Rc/9cKeNZ+DZHZkbIko3ratKCcxWola1lgEsOpCtL4ymA7SWKvSYw6VQMhomFkeJPih/jyRCW9CZVDffgJrHeR/lXvQgdknWQoyke2EEq5CRy4cFNL50r45izNMEiHSl9t3LSj3EUfBO3JGUqvG94KQ0EI8Mt/Ba0zdCXj0aGEnC7nvBu09dkcXg/nu+wPf8wJ/l1de/Sa1+K2IsjNxB77ZrGE4iStRSR7qzCG3gjHaCozh8seLmD+VRY8ek3TZknzGGII45kO6t9jTaxB1ZTr9wzMAobrrlFp566nENnJt0u6VftIcNK7kKWiwL51xhYnZVN4t8UYBMM4sGdlAt7TNuU1/h55cS2EvpS4khppUGBSnYI7aTEZb7OcqeC9/sI28T+mDRCjtWFV0ObLHK6z68SHs4LrQmxbSS3+ySxdkXBAuXttBWuP2+4W5+6k/MgCsg06yfYG4Mg4IBa4PNUAKYsBpdCmltQ3MnOzaq2OUbwqT9auGrkxku2mrwe3d9kXe/909x3WtfLw1AFmMER0dX2GWjLTs2B6dNnijtYg3fW8sgaqPZoBfNv1jp/soYqBbHslusqMvS5JxGa00+zKZxaUi2yMmxl6rlWCGL5599jptuuYV1d6Th6YDVJvh0Dd2qtNpKeSHoi2ATavUQs0NKBZuxQc6uC+swnoh5whVw5Tx56Tm49Cx18Rm4+ARx6Wm48hQcnkNqWQ1dtGpLW5oih3nG3cMUDNeEYAakQlR/LsEEDFsMlLxnvPcxRiOGE5JP7ijoqWGQvHE6k/MsDu6gRiNGp42mQJNN3iGp6vx4KGmYh2AxTKIWZ7HdMVY2Yk52mtolKhehTTxUSgATXdCYlZQT2xRdr3Pu8Yf5+p2fI0PwguaKokSSnpX4UBdlJkjtWUfFShezVTOIlxGVkVRhcQcZMJJ1zg+618FNozw8cJ8fO+Uhmq7n+XPn+cgtH2bdFWMRTVZNq5ddlIsfO4SqA5QvRYw0rFXEzpV+c7Lugrqmb3kgoU5L4fCjxDGPxd1v+R5gnhPBQRouJuxgJIymHQzNxU6Kvg1DCS5MZe3e07qPgSWorsRiULLDHTsLu77aFC/o0emIVFEBNCV/Cq+qg/2rNY5VpdJuDAmLhmgrZuJhYVI50SEIyji8kWPNK7Bi1Us7JskjMmmj2XdHHUyU1JHnnn2UB+75kk5vaJ43ELVxVHG03XLl8jnWK4es6zje9ObY17uKUvZm0y98uF8RwV1UXjs0zoBQOvgjdnJfq64bJISJaSt7GZ1aVXAP+Bv/99/iy1/6sqoKVxmxNztKtBSja0ARsopt3VhoGEbwNqg+p+AU1RYPMTWaFbPMlXC4RdRESW+qhr8WtOgUnew7llni9g0tk9yYtdPdkQSAA1kUuXr1lz0mqA1UwgaiGctuqybxc8+rKMNsKoGdKpemJAk+Dr0xEY8u4jC5C/vl6zUGrhgDV2yyjG3z0Cd7z5QySDj8OQ1juVqaMfY3ljoAmBWH/OdV7aRdH0cFn73pV7nrCx9hmlSRc7iVquaG4YRAFRgpFkpi+Kd03ePlxNsdBIYZPoVwVYLSNhIN1WMWLIY4DFHlHngdsIW/8Tf/Jrff/qV9YtsRDhCyIItJczROriNjBaQXuNQQX3sxlKFhtrKmYA93X2sZi0642tupkrGUE7L2uBY71xsaCEce38eRXWrViUVX93kJwXPR5P5ZyClVbSF7H/NsFu00OShKjYXfoc/X6hQIAiNMGQQqNjAWBYem5eIyCgzDRItgJFMzm3/DVJ3LSVPFlHOGxJapLWIVXX9KVijqgSc1WEZtoyVjWei6c/j8LR/kni99At3n834ymyyFSRztLrPdXmE7DtUVuDjKHEL8S4XYK75yFxohiqCjjTYypdrJ8kmPlGvgvqIZYbfFRmTj7jvv4M47vqLlBIgtECGK5V6pGBuCJpw4YVPNrJj5YlSPjO5AGMPkDiGMiSwCltTFbhQ1XEH7hpHQYBETYAw6dt+ruXXGVdkYjFUHtodEI+W2tPtDHguGYQatgk2sRHRG15BnN4y9DeGmmjmX2j0gWDwL6Kp2akJfCiRzoxQkY9nRm75e5o7r8xBeKmhn2sZqH+ZcnF0hbJdIWhprLVVvs7JRNT2renUC8vuWh0f12PuVb/uWr972MS5fvKzXu11tfaD31VJlq2LGNIgb+2CaYZuCg1P/Zg/vSz1mS202DDU7Ia1LlB2DBoU1wu/P8wPxSSHhq3ffwVfv/F0nMNsBZ6gqVIsirN1dVYVgjaQTJcphhZSwLAu9ecG2LSKi2t7ZVIxJhccsEQHmApyeg7HGHhoVeUWkBM1QBLNEihkWXQl3mB0mz6bV+DXC3hlErr4ubkDGYKi8F55fGtC36mQLlixdQ1mnss4OKI7fc+WgtRVap9qGiKSNhQi5qWrFpu4rdKuahtvUmVuQFYR5/+wxbiOjThI6x9X1d6YYL+VCpQ329Nwo2G4P+drvfpqjy5egsJ5GRN4aKcUrycqO9dIlLl08x/ZoK8rlmGFybs964ccrIrj/c1hvg1xk8VloAKjpuOCANB4o1ojb9gh225WbbrqJcxcvyIs9A1ILqPXZhatQba3B1Xc1WEKc7f0HFyG1azM9DWfkJgSoo0Og6XihdVyDtRrbgcRFVeJfNzPXE2JOHllV4Rgr7jZdIizAqnIFpwrBBRiFVaQRgjkClq56VreUaXapxDWDRBvs7Y3nNplcgLCToqXSehJvV7I8PDBNLcpJOFwj5R7v1AGVO6TrTFcwYk7s91S6mhsR9BiMrfDGUbHXJGmgLgT0gd/7XZ54/PeMoyeZSTSv5qtAnYEq9XSHRjYiNOiNTDbL5g/pFL/QQ+ymKOS3Q6kStqdLi9l+QMaOyO65hWYTVXC0Xbnplpt4/twFUwHFsmoUIzYkolVSmoVMDxMCDfeMeatDaOoKrDOAGcxXWsV+lpKp17kSthpQhxXVDBpak+GEESBsv0uQMymcbqW04q/LzVWVtLo2CkYLdsjCOSSfhfL6zNXBvpCYyRErUsNiN0SiKvt5x6oCANJDR1iG3DUnN4gA7SwVSyX9XDOsph1gJ8zZrfRtFRoOh4NsKdAWojyDxGqY/py5mu1jW+7WoSUPP3AXTz/xgOJHNtxOmPkHrRfNm86I4mh7xJWji2zXQ3odse7tGV748YoI7gI4FCjEBvLa6zLe2rVoVlm8yaze1S4esj7x8CPceOON2ioeqFrHFRHDOLO4w8LB9aO9C1qIEiaargAZcNAdtFI7HKsLixRer4xtBGzPNw7kGDmWjva2p4JOpOlu5dbYgxIsax6DYLXXdhJdlrrZ1XrNpSSjgqqNOewFm9i3dB1Zq86tL2EYSUWV8FbS/jS1OjDrGqcDTKsQvIO5+V0EPdvIMLqWRfRRxJJkayxhEdq802abO3Qj7fQT5KIpYhZAaohOEalgTDaxkVC1uInBp3/r7+v3GTPokfTWIDXDmLh8VahytIgh/NmEK+eX7VEqCaMkIw9j6uztXx19ouiZ1NB6wvJrz9Z45JFH+I3f+C0zXDSn0SP2GPDiCrDH9BzJPbRTtpMUdDCvx9BugQhio5FMesNZ70MWGTULi8Wd2ErUoDOoRZDIVEcXGoy3dEVefU9NDnJPyT5owVKyPo5hF0mGzlDKmTVaMJq53S5qFASPZwdyT0Xd8wiSVcG1p/ziQfdOQY+uPb1+jTVKFcjqrhQN6Gu/u8C04oEz3yBzeyyiqzy2Fke04Mju7kXGCmEl6mDDOiSG6qUKvrni/8zNH1RjXBaTlZJbVsAi50ep8VXQ1FpsDw85vHCJoyuXYb38IqDMKyS4zypPA+fYe2eIShdaa9VcIdqCNCJoYzC5zD/9sz/NYLDxJqQOro5sttTRiioPjCKdLHxjVXPbWnMbjkybYkFii9IfOgwb7leFB7rC3lpqb2oOwUqJD1GzMKN1Dbcyvf9w2PdMw+OJtVa3E1wosWjRryhWSxYNrQsvuYup5W2QpmkxVBFPwIWrOqMs+WaOkYyufakVGkLlmJ4kigorWlDcsW81uh/G6u6hYIyxrxrLyRHjprXHBBth3/BgmL+bnCDl8jkWM0LUdva+MKrRe/Lkw7/HV7/4kT0jKCo5MQbLTvIyDcaUqOdmrqIJkhnFJrXd6mV7BKqc06ysYaixKaEN87VArA11lWhIjgL5z/3cT6viL1EDR8hoi66EIbuk3LfrvYStRJieOwSfDMMdWV6ULcq72VxJX+dzqDObH99Yd/SOtluAja6wTe/s4yasofOaVhlTE9qUj81aqCij74eiI7S8o89idIq4XDDlvD4hFSehzjJ6Z3jvKQR9F/uuJmbXwjw4ok9PiJcIqikGTEV6AyUKz5ukWIWd6Zega+v+U91+V3E27PtezYVRQ/RIBHO26mxQpzzctT/72IPc/aVPuXN2F5xGBNAcby2hB3O0PkfEY7vj4vZQi8hf4PGKCO7Cr+YiDH+YxhbDVUG3wdLY7yENV7hw62c/y9fuu5eajAkLkkZO7FCHZclmfHqCB+x3rcbM6B1jn+lZVpqhode4xKCtsR8oglprzQZC3Pvp1qZ2xDAPJBsWofaGE+Q3o+XPe9KH8Mwq1qEbbwOyG51K2DGvnAJgjkA7xNt+oKYFunbqUxSGEQ767icJsnfIBSG9KzQN5sRl7tSqaz6AtXfCXvoMYeDahDV5vx6OhnoW1/qivtknpGPxUQarSfzlbsf3j7FTq4lrcMetH+LyxYvC50vMn1E7kmXfttMwuwfK1aS86l9cov1v/FG4VDbcYv53DVfwPjey120qSFzFrr349Odu5d5771clSvozFD0xjUlXuivbJ1j7L5V3kPa5+GXi3OoANeDUWVMzJbVquqOaq+4qBJO2bjF8QF/FNukt9rTImLHSnXcgQkPi+zZcyVeIxdXSDCK9/5bHHZoYV2bMhHByqhnBSrQgPUyRVjzIZRDNSup5TUDDWIKMFUIW1Dmv1zRZY1AxtLM0UOAnNBS1R77IFvie0nlrrZmjr1kcWSwp+Cmq1LF0n+9K78c17XI3uOe2j7O9dEmfy2j6f3U8i1pq6hB8XDIc6KWS3ddvv8/jFRHcBYGZSjSCNQqi+8ILQ2xZxpYRdo5EOc9feJ6/8n/+Vc49/5whhfkL1YZljP1FGbnzhDnt1SCqXOtdk+oIYtFSDanl9POUPazn785132YvTQNZRpCxoWhkNnIuNcCru3b6hHpTqzkMQUUuZDQPR1aoaesrumUk9gfR9w8mXWy2hAqGYh/ITR3DRn2EWESVRAiRH7S9n7gcA5PjuYwgBInGXGFHMb23MhY2bTFjSRYFyaSISeW3Wmyxq7n5PelrUj2JVUc0GuJxD7l6Rmjxcyv9TvMg9tXMI9/8Gh/7J+/3TQEjmzqr7OSkiq1KSJO5U9pEbHvkl/ERLlxKnVwjyNxoMGZMT5TApi4wmyvWzoXnz/HX/upf5dzzzyNqsAzFMmd3loIbYxYqGuoHkynWbYEhRewU/PVwIE4xsbIP01J1fqobK/ewZ1NlEawrXkLVe/PgMtBZHWLdRExGWe6Lllb25h/qBidUtpoxUq2czNPVN74fj2c4RWkZB4ZAUvdqsdoy28P6oeunSrioWs3ekZXvXLxTUeLaR+wTXAvNdShblrTZgcj0SwUo3hcryvHqIdeItF2yumv57ytpJUrmSyuopMdgXToPP3IPH7/xl3SRFtE61UUPIlaqmSUWaX6+ydgZvNTRfmUEdxAEUzpgiZgOI+RaOBcG9IDuDzRTY+N/9MEPcvnyJQX9DJYe0GRVGiVGAmWVnEtDEUBWotsUKBJ6u4oBoKOUa+ztZ8PCpWHOe/lmiSxaNrnHhVowGV1po1C26e5m4mQBfT1mB00GSMcTc1GqaD54DBlzeSNVpqqtNsyP3wxXfRrCksLntJtUz1MhBd4I0d7EfNN/w2r1rodxuMVe/W8bVU9L+eBXeHGK/TBS+KNu+Q5OpgvyEzH66+Hf8SB27nxVg75SsbM4xq5NVXsP/6jiwQe+zEMPfFXnoMoKWLERINylzcQdEAcMj2Zf9kcqGa9pCqSZHPvBf5Uoku6YFISSD/6Tf8rl7aES3ZSgeog24rgTc+RTFzVUEDUdBSVbww+yBLaFcuFALUhPsGcZ6DcBoPoUfpMh33GiU4t3FlvAlt0JpUFEd5XtCtTdwhjziWbSVmElZbrfM+FZm2AP6UCKHIOl9N4XA396XSvZGj0XeqTcNPd2HLJBpge1mM3WSxRgXYm9+RnTYnqWFYYX7bDLJNplNs15lDP02gfgsilwAlutg/G1XEPdxUjPhOqYd98qePjBu3ji4ftIyWsoM8LGCGrVtcRD8pxgZJVe84swZl4RwV3aDgWG0VTNJWqfBs7cZXZ0lAPChm8+9CCf/dSnadVYayGG3NJIWQdnKylKJf2kOFb7ZUI2EftGK60Nax7U2Edjzanp1AFtwlJsrSrQMoYUd9mchIYNv3YBI2klxsEwRFEx5BePDkw3BpdLMZakFvuLWPXHUABQxer2YTHuom7fbZoPaReFsY3OUs02uxCje0VeuO1bFJTDLeoQtaxK84K20Z7NTYh2NmcAPVUxzY5g+L2pdZwJVbRJYc0IFkoLQcrBxYwJLfo1+2IIWqrwZzaO8dZLF5/jni9/HIEA03BKrX71YXhBt1i5c5MjcntZUZlZEUYGzQupobsbKi3jCMF8yYBM1oJHH36Mz376k9RW11apdFJFjPu6M3QrpI6qkiUWF0qa4fQSOSG9zFpVuYVgbUNl2w88cwxq1fmulpLOE6wl9lUNYOd5E2hrWAu6k32R9IFNz1woVZPnEZ1Kd98GAmvMODU8wyl50VTSkFGgxEtlHYjQ8RaizTL0fltIh56zgEhZW0QqjkSpuDD7np6QsVKssjrYwyiiNA73ib2ahVsAFj1hBlEkPTUATZ9/SnOoWgWVKgEEuz7nD0XFCnhIWoMr587zta981jOuuRxExnkTFgN1O9OKQRrheOXDMkII1LZmN6nfooA0n5nU5W59slV2fP4zn+frD34TKPFkh6rFHIsxdK3ZGt6xGg78scLIDcWiQVTt4U99qJ7EZ6b41D3NnfcGFf+/SVnLIY4yXcrBgd5P4xhHz7J3zGBPu5zcYIl44qqDjrFFQTsrwuHDk8P0so/qgoua28vhgKoTnaZklVtuqQMiJCKZ/t4dBfyRqizSW6K02k/KxI5uuB6CVAi79IXpXenBs3HkyYDL6vbBKbNsxN8Xm8in0kGolWYVYWZAuFuapIggueuLH+Pcc89SaFdmEBZ4WDRTwnhH6D3l6GRNUP5lepiGqOpvWFS1EFbEaHbjIXZj72j5uc9+kge++XWFGCen4WvU7BbYWBlDyXK0lBLaRQQcdwOsZkGFAmhU0LpmMUEnV5EPuoGhlQAAIABJREFURiFItBlG8MaycpIVtq8lE3PDEggOUbi23V0lfZnQn851lvxR5EBh6NCD5pGiACe+r8DMGEQDGg1q4yJQr3OqRAP2+pKW3jnqgXK5shmBtASBOssA6O76/NlEKcm1YWaYmHY5drq/99YGZq9l31fNXQFB3rFNiaVjA8Sh5NsatNUdZ0y1MMxW5t6vfJqL557ZzxK0+FzvrVllnaVds3Pmli9xrl8RwZ0BtSqwUoupfMqg9FJlZky8l/Dqj33kE7z/fe8jHOD2sI0zYzL2kEpON0Wmos5wSK3MRQrYhVJy+SHsnal1G4wRqg6EOShDhyqX4YBGg1wFC8SwGZmlzYLqap9k0gFzQjtJWSpfrEM36mBYLSsXyxxhjHO4LRVO3UuHOLPsSaNjn82sCGOlLaadwjGP2oaq+r4BNYJ1QbasI0yPYX/T6HbwjWK/6oqkdsJMa7gnEcJmd00ll2YvH3UMXuFWrsBi+vCLmzxFXjWrOWB35ZBf+8Bf48JzT3urUTGrxeHE00qSGjAkUBte3tId+/0YQ0b+P6NyD23JAyipNakRfOKjH+P973+fKr0p5ImuzrB3MVdGHDOWYmgQO8SiGlUyf0wp9ea+0l6y7Ggp3+xYg9xhBFvXnrTjanbcTKsDas0wjMVqE/4yWyeRQG5Eh9wpufoMz+H96Do3LYfXaLqTduelZk1D96QzNoJzpOZdlVAKIrSqUp2Hh++oQ2mVe2l+uCOqki/9tB2pLkplzWociBEKvtjUbcKO5rrXprxsxXh8VwAmOUYEGPKToum5WzKWDjn2hea0GI85G0Id5m675aZ/8Hc4/9xT5Oj7mUC5s8eOmFR3t609uy9G831FBPcKLCc35uyKkwW9qYPGXJ8WDS5eusDv3HQTh7ud64UiWrBJVRQBWuLQXPkN3exiCHQdlrF6Ui81a+08QCk0vNh5OUi4FWxpUc/ERafhlatfZ+IwKyQA3AZXKRjPJNTGzLh6zzUWaj8fkLPf9GVnHWwMzYjG5U6A2GNxURCl+YGWQCicVckQN0uQi+L3TofaFX/V6q5EuoA5ml8agERYzaZHqvyUOCokTU93BKT0GUHtlz2s4qPBkBOhzqjpaeu0IVDilVxqf4GI2BDGilss+6T15EMP8Y17bjdUFsfXowu+8Ce033eb87N4GR8euew7jGJRou3CTkfOShaOLl7gt2/6EIeXdzKNEsLEOpqvgXyA1jlE3wh+oICWrImCizp/YsCSAW2wCZ3PMltmFctOFXMljCH5wEjWruvY92fZSTSDTdlmYiYsDaN0NgKuXjcXOckCSlTSPWBqrbYLZeqc7LFw0O5S78adO2ClDVlhMeYds7PQGYLOWrK7sEYPDPORA8aw+Zntt8MUZexO2kyhdpFS6bgxypV2up4IAhMhyjtlHTtwFyMiwgDTi4nF1NHcw7xhuG0ZOgtPP/EAj3z9DhdSgjbTMUZdi5LZTCgWjLzg4xUR3OfBNCgFrYtJMgYhaskxI6WKu++6h8999nOOQxut5qu5uQU5zE161drFPvFTZAWt6SaRVDgQrCNKXw+syqv9cHTsnZgsYOgeaqUMu9qc/g9XAaGDKGwbU/hSVWxzZHUHsXfCjGPzsJmYPMnSB2kWxAjj3GEBi4Nt0Ila/cFrMDnVroqZ8oWJrrEVvlnSKwsLWyf4Gk2qXg0YGydJ832HDagwLNDWY2n77DYLiJKKOCyiCSbmP3tqw0VVRNeQWIKzQKrfcuss2mX3dbj1I/+YPrqGeWWYbLlKjNUDtgoge0Opl/FRU+mJYJWe6357lahTrg6zuOvur3HbrbcSB2E1rmGPVJGiztQ+5gNq2Fxv0XCvOaGtw2rY0Kq3imCdMvlR9NF89tQdr5pPK9inEu/okLG4KgFiMLclpQe06bMrwZGq77QCfNI/ZQw5oBY3kTrDsu3QAL1cfImC3hitMTbls6wMvomZhLRLto8gVn3uw6KolrpelcKva3Qm9TCyuSjS9cg2PaLExFNnorMcbUhrYHuG1V31DMhRQ6ik3Thn9zii7KUzq27dtxnTh2rCmTob3R1wuXu//ZP/TPtW5zfsB73AKDmseh5lUvMLPl4RwV2ZvQsPJmDrzF+GQcxwqSzW0fmZn/lp7W3MIeGOZGcewqnl6ajSrCXA4oUxtsrUJbpWtUHvKVtTt5ZpmGC/ZHjio3NC3USnkg/HyshBxU4VaoSqGW+PrwWqpaGUVbh6IfYKbtWrC0RwS1oO9t0JJ8egD1UAPSc7RAfEaD3ESmTunTWV9TxIHFKZ9lRNpGUIal2nORkZLJvZwmrQJnvtxlwVViRL85R/XwEekLEwIvcUrRy6PuHfK8hIMmxBDBKmRdr4zRDSxOL3xjrjuOKtIbbN3HZ97tzTfOGj/1TXL63IrEY3fl8J1TptgekL/nI9ag9DWe25DsK888qGHDJ9lqvzUz/zUxSdWMuQyCwyIO172Qqie7w6hsQ8HuzrTtJgVgPCclAGohAJKlnaCqOsNjXbykM+6QfNTClBomOUgvEIimbmVDGa5wDLrIhRdez2cJE9qbJGDdrsKBIdwHKSacN+Rpo5LB5KBkOQFL4/0GIX8KLuFozmbtD2JNM1VEprmEy8imIsqxSwoQKmAkY1dddOhva7gzJF2nRbqccHuyFAs3ImKsWUOWCLJsqrPPHDIlcLwMI07GhakVmwjq4hbAUXnn+WL376t134GbdHRfrcIgdhksKLn72XDO4R8ZaI+GhE3BURX42I/9lff01E3BwR9/nvV/vrERE/GRH3R8RXIuK9L3kDlIaGGU1GRs00pAlxlAN07/z6//vbPPHEowp4KAAImzeTpEADC9OlugyReglnIzVo3KFBjyxC9Rqae1QhheLG5sQ3CcubBVtMtoA8TRZX1Z1my2Cz6ylPPwLdMITwe0aSK1DaVNOiFPBscSp1oullCcYc9mKQJMnUzRs9xc6pEL7HYq8ZiMV8eYxCZ+pg2LubCbOsqrSNOvqG6L7h5D0/eu2rD3UFHgg3fw4B++5raADY6awOCMMceEaxUNZVWRJf6Fq7mvfJ0FU0HVbVoq7FXV/6KM8++eA+eNWcl1Tfd6vl1/FCpfsfxtkO3E5PdTMKCFGYAqgb/KgP/tmv/SaPPfYo2jcLu0i0hN2VW8Re9l6oQl5yGFZR7utdhIIlisYiCCEX1oF3pkIhpWWhIbRsHIpsTQFrIDm9z9sIeS3l0p0kwnh1sQwP4KuOFapm81Cp4J8lvnYULLHvsPaogs9ThLqzKsyqMRTZr07PQ8+ZyaLIrGq6QS7Nu3LSnkU+imM+pzqVUbYqiE724edTxywyxFAR15rmaGaZMRMbjb4MaWD2Hi/C3+ndXPikUsyxFsLLw11OLdhCIWnIOwsbk9UI7v/yZ3juqYd0L5kuOt1m95RnvZMXPXt/kMp9Bf6Xqvou4AeAvxQR3wX8r8CHq+pdwIf93wB/DniX//xF4Ode6gkitJ1mrfBuRiBXiK4AG7r5H37wm/zEz/wEVw63tJBcWb4O9n8Ot6HO6JrjCArJUIU3t8hsYi5NaMLFGDLkTwtfQpBCz6s85Idq5RaCEDIbrTcze/Bwyxtb5rQ+GyMXhdoo5K2Sli2HpMZjyLNoqF1TPCov/bb5GUXshtV1TR4bHQhtJOoL9JQIKmvI5zrUTgbF4q3q61YzA3Wnom4OJ7vVZmgR3QdXUAAtaaxiLHmo23qzaCWIPhcpYO1LHFejKlcV1Fd/3iriMNimg7jIjNmj2/02oQhVTPpOkegiBs8/+Sg3/cOfFg+660aejJ4ezRBbzfTwsp1tKsw5F7siImb0ZBpdQfDINx7ip//O32Yc7nSNFpvX2bZYi9mHiwB7569mMyXieadEYDvvNx0jBQd1wySroMIaYeaFZjESkaWuRnnQ6GTZVkjjyZPOGzlFT0r4XUo6GY+FWWmoExnDIqfeCNIwxqC3YmyC4dWpTRgjaQZZeWi70vamepj6WcDeZhcJwNL2CC3T3X5SazdZYc7HJPwSTOh0YQ/3GCkmXngB0Dj+MzwTmgK7YUq0FLSiSs5GuRANtfeg10p4Zd/q4kwzCmXisuJ+7oOtEoz07DMP8ZFff78ID+n2tU9ChaBgEIzMi5zvlwzuVfVYVd3uf78A3A28Gfgx4Jf9bb8M/Cf+9x8DfqX0+BxwXUS88SVvAvVdYhYQ0E+o0m21P1S/+EsfkNjH1V+6soeiLTvjtEFbpzkPriBKFXeFpdgwhnYvKogNltTwoncPVWwelDFYmqrxMvd8V+JZr3R6U4Xf/SbKQg8Y5FgNe3R6dH9Gko2PpTTkKYTHu2ZOPEd2lYHdFufBAmGuPbSlZroD7tk2uLrNdGBGNqbhQeOCbqKEFsMum7NTQjdsbWRy5Ip/9HTCmkF8QYvIFcRFqAlDOoJsRh7jqBGbeXVEPcVLO2iuuNE8xSraCm2/kWugh08S/KnKM3b/9JMPcPedn9aA0JJ512ZORIiQ8nKebc8zdCJ0I+rv43lSZPBLv/L36L1bHiFcOIx1x75wyz1LaqDF8fH/tfdusZulV3nnb613f2VEQCLgEUKAEjLKTa4ShCKkQVxMMklAipiZq9wMXESJlIMULqIREVIEmcAwkcJFkmEm5iDlwMQi4WCkMMEEO/gQGxszTbcdY/fBBtMuu9022O52d9W337Vy8Tzv/hfEVXbGXfUvd3+vVOrqf1V9+9t7v4e1nvU8z1qb1LI/RAImXcr9A7awR1Hd0UBC2Vk5iGlv3lZ16LkTdhi1p8l045mC2GHbNWdqBVhpGK2FBeda2K2Is7MNS6rOsPoXyMp7SEi46H2FMuEWVCisO8xC0WdWCgZd2pTmLOrgEEQ1UjSvmQoax2Kvr8zDFgrr+zFsKDjVJSLtpyAIUiK7sOakl03EUss2YGFhGXYdPchdmUGYAViLnYf2scCKcAuq2p72v/uRp3nysXfokN4xRCNmTUQTeXLId5XT/MHxX4W5R8QfBf4U8KvAV3f3Tf/RR4Cv9u+/FvjQHf/sd/yzu47Gcw2l8ji6WZVzZvPe97yX9zz2mLne80hFZwjiYJffRGRBisudETBW4UcRpZDqMq67m5KUToVdIprqgL77nG5KOL0LReorKo/ngyWD2RqmUEW20q89Dj+YnsFWlkW1XvokYZy1oKx6nXVVgGN51QT0yfcT3pjRSxf+ksZE0xNFrzd3kTn9aWYKSaShf+MDBKV7Mk3yMaJTQkdHpuu7el5tNs2ODpjY2laogp3alrI9Be9cwUJ11bPa7pG7yezd4sLLDNs4aUGcW6ypFLtCEBfst27xm7/6Bm69+CJZQS0SZM+rzak+vyl+v+Y2Lk73El8t1eXagCt4z2Pv5tFH3w1erINUIBJXh3W1ahIdxe3Z5oO3efHh4rI2/qSYo4RjU1e2ALm43lCbSQbOGvdV8zAsJg/1OLBxl/1t5xCKdm8I9trqirc/azFQcKZ89V6rliRtygUyFGyBMpvM3VqHwSHSGYraIwT51IIvRsrsD+A0lamH3CS11yY9TtKqGKI9hwWB7oy1PIwMckGk+piamRa2EGhTLFcgIvm/PlRKdWX3i6QxptsW4lpA9HE4Rwc1YG5XTVpGhRrMB6gFZXL7xRd4/NG3Ml+8rXncJzaG6KABpR3/nnnp5725R8SXAT8NfHd3f+r3zV/xkO5+hHz2z/urEfFrEfFrH3/2WRnzpNLzHrbfbE32cxW//B/+PZ/4+LPaXDINsfvFLOjCnGqlu4OZRdtzpIejktTGF6uqvkyvcEGnA1IRwzhwxCRyEpu4ZUo19a/aLANNEu1aq1F0lKhbp7abH8VqHzZ74etqhFFOh+dUZJNp7NgHQ0USZzk3RtnatRpFv5Ik90nARWXTu+sBJit3y/VOh1SqZrErkQwzMWRxmvbjcbnWzSCyriZS121hxtqGVITeG3l9b4y+wgljQ/RHFzrpJOZiy2jKZCr1FR3OhbBUBFOG0xgbyRBjxPRPgKeeeJTfvfmkIi9Hr70OqUpFb9c4t5/9+LOIIZTQ01BIHIy5rtu84Q1v5BOf+LhzNx98MzSx1tGcpwPa2DoktU8xR6KlEqWVtRgsU6FwhLQgU2F2uftRTG9gIywwaoabl06bdkn1WzBlJb2BlMmpA+o8HfrYZXLkNMzIkeXGsHhpZQtmyhB6XYd/fQwz37ZFi2eGPdjzKkJtTJyYTeym79Li7CN4JlDglS7ey8zSG2/IwOBkGOuqzgSjJDzM2MjeqCm4KwzpZMrXXdwJQaaCBEsOtT2lOh3B7qxXjoHhg2EVD9bhniZr7Gb7FINd9ast+ODjj/DMzQ+aBrmzGxTo1hxX0PAFwDKerCc0+X+yu3/GP/7oSkn932f886eBr7/jn3+df/b7Rne/pru/qbu/6dWv/qoDkugZUpDu5s1W84mPP8vrXve6q+4t+04aQ5aaVJ1PytjVghp6RSGxZOnaQC34cuooBViwCb8PR+9uD5ZhWkkPFW69KXVigVVSe9sf3i8G4azCOGHajGJBh3NNp9BiTeOveMMci5PmIk0g3wwDSoJ+prG4WC83JP7xIjJfSJlEhv14lBl1CRhQOtwGM1ZaflZSs7KnofrCXE+sFDXR6Si5jlSVFpNhuvaBN4cexmGnJrRy+ZaX9krDfRtj+gU51U0fvCqQ63Mz1jtUd/q3v+FfC8NcApMQ35ucxvGvb25/1au/iqVXig0fXMK+K+GZTz7Lz7zuZ6BLfu4dqr3EjluJkwTD7plLCFWWoS9DMCj2tF+K0ATBGDL9JMZJhwymj2KxWGtTrOWdjD8zBEu0zevEzE06h4IbFHVuKcGOrKBV/Y9GkGMFMAzbCbOJhuHstCzKiFSnJXtlay0hts5izdLDsby7ULVsLyJCPkjR7N3MtL3oWGSDEBY/1MVNoiplkpujbh2EzkBZwWAQJ6u/0wdlBVurkLr6BDfFRjM2KHvc1BSOrnZvdj2lnWU3mLK6h4z8ABaoKO6ImHldO+980+uUIZQD2TBdI6Sm/YIi99DR8uPAe7v7h+/4o58Hvsu//y7gdXf8/DvNLPhm4JN3pLh3u4oWZ7WjxmRzofGZj36CH/yBH+B8nnLQqyTyJLyNMrQiu5VIbQBLxi9xgTYL+ZlMbdyosbMwO4scjG32EpVUEpupeodnSxwyaVH13I/GEFAPReQ5telKnepGBS6mq1dmKWKhDlbJorxVSjwRVj2pl0ORPWiLJhJzbdcmaax7JMSpcRVT37MlyKhQt5tMbTIi1jnjKGCE2AnLlrSTYlPk4YxkGSEx4yo9NC6+CziXZz2GUzoJH1Q9gx7F3uphu1DZZduQPdi2kCrRt1WyERNEUFowy3ckUt770c0HnniEt77+38gWOqUnGKGawOi7T/EHMred3uU0L32xrQI+/rGP8oPf9/fp85TVRsvgLd0sBePUM4rdrKusdjE6dFiWrB067ca4nk8rqs2hWskS2tDa4thKESUFdZsr7B4HHs5Oq81ACRdDRX6YYZy9REIIN/tmllo3ZiiDndpIR68oWd8/IoSFT6RcpWyz3YzhbbzMlJt5bKLpZ5dbmPYoZlAyZGzmnpNNiO5LEHOH7cQN2/hWBPs4icceQYwhH6qSFqBDm36OExnDGfLKaDw3y5YkPeTO2RyYvmq0xexdWcqEU6iaJpbQ5nsrF1UXtq+632hx7pPi5gffw7ve9LOstno67ATBxqx75pSfT+T+3wH/C/DfR8Qj/vXtwA8B/0NEPA78Wf8/wC8ATwFPAD8K/PXP4xq62aH0p9Mlp27e8fZ38MgjvyESxVGoUxQxIg83RkWuSscbyJbIhVAf84iVMqrq3OI/WoUm8U240h7TGHQpys0wSFwt7m1hhVtZEr3og8ZIQwtCga6FPWnLVEobTq1sAnKoAbDcEz22dsNgS8Nz6p6zjsLcXMVGg9BS4A4dQKFXezTt7oCWYnaW77PcSHhATBVz8DuYpll2S8A1GjUIcXqaU88hnNKfEjElpk29AkM7zhJUIeNGKf2NMeCkVH5ZN0TANtUwYZREK8PvZ0vBTNNF9zAEUyWfmV9/0+u4/ZlPAthPX1XCODKba5rb4bpIDke/4UbP8I63v4PH3v3uI8mRFQNX2Uy5Wt1IAOdsNIzFE8j+N2QRm4178irNH2lmRypL2DRxncHuR+MJvyAwA2U1SHevabFHhBUx2X1PYoSUi5OrObQM4lTD0m2UCpc2EZPoCShhzdF5FEkjzsiIztFsAG2oJMPc8MV0MV4ZZwS5qM6SDNgcQE2ctW+2eHAgVrD1LktqcLjXLh6vtdH0LDXvziDH4mqZ6OD2nrEZdiK8MaGA0q8wffjV0qCUMnAd8iIXKKP3wWfYJiZsczD3yaNv/7fcfu45v0Mz9g7J5t139+2uf+LR3W/hSJr/i/FnPsvfb+BvfK7P/YPDaN9CeoHgxVsv8k9f80+PSCJCdL20l7o2ZOFZ4iOof6acc/OKez1kZgQl98Wy17JgXjFXelK9HTzemgnsUqF6UWUXsVkcwrDroPFTR6Cx6TRXSzUcjSvgjWm5fvTVixnC9TMHSgd6SQrp3jl6ODri30k6ij3hNIsKGYalmQF7GOu/ITbpypAbuw9auRvbLkYNq3BrfHXRvhpqyLOkl/HYkRoK7xW0IzfHmau7T5HtJuDelNbs0fYQsrqtXZzsXBRRXZ5uKi1awdfrZt/zoIzVKHsKLZvm5vbt53nr63+SP/c//zV5tASuCNwdlnkQc9vL/lDuYs/5z9w683//6GsMNbm5g/UEXagJRZbpiXhuGXeOtSXFEZ0FyaS4sQWVpgbSiiy7zCn3t4kg+gZb77LVcE0gD5qmVKF5rmN+jIZis0OoCoZ6b/JWkjVBGVlpMk/KRufut9CmLA826yXkG9OeJ4Iku2DL/VCr5pgSItr/rdddmGnTzioZhisyGLsOrDabLNpWF3GDnGfVq9xHYHhTrjFU72LKAbJ0oJx9IFWED5Pp1LdNK+2jWcmo5na0ghdNYJuA6bFKXJUKltIQ81JMhSjFpPaWHsmtgNHFfuszvPONP823/MW/rPcR2DfrnoHLfx1b5n6O3uyxUhuUFvFrf/qneP75z4jvPczW6N1np9SW2XoJ2YJBJulCpjYlcYyLSHNfWsWgTaCiBIzrvaHmBedCsEna7ULQofm22rTb1fMmzW+XnDkZOjxauOQsDsZGjDqgjAhIktPkmCQLXpnZxi7FmEhnFVqsTgmM2cYQKqtMPWx+ZuwaRUcd63lgp0ipExtxlhfNUuzMPGCDYaaO0PtB06JHus1bM4/iTh545/DhJxaHzNW8gFGEkqmNO0daKNOE+fblYii2/e1qZzGK3NMHcyKRlhqGaPN48rF3cvPDH4BstoDabNB1jaPXL9cEFKUHP/WvX8vzzz+vzSp8byoyyCE1BFmI49VaHx2CQWKwSADV7RqEnlv5WZP6u8pAnbxlEF0w3VN06DMFXTnaTgU92Vfe/hsqhKYbzWg/9CQhfTBLxBChgylyHhtwbcoYy9ljZdCb1q8KmCgg2pvci/OyJ4iiJvQZaqbnkDDxacjqUJ9PO19O0xztt6NDbhxR8UwR0qMgKtwmUi3/JIBLR8aagyA4bcxxBD3rs5XpiHTAFCFjhCw9xLRY9OaQXQRoXzAphCiLvtRWUlDcZp5NGfYRYvDke9/Fsx9+yiiEnk0v2vhdxkOxuXdD2YccRDt6+kM3ecsb38KMs6iHtV6a0rPwA5jIMrRDLIIkDp8GURONR3YSu30ykNxdeFcQZ020tsAjopnq/sAwR7Mp9krt1ljQYHUennT+J0oxUyd1bk2MhUNa7GAsZYa7qhOKyOeAmrgRjDm8St2We6U4vvb4aIhzWgih4iwtmuLewvQqzSWPtCLU0Z8pjmfj2fh5CkPXAcutZFay5YDYWV3eq68WAmVWT4e6zxjqaizC0ckhyffB3y6pcgEQG2KmhVqxdHelwyWTrpQ/UChq7V5C4Ua8dxXqPv3J3+X973qjOdt6r47trm0oO2zZUNiH/ObNZ3jTr7yZ2s/6bmUOOkHMoWJjL0JNMRmko++IIbw82rTbFcnrEJ+eq9oE9H7CxPeejvpWwGKan1ShYlQRg+n3NFNRsixt67Dm2KIlrtt87RjEpjWptakDrFOioIOWaqogEWa2cdAmiWKnqRsqNkcPqjcd8mgd6zZTttQkW6XgUVSsjJqCPAZgJ1lFt/Y+wsFV51XNIVVrGoX0LDnN0Cqa3RRH1eEkFgvN324z/BzwDBursdn/pyBOPkhWxmmGXeDPVuF6GvpcWbzYcrIqAPnGv/DpT/G+x/6jMnBWpyu4FyzzUGzu4QAgQ5L6ovj+v/f9PPHU40pVdsvLK1Vc6cnsJu0lISaFuAVycEuqhEo1UwWbiVWT05zcI5QUY4aykrjtOOemEanjX1G0qGQidEwWDz8QLNHt3p3oBaq5+lCLuS6xa3qamyxcc+Y8hEg15M64s2EbFrNbwjYEpXZ9oa44ne60A/p8M0w2tKjU1NfKyOnuOalJKatfOdLFNH41FbWQZ8Fkoxn2t+mpRXfggsOTNZM5EHyEqZimTWot9qFg9dkNuAhoK4U490qd1BTErIpybaRKkdyO6hBplpDYQ3HwhRnFO9/6C7zzV36OnlNeNn0vYOb+jwZFgxXOCid/7/v/Lk8+8QQsu40QR1tghfyPopZ7oDx45Gkk1sp+Fq6tjUJeNRHO7qcyxVmb1kO47rq4o6gQmjOd0XnjBMMs+7HpbmY7paPz6mJaiJNT73WgrItpAGcXiUBioKuHIJFcuW+q6wPmZ2nqqPib9qAhd2Pt+ojyIUI58GrNubKxV49UgBABpbrPTOdNjWBYVKSnW03eba1bKWHcPsfh/ugkg9PAxoBnkxgg7GM0l1trSlTYLRilStTkJbZsmhJ9S1CR7UeUicsqJLr8+eUmH1DbdG9hHbCPveP1PPqAXJA+AAAgAElEQVT2XwAHsJsDp7uNh2JzVyyhKCBy8ua3voUPfehDspBtnVPZJbe6MU1P0n11ifPr/j9sOrthWwo1FUQqZN6ldMnwRMtdD6wew8wRV/ol5hBkUs7x0oyWWn0fh07aUXdka07GVJNqTqXTfYUp66UrnlGU0vZPjxR807ZA6Jxm/kC7TfrWm3pklLOd8O9L3OBC0BOx6cmWoBNJqYvesQ92G+MTw6B8wKr+0V7sffX7WBLypPZ1l5D7sjY2nBOb/DuinYbqnpOW3DtuQGqBBD5srPpVsc8c7k3PMsc82DkHeJ3I2KqcFicSgvTO+x55M889/3vQSsivNXIH64LEy/6Pb30bv/XB34KQlgO3rNNGLeqquo2tQ1fRYtmX5hzlwF0TTlmgIZQIKTMjiJA6us39xvAfcMAuWyuSXhRjuXC6YOmDuIf57p7ZmVJkVzcMmFEytlzraDSdO6e0SKcaIg1VojpSozUEcqp01i5r3qX1doQf5UMwIE1sXXFZTz9bzQMRH1xnsUy/a/PcDN+3IK8cuIhfJlfsBLsYPWMaNgzvBWKnHAT8LHKZ4yEyhZp+F/JNmpqb0fatKmKcBQGvtZmt95aTVRxcpn01gnOHgsI75lLvO7/5G2/juU9+HPtp3iNuf2g2d7EERiUvPPci/+6XfpHnn39B0aFPwB6K6ma4MxGpKrTDAqWmZ/ZYFp+KKCKNzWIM1/DGcNwtJ0YEM5iGKaqZoI+Z5Q3LuhIk5oiVT1rEEI0iMPuj64WnaI6grkkdaMdy5BxihuBJmk6VFyaNcW9icp5elImhqE3NhL2x7+mir73lq1ONA0TUEFOjIWJTMcedlqLTwHWLHtpi4eC9dNXlOzcH3VP2unkVUc+MIwKrsKCKFJa5iynQAGXs0007hB2eBD15vo9wJ5wyzS6c9pbSUW3+WgiLkdQo8q1WzeBjH36SD73/UXXGOsLHaxpend3w3AvP8Yu/9Iu88OJnpCjdNTd2hIOv2D1aTZXbrJitWlltJ6OWbS7UtCfRkN0yeC7HYmS1I2Rt3nOEospYUXszkO+4IJW+wq1DMM5ixATTilPz6t2wJlowzIJi9PLUFGQGwvbFBdTjaMNClfahUcalLk2NhGy2WWBKW6JIRlklSyQIi3yhg0niuLAcvEN89EztHVuufUK+OwGculfMImfLADIZU3x2F4zYI+SIGjroBPm2SRTJ3K+y5zam2uH352eSrruNLj2/GfrZcs1rQZnDKuRskQZopWtbypPn2Zsf4Omn/pNaB5oUcrfxkGzuUBZtPPHUB3nrm9+igoF3mDYeq0NZD2ylsZG2Pm3JGxT9rKiWg1okXG5J6e1Ct869VtQf5v0Gu7BLh6LmbMhBLtB1MlVxt9WfjIECclAh29osuz2GI9wo46suNCJqHytKK0MebjhMCZqYpYknD5ClbtvZolTZzxTDBn83HwxlE6SeMlkjliRGC6dmMWt3RC/GRtYwa2JtNUlMbTiBfTJacAvDEEwPHVaOwiTPTk6xwbY5YmrRUW2AtbIxUYXmylIJZLIU5WjGDJr1vKW2xQ2jfSLGes9X9pTv+g8/y76fTZ27xtEr2Qh++4Mf4C1verOKpS5eUoJgltlW1yR3F6PX/A+pH7vdFD1QjhpSE6u9YgruK1Fdly9p9xVrSAW4tRkrgiy04QhWs2fNMISRKeJBeG05VgySvB22AhAbRWLCNgW5DQOtuWBzrpXGoA2wfKgsbYoO4jBcE7AnszQXci5GD6zTpAnRp73ZyjwwHGRtIhoNM89Ieoo9hAuRYolZBdxNMpU1d9grvw81amSbeh3rnx8QY4qnyxLoLSiVxUyLK/OzWorXRHYPSme8z7ULu07lUhnWAKoHu/tI/39v+wV2d2S613goNvdGG/hTH/gAP/BDP3hHmuLu9a0IcinoutQcgHIWt++25RTFtQl3GQfmOCKXkc2NsSL6AiyicYReKMXsUCoXpg7KIDH8XRVJqADmCOGkKnl5ku8NvYm9s5wuAdKp3RiGZhw1Rzi0HqnKuZ3lmjiidVqqRLyIo+NofNxVZgxp580w2yHOpqyFlHP2GpF8X4ddngQfiV4WVM5l7SK8l90Y+PQcFATVTHWMAsNHOzB1r+b+q/5xRW6dQxE3ocNUFECnxYuC55qJ9u1AvvWGfkwXWwTHyaDPqh5Ei4/QrSDhIx/7LX7x5/4x+61b1wrLAFQVTz3+BH//f/vfBRESqunYzK4yYTtRtUFsNocqRdFccdGDpncxV9pe392pOVXNZm65AgA990xBe2K9wBKh0SryMWSslc7iAlEQBdFNW1ZAmBK8CrEM0RTXfBunNLy0Cqn2bwyxdWLXuw9nXWodt+CXk48Bw5Yx2JAB2EBzsUTEYpEGpkkHMjJbdh0NsVSoKijPUk3uaJGXqUynDYNVQ+/sBbNP1A5Z0xz2dL8BfwkEjca2qclN2tRvw5moDvKxaI0xdWDM3Zmr7IGlhDcMNQfqHSEVOqZcd6DMqdXDglZdrhue/cjT/PLP/Si3zy9yr8n9UGzuQbOfgzf88hu5+dsf9qHl5dsQpnRlhIuucRQGV2NbMUYWw8RCDcINdV34A86EsOOwEZCLbjnzwOa67IXSECPZ7xDljLXRRzJHa3Eth7i0JDpVvZdgwXjl1tQ4KyLYJ2XGg6tJwplZdLKC03o22sgijUU3mgCRSuGGWjis4lt7YihTcFEYLdKtkQ1v+TOGKYoRxmPDgqs8DjVFyH729keZUxuEBFp9CFlKpSG6beHY5tanaGJBy+ParI2JDlyML07T0BSamnc9BOns6QgnYV1py2Y4MiOKYVm5xZy8/9ffxu997MP3xCXv+zDk98Y3/Ao3bz6t4jSuV5hZErPos+Gn1Ka95pTKM4oIWeV7rXUV4Dznqtx7dEj4pSJhHLhsxR1L3e+jCfqMAgVbUsjJwF3DnBJ1aV9ipgu20D2kU+irYKpbhAaxOAzFpSJS4KrudMIRreb2agC9WL6UmoqcQeszGs6pBkZW6JpD5uKvMpI6gfxjNG9Hla2uxT3fOshd2o2Bg48oNobqNeak63kCu4IkrIRWYOcG7obGGNC346BV0qZvDmdDLlqX11fEFKtpFxxTMRfyImFfcKyVSK33k7PYtV10TR5/7G383ic+wr0m90OxuXfD7378Gf6f1/6khAWNUkNbXOZmH4f0noG7E3nTSoZ5wkCryLNafYU3rjprExktTFhuvCFf6zWxFCOh32mxLB/l2vHmZg4skPMAdigSyj0hVVchsJ+Hv3hjRe0Wbkitz59tv/LwBkgxzitCbcaIK+qjI/RqbQKjgxvhtK5Ep8tzI0OxlTmaFwsyG9oGVOqgawmhVqu9Y3dcaIZxw6Xm6wgxVjqg1f8yF5/6eD8IZohgxiR2wUEdTc4yp1GRvyAkeeTHKEVzKQVqr5S8ghsI85/VMlrzprFmgCCHpHtzT86i6syv/Nt/fr2Re8Mnnn2Wf/VT/1LP1zTdch/f6aRFJm995TszWl2GAndKCrF/TmlHzjLtsY8m6BGbN0kVxSOwP1PAqCv7iFA7Q1kHXPm3hL9vWX08MTnLDpTpA0GFQLFZJpuCoALKTBwVBA4xId2C9lAQE7vbL5ZqVOdhKqCDhIYrWA7Bm1vuynCGoNOM26YpiiXWEeS+MVdLryy6hrLdLakh58t0en/UiGuwY4pjyN+ncrtqMqIv4znph5Qcc38OUX8jJnmyt056g+6FNdoawrytCAnU1O/Zh1fqMM2VTYQyoZGDvcZB0WT5x3fx9tf/G+5FF3goNncCfuwnfoy96oiQc4r7kgyOYK6MlHWypUTDOmnXZNUUVeu3JVMWQUOFCjUPyExi8+nKLh780Mae0ZxSEVSAquWJClzmxVb7REenuPqd62GW2SVZoXTYOLwwSb+ISqIGGZKkb6M4DcERKgQ3Pc2wAXJOarpHqUUQ63OqcbV/Quh71pbsLUgolkAlOQqRewFd3KBc5NTkW/AJtFJEFvwXRKmBt1qNmaJpH+qFf4452Eqc4TAlbcElSnkEJ9DiaBumVeOVnO5MFYbhguWL2xs+OIMtXfii6D1YYexRlwpnY7pFbv72b/Lii8/ft6n7uUfzmh/7cfb24VimH1ZQw5xwi4OaZkzVDXJaiN9DhTX7LqlxRzMjj2BGzAvRY5Xyh1S6y/kxraFom/G5QYemkVg70iEgy1zEywfITc6lQdFTB0rloBc/zZzuCIhhRgwSANJSZI8KesTV3MLByFDomxNDVaVuUQMid3Iz9h46RHLuRKtqUD1EbZ4JtRFZzC5i2KIkEGNO0lrXevxzZyk0ElSVNs3u4OT5kztmmaEsvYb6nRJHPSTixBYbOU7iwC/RVpj6LKMY9K+8Jtr5hrNj/JnS8SzlfDiq9wEzSrAYOpzFFGw+/KH3cfvW3ef257QfeBDj1ou3ePejj3FjbBbcSGNwDi3kNrclZtJjR0VBMQLC5kM9Cm5MZg02BYPM3sghxeSsqS3Ym9ww5qbCTss4a6XMAdmb0yNdR5BBqcg5WpV3p4Wbo/7am+0UjB7GB/XzpKFvKPXoZbgfhDsvddxwFC6f7pzFfkOCpm2T9/QWIliN3LWhz8VNHsZgm8Ua6WpiBKdYAn5ZnTI1+TV3RJvbiGOz6M1qxBqk4QEVweRXI1tjcYH7lMD04SdVXYzUQs5kDqXAgs0k5kiKHF8CpsFFLxx3MvoGcUphpq3bIpKsVvS/2W1Q5H9uRF75wm+eEZvrCEP2qT2Sfb/FC8996rNNuwcyXryluZ2RbLkxU4KzTCkf44agkTGS4jZZm2ihy8f8pDkjKGcnYiOHmFxqQ6c5Ws7tkyJ24FUiCMxatEEAQ2QdTE5sqQJ3jUFHiHqcATcQVLkDQxv3eQZjU/8BFX/l2URDj01FWYIsZQ9phpOQhsE8teZfK9hJhJX3DWXi0YJUdsc/XYNRzvbGBjFh2oS+0FqKsCYFie1uTCmvrCSnmtikGRlTtYlqZXxqcq/+tREnFXNNhBic6dMQRTmKwTT7ZRCsgrbBtQpGBrNfpVahqlBTZv2QfcXHj7ZpWnIKWYrELCI3K39FScbcfcU22psAxsmIQJ3YNpj7zouf+fRd595DsbnfeNUNfuRH/okmSi0fZFWl962v9EatjUisGYXyWamNfU/YOFzokuSc4vyuxtTaYPSisqc27DHouSvd29smVkN2vcbLe52u4JxRL1YbahsH21j2BaRoTvo7V+J9dXhqwMXNUqQmZatO7C30dypa6Rg4DVRKm1k8cXPnyWduqfPSwjeHutmkq+hz+vsP5CQIKmQtp7yymtcA1BFZGCOu3u64tu63bEIWLWpee0FDsEUdrnkAS80aiP8rHn4Sow6WhJoL6ztXDMmtcx26VzDBMEzRAXnm4FVHB3Eqohb9s6+eFYKyund+6H/9zvs9he86bty4wT/5xz9iLx7dVDSGRzjqD8o85Muj2xBnX2pL1UA3VSAVUXqTjqsPVEaVE8qdyCTaMDQ4HZB4PkQYjpSqlN6dZcrDXEZkZnZEu+4TivB3wyyO9MuFwtGI620jOtWhSiLCSq5aRIVrPSh6JldazpM3X+Txm7cZU1BSs6BIC9xIZhSn3qmVOXBitijQ2cm5VI+JwF2OWpTKDXHfaUbszhbSTpp9B8ShOToYzFDDbQMqQCsLiGDx6PZSRN1zcvv2mVm3tUG3g5hABWVDKyrhjYNyvepuAEtToj1sCBquVl8EsPhP764y+Ud/96/cde49FJt7RvIVX/GVKmi6PukkRMU6m9zLwMsT3wwOzPXWph/Gti0MOEouAH0lMlquk75GEMaeV4TjyAPhe91Tk8B+MmIQaCGYXaYiKA7Ou8FNNXSKL494S8zPUNlueaeKeBrqGNnEHswhpwMhzWJIhM2lPvaZ2/yhF9x9qIxZDRy179AbyX7HAhZ+P1MLck5x/6tF02SGDrdo2iZGo8MFoYTw4XerYCwV64C5+8kqP0lrEZRmL8xdGPMp1NlqtVGrFdV1WnIdws3DeGOrUKp3EvJqOhfxKuOu3iDFcdZiiSyW58newclpeI7rm+aRyatf/Yf9Pzj+llApqwwlrT/r9RtliD5Uw5ur+NVScZ8JxmrYYdYGHeybPVaMx9MIKzfI3Bb8jNxo5Km+6K2OE+gcZDkoaa4idJMGqDu+Zjtw6UGm5kOZbThsOyzwzNoOU1/b0vvG4qCYDJJnP/MCX/6pW9oEjTs1Lq6joq2OMjfktUix57RALviSVUTY88hUokq1LoKqIVuQWWbQBDWbzO2g6c7aGJsEW4J61zqyP1WoBice0gRuQ2/MhOc+9SkHLxxBajTyTwodUCDyn6wczIufYvUsv6TMZlqsWKdwEG9B5hkYTY67U30fDsxdZ6lO4kpGWqkVaJZUunlEHCshap2mZWWdTriKQfdJ6SPyyltc0QysYlOapMKE7A52MxfE73a00sWu0+Dw7Ihs5nZVSCWU/tLyQKlbycILijP0lJkQcHh5pDbuqqCnhCPVjqRvG/cuJb7nEYfgZGc3dxdiOqOQ+ksZwW37vUTJH2SJK4a9qEsTe6AFeHSgSzhs94YaJ8+wACh2bFaj2sPiX0y5cC5eccegZrCtjT313bYWw2ixNnTkBpESR13ZFk/GFo4ea00LUYdHE3uZTZLH+8kOC59cZHOYpLRX73sFCdc1FvtnhhpSdDQZ4jcVtqFFBcwlXEvfTw8OtpdBbXPkB2OpptvWHNuADLbpmo0bYTjYhBS7RBsJnFeWRHsO+ft2EnM/sqvD0trSfjVsRiUea1HOARG7OfYQ5yFIhysWVnYzyl2VZjrjGIyF14cwfAhqszq04sod0pnc0krIjG8zbDi9uOWwGD7x0za6Y/kYlNTrnZP24VW76lYz3HmpMLxpC+HiyASpVF0QbcTC3b3vsKnG0G4EaaOpPgNIiRphG3I9EdXvcrIva45sNtf0dBQHYwR1Axu5NXuXO1jZMrybu42HZHM3Lp1iEcweJvgLMMxUUxOpFc3q6OJUqBCyFD6VZO+QPiraoEFL2KGAQxtPj4DaxFIo2GbdUd32IRLinR9GS8QhUJgD9pyCQ6Y2xiYU6C4+cGx0iJM7YxJs2pBSXWCqhBvTfQiDxhJ82Ehqm2bK1MoxbCK2nk+IFTMJ95VcmYijefDOugAURe8ER3G0o4ihDGhFgOF76akP6NI1ZomR0plH4UomUEqh1cUpf38Uiop7aStefE7LGqbArBnBKsuTf8AmJki2FJtkSY9g1aEYSi78lbOp6YgW/d3p73Vto0O+PE7Fazp6dYFYnb92FQelyLEvjqTwIh0NUw7Fzws/Q/Jk/nm6oYQVx5HkLkZTUOwpcVMxjj9nkzs7jkiVjHnjXouuEQOr+oA2FYWGLXbtoNitdRKD2JIeTW3ybFpwalLUZrJB+v2WDofG2pOWOdlo25GcxBQTi8bU5DajJPQ8KtoNMMaxhshkCKMVNNNAeu+Ipp2lV7TcRst0w1xrPzl4jquoaVh0kF52CrrKGLwWbYiI4EBz0UnHUiyF9huyHM7K7kFEDHyYWqFNSHBV6gBVfgb0FdS0ObC623hINneYKTl8ktqYl1JrqpDZ0wZLrQUtalPopDabQOY9YWxe+2F1MMOLAj1EOhju/qKXl4toqtPSJ3VXmK5n6wBPKtEsDep0EqEKVNeuekCm6W1lmb+ikOkTebnE4SiuTopG5w7Vuzdyu+w1DtVLxdW0+W4FaR7uIRNKRwqluoQMyoayjyGloCF0mXVhIRR6TmL4gDaJFpYdeUQoo3V/ffQl7WUETDnKq3RTj/ZnIJFMjVYP2nbWszjwU9hittKZOnb/qXeHWBznECcqbFsbvdOxK8L1wiwRoQ/3ybKnOde5vYfXtYvcgSZmRDBHUem+po3YIHqTZN9g67T9xApS3GjaUXm7gXOsiR0lzJ2dipAgDKX1C5MUzOgDtJUpL0is9ynHw1mCwEMHUJ8AN1Vp9I7Zrfkwd73bm5YbXo8p6E3iPBlOsE/XQ70QPNcEv0mHoh6vJ62uUgTepSYhLQI94ZqU3mxxLsn+Yw6zt1qipZEqEq+AJUNsLsK+PcpMK0SfXllQtoSI4vhPdYAzdiWmWKvr14EaGCo2m0laF4uUTHMMGkzSIAVFSVm7WDAOpErZiLLnM/vKbNaSY3DCSldZ29516j00m3vWFIYeojrqhDcmNpV+LI47IVyRRBzgTTSxLQRLdIQa2eLCZy34AGUCY1ljaYOrtv/IPDuJTxVX12TCk6ODrHF0K08LfKRYdSjM0elUaV0j2CSVUkKxj2nWhFZ+nss8Z5gxJNgJ1IRD1ACd4lO0tDKNZQ4zBogrqqjD8qiUG2OKaSIDshXH6vsse4QklGZGHSZREQvzVcFSudVYKBCU4KTukC3C8gG3SZqaAYcdKOUdIvxSnjEqxiILAz+vjk3OmfOOVJ7WBHaLRTloeeGX5kU4DY4SIXUv1WrEiV5h7vWNDEFP7HVsEKOw30hYANbsC4Jpie+m8fBE+GwhyEly/JKjpyGxZTGlDc5+MSPouewfHOx3SxOSUjwSk2HvhwgdpGzpwrZtNnwfq4iYB16OIn8Mn3lj7bL5l7PKmrKwbZv2HdHrFJNLdZhVJ4Na1r2gyYE9k1ywr1otIYUljW1zdNtMtzEMdHh1hvQBPtRmr6zRvuicZPmbzbLu6FJHsOpg9SBWY3N9Fx3Q+jU8vbqLajjffkHZZyorc6lAENCCwfa2xqbZcjfchYVldohriQZHl5qIxKKdnnVwcdXH5a7z7gucty/NaKdOjWwDhiZjz80CislgNa1QWjZDBlKYXtRlCa/x7dFJxkkpqoUVXeEonwN9EfQnSb/tEGnKuL8ggLSiVX9fka7ohkCXo/lgjFQLr1ABdJ3QMZVRSEodDNznk6JPgmbUKzXpMZ2Kpz7f0Io6ryfsjpIa2x2Il6sEXH+3ow6DoqvoGOGDLTrj3l4+IdVhIBZAtSK8edQSJoueKFfKxcFfmUEI8jnr3/eEc7kvZGk/Wz4dyz5CmqtwQctf2vG/GCFt9TFoUVpRuTKzGHRvYM+OiDKWHJDFaUtOHWJcPAzDkIOaWBha8wQsc6NXkxndzyButQ9VRcDaQPUvY5TouKJpaX4hcZCerBlYdssU8iehWkSZGRaq65gLXwxZ5PYq4hmibB1EhSi2+zR8tqlbE6uhSOyKVLsOkd/MxYw6y3ddRS9W311lJMUCHBX/FsOBR3idOtWDzbTCgbUmkP0qq3nrWGNLVVWmPlImLvZgG8pOc1vvAZYd9eEgehLHPl0j6UYZcaJAblkYdbITuu+RUoR3k6Vstx3E2D5KWZNRH8UmthLxdyUdpI12HSJZ/P/o5cVjeDltdHiPKf5wbO6hvbAMzcyzUxaHB1LBaZY6kLeYYQkx8goOCfmbdpxZHYEkxcY7nAQzjdP2Key7DZUdNp12vhu0FonaClPoIeu7DHlMoGKZuuG0T9qm64yc4uOAWGIuzxUXiWf53oEusjbChZvoyVLFdcv2dA7sI4Oj6zjSRrX0myzep7rEqHAsm2E9bol7E5D6T2pPBOuE6G5afyW7AHthiIdryt3SHjh0qES85ZPUpquR8WiVQxbzRaeSYQWzFMJwixoXa4PmrCgUHAEGR4YCEFmMcEYQLSjmpOxJ7qEtKOgaERkN6wrw4+umRunx48zO20xYpt+2n1Ax09N2SN3bDb0HXZuRjYJxW5vjaHpzA5hINqx/SGhOmnNsooyHzC6mD+cRS3CGRCZdZoJwUA4VZMCdOL2ax5wVODENJq5oP519aEdLZ6rsjta9GYqrr2stO/fZ7Q3NNiJt3N+w6YgQHdR1g3Q8VVsY/nYx1/fVyMeoZl6V1KYYSzN32unNtixAXHdoQ4SxmG5a7bQbhsQyc/M8ftWXfKmKzqtmh+yqJbgKojdZTRSYrypvqN0IDnbu1NSBMjQHrIbRSyA4P8fkfjg2d9+U/GMMYdjYa3lQV8hGtk6rkl12DFxFGRlbVVj+TFJjt03B2hRcoK0kR/qkFKyQrKKRLQrKnO1dRag2v5tWlCur4Gl1G8iBSXBApyrmGUH3WS3fIiUp31bdpm1jK9XgquFoA7MIJ1Wc2tu1x5yM2M0wkSBpN7NI3EmggpzqmzmlBBL+6oIPgdPAZkwbpuU62GRNGmcdEOOc5BRm3s6WWEDHvnuzkQVzpiP9sw7hSrE6ahgfDEVV0SnKmgtRiZSGWrRqArEUgE0z40Tsy8/bhTKOMtxRV4nZYvB0ulmIqKg5rneKt1XGUcP+QIKTUlELi3AbKwuqAz4HfGimNs7wfK1skwJgH4YrUISvRhiTPJt/hw6/Rbtc4bD9s3SgiC9reETtFjuLDePJpdrR4oMH05mu12gICtq8oTlNJCjrPfB7ASgFEgNH8PiGw/bEuq/Y/J1c3xrhvxNFpzUVujs9ywjDMeMqw0P31rk7YxE7qW4psImt9d2nDqswFJVmzLRrcqlojzlt4VCDrUVgyFx21lBMRsKXfOmXsa2OVNs42E8ii0y4YZ1Clep9DFE/+4rGHc6qCai9RDBpCR3HjMNy4l6QY9yLSvOgRkR8DHgeePaavsKrr/Har/TrP4hr/5Hu/m/u8zU+64iITwPvu45re1zm1vWNa53bD8XmDhARv9bd3/RKu/Yr/frXfe/3e1z3/b2Sr/9Kvnd4WGCZy7iMy7iMy3hJx2Vzv4zLuIzLeBmOh2lzf80r9Nqv9Otf973f73Hd9/dKvv4r+d4fHsz9Mi7jMi7jMl668TBF7pdxGZdxGZfxEo1r39wj4i9ExPsi4omI+J4HdM0PRsRjEfFIRPyaf/aVEfFLEfG4//uHX8Lr/UREPBMR777jZ5/1eqHxj/w8Ho2Ib7wP1/6+iHja9/9IRHz7HX/2d3zt90XEn/9Cru3P+/qIeGNE/KeIeE9E/C3//IHc/3WOBz23X0nz+h7Xf0yzXnYAAAMuSURBVCBz+4tiXrcl7NfxCwn+nwT+GOr/8hvAn3gA1/0g8Oo/8LN/AHyPf/89wP/xEl7vW4FvBN79ua4HfDvw/yJ1wjcDv3ofrv19wN/+LH/3T/gdvAr4Br+b8QVe/2uAb/Tvvxx4v6/zQO7/un5dx9x+Jc3re1z/gcztL4Z5fd2R+58Gnujup7r7NvBa4Duu6bt8B/DP/Pt/BvyPL9UHd/ebgE98ntf7DuCft8bbga+IiK95ia99t/EdwGu7+1Z3fwB4Ar2j/9+ju29296/7958G3gt8LQ/o/q9xPCxz+2U5r+9x/buNl3RufzHM6+ve3L8W+NAd//87/tn9Hg28PiLeFRF/1T/76u6+6d9/BPjq+/wd7na9B/VM/qbTw5+4I1W/r9eOiD8K/CngV7n++7/f4zru4zKvNR7o3H5Y5/V1b+7XNb6lu78R+Dbgb0TEt975h6086oHRiB709YD/C/hvgT8J3AT+4f2+YER8GfDTwHd39+/rWH0N9/9yHa/0eQ0PeG4/zPP6ujf3p4Gvv+P/v84/u6+ju5/2f58BfhalZx9daZL/+8x9/hp3u959fybd/dHunt1dwI9ylZ7el2tHxAktgJ/s7p/xj6/t/h/QeOD38Uqf1/Bg5/bDPq+ve3N/J/DHI+IbQu2M/hLw8/fzghHxhyLiy9fvgT8HvNvX/S7/te8CXnc/v8c9rvfzwHe6uv7NwCfvSPNekvEHsL7/Cd3/uvZfiohXRcQ3AH8ceMcXeK0Afhx4b3f/8B1/dG33/4DGA53bl3mt8aDm9hfFvL7fFdvP9QtVkd+Pqtff+wCu98dQ1fw3gPesawJfBfwy8Djw74GvfAmv+a9QinhGWNtfvtv1UDX9//TzeAz4pvtw7X/hz34UTbqvuePvf6+v/T7g216Ce/8WlJo+CjziX9/+oO7/lTK3X2nz+rrn9hfDvL4oVC/jMi7jMl6G47phmcu4jMu4jMu4D+OyuV/GZVzGZbwMx2Vzv4zLuIzLeBmOy+Z+GZdxGZfxMhyXzf0yLuMyLuNlOC6b+2VcxmVcxstwXDb3y7iMy7iMl+G4bO6XcRmXcRkvw/GfAcWcqTsIxtPzAAAAAElFTkSuQmCC\n" + }, + "metadata": { + "needs_background": "light" + } + }, + { + "output_type": "display_data", + "data": { + "text/plain": [ + "
" + ], + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADnCAYAAAC9roUQAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9ebgdVZnv/6lp79rDmU+Sk4QkkAECCEQiMwhqWtRWEAcig4zScvuCV6XbtkFbVNRWG7n4o/uxRW1Co3jpth1abUFtB+x7UWwSICFMATKdnOSM++x51/T7o6r2XjWchCEkgazv8+TJ3jWsWrVP1afeetf7vkvxPA8pKSkpqX0jdX93QEpKSupgkoSulJSU1D6UhK6UlJTUPpSErpSUlNQ+lISulJSU1D6Uvof1MrRBSkpK6oVLmWmFtHSlpKSk9qEkdKWkpKT2oSR0DyIdeuih/OIXv0hdd//993PEEUfsleNcffXVfPazn90rbR2s+vznP88HPvCBPW532WWX8YlPfGIf9Ehqb0lC9xWgs846i76+PprN5st2jDPOOIMnnnhir7T1ta99jU9+8pMA/PrXv+aQQw550W3deOONKIrCrbfeGll+6623oigKN95440vp6n7VHXfcgaZpFItFuru7WbFiBT/+8Y8BuP766/nGN76xn3so9XJIQvcA13PPPcf999+Poij86Ec/2t/d2S86/PDDufPOOyPL1qxZw+GHH76fepQu27Zf8D6nnHIKlUqFqakprrzySs4//3wmJydfht5JHSiS0D3Adeedd3LyySdz2WWXsWbNmsi6yy67jD//8z/nrW99K8VikdNOO42RkRE+/OEP09fXx/Lly1m7dm1knwcffJCjjjqKvr4+Lr/8chqNBpC0SB966CFe+9rX0tXVxXvf+15Wr17dfo294447OP300yPtKorC008/3e7XJz7xCarVKm9961sZHh6mWCxSLBYZHh4mn88zPj4eOdasWbOwLCv1NzjhhBOo1Wps2LABgA0bNtBoNDjhhBMi2/34xz9mxYoV9Pb2cuqpp/LII4+01/3t3/4tS5Ysoauri6OOOorvf//77XVPP/00Z555Jj09PQwODrJ69WrAf+ApihKB6VlnndW2QO+44w5OO+00PvKRjzAwMMCNN95Is9nkL/7iL1i4cCFz5szh6quvpl6vp56XKFVVueKKK6jX62zatIkbb7yRiy++uL3+d7/7Haeeeiq9vb0sWLCAO+64I9FGuVzmDW94Ax/60IeQNVUOXEnoHuC68847ueiii7jooou499572blzZ2T9Pffcw0033cTY2BjZbJZTTjmF448/nrGxMd7znvfw0Y9+NLL9t7/9be699142bdrEk08+yU033ZQ4ZqvV4rzzzuOyyy5jYmKCCy64IAKp56tCocB//Md/MG/ePCqVCpVKhXnz5nHWWWdxzz33tLf753/+Z973vvdhGMaMbb3//e9vW7tr1qzh/e9/f2T92rVrueKKK/jHf/xHxsfH+eAHP8g555zTdsksWbKE+++/n1KpxKc+9SkuvvhiduzYAcAnP/lJ3vzmNzM5Ocm2bdu49tprn/c5/v73v2fx4sXs3LmTG264gY9//OM8+eSTrFu3jqeffprt27fzmc98Zo/t2LbNN77xDYrFIsuWLYus27x5M29961u59tprGR0dZd26daxYsSKyzfj4OG9605s47bTT+OpXv4qizBixJLWfJaF7AOt3v/sdmzdv5vzzz2flypUsWbKE73znO5FtzjvvPFauXIlpmpx33nmYpskll1yCpmmsXr06Yelec801LFiwgP7+fm644QbuvvvuxHEfeOABbNvmQx/6EIZh8K53vYsTTzxxr53XpZdeyl133QWA4zjcfffdCYjGdfHFF3P33XdjWRbf/e53I1YgwNe//nU++MEPctJJJ6FpGpdeeinZbJYHHngAgPe+973MmzcPVVVZvXo1y5Yt4w9/+AMAhmGwefNmhoeHMU0zYcXvTvPmzePaa69F13VM0+TrX/86t9xyC/39/XR1dXH99dfz3e9+d8b9H3jgAXp7exkaGuLuu+/m+9//Pj09PZFtvvOd77Bq1SouuOACDMNgYGAgAt3h4WHOPPNM3vve96Y+RKUOLEnoHsBas2YNb37zmxkcHATgwgsvTLgY5syZ0/6cy+US3yuVSmT7BQsWtD8vWrSI4eHhxHGHh4eZP39+xFoS93upOvfcc3nsscd49tln+fnPf05PT88eob5w4UKWLl3K9ddfz7JlyxL92bx5MzfffDO9vb3tf1u3bm2f35133tl2PfT29rJ+/XrGxsYA+NKXvoTneZx44okcffTRfOtb33re5yL2Y3R0lFqtxsqVK9vHectb3sLo6OiM+5988slMTU0xNjbGAw88wKpVqxLbbN26lSVLlszYxk9+8hPq9TpXX3318+631P7TnjLSpPaT6vU699xzD47jMDQ0BECz2WRqaoqHH36Y44477kW1u3Xr1vbnLVu2MG/evMQ2c+fOZfv27Xie1waveOMXCgVqtVp7+5GRkRmPl/aaa5om559/PnfddRePP/74Hq3cUJdccglXXHEF//RP/5RYt2DBAm644QZuuOGGxLrNmzdz1VVX8ctf/pJTTjkFTdNYsWJF2+85NDTE7bffDvhvF6tWreL1r3992+Ks1Wp0d3ennqt4foODg+RyOTZs2MD8+fOf1zk9Hy1YsKBtlafpqquuYnJykre97W387Gc/o1Ao7LVjS+19SUv3ANUPfvADNE3jscceY926daxbt46NGzdyxhlnJEbyX4j+/u//nm3btjExMcHnPve59qCRqBBMt912G7Zt88Mf/jBy0x933HFs2LCBdevW0Wg0dhu2NWfOHMbHxymVSpHll1xyCXfccQc/+tGPnjd0V69ezX333cf555+fWHfVVVfxta99jd///vd4nke1WuUnP/kJ5XKZarWKoijMmjULgH/6p39i/fr17X3/5V/+hW3btgHQ19eHoiioqsqsWbOYP38+d911F47j8K1vfYtNmzbN2D9VVbnqqqv4yEc+wq5duwDYvn0799577/M6v5l00UUX8Ytf/IJ77rkH27YZHx9n3bp1kW1uu+02jjjiCN7xjnc8r4E7qf0nCd0DVGvWrOHyyy9n4cKFDA0Ntf9dc801fPvb335R4Unguyje/OY3s3jxYpYsWZIaWJ/JZPi3f/s3vvnNb9Lb28tdd93F29/+drLZLOCHcP3N3/wNq1atYtmyZbv1gS5fvpwLLriAxYsX09vb237dP+2001BVleOPP55FixY9r77ncjlWrVpFLpdLrHvd617H7bffzjXXXENfXx9Lly5tj/AfddRRXHfddZxyyinMmTOHRx99lNNOO62974MPPshJJ51EsVjknHPO4dZbb2Xx4sUA3H777Xz5y19mYGCADRs2cOqpp+62j1/84hdZunQpJ598Mt3d3axateolxz8vXLiQn/70p9x888309/ezYsUKHn744cg2iqLw9a9/nUMOOYRzzz23HZUideBJ2UNoiYw7kQLgpJNO4uqrr+byyy/fa22+8Y1v5MILL3xemVdSUq8wyYI3Ui9Mv/nNbxgZGcG2bdasWcMjjzzCW97ylr3W/oMPPshDDz2U6t6Qkno1Sw6kSaXqiSee4Pzzz6darbJ48WL+9V//lblz5+6Vti+99FJ+8IMfcOutt9LV1bVX2pSSeqVIuhekpKSk9r6ke0FKSkrqQJCErpSUlNQ+lISulJSU1D6UhK6UlJTUPpSErpSUlNQ+lISulJSU1D6UhK6UlJTUPpSErpSUlNQ+lISulJSU1D6UhK6UlJTUPpSErpSUlNQ+lISulJSU1D6UhK6UlJTUPpSErpSUlNQ+lISulJSU1D6UhK6UlJTUPpSErpSUlNQ+lISulJSU1D6UhK6UlJTUPpSErpSUlNQ+lISulJSU1D6UhK6UlJTUPpSErtTLohU/+eT+7oKU1AEpCV2pva4QuBK8UlJJKZ7n7W79bldKScWVBtrSM708e+11+6E3UlL7TcqMKyR0pfaWFv/vrwDQvWyyvaz0TG9kGwlfqYNEErpSL69C4IbqXjaZAC5AbmfHo/XYTR952fslJbWfJKEr9fLp6L+6BYD63Ojl0vWMf91NL3OBKHAB1Fbns6tH23zs8xLIUq9oSehK7X2tuMaHrVXoLAvBGwJXlNXd+SwCF2LQ3c3wrrSOpV4hktCV2ntaedUt7c9O1v9fBG/vM75lW5nr0zM/6kb2Ly+IUvX5AlexOp+1ANqP3CIhLHVASkJXau/opPf7vlvb9K+pELqhCjujgM1M2wA0+n2yKk50e9eIfp8+LJ26InChA10ANbZu3W0SxFL7XRK6Ui9Np66+GQAn419LIXTBB2/fU63U/US5mnAdBp+b3T5klfiVJnwvLVFTrdxQcei2m4jx++GvShhL7TNJ6Eq9eJ32nr/DCyAZQhc64O1/rNpZVjTQq3ZkfzejCvtobeC25UQvs1aP1v6sRptCtf1tS4f628wEXIhBV515navD+i9JIEvtVUnoSr1wnXLBzaiWfwl4AihD8PauLwHg5js+ArUWpaDTnW3v78Vgq7Y6rgiroKO40cvNUwXAF7Q2cEMpAqynF8bCH5gZunELWPQpx6MoQEZSSL0oSehKPT8df7U/SJYt+UAMoQsd8Ba21v11dYvHr+mMoCmacLko4DnR627J3R3IanUnAmFP2NTNdixdV1OSl68a2y+2XnRjlA/R9mjlpn1OSDxGcJoSxlK7kYSuVLqWftGHbKbcWWaO+f+L4B0+UwBdDGJqy1/n9glWblOLbtPs7F98TmXOH2oAKE504M0OrGZPV3Cy0QPFLeW4ZezqM17nnTaETUqLo318odD1+xDdzM3Axs9IGEtJ6ErFFMJWVAje+pBPEi8jEMX1ryF92geVXQiA3IqBMIhOcLqCfWPRCkpg/ZqjPlCHfu+PiimC68DqjtIvDaYicN24jxhQnfRLN4SuawgPkZT9pxcKy+KrZ4Cum4luJkZmhL/L4zdKIB8kktCV6uiIz/jAFcO9PF1wIxgC0EyfKvpU0gzUmp3PVtFLhIPFL7uwXdHaDOHb/ZxHphw1G+OwDUEb+nq9ePtRwzX1sldiA3Ni1ISTVSJ+YkiGuE0tjVrfM0E3HgontqPVO583fFFC+FUqCd2DXcs/1bFsQziF0J0JuABarQOZ0K0QAiSMLIgDRpRreIk247Q0pv3vxS2QH/MbFwfZQh+uGDkRl6d0rNe4+yNN8bAzAL0Rhb6ViwM29tsE/u7xI/XnZeW29xOgK0ZniA+AR74iYfwKl4TuwaojPynANmCIaBGKIVdp1pmT63yPDEBld28Rgu96EPnqacnLKbers0Fu1GtD1z+IFxk0A3AT1qySXBZb7whAjIeYKR4YlWjn43BVYlETdjHmC465J0TXxcSRUXCLwIWZoQsk3BgP3ypB/AqShO7BqKP/2geuOEAUgjMzHd02XN7qSQHoDJdPq8+/PLRaik81Hl/bCtv39zEq0X10f1yN3GhQu2FL1BQVQQZ+7K/oi42D0snEBuFSzkFvRi1bteWhNZIAdvVOW6qVfLo0Bjt+GrGfWqx9o+qw8/jOtvHfKALd2J0XGbCLncvD/1vC+ACUhO7BoqNu6Fi2EX9jAN7caHR7sWbCTLK6Zl4XwrLZ5/+fmYqut4vJ4jYi1MPX8p5nPMzxDoXEBIsQqHZejyRa+G3FYnt1JXHVWoXOPrU5KvXTK3QVGtF+Oirlcses98azHPILl+xkYBqn3CfxyIW4W6PZH/U5GNXOiYvuk52vy+3Ryo0eOP2Y8SYelS6K/SkJ3Ve7jvjsLe2bU4SceMMWhgN45dKvB3FgLS18Smyr1dMBbqhwYM02/f/1KNeAKOSzpei6TNnvnwhfo9zxB9j5WKeUaBhbPHkirPcwcZSKd2SFVtk/QS3fabO3u8bERDHarNppJ/eID2KtAb3P2KiB9arXhSdH7B7Sask0OavPjHwXoavYUaruPDH6JNydlbs76IrbSn/xPpeE7qtVyz99S+TGSwNv11Yv8Xot3oSN3s7K0GKFDlRFH7DWTPffxi8xJ5OyTaD4a7U4qDWwvuP0VJudDZ1cx+Fs56I+1QgAA9VnZRg/RkWvQfXwDgRF4AK4owEM+zqdGHjc4fP/9n84/aknmSwU+Lu3vI37Dl0J+L8lgDlmodVtrnr2Xt62448A/GzweL45f1X7YXDvH2+koRjtm+g33Udx67y3geWflzWvNwHchIQHy66V+RmtXIjdrLv5e2vCw3DtP0gAv0yaEbq7CweXOoC1/FO3tDOtFFcYJFP97/mR2GBQ8DXMMAt9j41eBb0B9cFwfced4AjGmZMXwsj0mGVXD6ILgtjc7K4OFOPwze8kkWhQGPGhqbU8rG4frsa0hZvVOw8Lz8PqSYZJ6DXHdykEagUxvqUlPnDtPGS3+fvZOQ/GO5e8awrnMel3VC+r3Pijf8bSdE65/tMcuWM7t6/5BsNvmcczfUPoNf8ctabL20ce5LSxjVz1umtRmw5/u/FOdph9/GTOCRCw9H8cfhU7sv3BAV2URgf6xuaYrwew5w90vijR+3b2f8deLQSNnJifcV3CdSFILNMJ0bC3tX8vgfxySEL3FablN97SMWlc2uDtrVS56d9962zKLHDb697Gz5auRHE9HAMyFX+ngl3nIw/9gJNHngDgX446lVvPfgsA/dUyH//1Dzjh2U3kWy2eHBric+ecw8MLF6FYoDVCCASQNWPwLav0PQ7lRcKywIDM74ieh2NC76aOJevqSjskLDNlRwavWj2dyzTNNRKP563N0shOQnV+Z5mdi/bVHO/kD9fm+4TMb1XJtZqcvf4R3vbhv6SWzfK0fRj3H3I0b9v0R2573dtp9qhkSy7NPoNVj6zje7NOZsrKgQrfm3Mybx19iJ8OrMRT/f57GR03Kzwsclm00cDxHULV8yDIzNO3BCA2YzUzFQV7djdp8jSFOf/deUNwhDTq8ddEn3paissnVDzs7ZjrknWTAVzh8+OfkmB+oZLQfYXoyL8R3AgKCfB+5l+/h+dpvPnCGzl8fDu33vdNnhyYz3Z1dqcRBf7Xuh+SdSzOO+d6+hsVvvrrf2RHdz8/Wn4iXa0WG/sWcPPJ5zKRK/Ku9b/nm7d/k7Ov/ARTQ1EI5EfCjvgqH+rR97j/uWuz37nyIoXBhzuv/rXZHRjMeqgesVxDf6wx3QFx6EbQgvjZVq/WttpC0LaKom8lgHbFo1VUKGz3F/vuEdEajv62+e0qxe0elbmwaHIUW1UZ1mcza61Lq0vhyf65HD/yTPstIlP2+7i4NMLX552Nm9FRbJdNhbks2upDU3H9Pt+88VsoeDxWWMDtA2exK9MbBepULIykEFisodtP7Zyfvmsa7KgrxVrQH/kuAtfTFPo3dixrNXBlTC7zj7+7Cm22OfM6Ucs/HbWUFRs2flaCeHeS0D3AdfTHb2lbIKIbIQRv11YP02ryxuce5X3v/EvqRpaHhxbzu7lHcc6jD/K1Y97WzuBSLY/Ttz3GX5z6AVpqhpF8Pz9ZdCLvXP97/mPBCWybNcCdK89qA/1fjz2F6+7/EYdv28njjQXtPk0fmrQ2u54TnwTBss3R7/ldPjDMMd/8NUr+XW/1GJg7ogGsXkYjYwWw7fN/gEzZpdWlRixbT1PQmuFxPFxDoVUU10f7mRtzyQW1JUqL/R+zuN3fv7jDY3Bnk2rWpOdZNzimR8M1KVhNCjt82DqGima5mE6LipnHM1Q8Q6VCgbzbAtsFReG6pZfyeP4QcpUyl4zfz6e3/Sv/c+FluIoKjWDU0QgePMWoe8At+GBUakGadAhhXYNGq/3Z2FES9jHR2p87ZqvVZbSBC9D3VDMSezy11ExYuaLiher3pDA2XGzTLkSvhU0f/Whiv8Pu+gLPXvzXL+xgr0BJ6B7AOvrj/sWrtkiAt/vZzkW8aHIUR1HZ0jMLc8K/uTZ1z+W4sWeAWKaVB1pk8MZj8fQIAPlRLxKCtWxyO4bjsLV7MNKv+b/1b/rJw/1ODT7S8TVOHd4Ju+p/pAJAdaEPlOJmoe5uIfDdTtYxJjvAdbMGXqZDyhC4ALapolodv3Ra5pna8jAnOucQWtDVuckMip5nXHKjLSqHdKhSMzIU6w3MSYtGn9/H7ukaTdtot+XqCp6iUNczmKrVti6LEzVqagbP9G+rR7OL0SaqVDWTf5z1Jr636VYWtsZ5Tu0FLXA/FHNil3BzUcJ5hRTiFU3UeicH28tGfd1uJnpbixEg4P9untb58YrDnfVWIfo7OdnoA9bTYPqw9DGieIr1THJMl0P/4e86+3XvxuR+FUpC9wBU6EtT6WSJieDtfaoDTVdXyNstqnqW/KhvSTqGQkXPUWg10ZsdAHkq/GH2EVz09K95rvt8+hsV3rHpD5h2i2zJ3zd8Xc9bDT59/92sWbwKZdSgiE1lvk7Xls4N0vdkMpe298mg7KOQZFDYUsMzVNyMhtryl48fk8PKA/jQcaLsaUdfmOPQtc3CNRRUSwizCk7LNoOZJ1IK3ETC5XY4ZKf8vtdn+T9kbtQ/SHFbB2Dj9KJ5LvNrY2xnELXpsKw0zHPFOcFxvXb67+bCHJZObefJ3DwU1+Ow5i42F2a3+6LUWniGf4t5nuu/B9gWhM+RnNnetm3ZBhatp6c8UUI5Xnv76AmL9TGTv4c44Kg4bjsEL+yDk9Pa5xYOYKq2h1VQI28M4gNfVHlBp/3dWc6RLh9kwAUJ3QNGy2/0QavXAE2ob2B1wNu/sUORMCqgsMPCm1YpWB1w6DWHrnqNupZBsdx25pbiwlePPIf/teGH3P2LL1HK5PnPoRW8cWRdG1DZaZeMY/F3//UNHi/O5ztL3tBud/CP/gBQc3YnjtTcPAmANaeTQWGMdSzaiZWDNAaSlpEtMkOJDvCIsGwMQGPA/wHyOz2y0x6aEOMaT+EFEqnDaiwzLDfaQqsHroKcfwuEDwkLld/1H8kVT9zHzUecx5LKDk4d38i1Cz/YfmCEum/2Ct6z7b/4Q//heCi8Z/t/8f35J2MXDJaMbUfDYbM+SMa1uXTkV4zrRbbmZkNGyGALQ+ECH7Abs1rj59IuhSmeUpzP8frCwZtDuG/4ltFZH14fydrJANnplCy8PsF3HGxa3BbAW5jKyQq8JpmpoPxnBkCjuqDTprYjizO3eVC4FkBCd78rPhARyhPAO7AxAISQjaW1fN+oa6hsK8xC81wWTOxqhyctKe/gueIQKEoEYhUtx+eOfV/bKvrAE//B490LUAIr0nBtvvDgHYxlu7ll6bnoVd8S0ac7UM/uqqLUo1ausbOMm/dhsuv0WYAwGBMcX+yHEbyKhoM5YXxwPPtKF1y9TlahNksBVPKjbhu4nUI8ATRmKOvo6mp7GztvoNesTpJCADfXULl1+Tv5y43f418e+AJlPc+ty85lS34Ox0w9xxceXcM7Tv8UngI/nnsi8+qT3P7H/w+Anx5yAj869BQ8TaWQt/jo+u8z2CzR0DI81rWQTy69ECsbJGLE6zXEXAJeWjlL28NTFAjWtf28XhgGGKVv+EDpKOkP9z8Ixwg+awGAwweWEyv+Y076v70t1Dy280H5z7q/b30gfo6dz4WtIbQ1mv3eQQNckMkR+03H/IUP23aKrfBLi5lefU/aqXGW4ig/wCfW342nKty8/F0sKe/g84/ewbUn/nn7tTi0RubVxqnoJlU9x+vGnuL69f+HD5/wQZ4rziE73eBTT9yNqyh85oj34Soa+kQ1fujIq2t1uR9XWj6kc4PHY3MVJ1oGMlUzlGkM2xJ/E3OyQ+ZM2YlWJKMDk86gY4oP0vOS6bWCRFfGTIoXWY9IUdrWdLtN20NppWWWxLoWdy2ocdO18/srnte2ZNv9MqPfPYVYnYo9dqFj9c5AADsf7WOzN9aHWK2M+N+3EYQjb/jCqzbSQWakHUgKgQspdQ086H9MmNZGAEq7DoAgxfbdB11Wjb946vscP/k000aebyw5m/+cs4LXTD/H3667gz99w6dxNYU3DD/C/3zi3ynaDbblB/nGoWfzx76lABw79SxfefSbNFQjktn6iYWr2VBYiJfRUVwXT1WpLfIttvICMYY21jc3PS7UqMXqJYiuSENJRBzEs+lUC8ypILog9uqb8O2mFCBPy+CLtB/LEgvb9ARrOLI+tDQFsImDV66hJh4M/nlE++7FHg5e/DjxOeQUJelKiFu7MQCLxd1dXU3OviG8TYV99uKV3mIlNHf78AGaPdH1YYp5q0dCN00SuntRK64Rgs2FOMgQvP0bZjZBCsNReim2ExmBTsx+oIQ+tPQbInQnRNJDq0F4kgADT41ZNEO+P3d6Yccv2OqJti1CUnWSacNhTQZxueJ4KXUFYr5ZO3k5GlWPTEksjiO0F1PE2vOSAItbt23/qQBDMWnjqNJmjpt8lof7D+OxXj8jRC933C7tv0+sK+Frf9sSjv/pLDfpp41sMHNd4XacbnybSInNEJqC7zUWtRBuL/7mYpiZpymJpBRxWzunpv69Wl0qVuCGeJWnIEvo7k8d/z86sBVrv4bgFQPY4xd/YXusACsCDCD1BhRfN9NKGsalNX36ibCN+0WbQ4V22UMxxEocJHOE8RlPTQ++11PcDFpj5siD0KrSWsltcqMCbIP+xkszQjQ8CkgALV4vNxwwizzIYr/zkeWtfPnhb2K4Npaq87GjLmNj18Kg70lYgz89/Z6kV2LhXUIb4VtGtLOBf1e4JhKWaXg9BIvjZS8doZZFCEqxYLxR7vym4d8j/oagOB6OKYw5xAYv64N6e98/3JGM0X0VStZe2B8KZ9YFUv8Eqa/eQfm/7ESUTkpKHVf/pk6GGHWgoUYOO5Nl4mkqiuPiGRpKKxzh8v+zu82gr04btpmyQ22Wf+noVb++QeLcYtFkIYDjDwGtGYVECNfw1TXuinB1MCeiUFQcD6Nix7ZT2lZ8WFSmfZzd+DS1ht3eTnG8xMSZYeGd46aeIYPl/0yKxbtW/YabHrq0/QbhH7Dz+wJoNRsnryfcCKJVHlqqyVKPHh7CwzT+FoCaLICjRy3tOGzb5+lC7DUAACAASURBVBwUDBLdGeHfITPZilR3y5SiDwVxXZg5GD7EHVOj2SMRE5f8RV4GtS3bZJIWasDOvieSxLVzGuauWGaWrqI0YlHnsYEWT1EilqkXG1hpwynY5MjpLayYepZ1vYfxeCHINAsBYfj7hhaVGtxAjdlZMoHFEwIXosDVWklXwkyDNnEr2KiHftHgexy2hpLw30IHAiKA8bzUqeMVx/MfMCkxrGrTblum4frwAdT+TTSVw778BGf3PUr/QxXcSxSwPBxdY+1xCzn6kkfZUvOjR9YPz+XQW4XaCoG0WEU0/zVddBMFSRNi32IPXMX2UIIt3Ix/Pml+aqXl4mbUtp83XB5CETqp1notmdkQbh+uU4OIFTfnj24qjoshuFPCa9AN3Cei5QsHjYW7R0n3wl7SUdf7oDWCwf62pRf7BdNgiwL6ZGy5rrZjN0N5md0/I+Oj2HFr6KffuR3ljw2M80fA8vAMGP8/A1gr/Zvo0vP/3O+OAKX4K3GYpQVgFWNma7CbVQhcAimuhDjwQriKr/JhJS/wYRuPxfU0BaOchETcJxtxD+zmOo/H36r1AOQBDN2sQeVGP7vuzKGn29stfHic4u8arH/tfJondX4XEbwAh95CooSjGwvnmqk4u2gFh6UuRRdD2kwWblacKiTpOnHy0b9p6MsPl2vllAnk4i4ZcUAuZ0Qe+o25nRFVO6vyf++5Ltneq1/Sp/ty6PCbOu6DMKQpAd1AqbD1QC/HyOR4EUs2HmvpBq+fqmCtpGYvBXB77a0P878Gf9te3HVbhZ4vV1AcPyzL+9gAT/2PTozXiONHJdx08aWR5uxCtB923u9Ho09NXCViFhxAq6C2M51CGVUvUSjdqAYJAroIYOE8A4iKy4CEC8DfNrTuklCHzsi8IhSQUYM6B6Iv9tl399N/8k4gClyAjdNDke+HFsbbn0PwPvFTPzKksMMjN+5g7qgn/l6uKfrgYzG4RP/W4bmqwttPxC0kKHRJ2X354Jxjv1szeLgID/NwGYCn6x3/dLuzwnctjOvrtNs4xB9VDRMufvvDv0z06yCRhO7e1NIvdWAbpquKcaSGENo6sD5p7imel4jhjMuOWSPi3Fuq4yX8ggDGdBNP79zAx/39IxHgaigY/91iYPU4igWeAT9b8xqWnDKeaCuE7+fedzFWT2fgLIRtqLSBOjEjSWsmL6G4Ba4HfmwxIUCvJgdv0uYv223bipIIoQrDwcQQLq0cPBCFe6Exv5sdpxj0n7yTXev9Sm3e3M6D06karFi+OdL2I2sPA+Cc0//IDx88Hq3S+a2y436/Cjv8Y3RtbSUG8OL9Fx8moRWfAKflpAI3odiDyctGn3iKUDDey2ip0R/t9aG7RtineuQgSvBgVTz49X98bM99enVLQndvaNkXktOYQ3SWBr0G/Y/7N4Zej0986KLF/LMdf1uYQhnLTIqHgqX8vbI7a3jZKAyP/ccN0WMLo0d/uflXZP9fi9+9dhGjr40GCp9sPgfA1U9dwI2L/52/euLdjGzpZ9maVuIVOOxbqysIgYpZs2E2k9UVLdEYbST6NZwFQoRPPG7WU1JiVuMj9tmZXS2hTzMM7xKhVZ/fxcjJQdHzouAXjgFX1Irlm9vAbW+fDQaVBPDO/01QrD3ld0iL4+005qU+YMQsQb9jQRSKUCg9AltDT7isAGgGF3AuCKexUiBuiiXDOuCvLh9s40WxvPa4gYSuhO5LVuizFZMZRPDOWhe9mONhULmdUfdC+IoZWoq7gwSkW3UAmV3R2QS8rMaz1/ltn3v4o+3lamzI/t6ty/noEb9MtPfZtW9jweBk+3vd8m+2kS39LPqRvywRlpXG0sTrvDB4U9BTp9jB9SKv9qmWbPxSFi3+1Oys6IMiHuUQgrcVzGE2fWgHLlPLA9fE3OjfLg7d4tP+Qycsht7uTgDeod9qjB4Pcx7wEjMQZ8ejfqjInGeamrh1224R0Qqui5ANwCtaxCFE7edhEVtCW7lccr3RMQqqRwRpZQJ0f/PTgx62oSR0X4pC4EIyg2xgQ/QnioOiuCUGW5UIKJxselC6v60yI2yzO8rRdjUNtdbk6Zt8t8CegCtKhO9n174NYEbwAvSt1endFFiJsVfkeNJBWrWreNB8PJQtHt/qj87vPuA4XqsgnollByUK0wL2w4E7EbjiA7UpJH+EbzXTR3UAFkIX0sE79NvADy/EGhe2RaNUlFj9hLTzjfddFepftN0BiUgHB1opA2P12HUZAFkJ6/vG44GzHRdT/XC/tkb7zUc5qH23M0lC98VoxbUd2LZisA2LXIeKW3ZdzyWTGiJWTDzsK5YlNFNSQ3YsdrO27PbI8aYbfctEf6RA/RBxnnP/v/NP/T2QhC744A2BC+nQrf1sdttfHZaRDBW3IEWfc9wPOZPiaa9i30NFoO6RemlHpvrp3v0bRJoaA539m7Fsu/h08mklDEPwDv2/5DG7n64ktlccL3I9pPnrxfoL8foNiuOkxnFH3AyhyrHjB+4GTxgMU5Q4cDOQ8U+0tci3bp2siptR+e2PJGxnkITu89XRf31LJGlBnLk2BO+sRzsLI9PFAMUtSdiKSoR1xS20NPAEm+jV2KtxxSfArpN8MjSCmVvSgCuqZ0F07vPGQ/6OzvJocZsQvOP/fkhkeQjeYjCTghguFA/qT03vDVNMw9qtcb91iuJ+XUiGYSluNFjfjRVdifuk3dhLRtwVIU5ZUx9UEsDNj3ZOtjLfP1b/E2IMrADbp5KwjSv+OySyz4g9dBwPtZkC1sTvItbKDPzAoq+3uZtqRIHlay0ZCvqktGst/PpnfzXzflISunvScR++JVKwJQRvfLrwnueSfrHsaIpVGwvCjw+QtQfQvDDtMgYEMVrBSv8zNAY6bU4v8rePABcS0M1v90ljnDzht/FQbI4tAbzdvyykZpv1PRHc6JpozcYs/xSLMlGCMcWii7tTRPdF+FuJIVQztSNGLVhdnd/J05REBaywJGF7+3w09Th+DfjnkFyW32UlojsA8tvSKrUJsci5FHM5XhsiBmStllL8qJ6yLD5wlgbpNPdD4G5oHbkgaCfMFFT45a8OnjKML0ESurvTcR/uuBHSwAv+jTfwaBSudpeBORy1YMLspVBh2mh7feg+CG6ihD9TuLnallgMKqKFFi+xJ84PNnlMlAwhcCP9S7nfQ/B2/7IQHMNfPrChldpnPRhYi4dnhWqfc7h6hld8MYVWjYcsuSTD7JToPgBOPvpwC6d0h6SrQfyt4g8XMYZYDAfMVDwy09FjWgWV/K54eqyGuSsZmx0ZUOyK/viiWyTNsm+vS6kvoZVibqdwOh+xKHrMjxsPI2ur1aKx4tCgv8HAnevxi99cP2OfpBKS0E3T0X8VzNYQe7sKwTuwwb/JxZH2drHvic6d2E4TFYAbt2yhE7Df+R4fQIq9AscHoYKvIWh3B9y09NtGdKozurZ02p9a6u87IFQ6a/RG2+95NmkR7Q4OEJudNqUwd/gACdOj4xBVHQ8tnqLqJTOxwugDgMZgLOwujBDRwwddrLnYQ8DTkjMGiwrnYDMn3PYUQO3zMdSEGyh8GIWxtnbswSDGxIbXhBa35mPbAahhVTgnZduKAFjRkg3juJsp1m2gxrELO8cIoCut2xcsCd2ZlAbe2X/0gdrsj1oiet3BEAaywovdzXe2c7MpsN1dmBMp5RfjRp6hJP8ScT+prlCeH8x5lcLBbKmzsLREjQA3VFoiA0B2MiXlVhzMSbFc3XjW1Uz+1dBlEPcYWG4irEyx3YT7oTG7A9tmT9SSzZTdtqsgVDirQaj6QLxORfw8Zl4H0LUtKFA0aaVaoJ6utB+2aXVn00pRiha9Z6ipA5FKy064jtRKPRryBZ1wsfCaSwsbCzLLmsvnt/sc6lf3Sr/ti5SE7u509F/dwux1/pM/bqWAD9/csOCXC4tam+Kgze4LOceVFoPpr4i2N1NmUDjok2Y9irLyyX5lgkkoRYvQHIued1qomuJF6xvEwepvE90vPjDo5NTIQyEtSiNt3rN4XYXakP+gE0sQisrGXADmqP9Urc/uhD7F04lD3/nUMn+b3QFX9OmaU/5+he1NYX3M7ZHy5hNXODWSf8CgT/Fss5T7tR2nKyY1WCnL4gpA3Frm14hou4F0RcL2pUtCd096w9lfbH/Ojvh+WqfLRN821l7uDgbxQ/EK/wJ8XEOdEZS7q8/ahs9uClRD7FU4biArSmQuK2WGAThRYuyn2557K37M6LYzhl0J15Ibs+pC+IeDUwlYBl/D+gudYyvtDK66UN0sbfp1gMJwzLda0NrAjcsqdtqLD1aKlrk4EAdQnZN+8P4n/OOINXETLqVwtgUBwmKWYrvQTbygelpIWJgIIbp4hGwxWq10v60wuGYtnSv43BX+85cfT24v9WIkoft8FII3hG5bYpbUlJCUoAcl7GYFDsA02KrRGy8tPGqPsaNxn2Mc2GkhV8IiV0tJshDPKSzJp8cfJkpkfSK8LXARiAWr7XjYVS7WZvxBIViQ7XKOae4V8Xvaa/6WWGZX8JuY2zrhcXa/PzCYNu9bc27HiRs/ngjdZk+sL8Jvkqn4Hc/vDN6aYvCNn3vkrSH0+6a5EuLXVXDPRmouxO5jpTpDGFgQHmYvCubOM9T29Sf9tntVErovRH9y+uc6RVAgAihxsEyEnTbWgbHT35W0QlNiLiMQS7FwEwM8iQGflH0iExAmM7nEYyq2lz64pUeBHIeQbe6+H2H2VygneKNvFzKf4U077m9V3OhrfFqkBUBhlzCdt+AyyG6ZiKStAjAeJH0MBNMPx0f09ej20yvmtD+LkBQnYgx/j8KID8GMMGlo6J+NxxTjeZFrKbE+TWL4V1iNLM0CrqUAt9J50DiL5uCpajsZRUYlvCyS0H2xestxn3xe0I1Ys4K7QR+LWlVObzKfPe522B0oIToBYmJ/fFglZxGIRjakWYvxGWPj8ayOmQZoYX3MZZCIEkg5ZnsuLsEroATMCktExvshQtuodC5Rc8oHkLmt4g8qxTUdS50OgKuEAI4B1+krtj/X53X+bvGZb7NT6Rl3mWkbYyJZlD4h0ZLV1XSQtmy/bm34PaWwvVJNKR8qZKDZS+a343Y9VeXn//cTqf2W2iuS0N1bOvuET7c/zwRdiN5c4nbh4FN2pIwXjBo7QbxmvI1kBtXuYQwkoxoSronkLqJlqzoeTtxSTRmsEqGddEtEj5Vm2bYTJULjLXalaQ0SbwtxS1drRQsLhZYmQHak87BTNg8nOxAfxY8BV+np5H3b8zoJJI1Z2ch2WjP5ewHkt0ZdVOGMFtGFwvWjKCgpkQWp4wM1H+RewQ8uVspJd4lX64Q02kctaieZKK7LfQ/8DavO/Ly0cF9eSejuC/3JaTe1P+8JuhD1HYrhRGas8pQ44BPfr624nzcxyJbcRWxHcTqFp9t9SnmddyMPmpT1enQgLmFRhyBOs3rDTeID9nFoz3BVZqc8zCmX/JaORavumor2b2wiupPwoFMymQhsYWbgpoXXOVklmX3mecl6t1rKyafF2qb5Zd3YdqJ7JChK4wWuBPuYwzr+3xh0pV52SejuT73pjV9of34+0E2sE2CYE0K7nKyScC3EreE0uUKSkuKmDFSlWKaiNZu+PhofnBpdkLDC97A+ZfVMEIdo5ljXdh9O+S1l1EoHXt6oX7BdEcDn1uuoPdFsCHd2B7bNOdF0tXi2nKurZEejJTaByCwMbcWBm6jK5qbDth5zUwTJDYrgs/aE8DDnyEX+g1+ArnQn7FNJ6B6oOvNPv9T+LIJ3JuiKwIy/1ufGOlZQqv81PsgWB3YMpvH202CbWJ5mhMfHkGLbJNqNw9hNWSa2F4a0xRiXKXuRugmFHf4G5paO9esWzcg+YmSI1Rt1JcQnb3RyWiRZBmince8RuCn3nVJPyRKLVwWjA9w0OUcu6mwnQPcXv71hxn2kXhZJ6L6SddL7v9L+7EZLOcxogYqv77lxITRJU3abdRVuE1f8FT+ZShv9rrjpmXHt7ZV0F0MEyCqJKzDMXBOPH6/+5WnRKZNCxWOAYeZBsHjihDEWxG73dKzeMPTMFeobqLHIgfjMzAmpanoJxmrScvYajWRqeJgVeeShne00te3ekn7b/SYJ3VerVv5Zp1jPTNCdKTY2rCEQKh4KBknIJ4CdYoGqcbaFRcV2434Iv89URxgC4ArrxdmGnaCfmXIyySE7lYStGNbV7oOmkB1PKVKT8rqfGOBqCeAMLdr4vRVOc2/6nRXnGGurkTyWV6kmY7XDyAtdx1vqVwLzDK09GPvz/5KuhP0sCd2DTWIB9t0lJIiQE2Ecdy1YsSpcaREJaeUOQ7Vf8+N+2/ilmeaeSGm3DVxhezF8LHwA5SaEql6B+yY/0gFrWHzG3D4dad8t+K4Fbbgz8Ob1+MkVylTsld8wolBMm+0jFpmSiJ4wjGTMMODFLF43GCRTDB1l2aH+NmEBfEOTg2QHjiR0pTo69qMCkFOgCylujOC7EQ11xVNT3ASxyy3ua/V33L37AdLXh22J68KkCnFZphykyMau4OyYT2vRn23s9E9KdAUok8GJikktYjEZXW8X+O50LuU+i0cliLUQwkGwNNimLHND6/boZf7/oeWsKdz7359ObC+1XyWhK7VnveZjt0S+RwbthM9xy7kN4j1FJ4BQVUtYJl5lM8A4Ddx6EKMrttVOJSaaQRbOzCym5gLoYykzOsT9qZlY7Jwee8rErVhIdRPQspKhfY0OXBVdx00ZOHOFttQVy9vVxULo/mzdZ5LHktrfktCVemlafuMM7or4pSVcMaJVnDYAJsbjKinL2vvGw1yDGFkR1mLcrBYMrMUrk+l1N3FFZ8Z9wHqZWHJEPOMLItPcq3UrPQKhkpIV1molto1YsmHBmUps9E/TcINQMXXla/xlrtuG7r1rpXV7AEtCV+rl0xGfFSxk4YqJw9IIjDgx2iBMwNBSLNmZYCu6RCIF6INrOZ64oNpews0Q9lWcZihuYWtNJxWsWjWlmPv0DHPjxWfdrSW3c2OwVVQFNyg8rpx0rP+/7bZrL0hXwitCErpS+15H/k0HxnH3gNoM0nhjb+GOmVwGM0yd3oiWmgxLQO5pP9WJgjs+v5vips/ckDYvGcwA3BhcvTC5QRhkE1N1vXAOssAH7J3mwza01hXb5d4HP5V6fKkDUhK6UvtfR/+1AGEBrKE7IFtKXm5pfmG94SUG70Tr1lNAm6GWcNyajdetBdBiM0AoXjpw00LJ4rAFoNHEi0UrtN0LIWQD6DqnHeMfM6iDq1ou9/2/TybblDrQJaErdWDpGCGCQou9rYvfzcl4UfP4tl5iebisrZSiMZ2C7MJ+zRiAPS9SYBz8+FylkXQvpKbuhnOTCcANs8k8cd4yRcE66Uj/oxsODrqoD2zgvtZ3ku1KvRIkoSt14OqYj94SAa34OT6jQ27ctwxDl4CTEwa3RDeBGtabTVqycWs3UjhcCV0VyWly1LQpztNq14ahZeIsDkK4mRcmUoTT5axc0olICKArC4q/4iWhK/XKkZhlJ0I3PrAWFp7J7ewALZw7Lg5rf/8UANthfdkAtim+XLWWYtmm1UkIIStWAhPrJLjR4zdWHNqZ0SNYJafLedVIQlfqla0TLvfrT4jgFat9xUPNcrt8C9TNaInQsfY+sfnD4hbvC4atKDGRIubPrR/nT3GueB3L9j9/LmH7KpOErtSrSye9/yszQjc+H1wYIzzTBJVKCjQjwA2nxklzJcQnfhQBm3JvNQ+fi2so7RA2Oevuq1YSulKvbp16/s2R7yJ4VYGpYgiZOewHDotTMEX8tmEt2lTrNmZa20m/sJgW3FzSmWvNNRR+/TMJ21e5JHSlDj79yemfozaUnRG6AKoQsRDOHBxCOAFbz0vCFvx6CvGJRcVpzpcM+YsMTfpsDx5J6EpJAZxx3t9FwCtCNz6Iptgu+vCEX9jGTfcLJ2oshL5cXcdZMAvw/cpuAHIJ3YNGErpSUml6w5u/2P6cBt1QapAcoY771nCilkK1Ruu1S8iu3+x/H+zH7fLLQ7oZTc7ccPBJQldKak9adebnI9/ToAugTgWlIMOKYK5H67VLAMiMdKr8uF1ZWd/24JWErpTUC5U4u3MadEM1F8+OfA/B+7MNn3sZeyd1gEtCV0pqb+js1/pFZ0TwpkFXAvegl4SulNTe1Nmv/VQbvHHoysEyKSR0pfa1JsYnuO+++zjzrDOZO3fu/u6OlNS+1ozQTZleUErqxater3HSSScyVa7TalQxjSwZM8Pw8A6G5gzxnvPfw5WXX8HSZctQ4rGtUlIHgaSlK7VX5HkeV1xxBf/2ve9hZDJUazV6+3vbabKmmaNSq+G0LDJmBlVRyGVzZIwMp5x+Gn/2wT/jdStXouvSDpB6VUi6F6RePq1Zs4brb/hrpqfL9Pf3k8uYTJWmMIwMGdPEzGRpNCq4toOHgmZomNk8k+VpctksmYxBaaqEpuk0W036evtZsvgwrrvuOs54/Rlk4hNDSkkd+JLQldr7Wrt2LW9/+ztotVoUugpMjI1jWTZDc+fQlc8xPj5JJp9jbNcYxVyBvoEBbLdFs9VAU3RqtRqO7VDIFVF0F9v1wPXImXmmK9P0DfbRrDfB8ZguV1iy5DCuuuoq3v3ud9PX17e/T19KaneS0JXaexodHeX0009namqKbC5LV75ArVpjYmoKM5ejq5DHzOZwPA/dUHEdF8uxKU2VcG2XQj5HJmfSqDfxPPAcm4yZodW00VWV7SPDZLIZZs+ZQ61aQ8EvN6u4Ln29vZTKFcrlEtlslsMWL2H1+edz1VVX0dvbu79/GimpUBK6Ui9dtm3z7ne/lwf/8HsUXCzXDSCaZapcwWq16Ontp9VqoOsqtuuRN010XcfQdcrTFSr1OsVCnqyRoVyp0N83QNNuYugGLh6K4zBVKtHdXSCf72ZqcpLpSoWuri7slo3ttFAUFdd1MTJZivk8jXqDSrWMaeYZGBjA8xzOO+88rrjiSpYdLgfspPaLJHSlXpo+9rGPcdttt1GvzzDV+POUqmoUC3n6+nrQFY1ytUa+mMP1wDAzWI0mZq6AY9s0mnVymTye55LLGXiKRmmqQq6Qp16vUq9W0QyNZr2JqqkU8nkUw6DVqGNoBq7r0GrZ6JrK/EMO4cwzXs/7LryAlStXoqopM15KSe09SehKvTh997vf5YILLnhZ2s5ks2QzGQb6Z5MrmJRLJVRdRVM0dE0F1WCyNEkhl6PZbFAoFEHxKE2XqJVrHDJvLqqexbKbNFsNUBQ8Bxzbxshm0HWVjJnFsVxajQZ61sRuNRgbHSOfy9Gybd757vO4+IKLOOOMMzBN82U5T6mDUhK6Ui9MGzdu5Nhjj8VOqx+7l6UqKt3d3RgZnYH+XjKqwfjkFJV6DVVTyeVyqIqK57mYuRxT09NkDQPFVTBzWWzXpVKeRtcNBgcH2TU2hmM5OK5NaWqKrGkyd9486rUauaxJs9XCajVoNlsUurvJZfK4OExNTtDf3cfSI5ZyyaWXct5551EoFF7285d6VUpCV+r5qVQqcfTRR7N9+/YZt1E1A8918LwZasy+CKmKQk9PN5quU+jqwtANpiYn0VQFPZPDMHTspkW1XmFw1iDTlQqKq5LLGViWRaHow7HRaOC5Hs1mC9tukjELuJaDh43luAz2DeJ4Doam02w1MAt5JsZKmBkD23OwLQtcl97+fsbGxjEzWQxVY+4h87j08kt557nvZGhoaK+dt9SrVhK6UruX67q84x3v4Kc//WnqeiObx8h10z13MZqq07fgcAwjg66qtCwb13VxPY9MsRvF9dAzecqjz6J4MLLpIRqlcWqlXXvsRyGfp7enh3yxCxSX6nSVUnma3lmzKGgaZjZD3XaoNxrYVgvHdtENDVVVKHR1YzebTJZKqB6YuRw9fT3s3DFCT28flm2RM02mJqbwPD+ioqtQpNFsoqoqtmWh6QbZfB6r0SSXL7Br5066inks1wIXisUijmUzNj7GIQsXcuWVV/Ced7+HJUuWyAE7KVESulIz6x/+4R/48s1foVKaZmx8tL1cNUz65i1laPlJqBAU7lZxFBcFrz25oueCZ9komUxwqSkoeLguqJriz6yra6iaBprOpt99n/KuLal9WXTIQlqtJhkzS7GrGzyXarWC7Xhomo6HS61Wpbu7G9t1UVUV09DxUDAUjUarycjIDobmzMGyPQqFPBOTEziOQ9bMYFk2hmHQbDToKnbRaDXJ5UxqtTrdPX1YjTqKrjM5PoluKBiG73euVKuYmSz1RoNCsUi1PI3rehSLXdTqVQpdRaxWC7OQ582rVvFnH7iKY489Tg7YHbyS0JVK6le/+hUf+LMPoKCQM3NYVouRnTvIDR7BvOPfhNeogqNi2RVcu4XiupRHh/EUlUx3HwPzl6Blc+iZHNWdW8j3z+HZh35Occ5CcoV+9GyB6sQOUBR0VUdRPFQFPNVgbPvTtColRjf9sd2frmKRYr5Aw2qRNbPYroOeydBT6EbTDMrlEtOlKebMGcRzPDRVYbpcJddVpNVsksvmaDktWk2LTCaDoikojks+V2RiaoKsmaNcnvb7oHgM9A2AqrB963YUYLrcKdeYy+XJGAaKAjnTpLevn2azRalSAg+mSyW6Cnlmz5rNrolxNFWjUq3S29tLV1c3KC6ObVEuT6MoGd70+rO47AOX8/rXv55sNrsf/tpS+1gSulIdbd2ylTed+QaqjRpdPd1oGQPXshidKHHYyrPJDi6lPj6CbdexFY/W5DizFr+GytQIqqJTHJxHZXwniqoze9FRNGvTNGplpndtRsvlmHvE63AqJXZtWk+9UcZpVlG0DL1Dh2E5TXTPQ1U1Go06peFN2LVR3No4juPQ393LdKNGs9Ggu9hFq9nAdj3mz5mN5bhMladRAM91aTab4Cn0dPegaAo4Hrqhh1zJfAAAIABJREFUUa83yeZNyuVp8oUuVMWlXmti2S0GB2ehqQqg0Gy2mJycYGxsAj/9Ys/KmBkG+gbo7e0H1aPRtBjbOYrr2Xh4uLbDwKxeWo0WRtakWOihNDWBqoCZM2laNs1aAxeXN5z1Jq644nLOPPNMikU5YPcqk4SuFDQaTf70rW9h+44dKK5L07HJmlkMNUOr1cToX0z3khMoT+zAsZpMbn+Krr75zF58DJ5js2PTWpaccg6alqE2OYbbLFOvlBg49Gg0VcVVPLau+w2urjNr/jKGn/gDC499PfXJUfRiN/Z0hfLkdpxWjdzgXFRFR/Fg8qnfoTvTTJemyeXzeFaLbD5LZbpCsbsXTVOxbYdmq0WtXGZgYIDSdAlUcB3IZAyK+Rw7RnbRbDbpH+jHc23MXJ5avYFpZKhUpsnl8r7fOZPByGR4ZtMmLMva8w8nSFU15s+fDwqYZpZKtUq1XMHDo6vou0NK01OYZh49Y+AE/m5VUwGP/sF+yqUyngNmwWRydAIXD9PMYhhZ5s+fz5VXXsm73/NuBgcGXp4LQWpfSEL3YJbrunzomg/xgx/8AEVXKRZ7cB2Llt2imCuiKiqK6lEYeg0MLGF6eBPbNv6aqV3DoGgsOent1Euj5LoHWfCa09i1aS3ZwgCzDj2Gsa3ryXcP0qpV6Vm4FE3VwVNQVBUFsBwHq1mmtO1JJrc9xa6tGxmct4y+hUeR6+6jvms7zfGNeLUJH1bZPIV8nl2joz4gszqO45HVNUqlMkNzhpiYnETPaCgotBotNENH1zR6enqZnJokk8vSlTMZHhlFAfK5AoWeAlbLptWycCybHTtmjs6YSUPz5tHX0021UqFZb5DLF8h35RjdOUa9VsMwM/R095LL56nXmxi6RrNRx8VFcRQq1Sp6RvcB3dVDq1FjqlQil8szZ3CA8ckpbNel0WgwZ85cKuVpFA+KXUUuvOhCrvzAlSxcsHCvXx9SL4skdA9Wffvbd/GJG66nWq2jGTq9vT1kdJ16rYFuZlBVHU8JLMZshq5lb2Tz2l/iNMqo2Rzjw5toVKsAaJkijlUDz6X3kOXku/uolSY59HWraFWmmRh+ErPQRzbfjd1qoGdyuK7F2LPrmRp5Fs9p0dfXh6dqDC06ksIhr0HVDEbW/picqrUL5zSbNoah0bQaNOo1evv6mJqcpre3j1ar7rsPclkqlTLZXB7FdXDRUBUPu9Wir38AVVHBA0VxqVs29XoNq9VC8TxGdu7CzCr0FA12jree1+84b+4QPT19lCZLKCpkM1nyXQXqzQZWq0W91kTTVLp7usDzmJyaJG8W2DW6C9M0MQyDjJ7Bdh1arSaW5WBkdHQVevu6abYsKtMVNCOLpoLrqTSbDRzLon9wkHqljG279Pb1s3PnMAsWLuLd572L965ezTHHvEYO2B14ktA92LRx42OsXr2acqWCaeYY3bWLgb5BTDODB7geTEyM093bi92y0TWNlmuh6AXmHHc2rqdQm9oBuNSmp9n17HrKO5950f0xzC4WHfdG7No0mjNJ/5JTqO3cxM5n15HP52k2G2TNLJMTE2SNDHNmz0bTVVwXQMGyWigoWLaFo7gYms742CTz5gxhuzZjY2MUi0XfzeG5VGsNzJzJ2Ngo+VyOvv5+vNY4l547yOte08OmrVXMjM4fHp1kutzigUemKFdcMobKjtFmu999A70Ui91oKOhahtL0FIqq0ajX6OvvpdWyaNTr9HT3oOezVGtVFBt0Q2N4+zCz58zGsmx0TUfPGOB5FAomtm2hKgYTU5Pkc35d4Wq1CnjkcwW27xhmYKAfQ1WwbJucaaKrGWqWX6Utl8lgGCaNZoPp0hS6YTA0Z4jTzzidiy68kJNPOQXDMF7iVST1EiShe7BocnKSc97xTsYmx9HwaFotNHxf5Pj0NIauUTQLNFoN8oUimqIwsmuERr3B7DmzabZa9PV2k5nzGnoXnYht17EaLSqlnXhYuLaL4qq0auOMbt7A9K7NKKqObuZpVabQzSLZYh/Fwbm4tsXgwiNRNQO3VUO1p1ELc2mVS6jWJF5pGMVQqdaqFLIFqtUatmeD52EYWVDA0DUMM4tj2eiqTrPZoNGsYwbRFkY2A55HvVqnq7uLUmmacrnC4MAgltOir6ePer2M1Rjj41cu5qnNFQxdYe3GEleev4D+niyz+g0aDZe160sMDmbYvqvOV789QsPK0dc7QKNWp2E10DQNTfNQ0Kg1GigomJmsn7qs67TqTbqL3TTtJrZrUZ6u0d3TjaqqNKoNqvUqhq5SbzTpKvr1Jewgq65RrdLV3Y3TckBXUFCoV+voGQ3DMPBsB8dzsa0W45MlBvr7yea7qFdKGLpCJpujWquTzWZoNuvkCzlsyyOfy7H0iCP40LXX8vozziCfz+/vS/RgkYTuq12WZbH6fRey9r8fIpvT0TXfF9psNDHzJs16Hdf1yGg6rqFQmvDTY3u7ulE1Dc/zSyd6CoyNj5EvdJHL5fysrZ655GYtQnGaKN0LsB0XT1Fp1abxgmpfjtVC0bJk80XMQi+N0ghmdx+ZfC/lsedwJ7ey88k/Ytktent6GR7eQX9PF6h6UJ2sB0cBXIdqpYpu6OTzRTzXRlFU//XZdmm0Gii6TqNpUStXyOUMunt62LlrFNf2LfbBWbMBj+lKBUNtcd0l82haTbpMjaWHFZmcbDBdsZistnjquQavPaqb2X0G/X0Zpss2X/vuGM+NmoyOToLiQzGTyZLJauAqOK5Lo9HCNLM4joPqQtOxcFwH3VOYmppm9twhVBR0Xce2GoyXptA0jWwmi6bquJ5Ls9lgbHyc3p5eCsUi5VIJT1Hp6+vBs11KpRLFni5cx8YDGvUmmUyGTCZLo16n0ajhOB4Dg/2oqsbE5CQqCt29vVi2Ta1codjVxeTEOIWuAq1Gk3whj5krcNQRh/OOc8/lXe96F/39/fv56n1VSkL31azPf+7z3HLLLfT19aJnNZr1FuVKje7uIma26IdYYdNstDBzOTwPmq06GcMAVKrlEngexd5eKuUStuVg5nKMj43jeB59Pd3kCzm0TAbHdmnUauR6h8gW+5geHyGbMag2LDTDJGeAphloGrSmJ9E18PAY2TlCPpcjZ+ZRdINqbZpWs0Ffbz8oGnbTwaOBmSmCpjA1PUHOLGA3m3iqQT6fQ3FdNEXDUT1s16VWraIpHl3FLur/P3tvFm3bdpXnfaOY9VzFLs+5hXRVIUBYomE5MnJkW8SJbQo3Ycc4mASIcYoG+CWhNSD4KS4gaRCCHUHDwjKYuBkMIthIQrJABQIcEAqFkJAEutKtT7GLVc56jtHzMObeFyLsUEhX1en35ey1715rrrnW6KOPv///39uWpmvI8zl9U5EVJV2z40v+jOb+GxmzXPPCZ5UsDyIO5pbNZuBX37cjyzXOQ5wotjvPd/6zO+xbT9cNnBwdYq3Fi+DFk2cJXT/Stj2zWcngezQKP3j2Tc3BfE43DpyfnXN4eMSu2pOnCYoRg8ErHaZjdB1pkjHKgFaG7WbD0dEhIpq+b9BRRLXbkZU5282OJE5YlCWXqxXOC8478iyj63tk9BydHLKr9ozDyHw+p65r9rs9RTFDvGexXDAMPVYbzi7OSPIMxhEvcHR8zG67Z3l0g5s3D3nVX/kyvvxVf5VnPetZn+iv9ad63Eu6n47x5je/if/hm/5HuqbFGEuWhaOjCGz2G8q8xGjFvq6IEoO1MbefuMXh8pim23F0eMwwOrzz1G2NGweKsiSLMkRJgCBswma3pShK2r7BWgPKEicRCGg8q31FmRfsdzvm5YzzywusNhRlQdu1rNcbhmHAWMv+dwkQriJLEpqu+z2PzWdzMIokioiSlOXBAcopkBGPYvQj49jjOsdmu+FoeQQW8jQ8VxTHdG1H33cURUlTV9g4xrdrXv7SIz7neREPHhvuuy/DGMU/+L4neOyOJotL2nFA6EiTnCSK8B7qpkKU4eLuOQ889ABGFOIF8FR1jbGavh9p65rROZI4BUWodF1P14+cnJzixhGlTWj0GaGvKzbbPaBJIs3B0SG3bt2h7zpu3DhFa0uW56wuVwieo+UhTVOhrOXi/JIsS4lsEHHYKOLu+R2UNqR5TpEkVFVDnEZoFczkcYIYTb3fM1/OqKsKlEY50JEhy1O6ridNMjabNcdHx7zqy1/F13zNV/O5n/uie1LnP3jcS7qfTvGRj3yEv/BF/wmD8ywP5igtiGhATZMYHG3XorVm6FuSKCVOU7yH7W7N4mCBCDjnsLGmXu8p8hlKg1dhTkPfj4h37HdbTk9O8KIQPNprPKCnhlXbVmRZCd6jleFic0nTdVyen0+y4Y9dzGczZrMZZR6qbqUMdbVnGB3z+QKrFE6NrC7XDF1PHMeMTsjyjDTL6fuWbuxJjMEYi9GGqqkxWhHZGB2ZwEKING3dsiznNEM3JbQAwcQ2pqkb4tgwjD5wiIeRbhxx3qFF4b3ncHlA1dfEsaWrB4y2NH3DrMzomh43hs+onBUUZcF+V1FmJUobnnzqSbIiJ0njQInThu1uTRzFHC6W6CRhtVmBh+OTU8R79vsteVZwcX5OURT4caRqKoyxAQLRmqZtiExMksWU5Yymrhj6gfmspB064jSjHzqyNMMqw1O3bnG4PKTrO5x3bNZrTk5Pmc/m/OmX/2m+8eu/kZd8/kvuDRP9/eNe0v10iK7reMtbfoZ//I//Ebfu3KLe70nikID6oaOva8qyZNdUGCIiq3HegwhpmSEOlNfoSHDjyH5XkZclShR935MXGVXXkJiIrumwcUQcx4iXMDJnHLBxYD+MfqRvO5I8JDMZRy4u16zX64/7fSjLWRjnc3qD87M7HB8dYyI7XbNltVohAxydHDKMPVmS4VHgR7quJ8oylBcQQfBst1vSNMP7IN0dvKcsS7quIY0TLleXLI8P0RjEjWy2W8q8QLygtWXwAyKC0qCUoSwL/OhDo88mODdQNy3D0DOflSRpzr6pkWEkKjKUhzu3nsQaCwqcGzm5cR95HDGMI8MwkuYZWinapkVbzcXFJXEcY7QhT3IUwu3zuxzdOKKvWqIkC5V97+i7llt3nmK+OKBIEy5WK/Iix6LZNxU3btwELzRNQ5lmNEOHtsJ+31IWJftqT5JlzMsSbQ2b1QpjDE3b44Zwz48Oj3nuc5/H133d1/GX/uJfIi8+4xt295Lup3J473nN97+G17zmB3BupGk7tA3HSYWhbWviKEJrg/c+SGIVaAeIZsQH/wOl2O0DRhjHFu9H7pydMS9n5EUJeATNMPT4YcTGKdooNMI4DIxOGMRTVzXHx8doA2PX8+ijj9G27UdfuIbjzzogLmMUwuELDkhvZKTLBDXA/qzm1q/f4eRFR5y/75I77zlDW0UyT9jfqf9/74u1luVyiWhNkWXMimKa05bQNwNZkREnMZvVivlyTlf3jMNAnKYoYLVZY7UmzMN009G9ZV9VzBcFNk4nnvAS7xyX5ytGN5DEKXmZk6QJ1a6mqSvyoiBJUpw4+nFgdX7Bcj7H2oRuGEgmVdzh0QGj8xil2bctfuiJ8xw/TbiwNuLi8pwbpyfsq4qmaVgsD7hz+w5VtSfLMspZDl4TpTF9PzAOjiLP2W7WxFmKEoICToIhz9n5GVmaEccRVlu8V7TdHkRh4wirI+quIk8SBueI4wzE42VkdI5qVxFnKYtZSd8PYbNRcLlaEecpavCkRc5ms+H+Gzfo+oHtfocbHX/ulX+Wr/nqr+WVr3wlR59ZCrt7SfdTNd75Cz/PD/3QD/Kud/8/4BVdXRNnEUZp6rYhMpY4SkI1kkSBXuQMKEfT9vjRkWaB1qQAccLoHQ6P6wbyssT1Pd45MIo4SRER9psdSZoTJ5ZxasVpUfRdwzgOlEXJnbt3ODs753d/h+J5xBd+/Z9EWU19Z493gmiFEkEQMBrtFQh45VAoUDrgt4VBobGzmPaiZffUnt9+84cY63+/L8JDDz3EbD5DHOy2G2JrSZMUFRn6LlSl81nAlsuy4NHHnqAsSspZCRKYCQqNc47OD3T7iiiO6LuOtm7Z1RVHBwcsFvMAsRhFbAzj2JPEccBKFXRdT9M2eAEtnjhOSeIY56BrK0Q0Q99g4piuaciKOU1bE8cJbrKYTLIE5z2byxUmiRm7DudGDg6PcKPDTLjtMDjapg5YstEcLA8w1nBxeYkbB06PT2mbDh0Z+mEgz1K6vqOuG0SDDJ6DgwVRHFPXNTay9G1P27ccHh0ytC3eKYzV3L5zl2EcSOOUfuwoyhmJtQiw2+3J84xxdFxeXnLz5k36oQ+2mnnBfr/j8GDJvqqZl3OqriGLIj7rs17IF3/Zl/DV/9VXs1gsPu5r6BMU95Lup1r88v/9S3z9N349o3doL+DBKw1KUKIIJose0QrvYWh7bKxZ72oWRYExCoegsSgDbVuTxAlaBFF2cmD015+wKHBAXdXkSYqIo+96kjRlU22JtKEoSgbnqfcVSZKRpJaL5ozDPzFn8ZwZ+WHO9okKJYJXgugwFSKaxSgvmEThnSJKLdKP6NiQlDEjQnu+Z1j3jOPVdzVYQmYnKSaGR97+BGe/dUF98Xsramstz3vB84kjw3azJ40S9k0dhBRDTxQF5sPoRoyJ6PuWYlYGubKHtuuwJmJfbVnO58HPAcEPI15BW3cgnqIosDamH7vwGJ48ydFJzOXqkjgyeDeSZTnWxrRdQ9v2HCwPEBH86GmGwJ9dr9YwehYHSy4vVyCCtgrvHd7BvJzRDT1pnjL2I8ZooihBiWO/qxnEk+cpIMHIZxi47+iEqq0YlcJGEW1V48RTFjlGG6w11HVFHMVYbdnsqsmE3nOwOKB3I01bExkL6OBbPPSkSUrbVlS7isPjY1aXFyhtmOUFUWy4e3ZBbCKSNAEjtG2PMYa26Tg5PmbsHU/dfoK8KChmM8QNZFmMiTPuO73JW9/61mdmQT3zcS/pfqrEdrvlB1/7T3n9G9/IE489ibGapu2JtSZKo9CbUhonQrXfspgvwoekBM2V9FUQCXxSrYSmbknKHPAobVBKMbY9sY3xCGiFjA4M7LY7yqLAGoP34FG0TU2e5WAmyEIplEB8qjn6z5Y0u4bqrMWLR1vDwXNmpIuUKIkY246kjNBW4x006yAqSGYRHrj1G3dYPHsJXuh3Nd3OsXtPR5815DczNAptFMVpwRO/dJuzD12w/vD2+n4ZY3jwwefg/UhkLXVdcXh8eO0kJsA49Ow2W9KioG1bFnlO54Tbt29TliXLxQxB0Mpy+9ZtTGS4+cDNUJkPcH55TmQjRmBsGw6WB9RtR9u1ZGmGsYbdbsfh8oC+61AobGSwOsZrx35XcXhwwGp1SZ4mtN3I4mCJMYrVes1uu+O+mzdAwdh7vBaSJOH2rduICDfvv4EmYr/bYYxCG81mvSVOYhITcX55zsHhIcoorCjqPkxX7vsREXd9r8ZxpCxL4tSyudwzjiNJGpNkCWmcM3Qdm+1qwp8HyrLk5OSYzXqLVjpwies9sTX03YA1MXEecXl+wQP3PwhKWK0ucB60CCjBKUNsI7xzGA1V29DUDa94xct54xveTBzHz+DqekbjXtL9ZI+2bfmmb/om3vMb72W7XeO8x1iF9wolHkEYR4fRFq01okCJB0JFK6KIlAIdjuIyyWfRQr2vUMbggVgbojgCwnNrJeG5UEx2BeDAiWO/3zMv5yilEOPD77xCKUU0Uxz9pyW7dUNf9SjR2EgTzyOKo4xsGWPyBC2wenRNUsSo2HD+/gtufv4pKtIoJawf2RIXEcVRiqAC/NEJdz5wTn6Q0W4r8Ao0zG4UrB/f8b6f+CD13ZYkjnju856DsTFdO5CmMUprRITdbk3TO/IoYVbmeKUYup6qqUGE5XyBNgrnoG5q8jhFxxFNU9OPLdaEZFDkOaIENw4oY+jbDu8Js9m6jtVmx7Jc4LQjSXPq/R4nnsW8BA9GKeqmJc5SrFZ45xj6EYfQtyNZntB3LcMwUncNx4dHuMHRjT2zcoY1kxQ6IDxENggrLi4u8F6RZQHG0FZzdnZOksWkSYI1hsv1huVyCd5zfn6OjTOOjw/Zbvb0Q0Pf9qRRTJIaojxnv6tJYktbt2gboY3GWsvQDXhxgFDtq+DMJjBfzJjP59w9P6fM5wiOpq6J4xgbJWjlKWYZ9b4lilLqeo+I8P3/5Pv58ld9+TO9xJ7puJd0P1lDRPj2b/92fvzHfgyZfgZ/TQG7rly1gFeIEvzgghduMbs6hTOOI9Wmpixz4sggBrrGY60Ca9iuNxSzDKNjRDwWxb6pyfISLQoYabqeLE0RBPGgDOA1V9RMN47UbcOsLCheULB4ecHlkyu2j2w4f/8FX/C3X4IgtHdq7nvJTZRRiAvJWikBBGcE5UApzdTfw3sVjtjA7feeh+TrhfqywSSWZBacy6rzmpPPO+Z33vAw6glLOQ/jebQxbDYb8jJHiSdKErSyIH6COjSj8/RdS56mMG0wGkvdNCgNaRpTNw0oCfPZNnuyJKbvGrpJdJDlRfhMlKCU4Bw0VUNiLcYa2sn8pihSnEqIIsPQD7hhwMQRkY1wbuDOrbsYQ2gCAkkSIdqiEepdTWQMVVMxK2Zs9zuUjajrPX3Xcf+DD5LYiLPzM+IkxaaWLArc2r5vcWPPMEKWpeR5xpOPP0maZRRlQWwsShTGGogUdd0wDgNNHXDsLCsQARsnjEOLOMFqE75zXnCjoyhzvHM0wwAitE1DbCPm5QwTa87PV1xcnHN4cIjDsygXbLYb7j855uD0Bm9961sxxnwCVtozHveS7idjPP7E4/xfr/sJ/ulrX0vXdKGpY6abLj7ABUrhCPBrGI8jDP0A1hBbM1WfGs+IUmr6pAN7AS9hbA4gGJhsA1WkyGyGMpN1txNECaDCVAVxaEBUyOjiBRGoqoayLEKle2CZfU7E6rcaLh5b0dYNQ9rxwCtucP8X3E+UB7qZNZ6zR9ZEiaG8bwai2DyxZ/FAwJ2NAucMpLB7dIsfR4qbM9pVzRPvukt2mJGdJBgFJjLQa85+Zot0nrFvabuBOC3RkabZbsjz8G/lBdePOKsR50ijBKVCFXyxugTg+OiQzXZHkiQkaYR4T1M3DL0jLzPSJMJ7hYzCttriEGazGVpr7ty+zfKquTY6yrxku9uSFyX1UJFoizERdd2QFClaPNvtniROyIoMp8CPQtfW7PcVsY0pZxmiDLvthjIviOIUtCbSitt3b6EFZgdL1pdrbt53Ey/QtwNKeZzz7Hd7mrbm5n0PEseWum5wbgAljMOIUYYkTfB4zu6cEcUpeRpxeHAESrHarkijhH7oMVZRVy3zxRI3DrRNR9cFv46Li3NOjo4Z/UiSxPhhpHeepmlpu44bpzfou5b5fIaNI/zoeO1rX8sXvvzlz/AK+4TGvaT7yRR379zlu/637+ZnfvZnccOIwoMCL4apjcN2u2VeLjDKo/QkehCCGYoIMiiqticrIoauYxQo8iwkWq1xQFNVxNYQRTFN22CTFAMordDTJztOI3S0hKraEV5PeQENoxOsYTriByx3GBy7esMDL73B4k+VrN5bMaiO1SMbVo+uaVYtpy8+5uaLj3jWS+5H5zaot6ZqVymNNgovMDYjSkPfO7QRfON46jfOOH7eklu/s+HwuXNUp4gLxfzBnId/8im6D/fYKMW7kfV6x3KxoO86BucYXc/B8gilhc4PmBFsHNNWNVmehYrbC6INXVsHqCBPUFhQjmHoiW3CerNlGHq00rjRsTic0dYdXjSL2Yyu72ibhiwvGEdHVW2nx1q8CLPFjDIrOL8451kPPkAcxfSDp+1aojh8JuuLFcuDJdoavPNsN1uiOOLi/ILDk1OMFqLIAkGE4ZxgrKFvB9qhZz6fhQ3R+TDFWCm6bsBYTVmWnJ9dBIy+zFkeH1Lv9ihjUVrhR4d4IYriUOW3LeKhbxsWi9A03Ox2RHGMsZpIRWzWq2D0o3XAfLOcKImYL0p224rNOly/1pAkOVVV8SVf+sX80A/+0Gei9eS9pPvJEF3X8V3f9V286c1v4dYTT2EseO/Isiwct5UKBuABU0Ah9MNA0zeU+fxagikqkPqVVwzDQN91FNkMlFC3VahGAU9ouMzm5VQ1h+cNH6sCH470XklgQ4hClEbJiCgYB0GJYK3FKRACBqwB74OnwvJlGcWzc9q2o7pbo5Snrxy//ZaPIN6BErr1GG6AgRuffYhJDWkRcfH4jqSMmd8o+NDbHyM/zijLkrmaEc0i4sOU5ecXzB7KOXvXBZv31uhR0XWONI+D74EI69UWNw7MZjOarmW+PETcwG6/J4ljrI6IotBAFO8QbTAIu6rCWDv5ITQoURRFwfn5OYvDIxSCUVBVFVEckyQRzkPftIj35POCfdXwyIc/wv93qVhjGF1oYkXGYmPL4dEhbvDEUYKIMJ/PwCqqak+ZzVAmQEt379zlYH7I6AQdBcOc2FpMHLFZb+mHgTiKWC4XOIRdvce3jr7rKMuSfJZTbXesNltiG2GNISnSqcmng0qu7cnTnF1d48WTRjFpmmAjy76qsUnYzLe7LSeHJ0QmZbdbkRcZ/TgSRwnrzYrT0xu0fU1iE/qmY99U3Ll7l5Mbp/zsW36Gl7zkJc/I2vokjHtJ9xMZ3nu+9Vu+lTe9+U2IUUFKK4JGQp4FQIO4gKfqQGxXTJhnoCeg1UQdQ4V+mVGIUmg8SjlAhd+5QAHzylxLcY0EloITAblKoKFDo0TjlWcYR7RotDFUdYVzA4v5/Bqb9YDyFm1kwko94hXGKNLnRRy8fIYfHZsn9/g+VMxudIz1AHvNmMLYdbRnDcNqxMwVy+ctSYokXIdWsArXkx2kzF8S8/5/8zDucUWZzPDaIG5g6Aasjaj2FQeLJRPwHRqCSsKkBgCiIPcQQab7LYDzI2d3zwHFyelJ8O0dFB6PtQZBcE7Y7TYsZnMiRBvmAAAgAElEQVQuzi/IigLnRtquJooimqbl8uLyj/R9yPKMZz/neYDD92PwCHYjaZrjZAjfBS+0dU0xK6nrDj+OJHmCFxAcy9mSzXqDtRYT2SD3TgrWmxVlkSPakOc5bT3JnJMEcULfddRNhY0TEMV2swkTlZOE5dEB+KA2jOMI8dA0LXXbMHY9x8sj1vttkIafnrJvWqzVxCbCiaNtG8Z+5JVf9Epe97rXfab7NNxLup+o+K7v/E6+7/u+j1lRsKtq4jwi0jHoSXAwYalOVMBtFfTDgPIQJVPDQYIqTJRCXEi6ooVqwhC1sSEJBwQg4LMScNjActBXtS2KkFzHcSDPMtz0d0qHStdPPGB3hQ/7KWEpPcEggqDxw4iOQkNKYQILoKrI8hSTW45eNmf2gpTNB/dgPIvnz5FEUDZi2A3szgOfd3aSM+498bHFdY7hjsNtHRfv3iNtuF43jmzrmnlRUrctRZ6E0e5iqNoG50ayJMPoMMLHI3RNTZamKK0YxuALobVGaQ9i8Eroh56uaUiSlDhNcDiU9wydR3CkNg44sBGauqE8KTn+CyWP/sqTfOTnHifKLO36DzZ54ipmsxkP3n8fNk4RJVSbLUyKuvOLS7Q15GlOVdekNiKfZ5ydXTBfLkiihLPzuxR5TlHk1LuaOLFs9juaauDmjVO8dlhtaOsaLwptI8ahZz6b4/zIdhMEDXVbEUcpbd1wdLBkUI67t85YLhfMZiVKQd0OaA191+IFqv2e05MjVpdbRCvyLEYry8XFOffduJ/WDfz069/AZ3/OZ38MV9CnbNxLus90vP+D7+cf/sNv5/2/9X7GoUNEoSUMJ0R5nAq4qlcwFakBHmhqYhMHExEtiDJBwzBVvvvtjiQNgxXRVw0zAlVMgoBCy0QzU4TZXXGCsnbi8QYf17OLcw4Xh+RFhp4UY47Q6d9v18GhzEYocYiEilppRdd1iBtwXpEkCcZqxEPgWqjQmLuCSES4OL9AJ4bFcka1rclnM+JcM3SC6wYcQhxbcAFrRgV7w6EbKYqCqxNA1TQBcxZPlqdYpambjrwIUtzIJCRZCsohKsAConRQ2klQuu32W2wckZggZFitgvnPMIzghSTN2ew2HC0WPHXrNqenJ/RjjzmwnP5HNyg/K2H/xJbTFx2H6Rs9uN4RFxHnH17xwTf8Nnffd/H7rpo0SXj2Qw8hAv3Yk8YRWkdhskcckSUx221FsQjCDT95ZnRDR9e2xHHCMHYMXU+S5TgRyjzF6Djwe/3IZrVCiebw8Ii6rvEyUtc1s3JGkgbXs7t37nJ8ckw/eLbrS04OTnDKc7lakSUxTddycHCI8yNJnDH0oanWjQOxTdCjULVN4F2PA0VRsNlVfOVX/HW++3u+5xlaXZ8ScS/pPlNxuVrxvd/7av71v349Y9czHcrResqOjsAnUMLgYLPbcLhcYghyUiUKFTIQQSQrhAPvlYMYbHbrqYtuEDyGkGAEH15HQb1vwWrSKJ44uIHy88gjj1LVH+1roJTi8OCAk5MTtDZUdUORpERW45UJ16Y94gxGOZwKNC+lwiQFmeALpQTBoURAbEjC2odGnQqOXJqI7X5NXhbghF1dYbTCjWPAYIkR5bDWsq33ZGnKMHQczOYMCNWupsgSbGxxDu5enlPkOdpDVdWU84J6X5NGEfP5HMGw2q4xGsrFAu8d3b4iKXKMMk9T4iRsG0pr1qs1y/vnLF86Y/niGZcfuiRfFOSnJVhP9dSetnKcvHCO9ob1rR2R1SRlxC+8+t1cPLwGB1mWcXx0xMHikH7sw2lEay7OL1gcLAPft+momz5wWuuaJE3ohxGjFHES0Tc9Jo5IJv/jrm3ZbvccHR+yqzZ4J5TzA8T1aGPou47NeoexCoVhebjA6AjxA/3Qg2jarsV5T5ZmuL6jdwNoQ1PXLJdLrLXkacw40fcuVpfkcYa1hjTJwkbsPVW14ed+/hd41rMefMbW2KdI3Eu6H+9wzvHBD/4Of+9//vu87wPvCxJLkUlRFUS74gXclJzQCAMiE9FWgSYYamsjExwQbr8aFBKHv/USOK5yxZbHT5xXYb1aUx4s0FfQgIZAaG84P79gt938od6TUooHHniQg8MF3o2he24su21Qwmml8TqwAdQEQHsUfghTFIyJmC4NvGO3qzCxIY0TrLFBJiyaUYS2qsjTGC9CO4yUeTkdAUIFLWrSKU+YtNcK73qqtmW5mAevXcL7FTzbakeZ5RgbgQ9iEQ/U9Y5Ig43zYP5iBKWFcfR0bYuNbRgi+aDl9BUHpMcpWikQz2Pvvs3pi47Jihil1NSADPd69fCK2++7y4d+5hFc4zk8OsQaw2w+xzvBCqR5RtOHGWezxSJwmJ3DxNGEA3mQ4JER8HbwXlitN8zmM5Qy7Ksti3KOKIWXwECwk7nNfl+RZAF79jLiPWhlMEDTtiyXC9quZbVahUbc4gClNdqG1zFME0REMcpI7waSLGHsR2Jl6PqOuEjR3qK98BVf9ZV827d96x990Xx6x72k+/EKEeEd7/w5vuN/+XZuP/UkXsw1HUumBo+WsDj9hAVMJmA4CSir0jIRCgS8nnDT8Jn1fY8bWvK8DA0wDd4NbHcVi2WQz/ohLFzPdMzXV8d72GzWPPrY49fXa5OSF7z8r/Dsz38lrmmpVrd49H3vpLq4xfbOR37Pe1Na84Lnv4AsDfPKxHuUUjRdQ2QTjDKAAiNBHecNooXL1SWLxQKrwrSEwQ3ASKKSSbLs8V6IbMCCxTsGFe7O2PVoG5FESbgvGhgGlLbsm4YosrRdR54WKBOgF3CI1lgFHoMbB5q6Jp9luN4RRfHEBvEMY5jC60WF6lwJ+82Wtmk4PrmJmaxhVeRZtxsWnzcnWRoWDy3JD2P6dU9ymIFRVE/tmZ3MqFYNv/x//CqmiTg5PSWyFtEK0Chc2CR7Rzs0NF3HYnEAStDKIM6xryqc89fDOfMiI44Swu4CXd9yfn7ByeERcZ4xjD1dG3wcFHbanBX77Z62HzhcLGjGhqYKlDYbR/RdgFDKRRnYJ26Y/ip8Gfsm3PfziwtunBwhfqTtHVkas9nvSU0MRpPnGWmW8Oaf/rccHt0b8/MfiHtJ9+MRF+dnfPc/+t95w795I0rUVFlOjlhOAv3KCOLNdVc92OIFHFSUJ2ADobrw4gOfUYUmmBaFcj5ACwZE/JSMw0ILZIbJXNw4zGR+A9NiErh7cc7tp25j4pSX/MX/moMHXkiUzQOE7K+SfcCWub584fbDv8adD72L09Tj3RgS4PXyljBPTWtE9CTamJRnJiR87z19M5CkKd6oa8lywCk8gxrp6pYiX2DEX28YooTLzYZFuQjMDu9Ds85olPecn60wsebg4DAY/0z3SimFH0ZW6w35rAiTMiZ826MRGUODUDy7fUWZ5jg8kY3QOnCGtb56HyZUt0rRdz1D35MmEatqS/Zsw+yhGUkZhaajN6zf3WAafc2n9m5kGH0QDnhPU9fMiiIIMfKYoRuYzxesNivatmM5n1HkM0DwWuj7gaFtyPOCqm1J4gTvHGmScPvuHbI0Yz5bAJ7tZkNWlgG6EEiznN16TZmlgeon4fm6cSCOAlVsPp9xcXFBMZ+RpzkQ/JSr3ZY8L8iTHIejrvfUVUVSFMzLnNXFlm/7u/8T3/AN3/DxXlqfDnEv6X4s48knnuSNP/1GXv9Tr+eRRx+7rioDbUkm+Wkgg8vEtw0DGRSjAqumKlgkTGpQ4e8VIHIFDajpZ0c/jKRRFEQRSgVI4rqctoGzK3IFRqCvfucV/dBz3gif++e/mvzoRkj+4jETo4Er1ZkAygUqrxfGsWPzxPuoH30XagwJVeurJDcSzuwWCI+1XUdsLG7wQd01YcvKCeJVkBRfUbt82HSM8mgFowobjlXB10EIstNpF5vuo6B9wH+jJCY2FtHTqUIIGLMeMSIoMTgFdV2TxxnGGJRxYdaZ04xdS5KkiIHtakOURhijSeJAXfPiUaPQdC1xGuNGR103LMo5XivGoaGqGuazJXW9Dw1FbVlvtxwtF5NXBYzDSFu3zOZzwIRpvn2DVtBVLcXhgtGNeOdI0izIrOuKopyDG7HW0nQdRimiOMKNEjYXhK6uqNuWspyR5RnOTRuheHa7Pf04cHp8zHa7A6tZzOYIwma9JckzxrYln5dsL1e0TUuaF1hryJIEtOHu7dsYqzk9PUWZIHN+x9veznw+/ziurE+ruJd0PxbRdR1//+/9A5730EP8wD/7Qer9PjTXdaA24fV1pRiqO8A5sJOPApMoSwn4EdCgDF6NgdY1iSOCi5dcN6qYRAsQmll4CVQv0Wgd6F4T1EmQRDBNSlDY/CbF57yc2cF9oEyQFCtBnEMZUCrgeMh0fVoY9+dc/NobGYZNaPCJn5LxlGen/195j1f6Gs9V0ywxHQXc9grHDpuO4Cc4JWwPgr4ycyE8t56UeRiDEzftRRM7QhPuCQpRPlSmPH26UFcV97SByHQrg2elovM12moiHfBTpQPXuKk7RjcyL2d4kYC7W40ShwKatsdgsVE4kjzt2eXRCFXb4b2iLFJEPG0fJhJ3XZjcm6URyof33A/9ZFZk4UoRlkbs64rMRtg0xgsYCYqv7X6LiLCcLYIEd5ZPY5YE8SN3z85I0gxrDHmZY62lncawKxUEIKNX7Ks95XwG3mOtpW966r4J4gwP+2YX4BkdI8NIGiWI9dw9P+eBGzf4lm/9u3zVV/3Nj8OK+rSOe0n3jxPee975c+/kf/2O7+Cpp24FAxClUD54FaBDdRjo92FyQ+C7MmGdV42v8LPTocGGCsquzWbDfDYjsuG4PuUOQPDOh8aYMoEJMbEUlL9qpDF9vAGjVEZdj1NHdOD+RjAGDhU6OeHws19BMj9FKY0yeuLiQrW5w+7hX2T16HvJZ4vQ4Bk83diQRhnKOJpmILEmzAybnMycCnCGmvjDahJn4AITAB2yoJp2BmdAKzdR6NSkbptgC21Dw/CqWsdPzaoJopikyH4c8ChMFBKVwuH9yDgISRwsMMMEDY1iIOAz0PYNrvfEWYo20HQtQz9M88+iMMxyHImTmLxISHWMwlC3LZrgLnalzPPeB7GJ14iSMDDSGIqsoGlrtA7CAWUtWnsYPE4Ug3MkccwV32+73TCMHbNyjo3TCX+GYezDdUUWIxI2UiWoa4Ke4JFgy6kN4aMUmrbD2ojtZs18OaPadXRdg7GK5fKQ7WZHGieMBLvFfhgp5nMiZfD9SBRFPPHkUzznhQ/xznf8PHmWPSPr7NMs7iXdP0qICL/4i/+On/yp1/HWt/wsMl591RV+OgZrM0EJMjEUxIXkoOxUz4X/9JQ4wkAcJnHEldOCuvYfNTJVe2KmJaUAM9WHQWdPBEpNvxsnUBgIf6hwrpukpv4aB5YJxXCjBzO9HoCZEd/3J/D9lnH9GKrbIUpPBuhBqIEPrlVXLyHT5YuAQSYhRXAmcxDKVmXQonB4xm7AphbpBqq6ZblYoCd1nCiP8VxXqMFq0gSWhyFAMN6jlZ28JwIr5MoLQlAYB/3YYZM4JNauAxRpEoH3uN5xvlmx31fUdfXH+k5E1nJyepODgwOmC6bratwoxKnFO6Fu2jDssekYxoHZbI73A03XUk6PZ1kezH684vzsjDRJWc6KwACZ/JCvPjSFZxhG2r4nSRJk9GRJ8EuIkiRAUlox9gPW6OAi5oORfNcPyOjIinSCumCz3lIWM2yk2G93aG2Ik5TODWRpyuXZJd/3vd/Ll//VV/2x7tVneNxLun/YePLxx3jb297G97z61fT9gPXgQvfpesitFj0dzeX6FotW19JeLT4kxzEooJRWE30pwA8YP1XI5vr4HNAEHShk3iPKM3oz5Z+A6aqr60BhRF/zS2XyUDBahTTv/eQQ5vETjqwwIeF6jzITrssIYxiKKMqHanOq5iZkOVSwgR2GKIWZfggoh6bahdExGFCjkKUJbvKTUOIxKijBQk0ajuk6HAXQKvjotl1HEQezdO+vOMCATMwPpVGMU77ViJMJUgAnwsXFBYv5nCSx1F1Dtd1ysuz5L7/0AWY5LGcpRycRZ2c9D97MMMBvfGDDzZsJu63j4Ucr8tzwjl+55B2/fM7wNJbw+8bpjRvMF3PqzZaj05MAD3nC+0GePnFoHd66yFT0T7i8Erzr2e5rytmMtqqJjCbNcprJiNzoIOVu2gYlljRJwEzYu9YM3cjZxV289xwsj0jz9OoTo2lqsiQDDL3rw5DOOEOj6d1IkiSsz8+BkSjPyZMFNx445qdf/waSJPlYLaXP1LiXdP+g8YEPfIBv/pZv5qnHn2Qc+9DoktDdvtLpaqYTsw9igMkO9unbrELVayZq1JQJYXwaDgjqKwUjAZ+NAOeD3FaFxtGVleOVSALAiwp1rwdMADSu7Be9eIaxJ4nT6ZO76s6FJDbqqcKVMGVClA9V0rRJKAdDP5Dk8VSIT6+jJ2YCEYoJTxW5bhgNztNUDWWeo65sgP0VLcNN7+PqF6Gp6Kfnx4WqPcApBuc9jgGjdNjACBBOSGQT5/cKqwVw0LmW2EQUqea//Rs5z72haHrHI481dGPYeNQ0Wl6jccojTge2hIDgSTJFkUY4L5RZxHMfzKg6x3t+a82P/9vb/NKv//unHJ/ePOXo6HiSIWtG1+IEvAtqO9e3jE7j3UhRlIEZgQ8ObjYCxsloaGqwTjASohBxkxoxCp4SOvgE73c74sjStC3zxQxxI0PfkZcLFDJtxHIt/FBaaIaOet+y3+wQ77nv5n0kaYyI5u7ZHX70X/0Yf/7P/dk/yrK5Fx8d95LuHyTWmy3r7QX//d/+Bu7euYNTY0imXoEFZARMKHkNaBewVf/0Q2HheAkm5FdwJoREi5+qtKsHbahGFQEn1KFzjkx0rGnRKGeCJNg7mEbyBMYCEHkUwf7vKsc6r7G4wEVl4tG6ULM6fQVCTJWrn4QYOmwUWk8jf7RMBjcEnHmqvEFP1ztlv6v3OOGqAEo0g3NoM9GoAvKIVpqmrvFak6Y5emr6idJ4JiB42iS0TNj01Uy4yVVdqXANI1PPDeFFnxXzt7405tbtisv1OPX5wkYxLyNmZUQcKdzgiCLNExeaWaY5PYTToxjvhQ8+vGPfOLre48eRo4OEzRaODiwvfE7Obj/y5l8453v++Yd/3wq4nJXkacbh0REmiie2SrhSj75eguqKqTLRy3a7PWU5x48d2mr6diBJwjj4KwaI9oIyhrrpgwOcBEvGJLLgFV4Hn2Q3OAY3opOMxBqi2DCMA9YotDKhBzHBQW702NigtOOFz30RP/pjP/qZYi7+TMW9pPsHCeccv/ne9/B1X/vf4LQPTmCjB3GgY2AMRYjR4Vg9uX6ZUMpNmJmfjuEKtJmqFAEJDTDvpwO7kglSmGhi0zlZKT+ZjvO7CtWQJqfz6mTGrUH5sCBF4RV4LRM2PMEYaLSZcBGtA0465Uq5ctcJLf5rmpkXjUgQQYTXn+TFCtxVQ8xPHAQVuogyJfwrnq1HhUbh1ZwZPeHfKqTfkM1D5SuAVaEJGDaZ8Dd+Skzaq0lIHTBgfZ29YFbAf/fXErpuz3YPiOfZN0sevD8mjSxpqrBGk8aaKIJ+gLf/8iVf+HlLipm5rgR7J6zXDU/c6Xj/R0Z+9l2al794YJl7jBHmi4jn3F+SpZpqP/K13/ZrPHH7o41ujDG84PnPx2QJWiZhhIRGY72rpmm+MSYyU4UtMAqRtTgd4Jz9dkesDXGegpoq/94x+I5iEsiMbsQTLCbLRTGxSZ4u/sV7FJ7VesdsMcf8LnpL29SQxOxWl2RJzk/85E/w0j/50o/xSroX3Eu6f/gQEX7n4Yf5sR/5Ed70xp9mX9UhgY2C2AEhxksYseJFns4v03EfVLBTFAeRRq46+lPW86KwDjDT1IbfnWADLeAagvBeEBe4riHZTTnLh9/5aRYX5npEWrBxDGUqgfdg8HLlFKanI6hHe0KCnaTIToT9akMxm00etEzy44kadsUVVhLEFUwevJNsNdhEhktQfoIXdLCyFAgG5tM1yvSPa5mvOLxMhj1XCViFMTjOCWmSXjcl40j4ur+WcriAN7y95YmzlkVS81988Q2efX/ByXFEnhosjtEbRBz+6vmVxiKMCs7XHa5XnB5nDC5sHuPgGZSn2g088kTNrvGUqeb4wHI4T3jsqZZX/8uP8I5fWX3U9+bo+Ij777t/Oo2ED1VdeRhL2AgDBTAAR7vNjixJAivCT7ASE+QjnqttZruvKPOcsetRkaHtG+IoxjlPnsSTH3PoKfRtd63C0yJ0dc3gPUUxZ7vbEkcRX/DSL+BHf+RHP9PtFz+ecS/pfixiHEd+9Vffzf/5wz/Iv/uldzMOEKvAEOjwYdy59sEhyhNUatNBXCbFmlJXXrk+4LxT5eenhea9wqgr5Vp43TAwMhyxPU83p6Z54RNGDEFeO3W8dagixV21wgIyHMYATbCD1k+zIiZJlvbhXCzKoCfvAzcZoItTIYlirjcYmTgZ4T1pxitp13S8vuJshLwerkL7CQdXCi3++lumtLmu2OTKUGequr1AVzWQWPI44m99WcZ9R/CmX2xAe25f7Lh/6fk7f/P5JBlkqQ5j0RH8ED4FHYX3/dSTLe/54Ja//MqT69OGD31NBu/YbAY+8njNo7d6PLBcGKw25Al83vNLHnmq45u/63385u/8XiZEliU89znPDUMtJ6zfj4G659xA33cUWc6m2lEuF8QEpgIIo/MYAYzBCNNct56+G0jzdJpjR+APi55gIo2baHlaw36/Y3TCLM9Q1tA2DUmUhdHuSrh76xbPftZz+Rf/8od58Ytf/PFcKvfiP5B07TN5FZ/qYa3lZS/7Ql72si+8fqyuat7+9rfxQz/8z/nwhz7M6IeAc6oEJQ4XxL+Yq/6ITFidVfhhMtZWIf86PDoy14kSAAXjhL9qmfihQjiyewHjQDQjwdhciWBEIf4qyYdq0kzMCY1mRAJuPFVdXDEi/FXVHaAI0RqlCUlAArfWq6cZGzJBEILG+R6jwRhAHH3vqOqK+XIRmo1e48eAW8vkrR4QlQDVGCZIBoAg2UVMMEufWB1pmSJeMTrFw08KL/v8mL/8Z3re9ZuQRzF3Lvbcvqx44CThbb+25RV/6og8CxaZCkHGkOBv3oi478YBfhScVhhRWBPYAFpARcJ8kfCRX+95wYOaMonCZqM07ah49v0pz7ov+6ike3xygjHmmmOsEIyBYWgBgvexViwWc9brNbOiJAo3jOgK4faepm2JbIyNI2xuURKEFU3bU85yzMR0EQXd0IIQGo/iyYskNOdEUaRFgGiU5ak7d/jrX/E3eM1r/sm96vYTHPcq3Y9D3D27y0/91Ov5iX/1Oh6/+wRWacwoOC2B/qXGaRqECYkReToLXXfmTaBvMdXKbuIvTOT40GMZEW+mYzsEfCBUlaKnClVUIObjEaUDHOL1NRsiUIr1tXABCS5doq+acyHJ6vBPREuoTq8gYT2JOUQF790puU+dQ/ABD+YqWU/QhPeT3aO+unboR48xZjJi5+pEHhp0/qr9F97/5zzP8nf+85SzbcuTd1pcD7/0myve8s47fNF/fMI3fuVD3DjKyNMojEUaRxyKWIOKDUq5wOCYAHSZSl0lgnOKEeh6x2Yzcmc1oBTMMs2z70v5gR9/nFf/iw9Tt+EeZlnG0ckxB8sDEIIwQimiOIygF61QRhi6gaZukf+3vXONtfSs6vhvPc+7z+xzzpwz01I6VFq5VC1IRAOJIGKiiSIEMTEY8VOjATWIJiJ+UfSbFy4f1EgQEhI7ncKcuQTjdGZaiVqmN4m1FxXKrcNMKL1Np3Pmdq77fZ7lh7Wed6oGkMLsoWX9vp2Zfc7Ze5+9117v//mv/1IYz44Zj0fUZsvWilY4vXyG8dwcs1vnbGowCWdOLbOwdY40mvEwMvtAPLO8zMLCAjMj+zkmR1ROL59lYXEbZ8+fZS7PsP2y7Szt28O11157kV/5wdMIeeFSoqo89NBD7F1a4tAtn2Jl5RS1zNJJT1VBchk0QAsMtzU4CbGJMaodNFV3OLgum5Vh/U4rXgnrgAvmc80q1GwTcdJmbn00l5T8gM9kBGs8xf/qLmGIgFvLQIbJO6uppl+3GEvNyRdc6pCpIJ4LYc9Dsu7Z74c17Nqqt10FqLsm3EM86SsrZ8+wbds2HyqBZsl76Y7E9b845orLlC9/ZZ3l5Z4Hvwq33/cEr75uxLt//VrmZ2FjrbC2BpoTWzpldmxuhpQT62sTHn1qjUefmLCy1jMeJXbsGLM4O/JEtUqSyuyWzAuvGvOpO57gTz/6ZR55YkJKwtz8PFdf80JGecZ1WKUvPaVXNjfW2L51ERumseeUWj3DAmo1vXt1fR2plb4WxlvmSBaQTPb15/atQp3Y1UnOyQuskrIgmtFqY8kT7XnyxJPMzs2x7bJtvOkNb+L9H3j/FF7lwf8iiu53G33fc9+993Ljzpv4t3vuZmNjgnZmgdKirneKhUinSlda3KOtzxl8r22NjpNEbG24FDtYwQRLMxrYgZ6Lrf6quFAkvdH1Qz31YPRkzgox+SO3DAhlCNmxglK9BxV3ZCR3Wtj4b20Zv35bc3gk96wqtXizn4rLAQmtNvlXBEZVPVHN7ltq6WJJ+Y1fmueNPzHmybM9R4+d4+y5wuamcuqscOSep7j6+zLHHl7hlT+4wCt+aJEXvWCOF189i3bCmdObPPbUxAYtUsfCrDAaVWpOSFG2zWVGXeYvdx7lwL88TsoLXLHjSkajEZ0HXzSfczsMLb1y+vxpLrtsO8kPwppxzuRxe+ab37kvPSJKyjMUVc6ePs3cljHj8RwiFpJk3urqHzg2yLI52WBjY4PFuVk0Japms9EV5cz5M9z9mX/lqquuungv4uAbEUX32cDKygq3HbmNXTfcwENffshP/N0IOzFbV/FQ8BYCQ6k2iCAVOmo03uIAAA6LSURBVLu9eYOHkopYOAS1NgtFupDfJR6Og5KkupYK1SfFKqATu6mOrNB3xbb7VmwCLanS+09tEkh2Z9gFZ0aTF6wDT64p24iyrQTyBnZYa6T446QHyRYxIyBUD3Oxoq6SyAm6TnjL68a89JrEdS+ZYUtniWsnTm1w2709i3PKPZ8tnNtMfP/lws+9pmP7YofmwnhkHeTW2RGXbc+sryp7/3mZI/f1PP5ET5bOjiJzQmvP2soKs7NzticO+5CRAlWEnCtF7dIj+cWC6TiFmiwPw/IirEiK2gfIer9mRXTb9va5SBHT2bOnz00EVs6eZX48R86ZlbUVZudmff+b0teO37z+en7vPb9/kV+twTchiu6zlZMnT3Lg5pvZ+4ndPPHk41SgbELqzIPb1qbQ8gtISH7a17Xamzv5IR64juArgCR5pK+JDOIuhHZZq/5/SS10G60+wmt1pHmIVYp5IrSZj4tLI9lygtsDUgYPsC3ZVKiJnk26mRk7UPP7Xr3gJG/wZMgrzoOGCX4bdNj1htqhVBLhVS/rGM/CbFe49mpBZex+Ybji8syXjq+yfS7xxYcr9zxY2dgoNs3lnTwpUVRZ25gwHo/oROnVrija7xK/Gpmsb5K7RMqdP2fJDjzV7quF9ri1a8hMtm7drgR8RbwCyTIc5mZncdWeigypbmVSkKrkbHqx9CPGiyNuOfyPlgsRXGqi6D5XUFWOHj3K0p693HrrIc6fO89E7QBKN32PQFfsYEiE6j7d5EMLVb1Qk+xy1XWG1qEmXwOPJjTnC7Yvtb1tdnPTjYcQ9VShJa6BT1KZNYw2rKG+sp1WsLzba4dmIkiPD4dkN7LZ70oIvSi5MuQTa8UOyKiUClUqXRVIebDlaRupc3dIdSnFvt/lD9eqhyAibAhEinX5OVmS2Ob6KpMK83PzdP5cSs3Y51BxbVwv2PCqyT7tsaXkf5NkD3qyWVg5f47t2y5vBj76apNleZTRvjIej/FHC8k2jWSBVMyOtrq+xvmVc7xgx/P53Xe9m7e/4+3TfTEG34gous9l+r7n/gfuZ+fOm7jn7s+wXiZoErq+0FeQbFsriiayt6/tcEuSuRVEE+JjFG2vmfhkmwD4WG5FSdkcBbUJCt7JkTqTO3xIQ7H1Oan67/GRYOmx4pObbmtdcqpW5EmCSo/5ioXiF+LSotKS/eTqTuEEXmAt/DyJDiVU1IMQRd3/66W1Pk2A8ajOC0G87ijwyMuUbTClwhCLkMUONgtCFXNiZD9QrKkFXUqb2napRH2JKPiRJxNMnmkFvxZl+fQy2y9fJHfJMppLa/OFih2+pWIfXr0U7rrrbhYWFi7mSyz41omi+73G6uoqtx85ws4bdvGFL32ejYmSs5oUIcnURKloEVsQKT4g4YVJPfNXC4PuaAVP7JBMPXhSGUaYUe9EtbirwRPEcA+bljb4Zv/WxqeB9q+l+maNIReYIcgdZAjL0WpyQpNRLK/CbtdiK4uYuyP5wWOtxQ/3OuuGKYNla5Ts0I5qufN9EkYo0nmaHP74NdnBZBKg0N4/6hMWLcOieWH9Y8aeWw8rqrmljeGPXyh1wurKOvNzc5Cw/XJU+s2e1CUf5cWe/x4kCe997x/yK7/6tim8moJnQBTdAE6cOMHBgwdZ2rOXE489bm/6CiPpKTpGpadqs4+Jqwm2ZsbsY+oFyPMR3Pc7eO29kJiGa4VHPOhbJFuNzXbZL8kKrKJkjzpssdwkGyu2jQs+tdekiUETtYjNPBy6VZMMcutgbYIupxaOY0UxYZ1o87tW1HejtcOtFssoNlqN5WH0bvUapBUXBQQs9hKvyS6ZFMyJgucjq+cBt1X1qj0p2ZVHX8UHX6o7HDz6UQEqqdqQSEYoI+XKK67iwM3/wHg8ns4LJ3gmRNEN/i+qyleOHmXfnr3ccvhTnD5/ilyUzW6GrvZm3RKl9paB28KymiNCvdhRWzqVWcpa5kCpdviWxDYamHOhXca7PU0sajFREbUkLBvfc4kiewfepudwTTrZVuWiabCk5eSB6lWRon7w5iPLWi2LQkwlrgl0Yh16SuJ2LOvOS8U2NotSBFSFVH0LhkAp4jJN8zEzbPIorfNP7SAy00alz587x8yWGcajMRubE1bWz7O4bZsPuogNwEgL/mldfDJdWZS0kfjAX32AN7zh56f7QgmeCVF0g/8ffd/zwAP/wcd37eKuz9xFv7FJ74llmUKxStiUAVR7PzxqJ/6tVohvRBYvtgynZurH9gnsaywKs+WJma2qeKawtaFuuPA+tFDprLhVTGYodokvAjXVIZ/WbcEkTD6oUl1KuWCpU6kXrvXFA9LV+1j3rZkWbIdv1CaN2AcRta3SEbQUn5sWzp85y9zsAt2WEaqVDCCFREdV2y9Hqaysnmc83kLXbbHBCfTCgKLnbrz4Jdewf98nGY1GU3kdBN82UXSDZ87KygpHjtzOTbt28bkvfs5ihQUf/Z1QVEjZL+pVQcx/WqntOAnpLWRdU2fSgbfNZgBrqyq9kNnqYJt+8zXr5jLzzRruuBAp7kJITYGwgieWQdssdIp1n/YzKvS+jkhAsx1eibsiEIuuxNeyg5I97yJ5RoUUqNkDharfB4XUqbsjrIZnH1QBK/LqdTsh1N4eVxKhUMy3XIWU7dkwzViRmvjQhz/ET73+9dP8kwffPlF0g+8sJ0+e5ODBQyztvolHHz9h9igPQE+1RxnZ4RJtzY9rs77OCG3bKJTUm81MWtebXFltjaYmsk+8aXMWCEgpNnbcPW0TBQyrcPz6HtQ01oT5iykJkTIE6QhC7VpiGrQ84iwtg639bNNoRZRa7EDR9n3K8EFi96CnYgMrpVrXb1cGgojnXGCSSK2YtJLMMtKNtvDTv/Br3PlPS/zAS1/Mrht3WrB88Gwjim5wcVFVjh8/zr79+zl44O9ZPrdK55fmaDLzgiucfXvNaXMKVyQ3J1imF0WSkmtzIfh6dVoGBENGRNJCad0s1kVa3oN46I80z9vTHAM24ozqUAxxV0RW99a2dfHFdWTBC6swmfTMjGwAwh5bC+K5sPEC7GCsqFphTsn2vik+Pm1XB6X2SMr/Q+ZY3Po8Pvqxj/CKl79sGn+64OIQRTeYPn3f88D9D7D08U9wx9130W+sMtGOET2bvuG4Wp4Pqc1eJNNHCzbU4b2wOYhVkGJ2NhEbaDAdI1FTJRdPZ8iJKnYglkQG7VjAdo5hkoRiuqqNP1+wvUlK1tEi7nCw8d4ha4dmrQMtdQgQp3mDKd6Rm79XRex2qP+ejLbmVU26sMeovPYnX8vffvgj0d0++4miG3x3sL6+zpFPf5qdN97IZz//IFXUJsm8YxW/hLcJ3OweXwb7GZkhz2FomFvBNKXYJAHvSlUTVStZxQ7IWjiQJN85N3xpPbLa96fmssA6WXznXLO9ra9vops98wvzdohWzYaWqR7Eg31v7f1wLw8DGW1Qb+v8VmqdsLm5jmhmz94lrntZdLfPEaLoBt+9nDp1ikOHDrJ7aQ9fe+RhpFZyGqNlYn5cAXppC3Kp1YtrwrIl2im/DzyYvcwcEoqYt1hdPvDfWcWkjibVVtd+26olzS5RuPWs7RJNbomTkga1FxTNbs4oOiSnqQypxR5kL65Lm7ZNEt741t8mnTvK+973/ggXf24RRTd49qCqHDt2nH1793H40AHOnDnrNodsWq9U+ol41GM/rIuvyUZq1e1rqPqh2tNGclVA3TbmgfBFAXqSZHM7NN234sMfyeWJ6l205ZzlWqmUIXOiOR90CMPB33ototKm47LYSnWZyRw+dCsvuPKK6T/JwcUmim7w7KbWyv333c/S0m7uuvMIKyuFmntmSqJIglRtsMAtWpQLFrDWzUr263o1v66ZFKpn4ohJAOVCuprgbgcfvmibNJIfqqlidrEKdmooLus2p4N1uhW4/MoXcubkIzbZh/DLb30bf/wn770Ez2QwJaLoBs89VldXufOOO/j4rk/w2S/8F33xoYJa2JRMR0Vs9MxWFbl0QLIOVTR5iI5pyqYCeyh5lcEHXBNk9wynXH31vL81qu8xSolq2ZiIH/SBj/OKdcOpCKPZEQduPsyOHTsu3RMXTIMousH3BqeWlzl86BC7di/x2MOP0CXoSyFtEaSY/1Z9LZD6NotKC8lRhvMzHx8u1LaxyDrpZE4LfO18y3xI4sMPnv1bBVLqeOWrf4bjX7qXc+fP8Ftvfwfv/J13XcqnJ5geUXSD712OHz/Gvv37uOXwrZx56hQ9PVVHtpanKKla7nDvmmxb8ikK2qlbvdp7SNFebY/2UKAT9OrxaDaYkUX4sde8mce+9kWuedG1fPDP/4jt27ddqqcgmD5RdIOgUWvlvvvvY++eJW4/coTJygZ9B2kjoZ0PIyfbRWcbi71DBteJ03BIJ6lQNSHZFnSOtox53vOv5sQjX0Gr8Gd/8UHe8uafvdQPOZg+UXSD4BuxtrbGnXfcyU037uI/H3yQqhtQRkA/jOjaWHJBgNLsYCqkBFKU6370dbzkh3+c8cwiX/j3g/zdxz7M/Pz8pX1gwaUiim4QfKssL5/m8OHD7N69m6999WFzKKhv4MAC3lWgy9kD20e883038CPPh9e86uWX+u4Hl5YoukHwneDYsWPs/+R+Dh08xKknT4OMeM/f7AfgwF//AXv37yHnGOENougGwUWh1sqjTy6zuLDI4lxk3QYDUXSDIAimyNctunEdFARBMEWi6AZBEEyRKLpBEARTJIpuEATBFImiGwRBMEWi6AZBEEyRKLpBEARTJIpuEATBFImiGwRBMEWi6AZBEEyRKLpBEARTJIpuEATBFImiGwRBMEWi6AZBEEyRKLpBEARTJIpuEATBFImiGwRBMEWi6AZBEEyRKLpBEARTJIpuEATBFOm+yf9/3eVqQRAEwbdOdLpBEARTJIpuEATBFImiGwRBMEWi6AZBEEyRKLpBEARTJIpuEATBFPlvNEXmQ10dbAMAAAAASUVORK5CYII=\n" + }, + "metadata": { + "needs_background": "light" + } + } + ], + "source": [ + "user_input = 'Pick the blue block and place it in the green bowl.' #@param {type:\"string\"}\n", + "\n", + "# Show camera image before pick and place.\n", + "\n", + "def run_cliport(obs, text):\n", + " before = env.get_camera_image()\n", + " prev_obs = obs['image'].copy()\n", + "\n", + " # Tokenize text and get CLIP features.\n", + " text_tokens = clip.tokenize(text).cuda()\n", + " with torch.no_grad():\n", + " text_feats = clip_model.encode_text(text_tokens).float()\n", + " text_feats /= text_feats.norm(dim=-1, keepdim=True)\n", + " text_feats = np.float32(text_feats.cpu())\n", + "\n", + " # Normalize image and add batch dimension.\n", + " img = obs['image'][None, ...] / 255\n", + " img = np.concatenate((img, coords[None, ...]), axis=3)\n", + "\n", + " # Run Transporter Nets to get pick and place heatmaps.\n", + " batch = {'img': jnp.float32(img), 'text': jnp.float32(text_feats)}\n", + " pick_map, place_map = eval_step(restored_state.params, batch)\n", + " pick_map, place_map = np.float32(pick_map), np.float32(place_map)\n", + "\n", + " # Get pick position.\n", + " pick_max = np.argmax(np.float32(pick_map)).squeeze()\n", + " pick_yx = (pick_max // 224, pick_max % 224)\n", + " pick_yx = np.clip(pick_yx, 20, 204)\n", + " pick_xyz = obs['xyzmap'][pick_yx[0], pick_yx[1]]\n", + "\n", + " # Get place position.\n", + " place_max = np.argmax(np.float32(place_map)).squeeze()\n", + " place_yx = (place_max // 224, place_max % 224)\n", + " place_yx = np.clip(place_yx, 20, 204)\n", + " place_xyz = obs['xyzmap'][place_yx[0], place_yx[1]]\n", + "\n", + " # Calculate Persistence to find peaks\n", + " pick_map = pick_map.reshape(224, 224)\n", + " place_map = place_map.reshape(224, 224)\n", + " pick_p = persistence(pick_map)\n", + " place_p = persistence(place_map)\n", + "\n", + " pick_maxima = []\n", + " pick_locs = []\n", + " for i, homclass in enumerate(pick_p):\n", + " p_birth, bl, pers, p_death = homclass\n", + " pick_maxima.append(pick_map[p_birth[0], p_birth[1]])\n", + " pick_locs.append(p_birth)\n", + " place_maxima = []\n", + " place_locs = []\n", + " for i, homclass in enumerate(place_p):\n", + " p_birth, bl, pers, p_death = homclass\n", + " place_maxima.append(place_map[p_birth[0], p_birth[1]])\n", + " place_locs.append(p_birth)\n", + " pick_probs = softmax(np.asarray(pick_maxima))\n", + " place_probs = softmax(np.asarray(place_maxima))\n", + "\n", + " x_image, y_image = np.mgrid[0:prev_obs.shape[0], 0:prev_obs.shape[1]]\n", + "\n", + " # Step environment.\n", + " act = {'pick': pick_xyz, 'place': place_xyz}\n", + " obs, _, _, _ = env.step(act)\n", + "\n", + " # Show pick and place action.\n", + " plt.title(text)\n", + " plt.imshow(prev_obs)\n", + " plt.arrow(pick_yx[1], pick_yx[0], place_yx[1]-pick_yx[1], place_yx[0]-pick_yx[0], color='w', head_starts_at_zero=False, head_width=7, length_includes_head=True)\n", + " plt.show()\n", + "\n", + " # Show debug plots.\n", + " plt.subplot(1, 2, 1)\n", + " plt.title('Pick Heatmap')\n", + " plt.imshow(pick_map.reshape(224, 224))\n", + " plt.subplot(1, 2, 2)\n", + " plt.title('Place Heatmap')\n", + " plt.imshow(place_map.reshape(224, 224))\n", + " plt.show()\n", + "\n", + " # Show video of environment rollout.\n", + " debug_clip = ImageSequenceClip(env.cache_video, fps=25)\n", + " display(debug_clip.ipython_display(autoplay=1, loop=1, center=False))\n", + " env.cache_video = []\n", + "\n", + " # Show camera image after pick and place.\n", + " plt.subplot(1, 2, 1)\n", + " plt.title('Before')\n", + " plt.imshow(before)\n", + " plt.subplot(1, 2, 2)\n", + " plt.title('After')\n", + " after = env.get_camera_image()\n", + " plt.imshow(after)\n", + " plt.show()\n", + " \n", + " # Show ambiguity measure pick\n", + " fig = plt.figure()\n", + " ax = plt.axes(projection='3d')\n", + " ax.computed_zorder = False\n", + "\n", + " trans_offset = transforms.offset_copy(ax.transData, fig=fig, y=2, units='dots')\n", + " X, Y = np.mgrid[0:pick_map.shape[0], 0:pick_map.shape[1]]\n", + " Z = pick_map\n", + " ax.set_title(\"Ambiguity Measure Pick\")\n", + " ax.plot_surface(X, Y, Z, cmap=cm.viridis, linewidth=0, antialiased=False, shade=False, zorder=-1)\n", + " ax.plot_surface(x_image, y_image, np.ones(prev_obs.shape[:2]) * -250, rstride=1, cstride=1, facecolors=prev_obs / 255, shade=False)\n", + "\n", + " ax.set_zlim(-251, 50)\n", + " ax.view_init(30, 30)\n", + " for loc, value in zip(pick_locs, pick_probs):\n", + " if value > 0.05:\n", + " ax.plot3D([loc[0]], [loc[1]], [value], \"r.\", zorder=9)\n", + " ax.plot3D([loc[0]], [loc[1]], [-250], \"r.\", zorder=-2)\n", + " ax.text(\n", + " loc[0], loc[1], value, f\"{value:.2f}\", zorder=10,\n", + " transform=trans_offset,\n", + " horizontalalignment='center',\n", + " verticalalignment='bottom',\n", + " c=\"r\",\n", + " fontsize=\"large\",\n", + " )\n", + " ax.grid(False)\n", + " ax.set_axis_off()\n", + " ax.set_xticklabels([])\n", + " ax.set_yticklabels([])\n", + " ax.set_zticklabels([])\n", + " plt.show()\n", + "\n", + "\n", + " # return pick_xyz, place_xyz, pick_map, place_map, pick_yx, place_yx\n", + " return obs\n", + "\n", + "\n", + "# pick_xyz, place_xyz, pick_map, place_map, pick_yx, place_yx = \n", + "obs = run_cliport(obs, user_input)" + ] + }, + { + "cell_type": "markdown", + "id": "bnTeoANTW_Ja", + "metadata": { + "id": "bnTeoANTW_Ja" + }, + "source": [ + "A visual representation of the ambiguity measure for picking is show in the bottom figure.\n", + "Below the value estimates you can see the top view image.\n", + "The value heatmap shows what regions the model considers to be valuable for picking. \n", + "The red values correspond to significant local maxima ranging from 0 to 1.\n", + "A higher value corresponds to higher estimated pick success according to the model.\n", + "In case there is not one distinct maximum, the output is considered to be ambiguous.\n", + "Based on this ambiguity measure, we can deside whether or not we want the robot to execute the task." + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "provenance": [] + }, + "gpuClass": "standard", + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} \ No newline at end of file diff --git a/projects/utils/hyperparameter_tuner/hyperparameter_tuner_demo.py b/projects/python/utils/hyperparameter_tuner/hyperparameter_tuner_demo.py similarity index 100% rename from projects/utils/hyperparameter_tuner/hyperparameter_tuner_demo.py rename to projects/python/utils/hyperparameter_tuner/hyperparameter_tuner_demo.py diff --git a/projects/utils/hyperparameter_tuner/hyperparameter_tuning_tutorial.ipynb b/projects/python/utils/hyperparameter_tuner/hyperparameter_tuning_tutorial.ipynb similarity index 100% rename from projects/utils/hyperparameter_tuner/hyperparameter_tuning_tutorial.ipynb rename to projects/python/utils/hyperparameter_tuner/hyperparameter_tuning_tutorial.ipynb diff --git a/src/c_api/dependencies.ini b/src/c_api/dependencies.ini index 3f6ad4c185..632fa655c6 100644 --- a/src/c_api/dependencies.ini +++ b/src/c_api/dependencies.ini @@ -1,6 +1,5 @@ [compilation] -linux= - libboost-filesystem-dev - libopencv-dev - cmake - check +linux=libboost-filesystem-dev + libopencv-dev + cmake + check diff --git a/src/opendr/_setup.py b/src/opendr/_setup.py index a6bb1c5438..da8a5adbbb 100644 --- a/src/opendr/_setup.py +++ b/src/opendr/_setup.py @@ -107,9 +107,13 @@ def get_dependencies(current_module): parser = ConfigParser() parser.read(join("src/opendr", current_module, path, 'dependencies.ini')) try: - cur_deps = parser.get("runtime", "python").split('\n') + runtime_deps = parser.get("runtime", "python").split('\n') except Exception: - cur_deps = [] + runtime_deps = [] + try: + compilation_deps = parser.get("compilation", "python").split('\n') + except Exception: + compilation_deps = [] try: opendr_deps = parser.get("runtime", "opendr").split('\n') except Exception: @@ -124,8 +128,9 @@ def get_dependencies(current_module): except Exception: pass + deps = [x for x in list(set(runtime_deps + compilation_deps)) if x != ''] # Add dependencies found (filter git-based ones and local ones) - for x in cur_deps: + for x in deps: if 'git' in x or '${OPENDR_HOME}' in x: skipped_dependencies.append(x) else: diff --git a/src/opendr/_version.py b/src/opendr/_version.py index 15ea2c3dc3..dc5083a9c9 100644 --- a/src/opendr/_version.py +++ b/src/opendr/_version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.1.1" +__version__ = "2.0.0" diff --git a/src/opendr/control/mobile_manipulation/dependencies.ini b/src/opendr/control/mobile_manipulation/dependencies.ini index 4302f72988..982226d3a4 100644 --- a/src/opendr/control/mobile_manipulation/dependencies.ini +++ b/src/opendr/control/mobile_manipulation/dependencies.ini @@ -11,8 +11,9 @@ python=vcstool # 'python' key expects a value using the Python requirements file format # https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 + protobuf<=3.20.0 tensorboard - numpy + numpy<=1.23.5 pyyaml matplotlib pyparsing @@ -21,5 +22,6 @@ python=torch==1.9.0 stable-baselines3==1.1.0 cloudpickle>=1.5.0 netifaces + defusedxml opendr=opendr-toolkit-engine diff --git a/src/opendr/control/mobile_manipulation/install_mobile_manipulation.sh b/src/opendr/control/mobile_manipulation/install_mobile_manipulation.sh index 48378d4e5f..092baf44a8 100755 --- a/src/opendr/control/mobile_manipulation/install_mobile_manipulation.sh +++ b/src/opendr/control/mobile_manipulation/install_mobile_manipulation.sh @@ -1,17 +1,17 @@ #!/bin/bash if [[ -z "$OPENDR_HOME" ]]; then - echo "OPENDR_HOME is not defined" - exit 1 + echo "OPENDR_HOME is not defined" + exit 1 fi if [[ -z "$ROS_DISTRO" ]]; then - echo "ROS_DISTRO is not defined" - exit 1 + echo "ROS_DISTRO is not defined" + exit 1 fi MODULE_PATH=${OPENDR_HOME}/src/opendr/control/mobile_manipulation -WS_PATH=${OPENDR_HOME}/projects/control/mobile_manipulation/mobile_manipulation_ws +WS_PATH=${OPENDR_HOME}/projects/python/control/mobile_manipulation/mobile_manipulation_ws ## ROS sudo apt-get update && sudo apt-get install -y \ diff --git a/src/opendr/control/single_demo_grasp/dependencies.ini b/src/opendr/control/single_demo_grasp/dependencies.ini index 9c31b905a8..68485d245e 100644 --- a/src/opendr/control/single_demo_grasp/dependencies.ini +++ b/src/opendr/control/single_demo_grasp/dependencies.ini @@ -10,6 +10,7 @@ python=torch==1.9.0 matplotlib>=2.2.2 imgaug==0.4.0 pillow>=8.3.2 + empy opendr=opendr-toolkit-engine diff --git a/src/opendr/control/single_demo_grasp/install_single_demo_grasp.sh b/src/opendr/control/single_demo_grasp/install_single_demo_grasp.sh index 4a9fceaa71..3cbcee5d67 100755 --- a/src/opendr/control/single_demo_grasp/install_single_demo_grasp.sh +++ b/src/opendr/control/single_demo_grasp/install_single_demo_grasp.sh @@ -1,20 +1,19 @@ #!/bin/bash if [[ -z "$OPENDR_HOME" ]]; then - echo "OPENDR_HOME is not defined" - exit 1 + echo "OPENDR_HOME is not defined" + exit 1 fi if [[ -z "$ROS_DISTRO" ]]; then - echo "ROS_DISTRO is not defined" - exit 1 + echo "ROS_DISTRO is not defined" + exit 1 fi MODULE_PATH=${OPENDR_HOME}/src/opendr/control/single_demo_grasp -WS_PATH=${OPENDR_HOME}/projects/control/single_demo_grasp/simulation_ws +WS_PATH=${OPENDR_HOME}/projects/python/control/single_demo_grasp/simulation_ws BRIDGE_PATH=${OPENDR_HOME}/projects/opendr_ws/src/ros_bridge - ## Moveit sudo apt install ros-${ROS_DISTRO}-moveit diff --git a/src/opendr/engine/data.py b/src/opendr/engine/data.py index 667027169c..d151645542 100644 --- a/src/opendr/engine/data.py +++ b/src/opendr/engine/data.py @@ -17,8 +17,6 @@ from abc import ABC, abstractmethod from opendr.engine.target import BoundingBoxList import numpy as np -import torch -from typing import Union class Data(ABC): @@ -428,7 +426,7 @@ class Video(Data): - returning a NumPy compatible representation of data (numpy()) """ - def __init__(self, data: Union[torch.Tensor, np.ndarray]=None): + def __init__(self, data=None): """Construct a new Video Args: diff --git a/src/opendr/engine/dependencies.ini b/src/opendr/engine/dependencies.ini index 06a2ea98b9..7ac496104f 100644 --- a/src/opendr/engine/dependencies.ini +++ b/src/opendr/engine/dependencies.ini @@ -4,5 +4,5 @@ python=torch==1.9.0 wheel Cython + numpy<=1.23.5 opencv-python==4.5.1.48 - diff --git a/src/opendr/engine/target.py b/src/opendr/engine/target.py index 9bc9dbacf1..652cba01a0 100644 --- a/src/opendr/engine/target.py +++ b/src/opendr/engine/target.py @@ -296,9 +296,13 @@ def data(self, data): raise ValueError("Pose expects either NumPy arrays or lists as data") def __str__(self): - """Matches kpt_names and keypoints x,y to get the best human-readable format for pose.""" + """ + Returns pose in a human-readable format, that contains the pose ID, detection confidence and + the matched kpt_names and keypoints x,y position. + """ - out_string = "" + out_string = "Pose ID: " + str(self.id) + out_string += "\nDetection confidence: " + str(self.confidence) + "\nKeypoints name-position:\n" # noinspection PyUnresolvedReferences for name, kpt in zip(Pose.kpt_names, self.data.tolist()): out_string += name + ": " + str(kpt) + "\n" @@ -357,7 +361,7 @@ def mot(self, with_confidence=True, frame=-1): ], dtype=np.float32) else: result = np.array([ - self.frame, + frame, self.left, self.top, self.width, @@ -433,11 +437,13 @@ class BoundingBoxList(Target): """ def __init__( self, - boxes, + boxes=None, + image_id=-1, ): super().__init__() - self.data = boxes - self.confidence = np.mean([box.confidence for box in self.data]) + self.data = [] if boxes is None else boxes + self.image_id = image_id + self.__compute_confidence() @staticmethod def from_coco(boxes_coco, image_id=0): @@ -447,6 +453,8 @@ def from_coco(boxes_coco, image_id=0): for i in range(count): if 'segmentation' in boxes_coco[i]: segmentation = boxes_coco[i]['segmentation'] + else: + segmentation = [] if 'iscrowd' in boxes_coco[i]: iscrowd = boxes_coco[i]['iscrowd'] else: @@ -477,10 +485,17 @@ def mot(self, with_confidence=True): return result + def add_box(self, box: BoundingBox): + self.data.append(box) + self.__compute_confidence() + @property def boxes(self): return self.data + def __compute_confidence(self): + self.confidence = sum([box.confidence for box in self.data], 0) / max(1, len(self.data)) + def __getitem__(self, idx): return self.boxes[idx] @@ -574,11 +589,11 @@ class TrackingAnnotationList(Target): """ def __init__( self, - boxes, + annotations=None, ): super().__init__() - self.data = boxes - self.confidence = np.mean([box.confidence for box in self.data]) + self.data = [] if annotations is None else annotations + self.__compute_confidence() @staticmethod def from_mot(data): @@ -599,10 +614,17 @@ def mot(self, with_confidence=True): def bounding_box_list(self): return BoundingBoxList([box.bounding_box() for box in self.data]) + def add_annotation(self, annotation: TrackingAnnotation): + self.data.append(annotation) + self.__compute_confidence() + @property def boxes(self): return self.data + def __compute_confidence(self): + self.confidence = sum([box.confidence for box in self.data], 0) / max(1, len(self.data)) + def __getitem__(self, idx): return self.boxes[idx] @@ -719,12 +741,12 @@ class BoundingBox3DList(Target): """ def __init__( - self, - bounding_boxes_3d + self, + bounding_boxes_3d=None ): super().__init__() - self.data = bounding_boxes_3d - self.confidence = None if len(self.data) == 0 else np.mean([box.confidence for box in self.data]) + self.data = [] if bounding_boxes_3d is None else bounding_boxes_3d + self.__compute_confidence() @staticmethod def from_kitti(boxes_kitti): @@ -799,10 +821,17 @@ def kitti(self): return result + def add_box(self, box: BoundingBox3D): + self.data.append(box) + self.__compute_confidence() + @property def boxes(self): return self.data + def __compute_confidence(self): + self.confidence = sum([box.confidence for box in self.data], 0) / max(1, len(self.data)) + def __getitem__(self, idx): return self.boxes[idx] @@ -906,11 +935,11 @@ class TrackingAnnotation3DList(Target): """ def __init__( self, - tracking_bounding_boxes_3d + annotations_3d=None ): super().__init__() - self.data = tracking_bounding_boxes_3d - self.confidence = None if len(self.data) == 0 else np.mean([box.confidence for box in self.data]) + self.data = [] if annotations_3d is None else annotations_3d + self.__compute_confidence() @staticmethod def from_kitti(boxes_kitti, ids, frames=None): @@ -1000,9 +1029,16 @@ def kitti(self, with_tracking_info=True): def boxes(self): return self.data + def add_annotation(self, annotation: TrackingAnnotation3D): + self.data.append(annotation) + self.__compute_confidence() + def bounding_box_3d_list(self): return BoundingBox3DList([box.bounding_box_3d() for box in self.data]) + def __compute_confidence(self): + self.confidence = sum([box.confidence for box in self.data], 0) / max(1, len(self.data)) + def __getitem__(self, idx): return self.boxes[idx] @@ -1068,6 +1104,14 @@ def numpy(self): # Since this class stores the data as NumPy arrays, we can directly return the data. return self.data + def opencv(self): + """ + Required to support the ros bridge for images. + :return: a NumPy-compatible representation of data + :rtype: numpy.ndarray + """ + return self.numpy() + def shape(self) -> Tuple[int, ...]: """ Returns the shape of the underlying NumPy array. diff --git a/src/opendr/perception/activity_recognition/README.md b/src/opendr/perception/activity_recognition/README.md index fdfc6a9c89..74fc966fe6 100644 --- a/src/opendr/perception/activity_recognition/README.md +++ b/src/opendr/perception/activity_recognition/README.md @@ -39,24 +39,32 @@ CoX3D constitute the family of X3D networks transformed to _Continual 3D CNNs_ f __BibTeX__: ```bibtex -@article{hedegaard2021continual, +@article{hedegaard2022continual, title={Continual 3D Convolutional Neural Networks for Real-time Processing of Videos}, - author={Lukas Hedegaard}, - year={2021} + author={Lukas Hedegaard, Alexandros Iosifidis}, + journal={European Conference on Computer Vision}, + year={2022} } ``` -## Datasets +### CoTransEnc +Continual Transformer Encoder supplies an efficient formulation of the Transformer Encoder for step-wise (temporal) inference. The tool provided here can be used on top of user-supplied features to perform time-series inference-tasks. -### UCF-101 -[UCF-101](https://www.crcv.ucf.edu/data/UCF101.php) is prepared by directly downloading and unpacking [data](http://storage.googleapis.com/thumos14_files/UCF101_videos.zip) and [annotations](https://www.crcv.ucf.edu/data/UCF101/UCF101TrainTestSplits-RecognitionTask.zip). +__BibTeX__: +```bibtex +@article{hedegaard2021continual, + title={Continual Transformers: Redundancy-Free Attention for Online Inference}, + author={Lukas Hedegaard, Arian Bakhtiarnia, Alexandros Iosifidis}, + year={2021} +} +``` -### HMDB-51 -[HMDB-51](https://serre-lab.clps.brown.edu/resource/hmdb-a-large-human-motion-database/) is prepared by directly downloading and unpacking [data](http://serre-lab.clps.brown.edu/wp-content/uploads/2013/10/hmdb51_org.rar) and [annotations](http://serre-lab.clps.brown.edu/wp-content/uploads/2013/10/test_train_splits.rar). + +## Datasets ### Kinetics-400 [Kinetics](https://deepmind.com/research/open-source/kinetics) is a large-scale dataset for Trimmed Human Activity Recognition, consisting of 10 second videos collected from YouTube, ranging over 400 classes. -Due to it's origin, a direct download of the complete dataset is not possible. +Due to its origin, a direct download of the complete dataset is not possible. Instead, a list of videos and corresponding labels can be downloaded [here](https://storage.googleapis.com/deepmind-media/Datasets/kinetics400.tar.gz), and a [YouTube Crawler](https://github.com/LukasHedegaard/youtube-dataset-downloader) can subsequently be employed to collect the videos one by one. Note: this process may take multiple days. diff --git a/src/opendr/perception/activity_recognition/__init__.py b/src/opendr/perception/activity_recognition/__init__.py index e3494869ff..71e35f4b54 100644 --- a/src/opendr/perception/activity_recognition/__init__.py +++ b/src/opendr/perception/activity_recognition/__init__.py @@ -1,6 +1,18 @@ from opendr.perception.activity_recognition.x3d.x3d_learner import X3DLearner from opendr.perception.activity_recognition.cox3d.cox3d_learner import CoX3DLearner +from opendr.perception.activity_recognition.continual_transformer_encoder.continual_transformer_encoder_learner import ( + CoTransEncLearner, +) -from opendr.perception.activity_recognition.datasets.kinetics import KineticsDataset, CLASSES +from opendr.perception.activity_recognition.datasets.kinetics import ( + KineticsDataset, + CLASSES, +) -__all__ = ['X3DLearner', 'CoX3DLearner', 'KineticsDataset', 'CLASSES'] +__all__ = [ + "X3DLearner", + "CoX3DLearner", + "CoTransEncLearner", + "KineticsDataset", + "CLASSES", +] diff --git a/src/opendr/perception/activity_recognition/continual_transformer_encoder/__init__.py b/src/opendr/perception/activity_recognition/continual_transformer_encoder/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/activity_recognition/continual_transformer_encoder/continual_transformer_encoder_learner.py b/src/opendr/perception/activity_recognition/continual_transformer_encoder/continual_transformer_encoder_learner.py new file mode 100644 index 0000000000..70fdbbf1bb --- /dev/null +++ b/src/opendr/perception/activity_recognition/continual_transformer_encoder/continual_transformer_encoder_learner.py @@ -0,0 +1,699 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from functools import partial +import json +import torch +import os +import pickle +from pathlib import Path +from opendr.engine.learners import Learner +from opendr.engine.helper.io import bump_version +import onnxruntime as ort +from collections import OrderedDict + +from opendr.engine.data import Timeseries, Vector +from opendr.engine.datasets import Dataset +from opendr.engine.target import Category + +from logging import getLogger +from typing import Any, Union, Dict + +import pytorch_lightning as pl +import continual as co +from continual import onnx + +logger = getLogger(__name__) + + +class CoTransEncLearner(Learner): + def __init__( + self, + lr=1e-2, + iters=10, # Epochs + batch_size=64, + optimizer="sgd", + lr_schedule="", + network_head="classification", + num_layers=1, # 1 or 2 + input_dims=1024, + hidden_dims=1024, + sequence_len=64, + num_heads=8, + dropout=0.1, + num_classes=22, + positional_encoding_learned=False, + checkpoint_after_iter=0, + checkpoint_load_iter=0, + temp_path="", + device="cuda", + loss="cross_entropy", + weight_decay=1e-4, + momentum=0.9, + drop_last=True, + pin_memory=False, + num_workers=0, + seed=123, + *args, + **kwargs, + ): + """Initialise the CoTransLearner + This learner wraps the Continual Transformer Encoder with Recycling Positional Encoding + which optimises token-by-token predictions for temporal sequences. + It was proposed for Online Action Detection in + "L. Hedegaard, A. Bakhtiarnia, and A. Iosifidis: 'Continual Transformers: Redundancy-Free + Attention for Online Inference', 2022" + https://arxiv.org/abs/2201.06268 + + Args: + lr (float, optional): Learning rate during optimization. Defaults to 1e-3. + iters (int, optional): Number of epochs to train for. Defaults to 10. + optimizer (str, optional): Name of optimizer to use ("sgd" or "adam"). Defaults to "adam". + lr_schedule (str, optional): Schedule for training the model. Only "ReduceLROnPlateau" is available currently. + network_head (str, optional): Head of network (only "classification" is currently available). + Defaults to "classification". + num_layers (int, optional): Number of Transformer Encoder layers (1 or 2). Defaults to 1. + input_dims (int, optional): Input token dimension. Defaults to 1024. + hidden_dims (int, optional): Hidden projection dimension. Defaults to 1024. + sequence_len (int, optional): Length of token sequence to consider. Defaults to 64. + num_heads (int, optional): Number of attention heads. Defaults to 8. + dropout (float, optional): Dropout probability. Defaults to 0.1. + num_classes (int, optional): Number of classes to predict. Defaults to 22. + positional_encoding_learned (bool, optional): Whether positional encoding is learned. Defaults to False. + checkpoint_after_iter (int, optional): Unused parameter. Defaults to 0. + checkpoint_load_iter (int, optional): Unused parameter. Defaults to 0. + temp_path (str, optional): Path in which to store temporary files. Defaults to "". + device (str, optional): Name of computational device ("cpu" or "cuda"). Defaults to "cuda". + weight_decay ([type], optional): Weight decay used for optimization. Defaults to 1e-5. + momentum (float, optional): Momentum used for optimization. Defaults to 0.9. + drop_last (bool, optional): Drop last data point if a batch cannot be filled. Defaults to True. + pin_memory (bool, optional): Pin memory in dataloader. Defaults to False. + num_workers (int, optional): Number of workers in dataloader. Defaults to 0. + seed (int, optional): Random seed. Defaults to 123. + """ + # Pass the shared parameters on super's constructor so they can get initialized as class attributes + assert optimizer in {"sgd", "adam"}, "Supported optimizers are Adam and SGD." + assert network_head in { + "classification" + }, "Currently, only 'classification' head is supported." + + super(CoTransEncLearner, self).__init__( + lr=lr, + iters=iters, + batch_size=batch_size, + optimizer=optimizer, + lr_schedule=lr_schedule, + network_head=network_head, + temp_path=temp_path, + checkpoint_after_iter=checkpoint_after_iter, + checkpoint_load_iter=checkpoint_load_iter, + device=device, + ) + + assert num_layers in { + 1, + 2, + }, "Only 1 or 2 Transformer Encoder layers are supported." + self._num_layers = num_layers + self._positional_encoding_learned = positional_encoding_learned + self._input_dims = input_dims + self._hidden_dims = hidden_dims + self._sequence_len = sequence_len + self._num_heads = num_heads + self._dropout = dropout + self._num_classes = num_classes + self._weight_decay = weight_decay + self._momentum = momentum + self._drop_last = drop_last + self._pin_memory = pin_memory + self._num_workers = num_workers + self._loss = loss + self._ort_session = None + self._seed = seed + torch.manual_seed(self._seed) + + self.init_model() + + def init_model(self) -> torch.nn.Module: + """Initialise model with random parameters + + Returns: + torch.nn.Module: Continual Transformer Encoder with + Recycling Positional Encoding + """ + pos_enc = co.RecyclingPositionalEncoding( + embed_dim=self._input_dims, + num_embeds=self._sequence_len * 2 - 1, + learned=self._positional_encoding_learned, + ) + trans_enc = co.TransformerEncoder( + co.TransformerEncoderLayerFactory( + d_model=self._input_dims, + nhead=self._num_heads, + dim_feedforward=self._hidden_dims, + dropout=self._dropout, + sequence_len=self._sequence_len, + ), + num_layers=self._num_layers, + ) + lin = co.Linear(self._input_dims, self._num_classes, channel_dim=-1) + + self.model = co.Sequential( + OrderedDict( + [ + ("pos_enc", pos_enc), + ("trans_enc", trans_enc), + ( + "select", + co.Lambda( + fn=lambda x: x[:, :, -1], + forward_step_only_fn=lambda x: x, + takes_time=True, + ), + ), + ("lin", lin), + ] + ) + ) + + class AddIfTraining: + def __init__(slf, val: int): + slf.val = val + + def __add__(slf, other: int): + return other + (slf.val if self.model.training else 0) + + def __radd__(slf, other: int): + return slf.__add__(other) + + self.model[0].forward_update_index_steps = AddIfTraining(1) + self.model = self.model.to(device=self.device) + + self._plmodel = _LightningModuleWithCrossEntropy(self.model) + return self.model + + def save(self, path: Union[str, Path]): + """Save model weights and metadata to path. + + Args: + path (Union[str, Path]): Directory in which to save model weights and meta data. + + Returns: + self + """ + assert hasattr( + self, "model" + ), "Cannot save model because no model was found. Did you forget to call `__init__`?" + + root_path = Path(path) + root_path.mkdir(parents=True, exist_ok=True) + name = "cotransenc_weights" + ext = ".onnx" if self._ort_session else ".pth" + weights_path = bump_version(root_path / f"model_{name}{ext}") + meta_path = bump_version(root_path / f"{name}.json") + + logger.info(f"Saving model weights to {str(weights_path)}") + if self._ort_session: + self._save_onnx(weights_path) + else: + torch.save(self.model.state_dict(), weights_path) + + logger.info(f"Saving meta-data to {str(meta_path)}") + meta_data = { + "model_paths": weights_path.name, + "framework": "pytorch", + "format": "pth", + "has_data": False, + "inference_params": { + "network_head": self._network_head, + "num_layers": self._num_layers, + "input_dims": self._input_dims, + "hidden_dims": self._hidden_dims, + "sequence_len": self._sequence_len, + "num_heads": self._num_heads, + "dropout": self._dropout, + "num_classes": self._num_classes, + "positional_encoding_learned": self._positional_encoding_learned, + }, + "optimized": bool(self._ort_session), + "optimizer_info": { + "lr": self.lr, + "iters": self.iters, + "batch_size": self.batch_size, + "optimizer": self.optimizer, + "checkpoint_after_iter": self.checkpoint_after_iter, + "checkpoint_load_iter": self.checkpoint_load_iter, + "loss": self._loss, + "weight_decay": self._weight_decay, + "momentum": self._momentum, + "drop_last": self._drop_last, + "pin_memory": self._pin_memory, + "num_workers": self._num_workers, + "seed": self._seed, + "dropout": self._dropout, + }, + } + with open(str(meta_path), "w", encoding="utf-8") as f: + json.dump(meta_data, f, sort_keys=True, indent=4) + + return self + + def load(self, path: Union[str, Path]): + """Load model. + + Args: + path (Union[str, Path]): Path to metadata file in json format or path to model weights + + Returns: + self + """ + path = Path(path) + + # Allow direct loading of weights, omitting the metadatafile + if path.suffix in {".pyth", ".pth", ".onnx"}: + self._load_model_weights(path) + return self + if path.is_dir(): + path = path / "cotransenc_weights.json" + assert ( + path.is_file() and path.suffix == ".json" + ), "The provided metadata path should be a .json file" + + logger.debug(f"Loading CoTransEnc metadata from {str(path)}") + with open(path, "r") as f: + meta_data = json.load(f) + + inference_params = meta_data["inference_params"] + optimizer_info = meta_data["optimizer_info"] + + self.__init__( + lr=optimizer_info["lr"], + iters=optimizer_info["iters"], + batch_size=optimizer_info["batch_size"], + optimizer=optimizer_info["optimizer"], + device=getattr(self, "device", "cpu"), + network_head=inference_params["network_head"], + num_layers=inference_params["num_layers"], + input_dims=inference_params["input_dims"], + hidden_dims=inference_params["hidden_dims"], + sequence_len=inference_params["sequence_len"], + num_heads=inference_params["num_heads"], + num_classes=inference_params["num_classes"], + positional_encoding_learned=inference_params["positional_encoding_learned"], + dropout=optimizer_info["dropout"], + loss=optimizer_info["loss"], + checkpoint_after_iter=optimizer_info["checkpoint_after_iter"], + checkpoint_load_iter=optimizer_info["checkpoint_load_iter"], + weight_decay=optimizer_info["weight_decay"], + momentum=optimizer_info["momentum"], + drop_last=optimizer_info["drop_last"], + pin_memory=optimizer_info["pin_memory"], + num_workers=optimizer_info["num_workers"], + seed=optimizer_info["seed"], + ) + + weights_path = path.parent / meta_data["model_paths"] + self._load_model_weights(weights_path) + + return self + + def _load_model_weights(self, weights_path: Union[str, Path]): + """Load pretrained model weights + + Args: + weights_path (Union[str, Path]): Path to model weights file. + Type of file must be one of {".pyth", ".pth", ".onnx"} + """ + weights_path = Path(weights_path) + + assert weights_path.is_file() and weights_path.suffix in {".pyth", ".pth", ".onnx"}, ( + f"weights_path ({str(weights_path)}) should be a .pth or .onnx file." + "Pretrained weights can be downloaded using `self.download(...)`" + ) + if weights_path.suffix == ".onnx": + return self._load_onnx(weights_path.parent) + + logger.debug(f"Loading model weights from {str(weights_path)}") + + loaded_state_dict = torch.load(weights_path, map_location=torch.device(self.device)) + self.model.load_state_dict(loaded_state_dict, strict=False) + + return self + + @staticmethod + def download(path: Union[str, Path]): + """Download pretrained models. As this module + + Args: + path (Union[str, Path], optional): Directory in which to store model weights. Defaults to None. + model_names (Iterable[str], optional): iterable with model names to download. + The iterable may contain {"xs", "s", "m", "l"}. + Defaults to _MODEL_NAMES. + """ + raise NotImplementedError( + "No pretrained models available. Please train your own model using the `fit` function." + ) + + def reset(self): + pass + + def fit( + self, + dataset: Dataset, + val_dataset: Dataset = None, + epochs: int = None, + steps: int = None, + *args, + **kwargs, + ): + """Fit the model to a dataset + + Args: + dataset (Dataset): Training dataset + val_dataset (Dataset, optional): Validation dataset. + If none is given, validation steps are skipped. Defaults to None. + epochs (int, optional): Number of epochs. If none is supplied, self.iters will be used. Defaults to None. + steps (int, optional): Number of training steps to conduct. If none, this is determined by epochs. Defaults to None. + """ + + train_dataloader = torch.utils.data.DataLoader( + dataset, + batch_size=self.batch_size, + num_workers=self._num_workers, + shuffle=True, + pin_memory=self._pin_memory, + drop_last=self._drop_last, + ) + val_dataloader = ( + torch.utils.data.DataLoader( + val_dataset, + batch_size=self.batch_size, + num_workers=self._num_workers, + shuffle=False, + pin_memory=self._pin_memory, + drop_last=self._drop_last, + ) + if val_dataset + else None + ) + + optimisation_metric = "val/loss" if val_dataset else "train/loss" + + # Patch model optimizer + assert self.optimizer in { + "adam", + "sgd", + }, f"Invalid optimizer '{self.optimizer}'. Must be 'adam' or 'sgd'." + if self.optimizer == "adam": + Optimizer = partial( + torch.optim.Adam, + lr=self.lr, + betas=(self._momentum, 0.999), + weight_decay=self._weight_decay, + ) + else: # self.optimizer == "sgd": + Optimizer = partial( + torch.optim.SGD, + lr=self.lr, + momentum=self._momentum, + weight_decay=self._weight_decay, + ) + + def configure_optimizers(): + # nonlocal Optimizer, optimisation_metric + optimizer = Optimizer(self.model.parameters()) + scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=10) + return { + "optimizer": optimizer, + "lr_scheduler": scheduler, + "monitor": optimisation_metric, + } + + self._plmodel.configure_optimizers = configure_optimizers + + self.trainer = pl.Trainer( + max_epochs=epochs or self.iters, + gpus=1 if "cuda" in self.device else 0, + callbacks=[ + pl.callbacks.ModelCheckpoint( + save_top_k=1, + verbose=True, + monitor=optimisation_metric, + mode="min", + prefix="", + ) + ], + logger=_experiment_logger(), + ) + self.trainer.limit_train_batches = steps or self.trainer.limit_train_batches + self.trainer.limit_val_batches = steps or self.trainer.limit_val_batches + + self.trainer.fit(self._plmodel, train_dataloader, val_dataloader) + # self.model.to(self.device) + + def eval(self, dataset: Dataset, steps: int = None) -> Dict[str, Any]: + """Evaluate the model on the dataset + + Args: + dataset (Dataset): Dataset on which to evaluate model + steps (int, optional): Number of validation batches to evaluate. + If None, all batches are evaluated. Defaults to None. + + Returns: + Dict[str, Any]: Evaluation statistics + """ + test_dataloader = torch.utils.data.DataLoader( + dataset, + batch_size=self.batch_size, + num_workers=self._num_workers, + shuffle=False, + pin_memory=self._pin_memory, + drop_last=False, + ) + + if not hasattr(self, "trainer"): + self.trainer = pl.Trainer( + gpus=1 if "cuda" in self.device else 0, + logger=_experiment_logger(), + ) + self.trainer.limit_test_batches = steps or self.trainer.limit_test_batches + results = self.trainer.test(self._plmodel, test_dataloader) + results = { + "accuracy": results[-1]["test/acc"], + "loss": results[-1]["test/loss"], + } + return results + + def infer(self, x: Union[Timeseries, Vector, torch.Tensor]) -> Category: + """Run inference on a data point, x + + Args: + x (Union[Timeseries, Vector, torch.Tensor])): Either a single time instance (Vector) or a Timeseries. + x can also be passed as a torch.Tensor. + + Returns: + Category: Network output + """ + + if isinstance(x, Vector): + x = torch.tensor(x.data) + # assert len(data) == self._input_dims + forward_mode = "step" + x = x.unsqueeze(0) # Add batch dim + elif isinstance(x, Timeseries): + x = torch.tensor(x.data).permute(1, 0) + assert x.shape == (self._input_dims, self._sequence_len) + forward_mode = "regular" + x = x.unsqueeze(0) # Add batch dim + else: + assert isinstance(x, torch.Tensor) + if len(x.shape) == 1: + assert x.shape == (self._input_dims,) + forward_mode = "step" + x = x.unsqueeze(0) # Add batch dim + elif len(x.shape) == 2: + if x.shape == (self.batch_size, self._input_dims): + forward_mode = "step" + else: + assert x.shape == (self._input_dims, self._sequence_len) + forward_mode = "regular" + x = x.unsqueeze(0) # Add batch dim + else: + assert len(x.shape) == 3 + assert x.shape == ( + self.batch_size, + self._input_dims, + self._sequence_len, + ) + forward_mode = "regular" + + x = x.to(device=self.device, dtype=torch.float) + + if self._ort_session is not None and self._ort_state is not None and forward_mode == "step": + inputs = { + "input": x.cpu().detach().numpy(), + **self._ort_state, + } + r, *next_state = self._ort_session.run(None, inputs) + r = torch.tensor(r) + self._ort_state = {k: v for k, v in zip(self._ort_state.keys(), next_state)} + else: + self.model.eval() + r = (self.model.forward if forward_mode == "regular" else self.model.forward_step)(x) + if isinstance(r, torch.Tensor): + r = torch.nn.functional.softmax(r[0], dim=-1) + result = Category(prediction=int(r.argmax(dim=0)), confidence=r) + else: + # In the "continual inference"-mode, the model needs to warm up by first seeing + # "sequence_len" time-steps. Until then, it will output co.TensorPlaceholder values + result = Category( + prediction=-1, + confidence=torch.zeros(self._num_classes, dtype=torch.float), + ) + return result + + def optimize(self, do_constant_folding=False): + """Optimize model execution. + This is accomplished by saving to the ONNX format and loading the optimized model. + + Args: + do_constant_folding (bool, optional): Whether to optimize constants. Defaults to False. + """ + if getattr(self.model, "_ort_session", None): + logger.info("Model is already optimized. Skipping redundant optimization") + return + + path = Path(self.temp_path or os.getcwd()) / "weights" + if not path.exists(): + self._save_onnx(path, do_constant_folding) + self._load_onnx(path) + + @property + def _example_input(self): + return torch.randn(1, self._input_dims, self._sequence_len).to(device=self.device) + + @_example_input.setter + def _example_input(self): + raise ValueError("_example_input is set thorugh 'sequence_len' and 'input_dims' parameters") + + def _save_onnx(self, path: Union[str, Path], do_constant_folding=False, verbose=False): + """Save model in the ONNX format + + Args: + path (Union[str, Path]): Directory in which to save ONNX model + do_constant_folding (bool, optional): Whether to optimize constants. Defaults to False. + """ + assert ( + int(torch.__version__.split(".")[1]) >= 10 + ), "ONNX optimization of the Continual Transformer Encoder requires torch >= 1.10.0." + + assert ( + int(getattr(co, "__version__", "0.0.0").split(".")[0]) >= 1 + ), "ONNX optimization of the Continual Transformer Encoder requires continual-inference >= 1.0.0." + + path = Path(path) + path.mkdir(exist_ok=True, parents=True) + + self.model.eval() + self.model.to(device="cpu") + + # Prepare state + state0 = None + with torch.no_grad(): + for i in range(self._sequence_len): + _, state0 = self.model._forward_step(self._example_input[:, :, i], state0) + state0 = co.utils.flatten(state0) + + # Export to ONNX + onnx_path = path / "cotransenc_weights.onnx" + logger.info(f"Saving model to ONNX format at {str(onnx_path)}") + onnx.export( + self.model, + (self._example_input[:, :, -1], *state0), + onnx_path, + input_names=["input"], + output_names=["output"], + do_constant_folding=do_constant_folding, + verbose=verbose, + opset_version=14, + ) + + # Save default state and name mappings for later use + state_path = path.parent / "cotransenc_state.pickle" + logger.info(f"Saving ONNX model states at {str(state_path)}") + omodel = onnx.OnnxWrapper(self.model) + state = {k: v.detach().numpy() for k, v in zip(omodel.state_input_names, state0)} + with open(state_path, "wb") as f: + pickle.dump(state, f) + + def _load_onnx(self, path: Union[str, Path]): + """Loads ONNX model into an onnxruntime inference session. + + Args: + path (Union[str, Path]): Path to ONNX model folder + """ + assert ( + int(getattr(ort, "__version__", "0.0.0").split(".")[1]) >= 11 + ), "ONNX inference of the Continual Transformer Encoder requires onnxruntime >= 1.11.0." + + onnx_path = path / "cotransenc_weights.onnx" + state_path = path.parent / "cotransenc_state.pickle" + + logger.info(f"Loading ONNX runtime inference session from {str(onnx_path)}") + self._ort_session = ort.InferenceSession(str(onnx_path)) + + logger.info(f"Loading ONNX state from {str(state_path)}") + with open(state_path, "rb") as f: + self._ort_state = pickle.load(f) + + +def _experiment_logger(): + return pl.loggers.TensorBoardLogger(save_dir=Path(os.getcwd()) / "logs", name="cotransenc") + + +def _accuracy(x, y): + return torch.sum(x.argmax(dim=1) == y) / len(y) + + +class _LightningModuleWithCrossEntropy(pl.LightningModule): + def __init__(self, module): + pl.LightningModule.__init__(self) + self.module = module + + def forward(self, *args, **kwargs): + return self.module.forward(*args, **kwargs) + + def training_step(self, batch, batch_idx): + x, y = batch + z = self.module(x) + loss = torch.nn.functional.cross_entropy(z, y) + self.log("train/loss", loss) + self.log("train/acc", _accuracy(z, y)) + return loss + + def validation_step(self, batch, batch_idx): + x, y = batch + z = self.forward(x) + loss = torch.nn.functional.cross_entropy(z, y) + self.log("val/loss", loss) + self.log("val/acc", _accuracy(z, y)) + return loss + + def test_step(self, batch, batch_idx): + x, y = batch + z = self.forward(x) + loss = torch.nn.functional.cross_entropy(z, y) + self.log("test/loss", loss) + self.log("test/acc", _accuracy(z, y)) + return loss diff --git a/src/opendr/perception/activity_recognition/cox3d/algorithm/conv.py b/src/opendr/perception/activity_recognition/cox3d/algorithm/conv.py deleted file mode 100644 index 8ddde77df0..0000000000 --- a/src/opendr/perception/activity_recognition/cox3d/algorithm/conv.py +++ /dev/null @@ -1,268 +0,0 @@ -""" -Copyright (c) Lukas Hedegaard. All Rights Reserved. -Included in the OpenDR Toolkit with permission from the author. -""" - - -from typing import Tuple - -import torch -import torch.nn.functional as F -from torch import Tensor -from torch.nn.modules.conv import _ConvNd, _reverse_repeat_tuple, _size_3_t, _triple - -from .utils import warn_once_if -from logging import getLogger - -from .utils import FillMode - -State = Tuple[Tensor, int] - -logger = getLogger(__name__) - - -class ConvCo3d(_ConvNd): - def __init__( - self, - in_channels: int, - out_channels: int, - kernel_size: _size_3_t, - stride: _size_3_t = 1, - padding: _size_3_t = 0, - dilation: _size_3_t = 1, - groups: int = 1, - bias: bool = True, - padding_mode: FillMode = "zeros", - temporal_fill: FillMode = "replicate", - ): - r"""Applies a recursive 3D convolution over an input signal composed of several input - planes. - - Assuming an input of shape `(B, C, T, H, W)`, it computes the convolution over one temporal instant `t` at a time - where `t` ∈ `range(T)`, and keeps an internal state. Two forward modes are supported here. - - `forward3d` operates identically to `Conv3d.forward` - - `forward` takes an input of shape `(B, C, H, W)`, and computes a single-frame output (B, C', H', W') based on - its internal state. On the first execution, the state is initialised with either ``'zeros'`` - (corresponding to a zero padding of kernel_size[0]-1) or with a `'replicate'`` of the first frame - depending on the choice of `temporal_fill`. `forward` also supports a functional-style exercution, - by passing a `prev_state` explicitely as parameters, and by optionally returning the updated - `next_state` via the `return_next_state` parameter. - NB: The output when recurrently applying forward will be delayed by the `kernel_size[0] - 1`. - - Args: - in_channels (int): Number of channels in the input image - out_channels (int): Number of channels produced by the convolution - kernel_size (int or tuple): Size of the convolving kernel - stride (int or tuple, optional): Stride of the convolution. - NB: stride > 1 over the first channel is notsupported. Default: 1 - padding (int or tuple, optional): Zero-padding added to all three sides of the input. - NB: padding over the first channel is not supported. Default: 0 - dilation (int or tuple, optional): Spacing between kernel elements. - NB: dilation > 1 over the first channel is not supported. Default: 1 - groups (int, optional): Number of blocked connections from input channels to output channels. Default: 1 - bias (bool, optional): If ``True``, adds a learnable bias to the output. Default: ``True`` - temporal_fill (string, optional): ``'zeros'`` or ``'replicate'`` (= "boring video"). - `temporal_fill` determines how state is initialised and which padding is applied during `forward3d` - along the temporal dimension. Default: ``'replicate'`` - - Attributes: - weight (Tensor): the learnable weights of the module of shape - :math:`(\text{out\_channels}, \frac{\text{in\_channels}}{\text{groups}},` - :math:`\text{kernel\_size[0]}, \text{kernel\_size[1]}, \text{kernel\_size[2]})`. - The values of these weights are sampled from - :math:`\mathcal{U}(-\sqrt{k}, \sqrt{k})` where - :math:`k = \frac{groups}{C_\text{in} * \prod_{i=0}^{2}\text{kernel\_size}[i]}` - bias (Tensor): the learnable bias of the module of shape (out_channels). If :attr:`bias` is ``True``, - then the values of these weights are - sampled from :math:`\mathcal{U}(-\sqrt{k}, \sqrt{k})` where - :math:`k = \frac{groups}{C_\text{in} * \prod_{i=0}^{2}\text{kernel\_size}[i]}` - state (List[Tensor]): a running buffer of partial computations from previous frames which are used for - the calculation of subsequent outputs. - - """ - kernel_size = _triple(kernel_size) - - padding = _triple(padding) - warn_once_if( - cond=padding[0] != 0, - msg=( - "Padding along the temporal dimension only affects the computation in `forward3d`. " - "In `forward` it is omitted." - ) - ) - - stride = _triple(stride) - assert stride[0] == 1, "Temporal stride > 1 is not supported currently." - - dilation = _triple(dilation) - assert dilation[0] == 1, "Temporal dilation > 1 is not supported currently." - - super(ConvCo3d, self).__init__( - in_channels, - out_channels, - kernel_size, - stride, - padding, - dilation, - transposed=False, - output_padding=_triple(0), - groups=groups, - bias=bias, - padding_mode=padding_mode, - ) - self._reversed_padding_repeated_twice = _reverse_repeat_tuple( - (self.kernel_size[0] - 1, *self.padding[1:]), - 2 - # (0, *self.padding[1:]), 2 - ) - - assert temporal_fill in {"zeros", "replicate"} - self.make_padding = {"zeros": torch.zeros_like, "replicate": torch.clone}[ - temporal_fill - ] - # init_state is called in `_forward` - - def init_state(self, first_output: Tensor,) -> State: - padding = self.make_padding(first_output) - state_buffer = padding.repeat(self.kernel_size[0] - 1, 1, 1, 1, 1, 1) - state_index = 0 - if not hasattr(self, "state_buffer"): - self.register_buffer("state_buffer", state_buffer, persistent=False) - return state_buffer, state_index - - def clean_state(self): - self.state_buffer = None - self.state_index = None - - def get_state(self): - if ( - hasattr(self, "state_buffer") and - self.state_buffer is not None and - hasattr(self, "state_index") and - self.state_buffer is not None - ): - return (self.state_buffer, self.state_index) - else: - return None - - @staticmethod - def from_3d( - module: torch.nn.Conv3d, temporal_fill: FillMode = "replicate" - ) -> "ConvCo3d": - stride = (1, *module.stride[1:]) - dilation = (1, *module.dilation[1:]) - for shape, name in zip([stride, dilation], ["stride", "dilation"]): - prev_shape = getattr(module, name) - if shape != prev_shape: - logger.warning( - f"Using {name} = {shape} for RConv3D (converted from {prev_shape})" - ) - - rmodule = ConvCo3d( - in_channels=module.in_channels, - out_channels=module.out_channels, - kernel_size=module.kernel_size, - stride=stride, - padding=module.padding, - dilation=dilation, - groups=module.groups, - bias=module.bias is not None, - padding_mode=module.padding_mode, - temporal_fill=temporal_fill, - ) - with torch.no_grad(): - rmodule.weight.copy_(module.weight) - if module.bias is not None: - rmodule.bias.copy_(module.bias) - return rmodule - - def forward(self, input: Tensor, update_state=True) -> Tensor: - output, (new_buffer, new_index) = self._forward(input, self.get_state()) - if update_state: - self.state_buffer = new_buffer - self.state_index = new_index - return output - - def _forward(self, input: Tensor, prev_state: State) -> Tuple[Tensor, State]: - assert len(input.shape) == 4, "Only a single frame should be passed at a time." - - # B, C, H, W -> B, C, 1, H, W - x = input.unsqueeze(2) - - if self.padding_mode == "zeros": - x = F.conv3d( - input=x, - weight=self.weight, - bias=None, - stride=self.stride, - padding=(self.kernel_size[0] - 1, *self.padding[1:]), - dilation=self.dilation, - groups=self.groups, - ) - else: - x = F.conv3d( - input=F.pad( - x, self._reversed_padding_repeated_twice, mode=self.padding_mode - ), - weight=self.weight, - bias=None, - stride=self.stride, - padding=(self.kernel_size[0] - 1, 0, 0), - dilation=self.dilation, - groups=self.groups, - ) - - x_out, x_rest = x[:, :, 0].clone(), x[:, :, 1:] - - # Prepare previous state - buffer, index = prev_state or self.init_state(x_rest) - - tot = len(buffer) - for i in range(tot): - x_out += buffer[(i + index) % tot, :, :, tot - i - 1] - - if self.bias is not None: - x_out += self.bias[None, :, None, None] - - # Update next state - next_buffer = buffer.clone() if self.training else buffer.detach() - next_buffer[index] = x_rest - next_index = (index + 1) % tot - return x_out, (next_buffer, next_index) - - def forward3d(self, input: Tensor): - assert ( - len(input.shape) == 5 - ), "A tensor of size B,C,T,H,W should be passed as input." - T = input.shape[2] - - pad_start = [self.make_padding(input[:, :, 0]) for _ in range(self.padding[0])] - inputs = [input[:, :, t] for t in range(T)] - pad_end = [self.make_padding(input[:, :, -1]) for _ in range(self.padding[0])] - - # Recurrently pass through, updating state - outs = [] - for t, i in enumerate([*pad_start, *inputs]): - o, (self.state_buffer, self.state_index) = self._forward( - i, self.get_state() - ) - if self.kernel_size[0] - 1 <= t: - outs.append(o) - - # Don't save state for the end-padding - tmp_buffer, tmp_index = self.get_state() - for t, i in enumerate(pad_end): - o, (tmp_buffer, tmp_index) = self._forward(i, (tmp_buffer, tmp_index)) - outs.append(o) - - if len(outs) > 0: - outs = torch.stack(outs, dim=2) - else: - outs = torch.tensor([]) - return outs - - @property - def delay(self): - return self.kernel_size[0] - 1 diff --git a/src/opendr/perception/activity_recognition/cox3d/algorithm/cox3d.py b/src/opendr/perception/activity_recognition/cox3d/algorithm/cox3d.py deleted file mode 100644 index 781e580fd4..0000000000 --- a/src/opendr/perception/activity_recognition/cox3d/algorithm/cox3d.py +++ /dev/null @@ -1,1118 +0,0 @@ -""" -Copyright (c) Lukas Hedegaard. All Rights Reserved. -Included in the OpenDR Toolit with permission from the author. -""" - -import math -import torch - -from .conv import ConvCo3d -from .delay import Delay -from .pooling import AdaptiveAvgPoolCo3d, AvgPoolCo3d -from .se import ReSe -from .utils import FillMode, unsqueezed, warn_once_if -from opendr.perception.activity_recognition.x3d.algorithm.operators import Swish -from opendr.perception.activity_recognition.x3d.algorithm.x3d import ( - _accuracy, _round_width, _round_repeats, init_weights -) -import torch.nn.functional as F -import pytorch_lightning as pl - - -class CoX3DTransform(torch.nn.Module): - """ - Recursive X3D transformation: 1x1x1, Tx3x3 (channelwise, num_groups=dim_in), 1x1x1, - augmented with (optional) SE (squeeze-excitation) on the 3x3x3 output. - T is the temporal kernel size (defaulting to 3) - """ - - def __init__( - self, - dim_in: int, - dim_out: int, - temp_kernel_size: int, - stride: int, - dim_inner: int, - num_groups: int, - stride_1x1=False, - inplace_relu=True, - eps=1e-5, - bn_mmt=0.1, - dilation=1, - norm_module=torch.nn.BatchNorm3d, - se_ratio=0.0625, - swish_inner=True, - block_idx=0, - temporal_window_size: int = 4, - temporal_fill: FillMode = "replicate", - ): - """ - Args: - dim_in (int): the channel dimensions of the input. - dim_out (int): the channel dimension of the output. - temp_kernel_size (int): the temporal kernel sizes of the middle - convolution in the bottleneck. - stride (int): the stride of the bottleneck. - dim_inner (int): the inner dimension of the block. - num_groups (int): number of groups for the convolution. num_groups=1 - is for standard ResNet like networks, and num_groups>1 is for - ResNeXt like networks. - stride_1x1 (bool): if True, apply stride to 1x1 conv, otherwise - apply stride to the 3x3 conv. - inplace_relu (bool): if True, calculate the relu on the original - input without allocating new memory. - eps (float): epsilon for batch norm. - bn_mmt (float): momentum for batch norm. Noted that BN momentum in - PyTorch = 1 - BN momentum in Caffe2. - dilation (int): size of dilation. - norm_module (torch.nn.Module): torch.nn.Module for the normalization layer. The - default is torch.nn.BatchNorm3d. - se_ratio (float): if > 0, apply SE to the Tx3x3 conv, with the SE - channel dimensionality being se_ratio times the Tx3x3 conv dim. - swish_inner (bool): if True, apply swish to the Tx3x3 conv, otherwise - apply ReLU to the Tx3x3 conv. - """ - super(CoX3DTransform, self).__init__() - self.temp_kernel_size = temp_kernel_size - self._inplace_relu = inplace_relu - self._eps = eps - self._bn_mmt = bn_mmt - self._se_ratio = se_ratio - self._swish_inner = swish_inner - self._stride_1x1 = stride_1x1 - self._block_idx = block_idx - self._construct( - dim_in, - dim_out, - stride, - dim_inner, - num_groups, - dilation, - norm_module, - temporal_window_size, - temporal_fill, - ) - - def _construct( - self, - dim_in, - dim_out, - stride, - dim_inner, - num_groups, - dilation, - norm_module, - temporal_window_size, - temporal_fill="replicate", - ): - (str1x1, str3x3) = (stride, 1) if self._stride_1x1 else (1, stride) - - # 1x1x1, BN, ReLU. - self.a = unsqueezed( - torch.nn.Conv3d( - dim_in, - dim_inner, - kernel_size=(1, 1, 1), - stride=(1, str1x1, str1x1), - padding=(0, 0, 0), - bias=False, - ) - ) - self.a_bn = unsqueezed( - norm_module(num_features=dim_inner, eps=self._eps, momentum=self._bn_mmt) - ) - - self.a_relu = torch.nn.ReLU(inplace=self._inplace_relu) - - # Tx3x3, BN, ReLU. - self.b = ConvCo3d( - dim_inner, - dim_inner, - kernel_size=(self.temp_kernel_size, 3, 3), - stride=(1, str3x3, str3x3), - padding=(int(self.temp_kernel_size // 2), dilation, dilation), - groups=num_groups, - bias=False, - dilation=(1, dilation, dilation), - temporal_fill=temporal_fill, - ) - - self.b_bn = unsqueezed( - norm_module(num_features=dim_inner, eps=self._eps, momentum=self._bn_mmt) - ) - - # Apply SE attention or not - use_se = True if (self._block_idx + 1) % 2 else False - if self._se_ratio > 0.0 and use_se: - self.se = ReSe( - temporal_window_size, - dim_in=dim_inner, - ratio=self._se_ratio, - temporal_fill=temporal_fill, - ) - - if self._swish_inner: - self.b_relu = Swish() - else: - self.b_relu = torch.nn.ReLU(inplace=self._inplace_relu) - - # 1x1x1, BN. - self.c = unsqueezed( - torch.nn.Conv3d( - dim_inner, - dim_out, - kernel_size=(1, 1, 1), - stride=(1, 1, 1), - padding=(0, 0, 0), - bias=False, - ) - ) - self.c_bn = unsqueezed( - norm_module(num_features=dim_out, eps=self._eps, momentum=self._bn_mmt) - ) - self.c_bn.transform_final_bn = True - - def forward(self, x): - for block in self.children(): - x = block(x) - return x - - def forward3d(self, x: torch.Tensor) -> torch.Tensor: - for block in self.children(): - if hasattr(block, "forward3d"): - x = block.forward3d(x) - else: - x = block.forward(x) - return x - - -class ReResBlock(torch.nn.Module): - """ - Residual block. - """ - - def __init__( - self, - dim_in, - dim_out, - temp_kernel_size, - stride, - trans_func, - dim_inner, - num_groups=1, - stride_1x1=False, - inplace_relu=True, - eps=1e-5, - bn_mmt=0.1, - dilation=1, - norm_module=torch.nn.BatchNorm3d, - block_idx=0, - drop_connect_rate=0.0, - temporal_window_size: int = 4, - temporal_fill: FillMode = "replicate", - ): - """ - ResBlock class constructs redisual blocks. More details can be found in: - Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun. - "Deep residual learning for image recognition." - https://arxiv.org/abs/1512.03385 - Args: - dim_in (int): the channel dimensions of the input. - dim_out (int): the channel dimension of the output. - temp_kernel_size (int): the temporal kernel sizes of the middle - convolution in the bottleneck. - stride (int): the stride of the bottleneck. - trans_func (string): transform function to be used to construct the - bottleneck. - dim_inner (int): the inner dimension of the block. - num_groups (int): number of groups for the convolution. num_groups=1 - is for standard ResNet like networks, and num_groups>1 is for - ResNeXt like networks. - stride_1x1 (bool): if True, apply stride to 1x1 conv, otherwise - apply stride to the 3x3 conv. - inplace_relu (bool): calculate the relu on the original input - without allocating new memory. - eps (float): epsilon for batch norm. - bn_mmt (float): momentum for batch norm. Noted that BN momentum in - PyTorch = 1 - BN momentum in Caffe2. - dilation (int): size of dilation. - norm_module (torch.nn.Module): torch.nn.Module for the normalization layer. The - default is torch.nn.BatchNorm3d. - drop_connect_rate (float): basic rate at which blocks are dropped, - linearly increases from input to output blocks. - """ - super(ReResBlock, self).__init__() - self._inplace_relu = inplace_relu - self._eps = eps - self._bn_mmt = bn_mmt - self._drop_connect_rate = drop_connect_rate - # Use skip connection with projection if dim or res change. - if (dim_in != dim_out) or (stride != 1): - self.branch1 = unsqueezed( - torch.nn.Conv3d( - dim_in, - dim_out, - kernel_size=1, - stride=(1, stride, stride), - padding=0, - bias=False, - dilation=1, - ) - ) - self.branch1_bn = unsqueezed( - norm_module(num_features=dim_out, eps=self._eps, momentum=self._bn_mmt) - ) - self.branch2 = trans_func( - dim_in, - dim_out, - temp_kernel_size, - stride, - dim_inner, - num_groups, - stride_1x1=stride_1x1, - inplace_relu=inplace_relu, - dilation=dilation, - norm_module=norm_module, - block_idx=block_idx, - temporal_window_size=temporal_window_size, - temporal_fill=temporal_fill, - ) - self.relu = torch.nn.ReLU(self._inplace_relu) - # temporal_fill="replicate" works much better than "zeros" here - self.delay = Delay(temp_kernel_size - 1, temporal_fill) - - def _drop_connect(self, x, drop_ratio): - """Apply dropconnect to x""" - keep_ratio = 1.0 - drop_ratio - mask = torch.empty([x.shape[0], 1, 1, 1], dtype=x.dtype, device=x.device) - mask.bernoulli_(keep_ratio) - x.div_(keep_ratio) - x.mul_(mask) - return x - - def forward(self, x): - delayed_x = self.delay(x) - f_x = self.branch2(x) - if self.training and self._drop_connect_rate > 0.0: - f_x = self._drop_connect(f_x, self._drop_connect_rate) - if hasattr(self, "branch1"): - output = self.branch1_bn(self.branch1(delayed_x)) + f_x - else: - output = delayed_x + f_x - output = self.relu(output) - return output - - def forward3d(self, x): - f_x = self.branch2.forward3d(x) - if self.training and self._drop_connect_rate > 0.0: - f_x = self._drop_connect(f_x, self._drop_connect_rate) - if hasattr(self, "branch1"): - x = self.branch1_bn.forward3d(self.branch1.forward3d(x)) + f_x - else: - x = x + f_x - x = self.relu(x) - return x - - -class ReResStage(torch.nn.Module): - """ - Stage of 3D ResNet. It expects to have one or more tensors as input for - single pathway (C2D, I3D, Slow), and multi-pathway (SlowFast) cases. - More details can be found here: - - Christoph Feichtenhofer, Haoqi Fan, Jitendra Malik, and Kaiming He. - "SlowFast networks for video recognition." - https://arxiv.org/pdf/1812.03982.pdf - """ - - def __init__( - self, - dim_in, - dim_out, - stride, - temp_kernel_sizes, - num_blocks, - dim_inner, - num_groups, - num_block_temp_kernel, - nonlocal_inds, - nonlocal_group, - nonlocal_pool, - dilation, - instantiation="softmax", - trans_func_name="x3d_transform", - stride_1x1=False, - inplace_relu=True, - norm_module=torch.nn.BatchNorm3d, - drop_connect_rate=0.0, - temporal_window_size: int = 4, - temporal_fill: FillMode = "replicate", - ): - """ - The `__init__` method of any subclass should also contain these arguments. - ResStage builds p streams, where p can be greater or equal to one. - Args: - dim_in (list): list of p the channel dimensions of the input. - Different channel dimensions control the input dimension of - different pathways. - dim_out (list): list of p the channel dimensions of the output. - Different channel dimensions control the input dimension of - different pathways. - temp_kernel_sizes (list): list of the p temporal kernel sizes of the - convolution in the bottleneck. Different temp_kernel_sizes - control different pathway. - stride (list): list of the p strides of the bottleneck. Different - stride control different pathway. - num_blocks (list): list of p numbers of blocks for each of the - pathway. - dim_inner (list): list of the p inner channel dimensions of the - input. Different channel dimensions control the input dimension - of different pathways. - num_groups (list): list of number of p groups for the convolution. - num_groups=1 is for standard ResNet like networks, and - num_groups>1 is for ResNeXt like networks. - num_block_temp_kernel (list): extent the temp_kernel_sizes to - num_block_temp_kernel blocks, then fill temporal kernel size - of 1 for the rest of the layers. - nonlocal_inds (list): If the tuple is empty, no nonlocal layer will - be added. If the tuple is not empty, add nonlocal layers after - the index-th block. - dilation (list): size of dilation for each pathway. - nonlocal_group (list): list of number of p nonlocal groups. Each - number controls how to fold temporal dimension to batch - dimension before applying nonlocal transformation. - https://github.com/facebookresearch/video-nonlocal-net. - instantiation (string): different instantiation for nonlocal layer. - Supports two different instantiation method: - "dot_product": normalizing correlation matrix with L2. - "softmax": normalizing correlation matrix with Softmax. - trans_func_name (string): name of the the transformation function apply - on the network. - norm_module (nn.Module): nn.Module for the normalization layer. The - default is nn.BatchNorm3d. - drop_connect_rate (float): basic rate at which blocks are dropped, - linearly increases from input to output blocks. - """ - super(ReResStage, self).__init__() - assert trans_func_name == "x3d_transform" - assert nonlocal_inds == [[]], "Nonlocal network not supported currently." - assert all( - ( - num_block_temp_kernel[i] <= num_blocks[i] - for i in range(len(temp_kernel_sizes)) - ) - ) - self.num_blocks = num_blocks - self.nonlocal_group = nonlocal_group - self._drop_connect_rate = drop_connect_rate - self.temp_kernel_sizes = [ - (temp_kernel_sizes[i] * num_blocks[i])[: num_block_temp_kernel[i]] + - [1] * (num_blocks[i] - num_block_temp_kernel[i]) - for i in range(len(temp_kernel_sizes)) - ] - assert ( - len( - { - len(dim_in), - len(dim_out), - len(temp_kernel_sizes), - len(stride), - len(num_blocks), - len(dim_inner), - len(num_groups), - len(num_block_temp_kernel), - len(nonlocal_inds), - len(nonlocal_group), - } - ) == 1 - ) - self.num_pathways = len(self.num_blocks) - self._construct( - dim_in, - dim_out, - stride, - dim_inner, - num_groups, - CoX3DTransform, - stride_1x1, - inplace_relu, - nonlocal_inds, - nonlocal_pool, - instantiation, - dilation, - norm_module, - temporal_window_size, - temporal_fill, - ) - - def _construct( - self, - dim_in, - dim_out, - stride, - dim_inner, - num_groups, - trans_func, - stride_1x1, - inplace_relu, - nonlocal_inds, - nonlocal_pool, - instantiation, - dilation, - norm_module, - temporal_window_size, - temporal_fill, - ): - for pathway in range(self.num_pathways): - for i in range(self.num_blocks[pathway]): - # Construct the block. - res_block = ReResBlock( - dim_in[pathway] if i == 0 else dim_out[pathway], - dim_out[pathway], - self.temp_kernel_sizes[pathway][i], - stride[pathway] if i == 0 else 1, - trans_func, - dim_inner[pathway], - num_groups[pathway], - stride_1x1=stride_1x1, - inplace_relu=inplace_relu, - dilation=dilation[pathway], - norm_module=norm_module, - block_idx=i, - drop_connect_rate=self._drop_connect_rate, - temporal_window_size=temporal_window_size, - temporal_fill=temporal_fill, - ) - self.add_module("pathway{}_res{}".format(pathway, i), res_block) - - def forward(self, inputs): - output = [] - for pathway in range(self.num_pathways): - x = inputs[pathway] - for i in range(self.num_blocks[pathway]): - m = getattr(self, "pathway{}_res{}".format(pathway, i)) - x = m(x) - output.append(x) - - return output - - def forward3d(self, inputs): - output = [] - for pathway in range(self.num_pathways): - x = inputs[pathway] - for i in range(self.num_blocks[pathway]): - m = getattr(self, "pathway{}_res{}".format(pathway, i)) - x = m.forward3d(x) - output.append(x) - - return output - - -class CoX3DHead(torch.nn.Module): - """ - X3D head. - This layer performs a fully-connected projection during training, when the - input size is 1x1x1. It performs a convolutional projection during testing - when the input size is larger than 1x1x1. If the inputs are from multiple - different pathways, the inputs will be concatenated after pooling. - """ - - def __init__( - self, - dim_in, - dim_inner, - dim_out, - num_classes, - pool_size, - dropout_rate=0.0, - act_func="softmax", - inplace_relu=True, - eps=1e-5, - bn_mmt=0.1, - norm_module=torch.nn.BatchNorm3d, - bn_lin5_on=False, - temporal_window_size: int = 4, - temporal_fill: FillMode = "replicate", - ): - """ - The `__init__` method of any subclass should also contain these - arguments. - X3DHead takes a 5-dim feature tensor (BxCxTxHxW) as input. - - Args: - dim_in (float): the channel dimension C of the input. - num_classes (int): the channel dimensions of the output. - pool_size (float): a single entry list of kernel size for - spatiotemporal pooling for the TxHxW dimensions. - dropout_rate (float): dropout rate. If equal to 0.0, perform no - dropout. - act_func (string): activation function to use. 'softmax': applies - softmax on the output. 'sigmoid': applies sigmoid on the output. - inplace_relu (bool): if True, calculate the relu on the original - input without allocating new memory. - eps (float): epsilon for batch norm. - bn_mmt (float): momentum for batch norm. Noted that BN momentum in - PyTorch = 1 - BN momentum in Caffe2. - norm_module (torch.nn.Module): torch.nn.Module for the normalization layer. The - default is torch.nn.BatchNorm3d. - bn_lin5_on (bool): if True, perform normalization on the features - before the classifier. - """ - super(CoX3DHead, self).__init__() - self.pool_size = pool_size - self.dropout_rate = dropout_rate - self.num_classes = num_classes - self.act_func = act_func - self.eps = eps - self.bn_mmt = bn_mmt - self.inplace_relu = inplace_relu - self.bn_lin5_on = bn_lin5_on - self._construct_head( - dim_in, - dim_inner, - dim_out, - norm_module, - temporal_window_size, - temporal_fill, - ) - - def _construct_head( - self, - dim_in, - dim_inner, - dim_out, - norm_module, - temporal_window_size, - temporal_fill, - ): - - self.conv_5 = unsqueezed( - torch.nn.Conv3d( - dim_in, - dim_inner, - kernel_size=(1, 1, 1), - stride=(1, 1, 1), - padding=(0, 0, 0), - bias=False, - ) - ) - self.conv_5_bn = unsqueezed( - norm_module(num_features=dim_inner, eps=self.eps, momentum=self.bn_mmt) - ) - self.conv_5_relu = torch.nn.ReLU(self.inplace_relu) - - if self.pool_size is None: - self.avg_pool = AdaptiveAvgPoolCo3d(1, temporal_fill, (1, 1)) - else: - self.avg_pool = AvgPoolCo3d( - self.pool_size[0], temporal_fill, 1, self.pool_size[1:], stride=1 - ) - - self.lin_5 = unsqueezed( - torch.nn.Conv3d( - dim_inner, - dim_out, - kernel_size=(1, 1, 1), - stride=(1, 1, 1), - padding=(0, 0, 0), - bias=False, - ) - ) - if self.bn_lin5_on: - self.lin_5_bn = unsqueezed( - norm_module(num_features=dim_out, eps=self.eps, momentum=self.bn_mmt) - ) - self.lin_5_relu = torch.nn.ReLU(self.inplace_relu) - - if self.dropout_rate > 0.0: - self.dropout = torch.nn.Dropout(self.dropout_rate) - # Perform FC in a fully convolutional manner. The FC layer will be - # initialized with a different std comparing to convolutional layers. - self.projection = torch.nn.Linear(dim_out, self.num_classes, bias=True) - - # Softmax for evaluation and testing. - if self.act_func == "softmax": - self.act = unsqueezed(torch.nn.Softmax(dim=4)) - elif self.act_func == "sigmoid": - self.act = torch.nn.Sigmoid() - else: - raise NotImplementedError( - "{} is not supported as an activation" "function.".format(self.act_func) - ) - - def forward(self, inputs): - # In its current design the X3D head is only useable for a single - # pathway input. - assert len(inputs) == 1, "Input tensor does not contain 1 pathway" - x = self.conv_5(inputs[0]) - x = self.conv_5_bn(x) - x = self.conv_5_relu(x) - x = self.avg_pool(x) - - x = self.lin_5(x) - if self.bn_lin5_on: - x = self.lin_5_bn(x) - x = self.lin_5_relu(x) - - # (N, C, T, H, W) -> (N, T, H, W, C). - # x = x.permute((0, 2, 3, 4, 1)) - # Compatible with recursive conversion: - x = x.unsqueeze(-1).transpose(-1, 1).squeeze(1) - - # Perform dropout. - if hasattr(self, "dropout"): - x = self.dropout(x) - x = self.projection(x) - - # Performs fully convolutional inference. - if not self.training: - x = self.act(x) - x = x.mean([1, 2]) - - x = x.view(x.shape[0], -1) - return x - - def forward3d(self, inputs): - # In its current design the X3D head is only usable for a single - # pathway input. - assert len(inputs) == 1, "Input tensor does not contain 1 pathway" - x = self.conv_5.forward3d(inputs[0]) - x = self.conv_5_bn.forward3d(x) - x = self.conv_5_relu(x) - x = self.avg_pool.forward3d(x) - - x = self.lin_5.forward3d(x) - if self.bn_lin5_on: - x = self.lin_5_bn.forward3d(x) - x = self.lin_5_relu(x) - - # (N, C, T, H, W) -> (N, T, H, W, C). - # x = x.permute((0, 2, 3, 4, 1)) - # Compatible with recursive conversion: - x = x.unsqueeze(-1).transpose(-1, 1).squeeze(1) - - # Perform dropout. - if hasattr(self, "dropout"): - x = self.dropout(x) - x = self.projection(x) - - # Performs fully convolutional inference. - if not self.training: - x = self.act.forward3d(x) - x = x.mean([1, 2, 3]) - - x = x.view(x.shape[0], -1) - return x - - -class CoX3DStem(torch.nn.Module): - """ - X3D's 3D stem module. - Performs a spatial followed by a depthwise temporal Convolution, BN, and Relu following by a - spatiotemporal pooling. - """ - - def __init__( - self, - dim_in, - dim_out, - kernel, - stride, - padding, - inplace_relu=True, - eps=1e-5, - bn_mmt=0.1, - norm_module=torch.nn.BatchNorm3d, - temporal_window_size: int = 4, - temporal_fill: FillMode = "replicate", - ): - """ - The `__init__` method of any subclass should also contain these arguments. - - Args: - dim_in (int): the channel dimension of the input. Normally 3 is used - for rgb input, and 2 or 3 is used for optical flow input. - dim_out (int): the output dimension of the convolution in the stem - layer. - kernel (list): the kernel size of the convolution in the stem layer. - temporal kernel size, height kernel size, width kernel size in - order. - stride (list): the stride size of the convolution in the stem layer. - temporal kernel stride, height kernel size, width kernel size in - order. - padding (int): the padding size of the convolution in the stem - layer, temporal padding size, height padding size, width - padding size in order. - inplace_relu (bool): calculate the relu on the original input - without allocating new memory. - eps (float): epsilon for batch norm. - bn_mmt (float): momentum for batch norm. Noted that BN momentum in - PyTorch = 1 - BN momentum in Caffe2. - norm_module (torch.nn.Module): torch.nn.Module for the normalization layer. The - default is torch.nn.BatchNorm3d. - """ - super(CoX3DStem, self).__init__() - self.kernel = kernel - self.stride = stride - self.padding = padding - self.inplace_relu = inplace_relu - self.eps = eps - self.bn_mmt = bn_mmt - # Construct the stem layer. - self._construct_stem(dim_in, dim_out, norm_module, temporal_fill) - - def _construct_stem(self, dim_in, dim_out, norm_module, temporal_fill): - self.conv_xy = unsqueezed( - torch.nn.Conv3d( - dim_in, - dim_out, - kernel_size=(1, self.kernel[1], self.kernel[2]), - stride=(1, self.stride[1], self.stride[2]), - padding=(0, self.padding[1], self.padding[2]), - bias=False, - ) - ) - self.conv = ConvCo3d( - dim_out, - dim_out, - kernel_size=(self.kernel[0], 1, 1), - stride=(self.stride[0], 1, 1), - padding=(self.padding[0], 0, 0), - bias=False, - groups=dim_out, - temporal_fill=temporal_fill, - ) - self.bn = unsqueezed( - norm_module(num_features=dim_out, eps=self.eps, momentum=self.bn_mmt) - ) - self.relu = torch.nn.ReLU(self.inplace_relu) - - def forward(self, x): - x = self.conv_xy(x) - x = self.conv(x) - x = self.bn(x) - x = self.relu(x) - return x - - def forward3d(self, x): - x = self.conv_xy.forward3d(x) - x = self.conv.forward3d(x) - x = self.bn.forward3d(x) - x = self.relu(x) - return x - - -class ReVideoModelStem(torch.nn.Module): - """ - Video 3D stem module. Provides stem operations of Conv, BN, ReLU, MaxPool - on input data tensor for one or multiple pathways. - """ - - def __init__( - self, - dim_in, - dim_out, - kernel, - stride, - padding, - inplace_relu=True, - eps=1e-5, - bn_mmt=0.1, - norm_module=torch.nn.BatchNorm3d, - stem_func_name="x3d_stem", - temporal_window_size: int = 4, - temporal_fill: FillMode = "replicate", - ): - """ - The `__init__` method of any subclass should also contain these - arguments. List size of 1 for single pathway models (C2D, I3D, Slow - and etc), list size of 2 for two pathway models (SlowFast). - - Args: - dim_in (list): the list of channel dimensions of the inputs. - dim_out (list): the output dimension of the convolution in the stem - layer. - kernel (list): the kernels' size of the convolutions in the stem - layers. Temporal kernel size, height kernel size, width kernel - size in order. - stride (list): the stride sizes of the convolutions in the stem - layer. Temporal kernel stride, height kernel size, width kernel - size in order. - padding (list): the paddings' sizes of the convolutions in the stem - layer. Temporal padding size, height padding size, width padding - size in order. - inplace_relu (bool): calculate the relu on the original input - without allocating new memory. - eps (float): epsilon for batch norm. - bn_mmt (float): momentum for batch norm. Noted that BN momentum in - PyTorch = 1 - BN momentum in Caffe2. - norm_module (torch.nn.Module): torch.nn.Module for the normalization layer. The - default is torch.nn.BatchNorm3d. - stem_func_name (string): name of the the stem function applied on - input to the network. - """ - super(ReVideoModelStem, self).__init__() - - assert ( - len({len(dim_in), len(dim_out), len(kernel), len(stride), len(padding)}) == 1 - ), "Input pathway dimensions are not consistent." - self.num_pathways = len(dim_in) - self.kernel = kernel - self.stride = stride - self.padding = padding - self.inplace_relu = inplace_relu - self.eps = eps - self.bn_mmt = bn_mmt - - # Construct the stem layer. - assert ( - stem_func_name == "x3d_stem" - ), "Currently, only 'x3d_stem' stem func is implemented." - for pathway in range(len(dim_in)): - stem = CoX3DStem( - dim_in[pathway], - dim_out[pathway], - self.kernel[pathway], - self.stride[pathway], - self.padding[pathway], - self.inplace_relu, - self.eps, - self.bn_mmt, - norm_module, - temporal_window_size=temporal_window_size, - temporal_fill=temporal_fill, - ) - self.add_module("pathway{}_stem".format(pathway), stem) - - def forward(self, x): - assert ( - len(x) == self.num_pathways - ), "Input tensor does not contain {} pathway".format(self.num_pathways) - for pathway in range(len(x)): - m = getattr(self, "pathway{}_stem".format(pathway)) - x[pathway] = m(x[pathway]) - return x - - def forward3d(self, x): - assert ( - len(x) == self.num_pathways - ), "Input tensor does not contain {} pathway".format(self.num_pathways) - for pathway in range(len(x)): - m = getattr(self, "pathway{}_stem".format(pathway)) - x[pathway] = m.forward3d(x[pathway]) - return x - - -class CoX3D(pl.LightningModule): - """ - Recurrent X3D model, - adapted from https://github.com/facebookresearch/SlowFast - - Christoph Feichtenhofer. - "X3D: Expanding Architectures for Efficient Video Recognition." - https://arxiv.org/abs/2004.04730 - """ - - def __init__( - self, - dim_in: int, - image_size: int, - frames_per_clip: int, - num_classes: int, - conv1_dim: int, - conv5_dim: int, - num_groups: int, - width_per_group: int, - width_factor: float, - depth_factor: float, - bottleneck_factor: float, - use_channelwise_3x3x3: bool, - dropout_rate: float, - head_activation: str, - head_batchnorm: bool, - fc_std_init: float, - final_batchnorm_zero_init: bool, - temporal_fill: FillMode = "replicate", - loss_name="cross_entropy", - ): - super().__init__() - self.temporal_window_size = frames_per_clip - self.loss_name = loss_name - - exp_stage = 2.0 - self.norm_module = torch.nn.BatchNorm3d # Will be unsqueezed - self.dim_conv1 = conv1_dim - self.dim_res2 = ( - _round_width(self.dim_conv1, exp_stage, divisor=8) - if False # hparams.X3D.SCALE_RES2 - else self.dim_conv1 - ) - self.dim_res3 = _round_width(self.dim_res2, exp_stage, divisor=8) - self.dim_res4 = _round_width(self.dim_res3, exp_stage, divisor=8) - self.dim_res5 = _round_width(self.dim_res4, exp_stage, divisor=8) - - self.block_basis = [ - # blocks, c, stride - [1, self.dim_res2, 2], - [2, self.dim_res3, 2], - [5, self.dim_res4, 2], - [3, self.dim_res5, 2], - ] - - num_groups = num_groups - width_per_group = width_per_group - dim_inner = num_groups * width_per_group - - w_mul = width_factor - d_mul = depth_factor - dim_res1 = _round_width(self.dim_conv1, w_mul) - - # Basis of temporal kernel sizes for each of the stage. - temp_kernel = [ - [[5]], # conv1 temporal kernels. - [[3]], # res2 temporal kernels. - [[3]], # res3 temporal kernels. - [[3]], # res4 temporal kernels. - [[3]], # res5 temporal kernels. - ] - - self.s1 = ReVideoModelStem( - dim_in=[dim_in], - dim_out=[dim_res1], - kernel=[temp_kernel[0][0] + [3, 3]], - stride=[[1, 2, 2]], - # padding=[[0, 1, 1]], - padding=[[temp_kernel[0][0][0] // 2, 1, 1]], - norm_module=self.norm_module, - stem_func_name="x3d_stem", - temporal_window_size=frames_per_clip, - temporal_fill=temporal_fill, - ) - - dim_in = dim_res1 - dim_out = dim_in - for stage, block in enumerate(self.block_basis): - dim_out = _round_width(block[1], w_mul) - dim_inner = int(bottleneck_factor * dim_out) - - n_rep = _round_repeats(block[0], d_mul) - prefix = "s{}".format(stage + 2) # start w res2 to follow convention - - s = ReResStage( - dim_in=[dim_in], - dim_out=[dim_out], - dim_inner=[dim_inner], - temp_kernel_sizes=temp_kernel[1], - stride=[block[2]], - num_blocks=[n_rep], - num_groups=[dim_inner] if use_channelwise_3x3x3 else [num_groups], - num_block_temp_kernel=[n_rep], - nonlocal_inds=[[]], - nonlocal_group=[1], - nonlocal_pool=[[1, 2, 2], [1, 2, 2]], - instantiation="dot_product", - trans_func_name="x3d_transform", - stride_1x1=False, - norm_module=self.norm_module, - dilation=[1], - drop_connect_rate=0.0, - temporal_window_size=frames_per_clip, - temporal_fill=temporal_fill, - ) - dim_in = dim_out - self.add_module(prefix, s) - - spat_sz = int(math.ceil(image_size / 32.0)) - self.head = CoX3DHead( - dim_in=dim_out, - dim_inner=dim_inner, - dim_out=conv5_dim, - num_classes=num_classes, - pool_size=(frames_per_clip, spat_sz, spat_sz), - dropout_rate=dropout_rate, - act_func=head_activation, - bn_lin5_on=bool(head_batchnorm), - temporal_window_size=4, - temporal_fill=temporal_fill, - ) - init_weights(self, fc_std_init, bool(final_batchnorm_zero_init)) - - def receptive_field(self): - if not hasattr(self, "_receptive_field"): - self._receptive_field = sum( - [m.kernel_size[0] - 1 for m in self.modules() if "ConvCo3d" in str(type(m))] - ) + self.temporal_window_size - return self._receptive_field - - def clean_model_state(self): - for m in self.modules(): - if hasattr(m, "clean_state"): - m.clean_state() - - def forward(self, x: torch.Tensor): - """Pass Image through the network - - Args: - x (torch.Tensor): Image batch of shape (B, 3, H, W) - - Returns: - torch.Tensor: Class prediction - """ - if getattr(self, "_current_input_shape", None) != x.shape: - self.clean_model_state() - self._current_input_shape = x.shape - - x = [x] - for module in self.children(): - x = module(x) - return x - - def forward_video(self, x: torch.Tensor) -> torch.Tensor: - """Pass Video through the network. Only the prediction for the last frame is returned. - - Args: - x (torch.Tensor): Video batch of shape (B, 3, T, H, W) - - Returns: - torch.Tensor: Class prediction - """ - T = x.shape[2] - - warn_once_if( - cond=self.receptive_field() > T, - msg=( - "Input clip has length smaller than the model receptive field. " - "This may yield worse predictions than expected because the network " - "is operating within its transient response." - ) - ) - - for i in range(T - 1): - self.forward(x[:, :, i]) - - result = self.forward(x[:, :, -1]) - return result - - def training_step(self, batch, batch_idx): - x, y = batch - x = self.forward_video(x) - loss = getattr(F, self.loss_name, F.cross_entropy)(x, y) - self.log('train/loss', loss) - self.log('train/acc', _accuracy(x, y)) - return loss - - def validation_step(self, batch, batch_idx): - x, y = batch - x = self.forward_video(x) - loss = getattr(F, self.loss_name, F.cross_entropy)(x, y) - self.log('val/loss', loss) - self.log('val/acc', _accuracy(x, y)) - return loss - - def test_step(self, batch, batch_idx): - x, y = batch - x = self.forward_video(x) - loss = getattr(F, self.loss_name, F.cross_entropy)(x, y) - self.log('test/loss', loss) - self.log('test/acc', _accuracy(x, y)) - return loss diff --git a/src/opendr/perception/activity_recognition/cox3d/algorithm/delay.py b/src/opendr/perception/activity_recognition/cox3d/algorithm/delay.py deleted file mode 100644 index 56709ffe27..0000000000 --- a/src/opendr/perception/activity_recognition/cox3d/algorithm/delay.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -Copyright (c) Lukas Hedegaard. All Rights Reserved. -Included in the OpenDR Toolit with permission from the author. -""" - -import torch -from torch import Tensor -from typing import Tuple -from logging import getLogger -from .utils import FillMode - -State = Tuple[Tensor, int] - -logger = getLogger(__name__) - - -class Delay(torch.nn.Module): - def __init__( - self, window_size: int, temporal_fill: FillMode = "replicate", - ): - assert window_size > 0 - assert temporal_fill in {"zeros", "replicate"} - self.window_size = window_size - self.make_padding = {"zeros": torch.zeros_like, "replicate": torch.clone}[ - temporal_fill - ] - - super(Delay, self).__init__() - # state is initialised in self.forward - - def init_state(self, first_output: Tensor,) -> State: - padding = self.make_padding(first_output) - state_buffer = torch.stack([padding for _ in range(self.window_size)], dim=0) - state_index = 0 - if not hasattr(self, "state_buffer"): - self.register_buffer("state_buffer", state_buffer, persistent=False) - return state_buffer, state_index - - def clean_state(self): - self.state_buffer = None - self.state_index = None - - def get_state(self): - if ( - hasattr(self, "state_buffer") and - self.state_buffer is not None and - hasattr(self, "state_index") and - self.state_buffer is not None - ): - return (self.state_buffer, self.state_index) - else: - return None - - def forward3d(self, input: Tensor) -> Tensor: - # Pass into delay line, but discard output - self.forward(input) - - # No delay during forward3d - return input - - def forward(self, input: Tensor) -> Tensor: - output, (self.state_buffer, self.state_index) = self._forward( - input, self.get_state() - ) - return output - - def _forward(self, input: Tensor, prev_state: State) -> Tuple[Tensor, State]: - assert len(input.shape) == 4, "Only a single frame should be passed at a time." - - if prev_state is None: - buffer, index = self.init_state(input) - else: - buffer, index = prev_state - - # Get output - output = buffer[index] - - # Update state - new_buffer = buffer.clone() if self.training else buffer.detach() - new_index = (index + 1) % self.window_size - new_buffer[(index - 1) % self.window_size] = input - - return output, (new_buffer, new_index) diff --git a/src/opendr/perception/activity_recognition/cox3d/algorithm/pooling.py b/src/opendr/perception/activity_recognition/cox3d/algorithm/pooling.py deleted file mode 100644 index 0a3e1af84a..0000000000 --- a/src/opendr/perception/activity_recognition/cox3d/algorithm/pooling.py +++ /dev/null @@ -1,277 +0,0 @@ -""" -Copyright (c) Lukas Hedegaard. All Rights Reserved. -Included in the OpenDR Toolit with permission from the author. -""" - -from typing import Tuple, Union - -import torch -from torch import Tensor -from torch.nn.modules.pooling import ( - AdaptiveAvgPool1d, - AdaptiveAvgPool2d, - AdaptiveAvgPool3d, - AdaptiveMaxPool1d, - AdaptiveMaxPool2d, - AdaptiveMaxPool3d, - AvgPool2d, - AvgPool3d, - MaxPool2d, - MaxPool3d, - _triple, -) - -from logging import getLogger - -from .utils import FillMode - -State = Tuple[Tensor, int] -Pool2D = Union[AvgPool2d, MaxPool2d, AdaptiveAvgPool2d, AdaptiveMaxPool2d] - - -logger = getLogger(__name__) - -__all__ = [ - "AvgPoolCo3d", - "MaxPoolCo3d", - "AdaptiveAvgPoolCo3d", - "AdaptiveMaxPoolCo3d", - "convert_avgpool3d", - "convert_maxpool3d", - "convert_adaptiveavgpool3d", - "convert_adaptivemaxpool3d", -] - - -def RecursivelyWindowPooled(cls: Pool2D) -> torch.nn.Module: # noqa: C901 - """Wraps a pooling module to create a recursive version which pools across execusions - - Args: - cls (Pool2D): A 2D pooling Module - """ - assert cls in {AdaptiveAvgPool2d, MaxPool2d, AvgPool2d, AdaptiveMaxPool2d} - - class RePooled(cls): - def __init__( - self, - window_size: int, - temporal_fill: FillMode = "replicate", - temporal_dilation: int = 1, - *args, - **kwargs, - ): - assert window_size > 0 - assert temporal_fill in {"zeros", "replicate"} - self.window_size = window_size - self.temporal_dilation = temporal_dilation - self.make_padding = {"zeros": torch.zeros_like, "replicate": torch.clone}[ - temporal_fill - ] - super(RePooled, self).__init__(*args, **kwargs) - - self.temporal_pool = ( - AdaptiveAvgPool1d - if "avg" in str(cls.__name__).lower() - else AdaptiveMaxPool1d - )(1) - - if self.temporal_dilation > 1: - self.frame_index_selection = torch.tensor( - range(0, self.window_size, self.temporal_dilation) - ) - - # state is initialised in self.forward - - def init_state(self, first_output: Tensor,) -> State: - padding = self.make_padding(first_output) - state_buffer = torch.stack( - [padding for _ in range(self.window_size)], dim=0 - ) - state_index = 0 - if not hasattr(self, "state_buffer"): - self.register_buffer("state_buffer", state_buffer, persistent=False) - return state_buffer, state_index - - def clean_state(self): - self.state_buffer = None - self.state_index = None - - def get_state(self): - if ( - hasattr(self, "state_buffer") and - self.state_buffer is not None and - hasattr(self, "state_index") and - self.state_buffer is not None - ): - return (self.state_buffer, self.state_index) - else: - return None - - def forward(self, input: Tensor) -> Tensor: - output, (self.state_buffer, self.state_index) = self._forward( - input, self.get_state() - ) - return output - - def _forward(self, input: Tensor, prev_state: State,) -> Tuple[Tensor, State]: - assert ( - len(input.shape) == 4 - ), "Only a single frame should be passed at a time." - - pooled_frame = super(RePooled, self).forward(input) - - if prev_state is None: - buffer, index = self.init_state(pooled_frame) - else: - buffer, index = prev_state - - buffer[index] = pooled_frame - - if self.temporal_dilation == 1: - frame_selection = buffer - else: - frame_selection = buffer.index_select( - dim=0, index=self.frame_index_selection - ) - - # Pool along temporal dimension - T, B, C, H, W = frame_selection.shape - x = frame_selection.permute(1, 3, 4, 2, 0) # B, H, W, C, T - x = x.reshape(B * H * W, C, T) - x = self.temporal_pool(x) - x = x.reshape(B, H, W, C) - x = x.permute(0, 3, 1, 2) # B, C, H, W - pooled_window = x - - new_index = (index + 1) % self.window_size - new_buffer = buffer.clone() if self.training else buffer.detach() - - return pooled_window, (new_buffer, new_index) - - def forward3d(self, input: Tensor): - """ If input.shape[2] == self.window_size, a global pooling along temporal dimension is performed - Otherwise, the pooling is performed per frame - """ - assert ( - len(input.shape) == 5 - ), "A tensor of size B,C,T,H,W should be passed as input." - - outs = [] - for t in range(input.shape[2]): - o = self.forward(input[:, :, t]) - if self.window_size - 1 <= t: - outs.append(o) - - if len(outs) == 0: - return torch.tensor([]) - - if input.shape[2] == self.window_size: - # In order to be compatible with downstream forward3d, select only last frame - # This corrsponds to the regular global pool - return outs[-1].unsqueeze(2) - - else: - return torch.stack(outs, dim=2) - - RePooled.__doc__ = f""" - Recursive {cls.__name__} - - Pooling results are stored between `forward` exercutions and used to pool subsequent - inputs along the temporal dimension with a spacified `window_size`. - Example: For `window_size = 3`, the two previous results are stored and used for pooling. - `temporal_fill` determines whether to initialize the state with a ``'replicate'`` of the - output of the first execution or with with ``'zeros'``. - - Parent doc: - {cls.__doc__} - """ - - return RePooled - - -AvgPoolCo3d = RecursivelyWindowPooled(AvgPool2d) -MaxPoolCo3d = RecursivelyWindowPooled(MaxPool2d) -AdaptiveAvgPoolCo3d = RecursivelyWindowPooled(AdaptiveAvgPool2d) -AdaptiveMaxPoolCo3d = RecursivelyWindowPooled(AdaptiveMaxPool2d) - - -def convert_avgpool3d( - instance: AvgPool3d, - window_size: int = None, # Not used: only there to satisfy interface - temporal_fill: FillMode = "replicate", -): - kernel_size = _triple(instance.kernel_size) - padding = _triple(instance.padding) - stride = _triple(instance.stride) - assert padding[0] == 0, "Cannot convert AvgPool3d with padding[0] != 0" - assert stride[0] == 1, "Cannot convert AvgPool3d with stride[0] != 1" - return AvgPoolCo3d( - window_size=kernel_size[0], - temporal_fill=temporal_fill, - kernel_size=kernel_size[1:], - stride=stride[1:], - padding=padding[1:], - ceil_mode=instance.ceil_mode, - count_include_pad=instance.count_include_pad, - divisor_override=instance.divisor_override, - ) - - -def convert_maxpool3d( - instance: MaxPool3d, - window_size: int = None, # Not used: only there to satisfy interface - temporal_fill: FillMode = "replicate", -): - kernel_size = _triple(instance.kernel_size) - padding = _triple(instance.padding) - stride = _triple(instance.stride) - dilation = _triple(instance.dilation) - assert padding[0] == 0, "Cannot convert MaxPool3d with padding[0] != 0" - assert stride[0] == 1, "Cannot convert MaxPool3d with stride[0] != 1" - assert dilation[0] == 1, "Cannot convert MaxPool3d with dilation[0] != 1" - assert ( - instance.return_indices is False - ), "return_indices currently not supported for MaxPool3d" - return MaxPoolCo3d( - window_size=kernel_size[0], - temporal_fill=temporal_fill, - kernel_size=kernel_size[1:], - stride=stride[1:], - padding=padding[1:], - dilation=dilation[1:], - return_indices=instance.return_indices, - ceil_mode=instance.ceil_mode, - ) - - -def convert_adaptiveavgpool3d( - instance: AdaptiveAvgPool3d, - window_size: int, - temporal_fill: FillMode = "replicate", -): - assert ( - instance.output_size[0] == 1 - ), "Cannot convert AdaptiveAvgPool3d without output_size[0] != 1" - return AdaptiveAvgPoolCo3d( - window_size=window_size, - temporal_fill=temporal_fill, - output_size=instance.output_size[1:], - ) - - -def convert_adaptivemaxpool3d( - instance: AdaptiveMaxPool3d, - window_size: int, - temporal_fill: FillMode = "replicate", -): - assert ( - instance.output_size[0] == 1 - ), "Cannot convert AdaptiveMaxPool3d without output_size[0] != 1" - assert ( - instance.return_indices is False - ), "return_indices currently not supported for AdaptiveMaxPool3d" - return AdaptiveAvgPoolCo3d( - window_size=window_size, - temporal_fill=temporal_fill, - output_size=instance.output_size, - ) diff --git a/src/opendr/perception/activity_recognition/cox3d/algorithm/res.py b/src/opendr/perception/activity_recognition/cox3d/algorithm/res.py new file mode 100644 index 0000000000..fdcbde7aa5 --- /dev/null +++ b/src/opendr/perception/activity_recognition/cox3d/algorithm/res.py @@ -0,0 +1,268 @@ +from collections import OrderedDict +from typing import Callable + +import continual as co +import torch +from continual import PaddingMode +from torch import nn + + +def CoResBlock( + dim_in, + dim_out, + temp_kernel_size, + stride, + trans_func, + dim_inner, + num_groups=1, + stride_1x1=False, + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + dilation=1, + norm_module=torch.nn.BatchNorm3d, + block_idx=0, + drop_connect_rate=0.0, + temporal_window_size: int = 4, + temporal_fill: PaddingMode = "zeros", + se_scope="frame", # "clip" or "frame" +): + """ + ResBlock class constructs redisual blocks. More details can be found in: + Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun. + "Deep residual learning for image recognition." + https://arxiv.org/abs/1512.03385 + Args: + dim_in (int): the channel dimensions of the input. + dim_out (int): the channel dimension of the output. + temp_kernel_size (int): the temporal kernel sizes of the middle + convolution in the bottleneck. + stride (int): the stride of the bottleneck. + trans_func (string): transform function to be used to construct the + bottleneck. + dim_inner (int): the inner dimension of the block. + num_groups (int): number of groups for the convolution. num_groups=1 + is for standard ResNet like networks, and num_groups>1 is for + ResNeXt like networks. + stride_1x1 (bool): if True, apply stride to 1x1 conv, otherwise + apply stride to the 3x3 conv. + inplace_relu (bool): calculate the relu on the original input + without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + dilation (int): size of dilation. + norm_module (torch.nn.Module): torch.nn.Module for the normalization layer. The + default is torch.nn.BatchNorm3d. + drop_connect_rate (float): basic rate at which blocks are dropped, + linearly increases from input to output blocks. + """ + branch2 = trans_func( + dim_in, + dim_out, + temp_kernel_size, + stride, + dim_inner, + num_groups, + stride_1x1=stride_1x1, + inplace_relu=inplace_relu, + dilation=dilation, + norm_module=norm_module, + block_idx=block_idx, + temporal_window_size=temporal_window_size, + temporal_fill=temporal_fill, + se_scope=se_scope, + ) + + def _is_training(module: nn.Module) -> bool: + return module.training + + def _drop_connect(x, drop_ratio): + """Apply dropconnect to x""" + keep_ratio = 1.0 - drop_ratio + mask = torch.empty([x.shape[0], 1, 1, 1], dtype=x.dtype, device=x.device) + mask.bernoulli_(keep_ratio) + x.div_(keep_ratio) + x.mul_(mask) + return x + + if drop_connect_rate > 0: + drop = [("drop", co.Conditional(_is_training, co.Lambda(_drop_connect)))] + else: + drop = [] + + main_stream = co.Sequential( + OrderedDict( + [ + ("branch2", branch2), + *drop, + ] + ) + ) + + if (dim_in == dim_out) and (stride == 1): + residual_stream = co.Delay(main_stream.delay) + else: + residual_stream = co.Sequential( + OrderedDict( + [ + ( + "branch1", + co.Conv3d( + dim_in, + dim_out, + kernel_size=1, + stride=(1, stride, stride), + padding=0, + bias=False, + dilation=1, + ), + ), + ( + "branch1_bn", + norm_module(num_features=dim_out, eps=eps, momentum=bn_mmt), + ), + ] + ) + ) + + return co.Sequential( + co.BroadcastReduce(residual_stream, main_stream, reduce="sum"), + nn.ReLU(), + ) + + +def CoResStage( + dim_in: int, + dim_out: int, + stride: int, + temp_kernel_sizes: int, + num_blocks: int, + dim_inner: int, + num_groups: int, + num_block_temp_kernel: int, + dilation: int, + trans_func: Callable, + stride_1x1=False, + inplace_relu=True, + norm_module=torch.nn.BatchNorm3d, + drop_connect_rate=0.0, + temporal_window_size: int = 4, + temporal_fill: PaddingMode = "zeros", + se_scope="frame", + *args, + **kwargs, +): + """ + Create a Continual Residual X3D Stage. + + Note: Compared to the original implementation of X3D, we discard the + obsolete handling of the multiple pathways and the non-local mechanism. + + Args: + dim_in (int): channel dimensions of the input. + dim_out (int): channel dimensions of the output. + temp_kernel_sizes (int): temporal kernel sizes of the + convolution in the bottleneck. + stride (int): stride of the bottleneck. + num_blocks (int): numbers of blocks. + dim_inner (int): inner channel dimensions of the input. + num_groups (int): number of groups for the convolution. + num_groups=1 is for standard ResNet like networks, and + num_groups>1 is for ResNeXt like networks. + num_block_temp_kernel (int): extent the temp_kernel_sizes to + num_block_temp_kernel blocks, then fill temporal kernel size + of 1 for the rest of the layers. + dilation (int): size of dilation. + trans_func (Callable): transformation function to apply on the network. + norm_module (nn.Module): nn.Module for the normalization layer. The + default is nn.BatchNorm3d. + drop_connect_rate (float): basic rate at which blocks are dropped, + linearly increases from input to output blocks. + """ + + assert num_block_temp_kernel <= num_blocks + + temp_kernel_sizes = (temp_kernel_sizes * num_blocks)[:num_block_temp_kernel] + ( + [1] * (num_blocks - num_block_temp_kernel) + ) + + return co.Sequential( + OrderedDict( + [ + ( + f"pathway0_res{i}", + CoResBlock( + dim_in=dim_in if i == 0 else dim_out, + dim_out=dim_out, + temp_kernel_size=temp_kernel_sizes[i], + stride=stride if i == 0 else 1, + trans_func=trans_func, + dim_inner=dim_inner, + num_groups=num_groups, + stride_1x1=stride_1x1, + inplace_relu=inplace_relu, + dilation=dilation, + norm_module=norm_module, + block_idx=i, + drop_connect_rate=drop_connect_rate, + temporal_window_size=temporal_window_size, + temporal_fill=temporal_fill, + se_scope=se_scope, + ), + ) + for i in range(num_blocks) + ] + ) + ) + + +def c2_msra_fill(module: torch.nn.Module) -> None: + """ + Initialize `module.weight` using the "MSRAFill" implemented in Caffe2. + Also initializes `module.bias` to 0. + + Args: + module (torch.nn.Module): module to initialize. + """ + # pyre-ignore + torch.nn.init.kaiming_normal_(module.weight, mode="fan_out", nonlinearity="relu") + if module.bias is not None: # pyre-ignore + torch.nn.init.constant_(module.bias, 0) + + +def init_weights(model, fc_init_std=0.01, zero_init_final_bn=True): + """ + Performs ResNet style weight initialization. + Args: + fc_init_std (float): the expected standard deviation for fc layer. + zero_init_final_bn (bool): if True, zero initialize the final bn for + every bottleneck. + """ + for m in model.modules(): + if isinstance(m, torch.nn.Conv3d) or isinstance(m, co.Conv3d): + """ + Follow the initialization method proposed in: + {He, Kaiming, et al. + "Delving deep into rectifiers: Surpassing human-level + performance on imagenet classification." + arXiv preprint arXiv:1502.01852 (2015)} + """ + c2_msra_fill(m) + elif isinstance(m, torch.nn.BatchNorm3d) or isinstance(m, torch.nn.BatchNorm2d): + if ( + hasattr(m, "transform_final_bn") and + m.transform_final_bn and + zero_init_final_bn + ): + batchnorm_weight = 0.0 + else: + batchnorm_weight = 1.0 + if m.weight is not None: + m.weight.data.fill_(batchnorm_weight) + if m.bias is not None: + m.bias.data.zero_() + if isinstance(m, torch.nn.Linear): + m.weight.data.normal_(mean=0.0, std=fc_init_std) + if m.bias is not None: + m.bias.data.zero_() diff --git a/src/opendr/perception/activity_recognition/cox3d/algorithm/se.py b/src/opendr/perception/activity_recognition/cox3d/algorithm/se.py index 7337251e15..c5d03f71a2 100644 --- a/src/opendr/perception/activity_recognition/cox3d/algorithm/se.py +++ b/src/opendr/perception/activity_recognition/cox3d/algorithm/se.py @@ -1,39 +1,34 @@ -""" -Copyright (c) Lukas Hedegaard. All Rights Reserved. -Included in the OpenDR Toolit with permission from the author. -""" +from collections import OrderedDict +import continual as co import torch -from torch import Tensor -from torch.nn import AdaptiveAvgPool3d, Conv3d, ReLU, Sigmoid +from torch import Tensor, nn from torch.nn.modules.pooling import AdaptiveAvgPool2d - from opendr.perception.activity_recognition.x3d.algorithm.operators import Swish -from .pooling import AdaptiveAvgPoolCo3d -from .utils import unsqueezed -class SE(torch.nn.Module): - """Squeeze-and-Excitation (SE) block w/ Swish: AvgPool, FC, Swish, FC, Sigmoid.""" +def _round_width(width, multiplier, min_width=8, divisor=8): + """ + Round width of filters based on width multiplier + Args: + width (int): the channel dimensions of the input. + multiplier (float): the multiplication factor. + min_width (int): the minimum width after multiplication. + divisor (int): the new width should be dividable by divisor. + """ + if not multiplier: + return width - def _round_width(self, width, multiplier, min_width=8, divisor=8): - """ - Round width of filters based on width multiplier - Args: - width (int): the channel dimensions of the input. - multiplier (float): the multiplication factor. - min_width (int): the minimum width after multiplication. - divisor (int): the new width should be dividable by divisor. - """ - if not multiplier: - return width + width *= multiplier + min_width = min_width or divisor + width_out = max(min_width, int(width + divisor / 2) // divisor * divisor) + if width_out < 0.9 * width: + width_out += divisor + return int(width_out) - width *= multiplier - min_width = min_width or divisor - width_out = max(min_width, int(width + divisor / 2) // divisor * divisor) - if width_out < 0.9 * width: - width_out += divisor - return int(width_out) + +class SE(torch.nn.Module): + """Squeeze-and-Excitation (SE) block w/ Swish: AvgPool, FC, Swish, FC, Sigmoid.""" def __init__(self, dim_in, ratio, relu_act=True): """ @@ -44,13 +39,15 @@ def __init__(self, dim_in, ratio, relu_act=True): of Swish (default). """ super(SE, self).__init__() - self.avg_pool = AdaptiveAvgPool3d((1, 1, 1)) - dim_fc = self._round_width(dim_in, ratio) - self.fc1 = Conv3d(dim_in, dim_fc, 1, bias=True) - self.fc1_act = ReLU() if relu_act else Swish() - self.fc2 = Conv3d(dim_fc, dim_in, 1, bias=True) - - self.fc2_sig = Sigmoid() + self.avg_pool = nn.AdaptiveAvgPool3d((1, 1, 1)) + dim_fc = _round_width(dim_in, ratio) + self.fc1 = nn.Conv3d(dim_in, dim_fc, 1, bias=True) + self.fc1_act = nn.ReLU() if relu_act else Swish() + self.fc2 = nn.Conv3d(dim_fc, dim_in, 1, bias=True) + self.fc2_sig = nn.Sigmoid() + self.dim_in = dim_in + self.ratio = ratio + self.relu_act = relu_act def forward(self, x): x_in = x @@ -59,27 +56,57 @@ def forward(self, x): return x_in * x -class ReSe(torch.nn.Module): - """Recursive Squeeze-and-Excitation (SE) block w/ Swish: AvgPool, FC, Swish, FC, Sigmoid.""" - - def _round_width(self, width, multiplier, min_width=8, divisor=8): - """ - Round width of filters based on width multiplier - Args: - width (int): the channel dimensions of the input. - multiplier (float): the multiplication factor. - min_width (int): the minimum width after multiplication. - divisor (int): the new width should be dividable by divisor. - """ - if not multiplier: - return width - - width *= multiplier - min_width = min_width or divisor - width_out = max(min_width, int(width + divisor / 2) // divisor * divisor) - if width_out < 0.9 * width: - width_out += divisor - return int(width_out) +def CoSe( + window_size: int, + dim_in: int, + ratio: float, + relu_act: bool = True, + scope="frame", + temporal_fill="zeros", +): + dim_fc = _round_width(dim_in, ratio) + return co.Residual( + co.Sequential( + OrderedDict( + [ + ( + "avg_pool", + co.AdaptiveAvgPool3d( + output_size=(1, 1, 1), + kernel_size={ + "clip": window_size, + "frame": 1, + }[scope], + temporal_fill=temporal_fill, + ), + ), + ( + "fc1", + co.Conv3d(dim_in, dim_fc, 1, bias=True), + ), + ( + "fc1_act", + nn.ReLU() + if relu_act + else Swish(), # nn.SELU is the same as Swish + ), + ( + "fc2", + co.Conv3d(dim_fc, dim_in, 1, bias=True), + ), + ( + "fc2_sig", + nn.Sigmoid(), + ), + ] + ) + ), + reduce="mul", + ) + + +class CoSeAlt(co.CoModule, nn.Module): + """Continual Squeeze-and-Excitation (SE) block w/ Swish: AvgPool, FC, Swish, FC, Sigmoid.""" def __init__( self, @@ -88,7 +115,7 @@ def __init__( ratio: float, relu_act: bool = True, scope="frame", - temporal_fill="replicate", + temporal_fill="zeros", ): """ Args: @@ -98,19 +125,27 @@ def __init__( relu_act (bool): whether to use ReLU activation instead of Swish (default). """ - super(ReSe, self).__init__() + super(CoSeAlt, self).__init__() self.avg_pool = { - "clip": lambda: AdaptiveAvgPoolCo3d( - window_size, output_size=(1, 1), temporal_fill=temporal_fill + "clip": lambda: co.AdaptiveAvgPool3d( + output_size=(1, 1, 1), + kernel_size=window_size, + temporal_fill=temporal_fill, ), - "frame": lambda: unsqueezed(AdaptiveAvgPool2d(output_size=(1, 1))), + "frame": lambda: co.forward_stepping(AdaptiveAvgPool2d(output_size=(1, 1))), }[scope]() - dim_fc = self._round_width(dim_in, ratio) - self.fc1 = unsqueezed(Conv3d(dim_in, dim_fc, 1, bias=True)) - self.fc1_act = ReLU() if relu_act else Swish() - self.fc2 = unsqueezed(Conv3d(dim_fc, dim_in, 1, bias=True)) - - self.fc2_sig = Sigmoid() + dim_fc = _round_width(dim_in, ratio) + self.fc1 = co.forward_stepping(nn.Conv3d(dim_in, dim_fc, 1, bias=True)) + self.fc1_act = nn.ReLU() if relu_act else Swish() + self.fc2 = co.forward_stepping(nn.Conv3d(dim_fc, dim_in, 1, bias=True)) + self.fc2_sig = nn.Sigmoid() + + self.window_size = window_size + self.dim_in = dim_in + self.ratio = ratio + self.relu_act = relu_act + self.scope = scope + self.temporal_fill = temporal_fill def forward(self, x: Tensor) -> Tensor: x_in = x @@ -118,11 +153,45 @@ def forward(self, x: Tensor) -> Tensor: x = module(x) return x_in * x - def forward3d(self, x: Tensor) -> Tensor: + def forward_step(self, x: Tensor) -> Tensor: x_in = x for module in self.children(): - if hasattr(module, "forward3d"): - x = module.forward3d(x) + if hasattr(module, "forward_step"): + x = module.forward_step(x) else: x = module(x) + if not isinstance(x, Tensor): + return None return x_in * x + + def forward_steps(self, x: Tensor) -> Tensor: + x_in = x + for module in self.children(): + if hasattr(module, "forward_steps"): + x = module.forward_steps(x) + else: + x = module(x) + if not isinstance(x, Tensor): + return None + return x_in * x + + @property + def delay(self): + return self.avg_pool.delay + + def clean_state(self): + self.avg_pool.clean_state() + + def build_from( + module: SE, window_size: int, scope="frame", temporal_fill="zeros" + ) -> "CoSeAlt": + mod = CoSeAlt( + window_size, + module.dim_in, + module.ratio, + module.relu_act, + scope, + temporal_fill, + ) + mod.load_state_dict(module.state_dict()) + return mod diff --git a/src/opendr/perception/activity_recognition/cox3d/algorithm/utils.py b/src/opendr/perception/activity_recognition/cox3d/algorithm/utils.py deleted file mode 100644 index 551723055e..0000000000 --- a/src/opendr/perception/activity_recognition/cox3d/algorithm/utils.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -Copyright (c) Lukas Hedegaard. All Rights Reserved. -Included in the OpenDR Toolit with permission from the author. -""" - -from enum import Enum -from functools import wraps -from typing import Callable - -from torch import Tensor -from torch.nn import Module -from logging import getLogger - -logger = getLogger(__name__) - - -class FillMode(Enum): - REPLICATE = "replicate" - ZEROS = "zeros" - - -def unsqueezed(instance: Module, dim: int = 2): - def decorator(func: Callable[[Tensor], Tensor]): - @wraps(func) - def call(x: Tensor) -> Tensor: - x = x.unsqueeze(dim) - x = func(x) - x = x.squeeze(dim) - return x - - return call - - instance.forward3d = instance.forward - instance.forward = decorator(instance.forward) - - return instance - - -def once(fn: Callable): - called = 0 - - @wraps(fn) - def wrapped(*args, **kwargs): - nonlocal called - if not called: - called = 1 - return fn(*args, **kwargs) - - return wrapped - - -@once -def warn_once_if(cond: bool, msg: str): - if cond: - logger.warning(msg) diff --git a/src/opendr/perception/activity_recognition/cox3d/algorithm/x3d.py b/src/opendr/perception/activity_recognition/cox3d/algorithm/x3d.py new file mode 100644 index 0000000000..5b43fe29a3 --- /dev/null +++ b/src/opendr/perception/activity_recognition/cox3d/algorithm/x3d.py @@ -0,0 +1,467 @@ +import math +from collections import OrderedDict + +import continual as co +import torch +from continual import PaddingMode +from torch import nn + +from .res import CoResStage, init_weights + +from opendr.perception.activity_recognition.x3d.algorithm.operators import Swish +from .se import CoSe + + +def CoX3DTransform( + dim_in: int, + dim_out: int, + temp_kernel_size: int, + stride: int, + dim_inner: int, + num_groups: int, + stride_1x1=False, + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + dilation=1, + norm_module=torch.nn.BatchNorm3d, + se_ratio=0.0625, + swish_inner=True, + block_idx=0, + temporal_window_size: int = 4, + temporal_fill: PaddingMode = "zeros", + se_scope="frame", # "frame" or "clip" +): + """ + Args: + dim_in (int): the channel dimensions of the input. + dim_out (int): the channel dimension of the output. + temp_kernel_size (int): the temporal kernel sizes of the middle + convolution in the bottleneck. + stride (int): the stride of the bottleneck. + dim_inner (int): the inner dimension of the block. + num_groups (int): number of groups for the convolution. num_groups=1 + is for standard ResNet like networks, and num_groups>1 is for + ResNeXt like networks. + stride_1x1 (bool): if True, apply stride to 1x1 conv, otherwise + apply stride to the 3x3 conv. + inplace_relu (bool): if True, calculate the relu on the original + input without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + dilation (int): size of dilation. + norm_module (torch.nn.Module): torch.nn.Module for the normalization layer. The + default is torch.nn.BatchNorm3d. + se_ratio (float): if > 0, apply SE to the Tx3x3 conv, with the SE + channel dimensionality being se_ratio times the Tx3x3 conv dim. + swish_inner (bool): if True, apply swish to the Tx3x3 conv, otherwise + apply ReLU to the Tx3x3 conv. + """ + (str1x1, str3x3) = (stride, 1) if stride_1x1 else (1, stride) + + a = co.Conv3d( + dim_in, + dim_inner, + kernel_size=(1, 1, 1), + stride=(1, str1x1, str1x1), + padding=(0, 0, 0), + bias=False, + ) + + a_bn = co.forward_stepping( + norm_module(num_features=dim_inner, eps=eps, momentum=bn_mmt) + ) + + a_relu = torch.nn.ReLU(inplace=inplace_relu) + + # Tx3x3, BN, ReLU. + b = co.Conv3d( + dim_inner, + dim_inner, + kernel_size=(temp_kernel_size, 3, 3), + stride=(1, str3x3, str3x3), + padding=(int(temp_kernel_size // 2), dilation, dilation), + groups=num_groups, + bias=False, + dilation=(1, dilation, dilation), + temporal_fill=temporal_fill, + ) + + b_bn = co.forward_stepping( + norm_module(num_features=dim_inner, eps=eps, momentum=bn_mmt) + ) + + # Apply SE attention or not + use_se = True if (block_idx + 1) % 2 else False + if se_ratio > 0.0 and use_se: + se = CoSe( + temporal_window_size, + dim_in=dim_inner, + ratio=se_ratio, + temporal_fill=temporal_fill, + scope=se_scope, + ) + + b_relu = co.forward_stepping( + Swish() # nn.SELU is the same as Swish + if swish_inner + else nn.ReLU(inplace=inplace_relu) + ) + + # 1x1x1, BN. + c = co.Conv3d( + dim_inner, + dim_out, + kernel_size=(1, 1, 1), + stride=(1, 1, 1), + padding=(0, 0, 0), + bias=False, + ) + c_bn = co.forward_stepping( + norm_module(num_features=dim_out, eps=eps, momentum=bn_mmt) + ) + c_bn.transform_final_bn = True + + return co.Sequential( + OrderedDict( + [ + ("a", a), + ("a_bn", a_bn), + ("a_relu", a_relu), + ("b", b), + ("b_bn", b_bn), + *([("se", se)] if use_se else []), + ("b_relu", b_relu), + ("c", c), + ("c_bn", c_bn), + ] + ) + ) + + +def CoX3DHead( + dim_in: int, + dim_inner: int, + dim_out: int, + num_classes: int, + pool_size: int, + dropout_rate=0.0, + act_func="softmax", + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + norm_module=torch.nn.BatchNorm3d, + bn_lin5_on=False, + temporal_window_size: int = 4, + temporal_fill: PaddingMode = "zeros", + no_pool=False, +): + """ + Continual X3D head. + This layer performs a fully-connected projection during training, when the + input size is 1x1x1. It performs a convolutional projection during testing + when the input size is larger than 1x1x1. If the inputs are from multiple + different pathways, the inputs will be concatenated after pooling. + """ + modules = [] + modules.append( + ( + "conv_5", + co.Conv3d( + dim_in, + dim_inner, + kernel_size=(1, 1, 1), + stride=(1, 1, 1), + padding=(0, 0, 0), + bias=False, + ), + ) + ) + modules.append( + ("conv_5_bn", norm_module(num_features=dim_inner, eps=eps, momentum=bn_mmt)) + ) + modules.append(("conv_5_relu", torch.nn.ReLU(inplace_relu))) + + if no_pool: + return co.Sequential(OrderedDict(modules)) + + avg_pool = co.Sequential( + co.Lambda(lambda x: x.mean(dim=(-1, -2))), + co.AvgPool1d(temporal_window_size, stride=1, temporal_fill=temporal_fill), + ) + + modules.append(("avg_pool", avg_pool)) + modules.append( + ( + "lin_5", + co.Conv1d( + dim_inner, + dim_out, + kernel_size=(1,), + stride=(1,), + padding=(0,), + bias=False, + ), + ) + ) + if bn_lin5_on: + modules.append( + ("lin_5_bn", torch.nn.BatchNorm1d(num_features=dim_out, eps=eps, momentum=bn_mmt)) + ) + + modules.append(("lin_5_relu", torch.nn.ReLU(inplace_relu))) + + if dropout_rate > 0.0: + modules.append(("dropout", torch.nn.Dropout(dropout_rate))) + + # Perform FC in a fully convolutional manner. The FC layer will be + # initialized with a different std comparing to convolutional layers. + modules.append( + ("projection", co.Linear(dim_out, num_classes, bias=True, channel_dim=1)) + ) + + def view(x): + return x.view(x.shape[0], -1) + + modules.append(("view", co.Lambda(view, forward_only_fn=view))) + + return co.Sequential(OrderedDict(modules)) + + +def CoX3DStem( + dim_in: int, + dim_out: int, + kernel: int, + stride: int, + padding: int, + inplace_relu=True, + eps=1e-5, + bn_mmt=0.1, + norm_module=torch.nn.BatchNorm3d, + temporal_fill: PaddingMode = "zeros", + *args, + **kwargs, +): + """ + X3D's 3D stem module. + Performs a spatial followed by a depthwise temporal Convolution, BN, and Relu followed by a + spatiotemporal pooling. + + Args: + dim_in (int): the channel dimension of the input. Normally 3 is used + for rgb input, and 2 or 3 is used for optical flow input. + dim_out (int): the output dimension of the convolution in the stem + layer. + kernel (list): the kernel size of the convolution in the stem layer. + temporal kernel size, height kernel size, width kernel size in + order. + stride (list): the stride size of the convolution in the stem layer. + temporal kernel stride, height kernel size, width kernel size in + order. + padding (int): the padding size of the convolution in the stem + layer, temporal padding size, height padding size, width + padding size in order. + inplace_relu (bool): calculate the relu on the original input + without allocating new memory. + eps (float): epsilon for batch norm. + bn_mmt (float): momentum for batch norm. Noted that BN momentum in + PyTorch = 1 - BN momentum in Caffe2. + norm_module (torch.nn.Module): torch.nn.Module for the normalization layer. The + default is torch.nn.BatchNorm3d. + """ + conv_xy = co.Conv3d( + dim_in, + dim_out, + kernel_size=(1, kernel[1], kernel[2]), + stride=(1, stride[1], stride[2]), + padding=(0, padding[1], padding[2]), + bias=False, + ) + + conv = co.Conv3d( + dim_out, + dim_out, + kernel_size=(kernel[0], 1, 1), + stride=(stride[0], 1, 1), + padding=(padding[0], 0, 0), + bias=False, + groups=dim_out, + temporal_fill=temporal_fill, + ) + + bn = norm_module(num_features=dim_out, eps=eps, momentum=bn_mmt) + + relu = torch.nn.ReLU(inplace_relu) + + # Wrap in sequential to match weight specification + return co.Sequential( + OrderedDict( + [ + ( + "pathway0_stem", + co.Sequential( + OrderedDict( + [ + ("conv_xy", conv_xy), + ("conv", conv), + ("bn", bn), + ("relu", relu), + ] + ) + ), + ) + ] + ) + ) + + +def CoX3D( + dim_in: int, + image_size: int, + temporal_window_size: int, + num_classes: int, + x3d_conv1_dim: int, + x3d_conv5_dim: int, + x3d_num_groups: int, + x3d_width_per_group: int, + x3d_width_factor: float, + x3d_depth_factor: float, + x3d_bottleneck_factor: float, + x3d_use_channelwise_3x3x3: bool, + x3d_dropout_rate: float, + x3d_head_activation: str, + x3d_head_batchnorm: bool, + x3d_fc_std_init: float, + x3d_final_batchnorm_zero_init: bool, + temporal_fill: PaddingMode = "zeros", + se_scope="frame", + headless=False, +) -> co.Sequential: + """ + Continual X3D model, + adapted from https://github.com/facebookresearch/SlowFast + + Christoph Feichtenhofer. + "X3D: Expanding Architectures for Efficient Video Recognition." + https://arxiv.org/abs/2004.04730 + """ + norm_module = torch.nn.BatchNorm3d + exp_stage = 2.0 + dim_conv1 = x3d_conv1_dim + + num_groups = x3d_num_groups + width_per_group = x3d_width_per_group + dim_inner = num_groups * width_per_group + + w_mul = x3d_width_factor + d_mul = x3d_depth_factor + + dim_res1 = _round_width(dim_conv1, w_mul) + dim_res2 = dim_conv1 + dim_res3 = _round_width(dim_res2, exp_stage, divisor=8) + dim_res4 = _round_width(dim_res3, exp_stage, divisor=8) + dim_res5 = _round_width(dim_res4, exp_stage, divisor=8) + + block_basis = [ + # blocks, c, stride + [1, dim_res2, 2], + [2, dim_res3, 2], + [5, dim_res4, 2], + [3, dim_res5, 2], + ] + + # Basis of temporal kernel sizes for each of the stage. + temp_kernel = [ + [5], # conv1 temporal kernels. + [3], # res2 temporal kernels. + [3], # res3 temporal kernels. + [3], # res4 temporal kernels. + [3], # res5 temporal kernels. + ] + + modules = [] + + s1 = CoX3DStem( + dim_in=dim_in, + dim_out=dim_res1, + kernel=temp_kernel[0] + [3, 3], + stride=[1, 2, 2], + padding=[temp_kernel[0][0] // 2, 1, 1], + norm_module=norm_module, + stem_func_name="x3d_stem", + temporal_window_size=temporal_window_size, + temporal_fill=temporal_fill, + ) + modules.append(("s1", s1)) + + # blob_in = s1 + dim_in = dim_res1 + dim_out = dim_in + for stage, block in enumerate(block_basis): + dim_out = _round_width(block[1], w_mul) + dim_inner = int(x3d_bottleneck_factor * dim_out) + + n_rep = _round_repeats(block[0], d_mul) + prefix = "s{}".format(stage + 2) # start w res2 to follow convention + + s = CoResStage( + dim_in=dim_in, + dim_out=dim_out, + dim_inner=dim_inner, + temp_kernel_sizes=temp_kernel[1], + stride=block[2], + num_blocks=n_rep, + num_groups=dim_inner if x3d_use_channelwise_3x3x3 else num_groups, + num_block_temp_kernel=n_rep, + trans_func=CoX3DTransform, + stride_1x1=False, + norm_module=norm_module, + dilation=1, + drop_connect_rate=0.0, + temporal_window_size=temporal_window_size, + temporal_fill=temporal_fill, + se_scope=se_scope, + ) + dim_in = dim_out + modules.append((prefix, s)) + + spat_sz = int(math.ceil(image_size / 32.0)) + head = CoX3DHead( + dim_in=dim_out, + dim_inner=dim_inner, + dim_out=x3d_conv5_dim, + num_classes=num_classes, + pool_size=(temporal_window_size, spat_sz, spat_sz), + dropout_rate=x3d_dropout_rate, + act_func=x3d_head_activation, + bn_lin5_on=bool(x3d_head_batchnorm), + temporal_window_size=temporal_window_size, + temporal_fill=temporal_fill, + no_pool=headless, + ) + modules.append(("head", head)) + seq = co.Sequential(OrderedDict(modules)) + init_weights(seq, x3d_fc_std_init, bool(x3d_final_batchnorm_zero_init)) + return seq + + +def _round_width(width, multiplier, min_depth=8, divisor=8): + """Round width of filters based on width multiplier.""" + if not multiplier: + return width + + width *= multiplier + min_depth = min_depth or divisor + new_filters = max(min_depth, int(width + divisor / 2) // divisor * divisor) + if new_filters < 0.9 * width: + new_filters += divisor + return int(new_filters) + + +def _round_repeats(repeats, multiplier): + """Round number of layers based on depth multiplier.""" + multiplier = multiplier + if not multiplier: + return repeats + return int(math.ceil(multiplier * repeats)) diff --git a/src/opendr/perception/activity_recognition/cox3d/cox3d_learner.py b/src/opendr/perception/activity_recognition/cox3d/cox3d_learner.py index 244ac8b278..9f9fb3f877 100644 --- a/src/opendr/perception/activity_recognition/cox3d/cox3d_learner.py +++ b/src/opendr/perception/activity_recognition/cox3d/cox3d_learner.py @@ -13,14 +13,20 @@ # limitations under the License. import torch - +import torch.nn.functional as F +import continual as co +import pickle +import os from opendr.engine import data from opendr.engine.target import Category -from opendr.perception.activity_recognition.cox3d.algorithm.cox3d import CoX3D +from opendr.perception.activity_recognition.cox3d.algorithm.x3d import CoX3D +from opendr.perception.activity_recognition.utils.lightning import _LightningModuleWithCrossEntropy from opendr.perception.activity_recognition.x3d.x3d_learner import X3DLearner - +from pathlib import Path from logging import getLogger from typing import Union, List +import onnxruntime as ort + logger = getLogger(__name__) @@ -47,7 +53,7 @@ def __init__( num_workers=0, seed=123, num_classes=400, - temporal_window_size: int=None, + temporal_window_size: int = None, *args, **kwargs, ): @@ -65,22 +71,42 @@ def __init__( checkpoint_load_iter (int, optional): Unused parameter. Defaults to 0. temp_path (str, optional): Path in which to store temporary files. Defaults to "". device (str, optional): Name of computational device ("cpu" or "cuda"). Defaults to "cuda". - weight_decay ([type], optional): Weight decay used for optimization. Defaults to 1e-5. + weight_decay (float, optional): Weight decay used for optimization. Defaults to 1e-5. momentum (float, optional): Momentum used for optimization. Defaults to 0.9. drop_last (bool, optional): Drop last data point if a batch cannot be filled. Defaults to True. pin_memory (bool, optional): Pin memory in dataloader. Defaults to False. + loss (str): Name of loss in torch.nn.functional to use. Defaults to "cross_entropy". num_workers (int, optional): Number of workers in dataloader. Defaults to 0. seed (int, optional): Random seed. Defaults to 123. num_classes (int, optional): Number of classes to predict among. Defaults to 400. temporal_window_size (int, optional): Size of the final global average pooling. - If None, size will be automically chosen according to the backbone. Defaults to None. + If None, size will be automatically chosen according to the backbone. Defaults to None. """ super().__init__( - lr, iters, batch_size, optimizer, lr_schedule, backbone, network_head, checkpoint_after_iter, - checkpoint_load_iter, temp_path, device, loss, weight_decay, momentum, drop_last, pin_memory, - num_workers, seed, num_classes, *args, **kwargs, + lr, + iters, + batch_size, + optimizer, + lr_schedule, + backbone, + network_head, + checkpoint_after_iter, + checkpoint_load_iter, + temp_path, + device, + loss, + weight_decay, + momentum, + drop_last, + pin_memory, + num_workers, + seed, + num_classes, + *args, + **kwargs, ) self.temporal_window_size = temporal_window_size + self._ort_state = None def init_model(self) -> CoX3D: """Initialise model with random parameters @@ -88,30 +114,34 @@ def init_model(self) -> CoX3D: Returns: CoX3D: model """ - assert hasattr( - self, "model_hparams" - ), "`self.model_hparams` not found. Did you forget to call `_load_hparams`?" + assert hasattr(self, "model_hparams"), "`self.model_hparams` not found. Did you forget to call `_load_hparams`?" self.model = CoX3D( dim_in=3, image_size=self.model_hparams["image_size"], - frames_per_clip=getattr(self, "temporal_window_size", None) or self.model_hparams["frames_per_clip"], + temporal_window_size=getattr(self, "temporal_window_size", None) or self.model_hparams["frames_per_clip"], num_classes=self.num_classes, - conv1_dim=self.model_hparams["conv1_dim"], - conv5_dim=self.model_hparams["conv5_dim"], - num_groups=self.model_hparams["num_groups"], - width_per_group=self.model_hparams["width_per_group"], - width_factor=self.model_hparams["width_factor"], - depth_factor=self.model_hparams["depth_factor"], - bottleneck_factor=self.model_hparams["bottleneck_factor"], - use_channelwise_3x3x3=self.model_hparams["use_channelwise_3x3x3"], - dropout_rate=self.model_hparams["dropout_rate"], - head_activation=self.model_hparams["head_activation"], - head_batchnorm=self.model_hparams["head_batchnorm"], - fc_std_init=self.model_hparams["fc_std_init"], - final_batchnorm_zero_init=self.model_hparams["final_batchnorm_zero_init"], + x3d_conv1_dim=self.model_hparams["conv1_dim"], + x3d_conv5_dim=self.model_hparams["conv5_dim"], + x3d_num_groups=self.model_hparams["num_groups"], + x3d_width_per_group=self.model_hparams["width_per_group"], + x3d_width_factor=self.model_hparams["width_factor"], + x3d_depth_factor=self.model_hparams["depth_factor"], + x3d_bottleneck_factor=self.model_hparams["bottleneck_factor"], + x3d_use_channelwise_3x3x3=self.model_hparams["use_channelwise_3x3x3"], + x3d_dropout_rate=self.model_hparams["dropout_rate"], + x3d_head_activation=self.model_hparams["head_activation"], + x3d_head_batchnorm=self.model_hparams["head_batchnorm"], + x3d_fc_std_init=self.model_hparams["fc_std_init"], + x3d_final_batchnorm_zero_init=self.model_hparams["final_batchnorm_zero_init"], ).to(device=self.device) + self._plmodel = _LightningModuleWithCrossEntropy(self.model) return self.model + def _map_state_dict(self, sd): + if len(sd["head.lin_5.weight"]) > 3: + sd["head.lin_5.weight"] = sd["head.lin_5.weight"].squeeze(-1).squeeze(-1) + return sd + @property def _example_input(self): C = 3 # RGB @@ -136,7 +166,122 @@ def infer(self, batch: Union[data.Image, List[data.Image], torch.Tensor]) -> Lis batch = batch.to(device=self.device, dtype=torch.float) - self.model.eval() - results = self.model.forward(batch) - results = [Category(prediction=int(r.argmax(dim=0)), confidence=r) for r in results] + if self._ort_session is not None and self._ort_state is not None: + inputs = { + "input": batch.cpu().detach().numpy(), + **self._ort_state, + } + results, *next_state = self._ort_session.run(None, inputs) + results = torch.tensor(results) + self._ort_state = {k: v for k, v in zip(self._ort_state.keys(), next_state)} + else: + self.model.eval() + results = self.model.forward_step(batch) + if results is not None: + results = [Category(prediction=int(r.argmax(dim=0)), confidence=F.softmax(r, dim=-1)) for r in results] return results + + def optimize(self, do_constant_folding=False): + """Optimize model execution. + This is accomplished by saving to the ONNX format and loading the optimized model. + + Args: + do_constant_folding (bool, optional): Whether to optimize constants. Defaults to False. + """ + + if getattr(self.model, "_ort_session", None): + logger.info("Model is already optimized. Skipping redundant optimization") + return + + path = Path(self.temp_path or os.getcwd()) / "weights" / f"cox3d_{self.backbone}.onnx" + if not path.exists(): + self._save_onnx(path, do_constant_folding) + self._load_onnx(path) + + def _save_onnx(self, path: Union[str, Path], do_constant_folding=False, verbose=False): + """Save model in the ONNX format + + Args: + path (Union[str, Path]): Directory in which to save ONNX model + do_constant_folding (bool, optional): Whether to optimize constants. Defaults to False. + """ + path.parent.mkdir(exist_ok=True, parents=True) + + model = self.model.to(device="cpu") + model.eval() + + # Prepare state + state0 = None + sample = self._example_input.repeat(self.batch_size, 1, 1, 1) + with torch.no_grad(): + for _ in range(model.receptive_field): + _, state0 = model._forward_step(sample, state0) + _, state0 = model._forward_step(sample, state0) + state0 = co.utils.flatten(state0) + + # Export to ONNX + logger.info(f"Saving model to ONNX format at {str(path)}") + co.onnx.export( + model, + (sample, *state0), + path, + input_names=["input"], + output_names=["output"], + do_constant_folding=do_constant_folding, + verbose=verbose, + opset_version=11, + ) + + # Save default state and name mappings for later use + state_path = path.parent / f"cox3d_{self.backbone}_state.pickle" + logger.info(f"Saving ONNX model states at {str(state_path)}") + omodel = co.onnx.OnnxWrapper(self.model) + state = {k: v.detach().numpy() for k, v in zip(omodel.state_input_names, state0)} + with open(state_path, "wb") as f: + pickle.dump(state, f) + + def _load_onnx(self, path: Union[str, Path]): + """Loads ONNX model into an onnxruntime inference session. + + Args: + path (Union[str, Path]): Path to ONNX model + """ + onnx_path = path + state_path = path.parent / f"cox3d_{self.backbone}_state.pickle" + + logger.info(f"Loading ONNX runtime inference session from {str(onnx_path)}") + self._ort_session = ort.InferenceSession(str(onnx_path)) + + logger.info(f"Loading ONNX state from {str(state_path)}") + with open(state_path, "rb") as f: + self._ort_state = pickle.load(f) + + def _load_model_weights(self, weights_path: Union[str, Path]): + """Load pretrained model weights + + Args: + weights_path (Union[str, Path]): Path to model weights file. + Type of file must be one of {".pyth", ".pth", ".onnx"} + """ + weights_path = Path(weights_path) + + assert weights_path.is_file() and weights_path.suffix in {".pyth", ".pth", ".onnx"}, ( + f"weights_path ({str(weights_path)}) should be a .pth or .onnx file." + "Pretrained weights can be downloaded using `self.download(...)`" + ) + if weights_path.suffix == ".onnx": + return self._load_onnx(weights_path) + + logger.debug(f"Loading model weights from {str(weights_path)}") + + # Check for configuration mismatches, loading only matching weights + loaded_state_dict = torch.load(weights_path, map_location=torch.device(self.device)) + if "model_state" in loaded_state_dict: # As found in the official pretrained X3D models + loaded_state_dict = loaded_state_dict["model_state"] + + loaded_state_dict = self._map_state_dict(loaded_state_dict) + + self.model.load_state_dict(loaded_state_dict, strict=False, flatten=True) + self.model.to(self.device) + + return self diff --git a/src/opendr/perception/activity_recognition/datasets/__init__.py b/src/opendr/perception/activity_recognition/datasets/__init__.py index e69de29bb2..50e8154ad0 100644 --- a/src/opendr/perception/activity_recognition/datasets/__init__.py +++ b/src/opendr/perception/activity_recognition/datasets/__init__.py @@ -0,0 +1,13 @@ +from opendr.perception.activity_recognition.datasets.dummy_timeseries_dataset import ( + DummyTimeseriesDataset, +) +from opendr.perception.activity_recognition.datasets.kinetics import ( + KineticsDataset, + CLASSES as KINETICS_CLASSES, +) + +__all__ = [ + "KineticsDataset", + "KINETICS_CLASSES", + "DummyTimeseriesDataset", +] diff --git a/src/opendr/perception/activity_recognition/datasets/dummy_timeseries_dataset.py b/src/opendr/perception/activity_recognition/datasets/dummy_timeseries_dataset.py new file mode 100644 index 0000000000..8d371573b6 --- /dev/null +++ b/src/opendr/perception/activity_recognition/datasets/dummy_timeseries_dataset.py @@ -0,0 +1,60 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +from opendr.engine.datasets import DatasetIterator + + +class DummyTimeseriesDataset(DatasetIterator, torch.utils.data.Dataset): + """ + Dumme dataset for time-series forecasting + + Input data: Sinusoidal data of different wavelength + Targets: Last sum of cosines quantized into four buckets + (positive and falling, positive and rising, negative and falling, negative and rising) + """ + + def __init__(self, num_sines=4, num_datapoints=64, base_offset=0, sequence_len=64): + DatasetIterator.__init__(self) + torch.utils.data.Dataset.__init__(self) + + time_steps = torch.stack( + [ + torch.stack( + [ + torch.arange( + 0 + offset + base_offset, + i + offset + base_offset, + i / sequence_len, + ) + for i in range(1, num_sines + 1) + ], + dim=1, + ) + for offset in range(num_datapoints) + ] + ).permute(0, 2, 1) + self._input_data = torch.sin(time_steps) + assert self._input_data.shape == (num_datapoints, num_sines, sequence_len) + + cosines = torch.cos(time_steps[:, :-2]).sum(dim=-1) + positive = cosines[:, -1] > 0 + upwards = cosines[:, -1] > cosines[:, -2] + self._output_data = positive + 2 * upwards + + def __getitem__(self, idx): + return self._input_data[idx], self._output_data[idx] + + def __len__(self): + return len(self._output_data) diff --git a/src/opendr/perception/activity_recognition/datasets/kinetics.py b/src/opendr/perception/activity_recognition/datasets/kinetics.py index 3eab1ad19f..021ae087ce 100644 --- a/src/opendr/perception/activity_recognition/datasets/kinetics.py +++ b/src/opendr/perception/activity_recognition/datasets/kinetics.py @@ -74,6 +74,7 @@ def __init__( video_transform=None, use_caching=False, decoder_backend="pyav", + spatial_pixels=224 ): """ Kinetics dataset @@ -121,7 +122,7 @@ def __init__( if video_transform: self.video_transform = video_transform else: - train_transform, eval_transform = standard_video_transforms() + train_transform, eval_transform = standard_video_transforms(spatial_pixels=spatial_pixels) self.video_transform = ( train_transform if self.split == "train" else eval_transform ) diff --git a/src/opendr/perception/activity_recognition/dependencies.ini b/src/opendr/perception/activity_recognition/dependencies.ini index ea5aaa01db..0290f78b95 100644 --- a/src/opendr/perception/activity_recognition/dependencies.ini +++ b/src/opendr/perception/activity_recognition/dependencies.ini @@ -1,15 +1,16 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format -python=torch==1.9.0 - torchvision==0.10.0 +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +python=torch>=1.9.0 + torchvision>=0.10.0 tqdm onnx==1.8.0 - onnxruntime==1.3.0 + onnxruntime>=1.3.0 pytorch_lightning==1.2.3 av==8.0.1 joblib>=1.0.1 pyyaml>=5.3 pandas>=1.2 + continual-inference>=1.0.2 opendr=opendr-toolkit-engine diff --git a/src/opendr/perception/activity_recognition/utils/__init__.py b/src/opendr/perception/activity_recognition/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/activity_recognition/utils/lightning.py b/src/opendr/perception/activity_recognition/utils/lightning.py new file mode 100644 index 0000000000..b467410919 --- /dev/null +++ b/src/opendr/perception/activity_recognition/utils/lightning.py @@ -0,0 +1,53 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +import pytorch_lightning as pl + + +class _LightningModuleWithCrossEntropy(pl.LightningModule): + def __init__(self, module): + pl.LightningModule.__init__(self) + self.module = module + + def forward(self, *args, **kwargs): + return self.module.forward(*args, **kwargs) + + def training_step(self, batch, batch_idx): + x, y = batch + z = self.module(x) + loss = torch.nn.functional.cross_entropy(z, y) + self.log("train/loss", loss) + self.log("train/acc", _accuracy(z, y)) + return loss + + def validation_step(self, batch, batch_idx): + x, y = batch + z = self.forward(x) + loss = torch.nn.functional.cross_entropy(z, y) + self.log("val/loss", loss) + self.log("val/acc", _accuracy(z, y)) + return loss + + def test_step(self, batch, batch_idx): + x, y = batch + z = self.forward(x) + loss = torch.nn.functional.cross_entropy(z, y) + self.log("test/loss", loss) + self.log("test/acc", _accuracy(z, y)) + return loss + + +def _accuracy(x, y): + return torch.sum(x.argmax(dim=1) == y) / len(y) diff --git a/src/opendr/perception/activity_recognition/x3d/algorithm/operators.py b/src/opendr/perception/activity_recognition/x3d/algorithm/operators.py index 7ae58a207d..6adb537a61 100644 --- a/src/opendr/perception/activity_recognition/x3d/algorithm/operators.py +++ b/src/opendr/perception/activity_recognition/x3d/algorithm/operators.py @@ -14,23 +14,6 @@ def __init__(self): def forward(self, x): return x * torch.sigmoid(x) - # return SwishEfficient.apply(x) - - -# class SwishEfficient(torch.autograd.Function): -# """Swish activation function: x * sigmoid(x).""" - -# @staticmethod -# def forward(ctx, x): -# result = x * torch.sigmoid(x) -# ctx.save_for_backward(x) -# return result - -# @staticmethod -# def backward(ctx, grad_output): -# x = ctx.saved_variables[0] -# sigmoid_x = torch.sigmoid(x) -# return grad_output * (sigmoid_x * (1 + x * (1 - sigmoid_x))) class SE(nn.Module): diff --git a/src/opendr/perception/activity_recognition/x3d/x3d_learner.py b/src/opendr/perception/activity_recognition/x3d/x3d_learner.py index f842b43b4d..c6951465d3 100644 --- a/src/opendr/perception/activity_recognition/x3d/x3d_learner.py +++ b/src/opendr/perception/activity_recognition/x3d/x3d_learner.py @@ -22,7 +22,6 @@ from opendr.engine.helper.io import bump_version from torch import onnx import onnxruntime as ort - from opendr.engine.data import Video from opendr.engine.datasets import Dataset from opendr.engine.target import Category @@ -87,12 +86,8 @@ def __init__( seed (int, optional): Random seed. Defaults to 123. num_classes (int, optional): Number of classes to predict among. Defaults to 400. """ - assert ( - backbone in _MODEL_NAMES - ), f"Invalid model selected. Choose one of {_MODEL_NAMES}." - assert network_head in { - "classification" - }, "Currently, only 'classification' head is supported." + assert backbone in _MODEL_NAMES, f"Invalid model selected. Choose one of {_MODEL_NAMES}." + assert network_head in {"classification"}, "Currently, only 'classification' head is supported." assert optimizer in {"sgd", "adam"}, "Supported optimizers are Adam and SGD." @@ -120,7 +115,7 @@ def __init__( self.seed = seed self.num_classes = num_classes self.loss = loss - self.ort_session = None + self._ort_session = None torch.manual_seed(self.seed) self._load_model_hparams(self.backbone) @@ -131,15 +126,13 @@ def _load_model_hparams(self, model_name: str = None) -> Dict[str, Any]: Args: model_name (str, optional): Name of the model (one of {"xs", "s", "m", "l"}). - If none, `self.backbon`e is used. Defaults to None. + If none, `self.backbone` is used. Defaults to None. Returns: Dict[str, Any]: Dictionary with model hyperparameters """ model_name = model_name or self.backbone - assert ( - model_name in _MODEL_NAMES - ), f"Invalid model selected. Choose one of {_MODEL_NAMES}." + assert model_name in _MODEL_NAMES, f"Invalid model selected. Choose one of {_MODEL_NAMES}." path = Path(__file__).parent / "hparams" / f"{model_name}.yaml" with open(path, "r") as f: self.model_hparams = yaml.load(f, Loader=yaml.FullLoader) @@ -154,9 +147,7 @@ def _load_model_weights(self, weights_path: Union[str, Path]): """ weights_path = Path(weights_path) - assert ( - weights_path.is_file() and weights_path.suffix in {".pyth", ".pth", ".onnx"} - ), ( + assert weights_path.is_file() and weights_path.suffix in {".pyth", ".pth", ".onnx"}, ( f"weights_path ({str(weights_path)}) should be a .pth or .onnx file." "Pretrained weights can be downloaded using `self.download(...)`" ) @@ -166,22 +157,10 @@ def _load_model_weights(self, weights_path: Union[str, Path]): logger.debug(f"Loading model weights from {str(weights_path)}") # Check for configuration mismatches, loading only matching weights - new_model_state = self.model.state_dict() loaded_state_dict = torch.load(weights_path, map_location=torch.device(self.device)) if "model_state" in loaded_state_dict: # As found in the official pretrained X3D models loaded_state_dict = loaded_state_dict["model_state"] - - def size_ok(k): - return new_model_state[k].size() == loaded_state_dict[k].size() - - to_load = { - k: v for k, v in loaded_state_dict.items() if size_ok(k) - } - self.model.load_state_dict(to_load, strict=False) - - names_not_loaded = set(new_model_state.keys()) - set(to_load.keys()) - if len(names_not_loaded) > 0: - logger.warning(f"Some model weight could not be loaded: {names_not_loaded}") + self.model.load_state_dict(loaded_state_dict, strict=False) self.model.to(self.device) return self @@ -192,9 +171,7 @@ def init_model(self) -> X3D: Returns: X3D: model """ - assert hasattr( - self, "model_hparams" - ), "`self.model_hparams` not found. Did you forget to call `_load_hparams`?" + assert hasattr(self, "model_hparams"), "`self.model_hparams` not found. Did you forget to call `_load_hparams`?" self.model = X3D( dim_in=3, image_size=self.model_hparams["image_size"], @@ -225,19 +202,17 @@ def save(self, path: Union[str, Path]): Returns: self """ - assert hasattr( - self, "model" - ), "Cannot save model because no model was found. Did you forget to call `__init__`?" + assert hasattr(self, "model"), "Cannot save model because no model was found. Did you forget to call `__init__`?" root_path = Path(path) root_path.mkdir(parents=True, exist_ok=True) name = f"x3d_{self.backbone}" - ext = ".onnx" if self.ort_session else ".pth" + ext = ".onnx" if self._ort_session else ".pth" weights_path = bump_version(root_path / f"model_{name}{ext}") meta_path = bump_version(root_path / f"{name}.json") logger.info(f"Saving model weights to {str(weights_path)}") - if self.ort_session: + if self._ort_session: self._save_onnx(weights_path) else: torch.save(self.model.state_dict(), weights_path) @@ -253,7 +228,7 @@ def save(self, path: Union[str, Path]): "network_head": self.network_head, "threshold": self.threshold, }, - "optimized": bool(self.ort_session), + "optimized": bool(self._ort_session), "optimizer_info": { "lr": self.lr, "iters": self.iters, @@ -292,9 +267,7 @@ def load(self, path: Union[str, Path]): return self if path.is_dir(): path = path / f"x3d_{self.backbone}.json" - assert ( - path.is_file() and path.suffix == ".json" - ), "The provided metadata path should be a .json file" + assert path.is_file() and path.suffix == ".json", "The provided metadata path should be a .json file" logger.debug(f"Loading X3DLearner metadata from {str(path)}") with open(path, "r") as f: @@ -329,9 +302,7 @@ def load(self, path: Union[str, Path]): return self @staticmethod - def download( - path: Union[str, Path], model_names: Iterable[str] = _MODEL_NAMES - ): + def download(path: Union[str, Path], model_names: Iterable[str] = _MODEL_NAMES): """Download pretrained X3D models Args: @@ -353,14 +324,20 @@ def download( url=f"https://dl.fbaipublicfiles.com/pyslowfast/x3d_models/x3d_{m}.pyth", filename=str(filename), ) - assert ( - filename.is_file() - ), f"Something wen't wrong when downloading {str(filename)}" + assert filename.is_file(), f"Something wen't wrong when downloading {str(filename)}" def reset(self): pass - def fit(self, dataset: Dataset, val_dataset: Dataset=None, epochs: int=None, steps: int=None, *args, **kwargs): + def fit( + self, + dataset: Dataset, + val_dataset: Dataset = None, + epochs: int = None, + steps: int = None, + *args, + **kwargs, + ): """Fit the model to a dataset Args: @@ -379,19 +356,26 @@ def fit(self, dataset: Dataset, val_dataset: Dataset=None, epochs: int=None, ste pin_memory=self.pin_memory, drop_last=self.drop_last, ) - val_dataloader = torch.utils.data.DataLoader( - val_dataset, - batch_size=self.batch_size, - num_workers=self.num_workers, - shuffle=False, - pin_memory=self.pin_memory, - drop_last=self.drop_last, - ) if val_dataset else None + val_dataloader = ( + torch.utils.data.DataLoader( + val_dataset, + batch_size=self.batch_size, + num_workers=self.num_workers, + shuffle=False, + pin_memory=self.pin_memory, + drop_last=self.drop_last, + ) + if val_dataset + else None + ) optimisation_metric = "val/loss" if val_dataset else "train/loss" # Patch model optimizer - assert self.optimizer in {"adam", "sgd"}, f"Invalid optimizer '{self.optimizer}'. Must be 'adam' or 'sgd'." + assert self.optimizer in { + "adam", + "sgd", + }, f"Invalid optimizer '{self.optimizer}'. Must be 'adam' or 'sgd'." if self.optimizer == "adam": Optimizer = partial( torch.optim.Adam, @@ -411,9 +395,14 @@ def configure_optimizers(): # nonlocal Optimizer, optimisation_metric optimizer = Optimizer(self.model.parameters()) scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=10) - return {"optimizer": optimizer, "lr_scheduler": scheduler, "monitor": optimisation_metric} + return { + "optimizer": optimizer, + "lr_scheduler": scheduler, + "monitor": optimisation_metric, + } - self.model.configure_optimizers = configure_optimizers + model = getattr(self, "_plmodel", self.model) + model.configure_optimizers = configure_optimizers self.trainer = pl.Trainer( max_epochs=epochs or self.iters, @@ -432,10 +421,10 @@ def configure_optimizers(): self.trainer.limit_train_batches = steps or self.trainer.limit_train_batches self.trainer.limit_val_batches = steps or self.trainer.limit_val_batches - self.trainer.fit(self.model, train_dataloader, val_dataloader) + self.trainer.fit(model, train_dataloader, val_dataloader) self.model.to(self.device) - def eval(self, dataset: Dataset, steps: int=None) -> Dict[str, Any]: + def eval(self, dataset: Dataset, steps: int = None) -> Dict[str, Any]: """Evaluate the model on the dataset Args: @@ -461,7 +450,8 @@ def eval(self, dataset: Dataset, steps: int=None) -> Dict[str, Any]: logger=_experiment_logger(), ) self.trainer.limit_test_batches = steps or self.trainer.limit_test_batches - results = self.trainer.test(self.model, test_dataloader) + model = getattr(self, "_plmodel", self.model) + results = self.trainer.test(model, test_dataloader) results = { "accuracy": results[-1]["test/acc"], "loss": results[-1]["test/loss"], @@ -485,21 +475,23 @@ def infer(self, batch: Union[Video, List[Video], torch.Tensor]) -> List[Category batch = torch.stack([torch.tensor(v.data) for v in batch]) batch = batch.to(device=self.device, dtype=torch.float) - - self.model.eval() - results = self.model.forward(batch) + if self._ort_session is not None: + results = torch.tensor(self._ort_session.run(None, {"video": batch.cpu().numpy()})[0]) + else: + self.model.eval() + results = self.model.forward(batch) results = [Category(prediction=int(r.argmax(dim=0)), confidence=r) for r in results] return results def optimize(self, do_constant_folding=False): """Optimize model execution. - This is acoomplished by saving to the ONNX format and loading the optimized model. + This is accomplished by saving to the ONNX format and loading the optimized model. Args: do_constant_folding (bool, optional): Whether to optimize constants. Defaults to False. """ - if getattr(self.model, "rpn_ort_session", None): + if getattr(self.model, "_ort_session", None): logger.info("Model is already optimized. Skipping redundant optimization") return @@ -538,6 +530,7 @@ def _save_onnx(self, path: Union[str, Path], do_constant_folding=False, verbose= path, input_names=["video"], output_names=["classes"], + dynamic_axes={"video": {0: "batch_size"}, "classes": {0: "batch_size"}}, do_constant_folding=do_constant_folding, verbose=verbose, opset_version=11, @@ -550,7 +543,7 @@ def _load_onnx(self, path: Union[str, Path]): path (Union[str, Path]): Path to ONNX model """ logger.info(f"Loading ONNX runtime inference session from {str(path)}") - self.ort_session = ort.InferenceSession(str(path)) + self._ort_session = ort.InferenceSession(str(path)) def _experiment_logger(): diff --git a/src/opendr/perception/compressive_learning/dependencies.ini b/src/opendr/perception/compressive_learning/dependencies.ini index c1958768b4..dab32bd556 100644 --- a/src/opendr/perception/compressive_learning/dependencies.ini +++ b/src/opendr/perception/compressive_learning/dependencies.ini @@ -1,8 +1,9 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 tensorboard>=2.4.1 tqdm diff --git a/src/opendr/perception/face_recognition/dependencies.ini b/src/opendr/perception/face_recognition/dependencies.ini index 5f2b1c66a1..17bdb8d3b8 100644 --- a/src/opendr/perception/face_recognition/dependencies.ini +++ b/src/opendr/perception/face_recognition/dependencies.ini @@ -1,9 +1,10 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 bcolz>=1.2.1 + protobuf<=3.20.0 onnx==1.8.0 onnxruntime==1.3.0 tensorboard>=2.4.1 diff --git a/src/opendr/perception/facial_expression_recognition/__init__.py b/src/opendr/perception/facial_expression_recognition/__init__.py index b257c97833..cad70122f1 100644 --- a/src/opendr/perception/facial_expression_recognition/__init__.py +++ b/src/opendr/perception/facial_expression_recognition/__init__.py @@ -1,16 +1,22 @@ -from opendr.perception.facial_expression_recognition.\ +from opendr.perception.facial_expression_recognition. \ landmark_based_facial_expression_recognition.progressive_spatio_temporal_bln_learner \ import ProgressiveSpatioTemporalBLNLearner -from opendr.perception.facial_expression_recognition.\ +from opendr.perception.facial_expression_recognition. \ landmark_based_facial_expression_recognition.algorithm.datasets.CASIA_CK_data_gen \ import CK_CLASSES, CASIA_CLASSES -from opendr.perception.facial_expression_recognition.\ +from opendr.perception.facial_expression_recognition. \ landmark_based_facial_expression_recognition.algorithm.datasets.landmark_extractor import landmark_extractor -from opendr.perception.facial_expression_recognition.\ +from opendr.perception.facial_expression_recognition. \ landmark_based_facial_expression_recognition.algorithm.datasets.gen_facial_muscles_data import gen_muscle_data -from opendr.perception.facial_expression_recognition.\ +from opendr.perception.facial_expression_recognition. \ landmark_based_facial_expression_recognition.algorithm.datasets.AFEW_data_gen import data_normalization +from opendr.perception.facial_expression_recognition.image_based_facial_emotion_estimation.facial_emotion_learner \ + import FacialEmotionLearner +from opendr.perception.facial_expression_recognition.image_based_facial_emotion_estimation.algorithm.utils \ + import datasets +from opendr.perception.facial_expression_recognition.image_based_facial_emotion_estimation.algorithm.utils \ + import image_processing __all__ = ['ProgressiveSpatioTemporalBLNLearner', 'CK_CLASSES', 'CASIA_CLASSES', 'landmark_extractor', - 'gen_muscle_data', 'data_normalization'] + 'gen_muscle_data', 'data_normalization', 'FacialEmotionLearner', 'image_processing', 'datasets'] diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/README.md b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/README.md new file mode 100644 index 0000000000..831877c501 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/README.md @@ -0,0 +1,8 @@ +# Image-based Facial Expression Recognition + +This module provides the implementation of the efficient ensemble-based convolutional neural networks [[1]](https://ojs.aaai.org/index.php/AAAI/article/view/6037) for image-based facial expression recognition. +## References + +[1] +[Siqueira, Henrique, Sven Magg, and Stefan Wermter. "Efficient facial feature learning with wide ensemble-based convolutional neural networks." Proceedings of the AAAI conference on artificial intelligence. Vol. 34. No. 04. 2020.]( +https://ojs.aaai.org/index.php/AAAI/article/view/6037) \ No newline at end of file diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/__init__.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/__init__.py new file mode 100644 index 0000000000..58d74437a0 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/__init__.py @@ -0,0 +1,13 @@ +from opendr.perception.facial_expression_recognition.\ + image_based_facial_emotion_estimation.facial_emotion_learner \ + import FacialEmotionLearner +from opendr.perception.facial_expression_recognition.\ + image_based_facial_emotion_estimation.algorithm.model.esr_9 import ESR +from opendr.perception.facial_expression_recognition.\ + image_based_facial_emotion_estimation.algorithm.utils import datasets +from opendr.perception.facial_expression_recognition.\ + image_based_facial_emotion_estimation.algorithm.utils import image_processing +from opendr.perception.facial_expression_recognition.\ + image_based_facial_emotion_estimation.algorithm.utils import plotting + +__all__ = ['FacialEmotionLearner', 'ESR', 'datasets', 'image_processing', 'plotting'] diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/__init__.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/__init__.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/cbam.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/cbam.py new file mode 100644 index 0000000000..b05666b809 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/cbam.py @@ -0,0 +1,113 @@ +""" +Implementation of CBAM attention method (Sanghyun Woo, et al., 2018). +Code taken from: https://github.com/Jongchan/attention-module +""" + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class BasicConv(nn.Module): + def __init__(self, in_planes, out_planes, kernel_size, stride=1, padding=0, dilation=1, groups=1, + relu=True, bn=True, bias=False): + super(BasicConv, self).__init__() + self.out_channels = out_planes + self.conv = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, + dilation=dilation, groups=groups, bias=bias) + self.bn = nn.BatchNorm2d(out_planes, eps=1e-5, momentum=0.01, affine=True) if bn else None + self.relu = nn.ReLU() if relu else None + + def forward(self, x): + x = self.conv(x) + if self.bn is not None: + x = self.bn(x) + if self.relu is not None: + x = self.relu(x) + return x + + +class ChannelPool(nn.Module): + def forward(self, x): + return torch.cat((torch.max(x, 1)[0].unsqueeze(1), torch.mean(x, 1).unsqueeze(1)), dim=1) + # output shape is Nx2xHxW + + +class Flatten(nn.Module): + def forward(self, x): + return x.view(x.size(0), -1) + + +def logsumexp_2d(tensor): + tensor_flatten = tensor.view(tensor.size(0), tensor.size(1), -1) + s, _ = torch.max(tensor_flatten, dim=2, keepdim=True) + outputs = s + (tensor_flatten - s).exp().sum(dim=2, keepdim=True).log() + return outputs + + +class ChannelGate(nn.Module): + def __init__(self, gate_channels, reduction_ratio=16, pool_types=['avg', 'max']): + super(ChannelGate, self).__init__() + self.gate_channels = gate_channels + self.mlp = nn.Sequential( + Flatten(), + nn.Linear(gate_channels, gate_channels // reduction_ratio), + nn.ReLU(), + nn.Linear(gate_channels // reduction_ratio, gate_channels) + ) + self.pool_types = pool_types + + def forward(self, x): + channel_att_sum = None + for pool_type in self.pool_types: + if pool_type == 'avg': + avg_pool = F.avg_pool2d(x, (x.size(2), x.size(3)), stride=(x.size(2), x.size(3))) + channel_att_raw = self.mlp(avg_pool) + elif pool_type == 'max': + max_pool = F.max_pool2d(x, (x.size(2), x.size(3)), stride=(x.size(2), x.size(3))) + channel_att_raw = self.mlp(max_pool) + elif pool_type == 'lp': + lp_pool = F.lp_pool2d(x, 2, (x.size(2), x.size(3)), stride=(x.size(2), x.size(3))) + channel_att_raw = self.mlp(lp_pool) + elif pool_type == 'lse': + # LSE pool only + lse_pool = logsumexp_2d(x) + channel_att_raw = self.mlp(lse_pool) + + if channel_att_sum is None: + channel_att_sum = channel_att_raw + else: + channel_att_sum = channel_att_sum + channel_att_raw + + scale = torch.sigmoid(channel_att_sum).unsqueeze(2).unsqueeze(3).expand_as(x) + return x * scale, torch.sigmoid(channel_att_sum) + + +class SpatialGate(nn.Module): + def __init__(self): + super(SpatialGate, self).__init__() + kernel_size = 7 + self.compress = ChannelPool() + self.spatial = BasicConv(2, 1, kernel_size, stride=1, padding=(kernel_size-1) // 2, relu=False) + # added by me + self.avg_pool2d = nn.AdaptiveAvgPool2d(1) + self.tanh = torch.nn.Tanh() + + def forward(self, x): # x shape is NxCxHxW + x_compress = self.compress(x) # Shape: Nx2xHxW + x_out = self.spatial(x_compress) # Shape: Nx1xHxW + scale = torch.sigmoid(x_out).expand_as(x) + + return x * scale, torch.sigmoid(x_out) # Shape: NxCxHxW + + +class CBAM(nn.Module): + def __init__(self, gate_channels, reduction_ratio=16, pool_types=['avg', 'max']): + super(CBAM, self).__init__() + self.ChannelGate = ChannelGate(gate_channels, reduction_ratio, pool_types) + self.SpatialGate = SpatialGate() + + def forward(self, x): + x_out, attn_ch = self.ChannelGate(x) + x_out, attn_sp = self.SpatialGate(x_out) + return x_out, attn_ch, attn_sp # Shape: NxCxHxW diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/diversified_esr.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/diversified_esr.py new file mode 100644 index 0000000000..597548e143 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/diversified_esr.py @@ -0,0 +1,220 @@ +""" +Implementation of Diversified ESR (Heidari, et al., 2022) trained on AffectNet (Mollahosseini et al., 2017) for facial +expresison recognition. + +Code is adapted based on: +https://github.com/siqueira-hc/Efficient-Facial-Feature-Learning-with-Wide-Ensemble-based-Convolutional-Neural-Networks + +""" + +# Standard libraries +import torch.nn.functional as F +import torch.nn as nn +import torch +import copy +from .cbam import CBAM + + +class Base(nn.Module): + """ + The base of the network (Ensembles with Shared Representations, ESRs) is responsible for learning low- and + mid-level representations from the input data that are shared with an ensemble of convolutional branches + on top of the architecture. + """ + + def __init__(self): + super(Base, self).__init__() + + # Convolutional layers + self.conv1 = nn.Conv2d(3, 64, 5, 1) + self.conv2 = nn.Conv2d(64, 128, 3, 1) + self.conv3 = nn.Conv2d(128, 128, 3, 1) + self.conv4 = nn.Conv2d(128, 128, 3, 1) + + # Batch-normalization layers + self.bn1 = nn.BatchNorm2d(64) + self.bn2 = nn.BatchNorm2d(128) + self.bn3 = nn.BatchNorm2d(128) + self.bn4 = nn.BatchNorm2d(128) + + # Attention layers + self.cbam1 = CBAM(gate_channels=64, reduction_ratio=16, pool_types=['avg', 'max']) + self.cbam2 = CBAM(gate_channels=128, reduction_ratio=16, pool_types=['avg', 'max']) + self.cbam3 = CBAM(gate_channels=128, reduction_ratio=16, pool_types=['avg', 'max']) + self.cbam4 = CBAM(gate_channels=128, reduction_ratio=16, pool_types=['avg', 'max']) + + # Max-pooling layer + self.pool = nn.MaxPool2d(2, 2) + + def forward(self, x): + # Convolutional, batch-normalization and pooling layers for representation learning + x_shared_representations = F.relu(self.bn1(self.conv1(x))) + x_shared_representations, _, _ = self.cbam1(x_shared_representations) + + x_shared_representations = self.pool(F.relu(self.bn2(self.conv2(x_shared_representations)))) + x_shared_representations, _, _ = self.cbam2(x_shared_representations) + + x_shared_representations = F.relu(self.bn3(self.conv3(x_shared_representations))) + x_shared_representations, _, _ = self.cbam3(x_shared_representations) + + x_shared_representations = self.pool(F.relu(self.bn4(self.conv4(x_shared_representations)))) + x_shared_representations, _, _ = self.cbam4(x_shared_representations) + + return x_shared_representations + + +class ConvolutionalBranch(nn.Module): + """ + Convolutional branches that compose the ensemble in ESRs. Each branch was trained on a sub-training + set from the AffectNet dataset to learn complementary representations from the data (Siqueira et al., 2020). + + Note that, the second last layer provides eight discrete emotion labels whereas the last layer provides + continuous values of arousal and valence levels. + """ + + def __init__(self): + super(ConvolutionalBranch, self).__init__() + + # Convolutional layers + self.conv1 = nn.Conv2d(128, 128, 3, 1) + self.conv2 = nn.Conv2d(128, 256, 3, 1) + self.conv3 = nn.Conv2d(256, 256, 3, 1) + self.conv4 = nn.Conv2d(256, 512, 3, 1, 1) + + # Batch-normalization layers + self.bn1 = nn.BatchNorm2d(128) + self.bn2 = nn.BatchNorm2d(256) + self.bn3 = nn.BatchNorm2d(256) + self.bn4 = nn.BatchNorm2d(512) + + self.cbam1 = CBAM(gate_channels=128, reduction_ratio=16, pool_types=['avg', 'max']) + self.cbam2 = CBAM(gate_channels=256, reduction_ratio=16, pool_types=['avg', 'max']) + self.cbam3 = CBAM(gate_channels=256, reduction_ratio=16, pool_types=['avg', 'max']) + self.cbam4 = CBAM(gate_channels=512, reduction_ratio=16, pool_types=['avg', 'max']) + + # Second last, fully-connected layer related to discrete emotion labels + self.fc = nn.Linear(512, 8) + + # Last, fully-connected layer related to continuous affect levels (arousal and valence) + self.fc_dimensional = nn.Linear(8, 2) + + # Max-pooling layer + self.pool = nn.MaxPool2d(2, 2) + + # Global average pooling layer + self.global_pool = nn.AdaptiveAvgPool2d(1) + + def forward(self, x_shared_representations): + # Convolutional, batch-normalization and pooling layers + x_conv_branch = F.relu(self.bn1(self.conv1(x_shared_representations))) + x_conv_branch, _, _ = self.cbam1(x_conv_branch) + + x_conv_branch = self.pool(F.relu(self.bn2(self.conv2(x_conv_branch)))) + x_conv_branch, _, _ = self.cbam2(x_conv_branch) + + x_conv_branch = F.relu(self.bn3(self.conv3(x_conv_branch))) + x_conv_branch, _, _ = self.cbam3(x_conv_branch) + + x_conv_branch = F.relu(self.bn4(self.conv4(x_conv_branch))) + x_conv_branch, attn_ch, attn_sp = self.cbam4(x_conv_branch) # attn_mat of size 32x1x6x6 + + # Prepare features for Classification & Regression + x_conv_branch = self.global_pool(x_conv_branch) # N x 512 x 1 x 1 + x_conv_branch = x_conv_branch.view(-1, 512) # N x 512 + + # Fully connected layer for expression recognition + discrete_emotion = self.fc(x_conv_branch) + + # Fully connected layer for affect perception + x_conv_branch = F.relu(discrete_emotion) + continuous_affect = self.fc_dimensional(x_conv_branch) + + return discrete_emotion, continuous_affect, attn_ch, attn_sp + + +class DiversifiedESR(nn.Module): + """ + The unified ensemble architecture composed of two building blocks the Base and ConvolutionalBranch + """ + + def __init__(self, device, ensemble_size=9): + """ + Loads DiversifiedESR. + + :param device: Device to load ESR: GPU or CPU. + :param ensemble_size: Number of branches + + """ + + super(DiversifiedESR, self).__init__() + + # Base of ESR-9 as described in the docstring (see mark 1) + self.device = device + self.ensemble_size = ensemble_size + + self.base = Base() + self.base.to(self.device) + + self.convolutional_branches = [] + for i in range(ensemble_size): + self.add_branch() + + self.convolutional_branches = nn.Sequential(*self.convolutional_branches) + self.to(device) + + def get_ensemble_size(self): + return len(self.convolutional_branches) + + def add_branch(self): + self.convolutional_branches.append(ConvolutionalBranch()) + self.convolutional_branches[-1].to(self.device) + + def to_state_dict(self): + state_dicts = [copy.deepcopy(self.base.state_dict())] + for b in self.convolutional_branches: + state_dicts.append(copy.deepcopy(b.state_dict())) + + return state_dicts + + def to_device(self, device_to_process="cpu"): + self.to(device_to_process) + self.base.to(device_to_process) + + for b_td in self.convolutional_branches: + b_td.to(device_to_process) + + def reload(self, best_configuration): + self.base.load_state_dict(best_configuration[0]) + + for i in range(self.get_ensemble_size()): + self.convolutional_branches[i].load_state_dict(best_configuration[i + 1]) + + def forward(self, x): + """ + Forward method of ESR. + + :param x: (ndarray) Input data. + :return: A list of emotions and affect values from each convolutional branch in the ensemble. + """ + + # List of emotions and affect values from the ensemble + emotions = [] + affect_values = [] + attn_heads_sp = [] + attn_heads_ch = [] + + # Get shared representations + x_shared_representations = self.base(x) + + # Add to the lists of predictions outputs from each convolutional branch in the ensemble + for branch in self.convolutional_branches: + output_emotion, output_affect, attn_ch, attn_sp = branch(x_shared_representations) + emotions.append(output_emotion) + affect_values.append(output_affect) + attn_heads_sp.append(attn_sp[:, 0, :, :]) + attn_heads_ch.append(attn_ch) + attn_heads_sp = torch.stack(attn_heads_sp) + attn_heads_ch = torch.stack(attn_heads_ch) + attn_heads = [attn_heads_sp, attn_heads_ch] + + return emotions, affect_values, attn_heads diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/esr_9.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/esr_9.py new file mode 100644 index 0000000000..b9058b2972 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/model/esr_9.py @@ -0,0 +1,202 @@ + +""" +Implementation of ESR-9 (Siqueira et al., 2020) trained on AffectNet (Mollahosseini et al., 2017) for emotion +and affect perception. + +Modified based on: +https://github.com/siqueira-hc/Efficient-Facial-Feature-Learning-with-Wide-Ensemble-based-Convolutional-Neural-Networks + +Reference: + Siqueira, H., Magg, S. and Wermter, S., 2020. Efficient Facial Feature Learning with Wide Ensemble-based + Convolutional Neural Networks. Proceedings of the Thirty-Fourth AAAI Conference on Artificial Intelligence + (AAAI-20), pages 1–1, New York, USA. + + Mollahosseini, A., Hasani, B. and Mahoor, M.H., 2017. AffectNet: A database for facial expression, valence, + and arousal computing in the wild. IEEE Transactions on Affective Computing, 10(1), pp.18-31. +""" + +import torch.nn.functional as F +import torch.nn as nn +import copy + + +class Base(nn.Module): + """ + The base of the network (Ensembles with Shared Representations, ESRs) is responsible for learning low- and + mid-level representations from the input data that are shared with an ensemble of convolutional branches + on top of the architecture. + + In our paper (Siqueira et al., 2020), it is called shared layers or shared representations. + """ + + def __init__(self): + super(Base, self).__init__() + + # Convolutional layers + self.conv1 = nn.Conv2d(3, 64, 5, 1) + self.conv2 = nn.Conv2d(64, 128, 3, 1) + self.conv3 = nn.Conv2d(128, 128, 3, 1) + self.conv4 = nn.Conv2d(128, 128, 3, 1) + + # Batch-normalization layers + self.bn1 = nn.BatchNorm2d(64) + self.bn2 = nn.BatchNorm2d(128) + self.bn3 = nn.BatchNorm2d(128) + self.bn4 = nn.BatchNorm2d(128) + + # Max-pooling layer + self.pool = nn.MaxPool2d(2, 2) + + def forward(self, x): + # Convolutional, batch-normalization and pooling layers for representation learning + x_shared_representations = F.relu(self.bn1(self.conv1(x))) + x_shared_representations = self.pool(F.relu(self.bn2(self.conv2(x_shared_representations)))) + x_shared_representations = F.relu(self.bn3(self.conv3(x_shared_representations))) + x_shared_representations = self.pool(F.relu(self.bn4(self.conv4(x_shared_representations)))) + + return x_shared_representations + + +class ConvolutionalBranch(nn.Module): + """ + Convolutional branches that compose the ensemble in ESRs. Each branch was trained on a sub-training + set from the AffectNet dataset to learn complementary representations from the data (Siqueira et al., 2020). + + Note that, the second last layer provides eight discrete emotion labels whereas the last layer provides + continuous values of arousal and valence levels. + """ + + def __init__(self): + super(ConvolutionalBranch, self).__init__() + + # Convolutional layers + self.conv1 = nn.Conv2d(128, 128, 3, 1) + self.conv2 = nn.Conv2d(128, 256, 3, 1) + self.conv3 = nn.Conv2d(256, 256, 3, 1) + self.conv4 = nn.Conv2d(256, 512, 3, 1, 1) + + # Batch-normalization layers + self.bn1 = nn.BatchNorm2d(128) + self.bn2 = nn.BatchNorm2d(256) + self.bn3 = nn.BatchNorm2d(256) + self.bn4 = nn.BatchNorm2d(512) + + # Second last, fully-connected layer related to discrete emotion labels + self.fc = nn.Linear(512, 8) + + # Last, fully-connected layer related to continuous affect levels (arousal and valence) + self.fc_dimensional = nn.Linear(8, 2) + + # Pooling layers + # Max-pooling layer + self.pool = nn.MaxPool2d(2, 2) + + # Global average pooling layer + self.global_pool = nn.AdaptiveAvgPool2d(1) + + def forward(self, x_shared_representations): + # Convolutional, batch-normalization and pooling layers + x_conv_branch = F.relu(self.bn1(self.conv1(x_shared_representations))) + x_conv_branch = self.pool(F.relu(self.bn2(self.conv2(x_conv_branch)))) + x_conv_branch = F.relu(self.bn3(self.conv3(x_conv_branch))) + x_conv_branch = self.global_pool(F.relu(self.bn4(self.conv4(x_conv_branch)))) + x_conv_branch = x_conv_branch.view(-1, 512) + + # Fully connected layer for emotion perception + discrete_emotion = self.fc(x_conv_branch) + + # Application of the ReLU function to neurons related to discrete emotion labels + x_conv_branch = F.relu(discrete_emotion) + + # Fully connected layer for affect perception + continuous_affect = self.fc_dimensional(x_conv_branch) + + # Returns activations of the discrete emotion output layer and arousal and valence levels + return discrete_emotion, continuous_affect + + +class ESR(nn.Module): + """ + ESR is the unified ensemble architecture composed of two building blocks the Base and ConvolutionalBranch + classes as described below by Siqueira et al. (2020): + + 'An ESR consists of two building blocks. (1) The base (class Base) of the network is an array of convolutional + layers for low- and middle-level feature learning. (2) These informative features are then shared with + independent convolutional branches (class ConvolutionalBranch) that constitute the ensemble.' + """ + + def __init__(self, device, ensemble_size=9): + """ + Loads ESR-9. + :param device: Device to load ESR-9: GPU or CPU. + :param ensemble_size: Number of branches + """ + + super(ESR, self).__init__() + + self.device = device + self.ensemble_size = ensemble_size + + self.base = Base() + self.base.to(device) + + # Load 9 convolutional branches that composes ESR-9 as described in the docstring (see mark 2) + self.convolutional_branches = [] + + for i in range(ensemble_size): + self.add_branch() + + self.convolutional_branches = nn.Sequential(*self.convolutional_branches) + self.to(device) + + # Evaluation mode on + self.eval() + + def get_ensemble_size(self): + return len(self.convolutional_branches) + + def add_branch(self): + self.convolutional_branches.append(ConvolutionalBranch()) + self.convolutional_branches[-1].to(self.device) + + def to_state_dict(self): + state_dicts = [copy.deepcopy(self.base.state_dict())] + for b in self.convolutional_branches: + state_dicts.append(copy.deepcopy(b.state_dict())) + + return state_dicts + + def to_device(self, device_to_process="cpu"): + self.to(device_to_process) + self.base.to(device_to_process) + + for b_td in self.convolutional_branches: + b_td.to(device_to_process) + + def reload(self, best_configuration): + self.base.load_state_dict(best_configuration[0]) + for i in range(self.get_ensemble_size()): + self.convolutional_branches[i].load_state_dict(best_configuration[i + 1]) + + def forward(self, x): + """ + Forward method of ESR-9. + + :param x: (ndarray) Input data. + :return: A list of emotions and affect values from each convolutional branch in the ensemble. + """ + + # List of emotions and affect values from the ensemble + emotions = [] + affect_values = [] + attn_heads = [] + + # Get shared representations + x_shared_representations = self.base(x) + # Add to the lists of predictions outputs from each convolutional branch in the ensemble + for branch in self.convolutional_branches: + output_emotion, output_affect = branch(x_shared_representations) + emotions.append(output_emotion) + affect_values.append(output_affect) + attn_heads = affect_values + return emotions, affect_values, attn_heads diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/__init__.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/datasets.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/datasets.py new file mode 100644 index 0000000000..265ad41673 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/datasets.py @@ -0,0 +1,405 @@ +""" +This module implements methods to handle datasets. +Modified based on: +https://github.com/siqueira-hc/Efficient-Facial-Feature-Learning-with-Wide-Ensemble-based-Convolutional-Neural-Networks +""" + +# External Libraries +from torchvision.transforms import ToTensor, Normalize +from torch.utils.data import Dataset +from PIL import Image +import numpy as np +import pandas +import torch + +# Standard Libraries +from os import path, listdir +import re + +# OpenDR imports +from opendr.perception.facial_expression_recognition.image_based_facial_emotion_estimation.algorithm.utils import \ + image_processing +from opendr.engine.datasets import DatasetIterator + + +# AffectNet (Categorical) +class AffectNetCategorical(Dataset, DatasetIterator): + def __init__(self, idx_set=0, max_loaded_images_per_label=1000, transforms=None, is_norm_by_mean_std=True, + base_path_to_affectnet=None): + """ + This class follows the experimental methodology conducted by (Mollahosseini et al., 2017). + + Refs. + Mollahosseini, A., Hasani, B. and Mahoor, M.H., 2017. Affectnet: A database for facial expression, + valence, and arousal computing in the wild. IEEE Transactions on Affective Computing. + + :param idx_set: Labeled = 0, Unlabeled = 1, Validation = 2, Test = Not published by + (Mollahosseini et al., 2017) + :param max_loaded_images_per_label: Maximum number of images per label + :param transforms: transforms (callable, optional): Optional transform to be applied on a sample. + """ + Dataset.__init__(self) + DatasetIterator.__init__(self) + + self.idx_set = idx_set + self.max_loaded_images_per_label = max_loaded_images_per_label + self.transforms = transforms + self.base_path_to_affectnet = base_path_to_affectnet + self.affectnet_sets = {'supervised': 'Training_Labeled/', + 'unsupervised': 'Training_Unlabeled/', + 'validation': 'Validation/'} + + # Default values + self.num_labels = 8 + if is_norm_by_mean_std: + self.mean = [149.35457 / 255., 117.06477 / 255., 102.67609 / 255.] + self.std = [69.18084 / 255., 61.907074 / 255., 60.435623 / 255.] + else: + self.mean = [0.0, 0.0, 0.0] + self.std = [1.0, 1.0, 1.0] + + # Load data + self.loaded_data = self._load() + print('Size of the loaded set: {}'.format(self.loaded_data[0].shape[0])) + + def __len__(self): + return self.loaded_data[0].shape[0] + + def __getitem__(self, idx): + sample = {'image': self.loaded_data[0][idx], 'emotion': self.loaded_data[1][idx]} + sample['image'] = Image.fromarray(sample['image']) + + if not (self.transforms is None): + sample['image'] = self.transforms(sample['image']) + + return Normalize(mean=self.mean, std=self.std)(ToTensor()(sample['image'])), sample['emotion'] + + def online_normalization(self, x): + return Normalize(mean=self.mean, std=self.std)(ToTensor()(x)) + + def norm_input_to_orig_input(self, x): + x_r = torch.zeros(x.size()) + x_r[0] = (x[2] * self.std[2]) + self.mean[2] + x_r[1] = (x[1] * self.std[1]) + self.mean[1] + x_r[2] = (x[0] * self.std[0]) + self.mean[0] + return x_r + + @staticmethod + def get_class(idx): + classes = { + 0: 'Neutral', + 1: 'Happy', + 2: 'Sad', + 3: 'Surprise', + 4: 'Fear', + 5: 'Disgust', + 6: 'Anger', + 7: 'Contempt'} + + return classes[idx] + + @staticmethod + def _parse_to_label(idx): + """ + The file name follows this structure: 'ID_s_exp_s_val_s_aro_.jpg' Ex. '0000000s7s-653s653.jpg'. + + Documentation of labels adopted by AffectNet's authors: + Expression: expression ID of the face (0: Neutral, 1: Happy, 2: Sad, 3: Surprise, 4: Fear, 5: Disgust, 6: + Anger, 7: Contempt, 8: None, 9: Uncertain, 10: No-Face) + Valence: valence value of the expression in interval [-1,+1] (for Uncertain and No-face categories the value + is -2) + Arousal: arousal value of the expression in interval [-1,+1] (for Uncertain and No-face categories the value + is -2) + + :param idx: File's name + :return: label + """ + + label_info = idx.split('s') + discrete_label = int(label_info[1]) + + return discrete_label if (discrete_label < 8) else -1 + + def _load(self): + data_affect_net, labels_affect_net = [], [] + counter_loaded_images_per_label = [0 for _ in range(self.num_labels)] + + if self.idx_set == 0: + path_folders_affect_net = path.join(self.base_path_to_affectnet, + self.affectnet_sets['supervised']) + elif self.idx_set == 1: + path_folders_affect_net = path.join(self.base_path_to_affectnet, + self.affectnet_sets['unsupervised']) + else: + path_folders_affect_net = path.join(self.base_path_to_affectnet, + self.affectnet_sets['validation']) + + folders_affect_net = sort_numeric_directories(listdir(path_folders_affect_net)) + # Randomize folders + if self.idx_set < 2: + np.random.shuffle(folders_affect_net) + + for f_af in folders_affect_net: + path_images_affect_net = path.join(path_folders_affect_net, f_af) + + images_affect_net = np.sort(np.array(listdir(path_images_affect_net))) + # Randomize images + if self.idx_set < 2: + np.random.shuffle(images_affect_net) + + for file_name_image_affect_net in images_affect_net: + lbl = self._parse_to_label(file_name_image_affect_net) + + if (lbl >= 0) and (counter_loaded_images_per_label[int(lbl)] < self.max_loaded_images_per_label): + img = np.array(image_processing.read(path.join(path_images_affect_net, file_name_image_affect_net)), + np.uint8) + + data_affect_net.append(img) + labels_affect_net.append(lbl) + + counter_loaded_images_per_label[int(lbl)] += 1 + + has_loading_finished = (np.sum(counter_loaded_images_per_label) >= ( + self.max_loaded_images_per_label * self.num_labels)) + + if has_loading_finished: + break + + if has_loading_finished: + break + + return [np.array(data_affect_net), np.array(labels_affect_net)] + + +# AffectNet (Dimensional) +class AffectNetDimensional(Dataset, DatasetIterator): + def __init__(self, idx_set=0, max_loaded_images_per_label=1000, transforms=None, is_norm_by_mean_std=True, + base_path_to_affectnet=None): + """ + This class follows the experimental methodology conducted by (Mollahosseini et al., 2017). + + Refs. + Mollahosseini, A., Hasani, B. and Mahoor, M.H., 2017. Affectnet: A database for facial expression, valence, + and arousal computing in the wild. IEEE Transactions on Affective Computing. + + :param idx_set: Labeled = 0, Unlabeled = 1, Validation = 2, Test = Not published by (Mollahosseini et al., + 2017) + :param max_loaded_images_per_label: Maximum number of images per label + :param transforms: transforms (callable, optional): Optional transform to be applied on a sample. + """ + Dataset.__init__(self) + DatasetIterator.__init__(self) + + self.idx_set = idx_set + self.max_loaded_images_per_label = max_loaded_images_per_label + self.transforms = transforms + self.base_path_to_affectnet = base_path_to_affectnet + self.affectnet_sets = {'supervised': 'Training_Labeled/', + 'unsupervised': 'Training_Unlabeled/', + 'validation': 'Validation/'} + + # Default values + self.num_labels = 4 + if is_norm_by_mean_std: + self.mean = [149.35457 / 255., 117.06477 / 255., 102.67609 / 255.] + self.std = [69.18084 / 255., 61.907074 / 255., 60.435623 / 255.] + else: + self.mean = [0.0, 0.0, 0.0] + self.std = [1.0, 1.0, 1.0] + + # Load data + self.loaded_data = self._load() + print('Size of the loaded set: {}'.format(self.loaded_data[0].shape[0])) + + def __len__(self): + return self.loaded_data[0].shape[0] + + def __getitem__(self, idx): + sample = {'image': self.loaded_data[0][idx], 'emotion': self.loaded_data[1][idx]} + sample['image'] = Image.fromarray(sample['image']) + + if not (self.transforms is None): + sample['image'] = self.transforms(sample['image']) + + return Normalize(mean=self.mean, std=self.std)(ToTensor()(sample['image'])), sample['emotion'] + + def online_normalization(self, x): + return Normalize(mean=self.mean, std=self.std)(ToTensor()(x)) + + def norm_input_to_orig_input(self, x): + x_r = torch.zeros(x.size()) + x_r[0] = (x[2] * self.std[2]) + self.mean[2] + x_r[1] = (x[1] * self.std[1]) + self.mean[1] + x_r[2] = (x[0] * self.std[0]) + self.mean[0] + return x_r + + @staticmethod + def get_class(idx): + return idx + + @staticmethod + def _parse_to_label(idx): + """ + The file name follows this structure: 'ID_s_exp_s_val_s_aro_.jpg' Ex. '0000000s7s-653s653.jpg'. + + Documentation of labels adopted by AffectNet's authors: + Expression: expression ID of the face (0: Neutral, 1: Happy, 2: Sad, 3: Surprise, 4: Fear, 5: Disgust, 6: + Anger, 7: Contempt, 8: None, 9: Uncertain, 10: No-Face) + Valence: valence value of the expression in interval [-1,+1] (for Uncertain and No-face categories the value + is -2) + Arousal: arousal value of the expression in interval [-1,+1] (for Uncertain and No-face categories the value + is -2) + + :param idx: File's name + :return: label + """ + + label_info = idx.split('s') + + discrete_label = int(label_info[1]) + valence = np.float32(label_info[2]) / 1000.0 + arousal = np.float32(label_info[-1].split('.')[0]) / 1000.0 + + if (valence <= -2.0) or (arousal <= -2.0) or (discrete_label >= 8): + quadrant = -1 + else: + if (valence >= 0) and (arousal >= 0): + quadrant = 0 + elif (valence < 0) and (arousal >= 0): + quadrant = 1 + elif (valence < 0) and (arousal < 0): + quadrant = 2 + else: + quadrant = 3 + + return valence, arousal, quadrant + + def _load(self): + data_affect_net, labels_affect_net = [], [] + counter_loaded_images_per_label = [0 for _ in range(self.num_labels)] + + if self.idx_set == 0: + path_folders_affect_net = path.join(self.base_path_to_affectnet, + self.affectnet_sets['supervised']) + elif self.idx_set == 1: + path_folders_affect_net = path.join(self.base_path_to_affectnet, + self.affectnet_sets['unsupervised']) + else: + path_folders_affect_net = path.join(self.base_path_to_affectnet, + self.affectnet_sets['validation']) + + folders_affect_net = sort_numeric_directories(listdir(path_folders_affect_net)) + # Randomize folders + if self.idx_set < 2: + np.random.shuffle(folders_affect_net) + + for f_af in folders_affect_net: + path_images_affect_net = path.join(path_folders_affect_net, f_af) + + images_affect_net = np.sort(np.array(listdir(path_images_affect_net))) + # Randomize images + if self.idx_set < 2: + np.random.shuffle(images_affect_net) + + for file_name_image_affect_net in images_affect_net: + valence, arousal, quadrant = self._parse_to_label(file_name_image_affect_net) + + if (quadrant >= 0) and (counter_loaded_images_per_label[int(quadrant)] < + self.max_loaded_images_per_label): + img = np.array(image_processing.read(path.join(path_images_affect_net, file_name_image_affect_net)), + np.uint8) + data_affect_net.append(img) + + labels_affect_net.append([np.float32(valence), np.float32(arousal)]) + + counter_loaded_images_per_label[int(quadrant)] += 1 + + has_loading_finished = (np.sum(counter_loaded_images_per_label) >= + (self.max_loaded_images_per_label * self.num_labels)) + + if has_loading_finished: + break + + if has_loading_finished: + break + + return [np.array(data_affect_net), np.array(labels_affect_net)] + + +# Other methods +def sort_numeric_directories(dir_names): + return sorted(dir_names, key=lambda x: (int(re.sub("\D", "", x)), x)) + + +def _generate_single_file_name(img_id, expression, valence, arousal): + valence = int(valence * 1000) + arousal = int(arousal * 1000) + return '%07ds%ds%ds%d.jpg' % (img_id, expression, valence, arousal) + + +def pre_process_affect_net(base_path_to_images, base_path_to_annotations, base_destination_path, set_index): + """ + Pre-process the AffectNet dataset. Faces are cropped and resized to 96 x 96 pixels. + The images are organized in folders with 500 images each. The test set had not been released + when this experiment was carried out. + + :param base_path_to_images: (string) Path to images. + :param base_path_to_annotations: (string) Path to annotations. + :param base_destination_path: (string) destination path to save preprocessed data. + :param set_index: (int = {0, 1, 2}) set_index = 0 process the automatically annotated images. + set_index = 1 process the manually annotated images: training set. + set_index = 2 process the manually annotated images: validation set. + :return: (void) + """ + + print('preprocessing started') + assert ((set_index < 3) and (set_index >= 0)), "set_index must be 0, 1 or 2." + + annotation_folders = ['Automatically_Annotated_extracted/', 'Manually_Annotated_extracted/', + 'Manually_Annotated_extracted/'] + destination_set_folders = ['Training_Unlabeled/', 'Training_Labeled/', + 'Validation/'] + annotation_file_names = ['automatically_annotated.csv', 'training.csv', 'validation.csv'] + + image_id = 0 + error_image_id = [] + img_size = (96, 96) + num_images_per_folder = 500 + + annotation_file = pandas.read_csv(path.join(base_path_to_annotations, annotation_file_names[set_index])) + print('annotation file loaded') + + for line in range(image_id, annotation_file.shape[0]): + try: + # Read image + img_file_name = annotation_file.get('subDirectory_filePath')[line] + img_file_name = img_file_name.split("/")[-1] + img_full_path = path.join(base_path_to_images, annotation_folders[set_index], img_file_name) + img = image_processing.read(img_full_path) + + # Crop face + x = int(annotation_file.get('face_x')[line]) + y = int(annotation_file.get('face_y')[line]) + w = int(annotation_file.get('face_width')[line]) + h = int(annotation_file.get('face_height')[line]) + img = img[x:x + w, y:y + h, :] + + # Resize image + img = image_processing.resize(img, img_size) + + # Save image + folder = str(image_id // num_images_per_folder) + exp = annotation_file.get('expression')[line] + val = annotation_file.get('valence')[line] + aro = annotation_file.get('arousal')[line] + file_name = _generate_single_file_name(image_id, exp, val, aro) + image_processing.write(img, path.join(base_destination_path, destination_set_folders[set_index], folder), file_name) + image_id += 1 + except Exception: + print('ERROR: The image ID %d is corrupted.' % image_id) + error_image_id.append(image_id) + + print('Dataset has been processed.') + print('Images successfully processed: %d' % (image_id - len(error_image_id))) + print('Images processed with error: %d' % len(error_image_id)) + print('Image IDs processed with error: %s' % error_image_id) diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/diversity.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/diversity.py new file mode 100644 index 0000000000..52397795d5 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/diversity.py @@ -0,0 +1,64 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Implementation of diversity calculation between features extracted by different branches of ESR +""" + +import torch +import torch.nn as nn + + +class BranchDiversity(nn.Module): + def __init__(self, ): + super(BranchDiversity, self).__init__() + self.direct_div = 0 + self.det_div = 0 + self.logdet_div = 0 + + def forward(self, x, type='spatial'): + + num_branches = x.size(0) + gamma = 10 + snm = torch.zeros((num_branches, num_branches)) + + # Spatial attnention diversity + if type == 'spatial': # num_branch x batch_size x 6 x 6 + # diversity between spatial attention heads + for i in range(num_branches): + for j in range(num_branches): + if i != j: + diff = torch.exp(-1 * gamma * torch.sum(torch.square(x[i, :, :, :] - x[j, :, :, :]), (1, 2))) + # size: batch_size + diff = torch.mean(diff) # (1/num_branches) * torch.sum(diff) # size: 1 + snm[i, j] = diff + self.direct_div = torch.sum(snm) + self.det_div = -1 * torch.det(snm) + self.logdet_div = -1 * torch.logdet(snm) + + # Channel attn diversity + elif type == 'channel': # num_branch x batch_size x 512 + # diversity between channels of attention heads + for i in range(num_branches): + for j in range(num_branches): + if i != j: + diff = torch.exp( + -1 * gamma * torch.sum(torch.square(x[i, :, :] - x[j, :, :]), 1)) # size: batch_size + diff = torch.mean(diff) # (1/num_branches) * torch.sum(diff) # size: 1 + snm[i, j] = diff + self.direct_div = torch.sum(snm) + self.det_div = -1 * torch.det(snm) + self.logdet_div = -1 * torch.logdet(snm) + + return self diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/image_processing.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/image_processing.py new file mode 100644 index 0000000000..166a686f02 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/image_processing.py @@ -0,0 +1,174 @@ + +""" +This module implements image processing methods. +Modified based on: +https://github.com/siqueira-hc/Efficient-Facial-Feature-Learning-with-Wide-Ensemble-based-Convolutional-Neural-Networks +""" + +import os +import cv2 + +# Private variables +_MAX_FPS = 30 +_FPS = 5 +_CAP = None + + +# Image I/O methods + +def set_fps(fps): + global _FPS + _FPS = fps + + +def is_video_capture_open(): + global _CAP + + if _CAP is None: + return False + else: + return _CAP.isOpened() + + +def initialize_video_capture(source): + global _CAP + + # If cap is not none, it re-initialize video capture with the new video file + if not (_CAP is None): + _CAP.release() + _CAP = None + + # Read the file + try: + _CAP = cv2.VideoCapture(source) + except Exception as e: + _CAP = None + print("Error on trying to read the following file as video: {}".format(source)) + print("Please, check if the file exists, is an image and is not corrupted.") + print("Supported file format: MPEG-4 (*.mp4).") + print("Check whether working versions of ffmpeg or gstreamer is installed.") + raise e + + return not (_CAP is None) + + +def release_video_capture(): + global _CAP + + try: + _CAP.release() + except Exception as e: + print(e) + finally: + _CAP = None + + return _CAP is None + + +def get_frame(): + """ + Get a frame from a video file. + + :return: (ndarray, float) (Loaded frame, time in seconds). + """ + global _CAP, _FPS + + to_return_frame = None + + if _CAP is None: + print("Error on getting frame. cv2.VideoCapture is not initialized.") + else: + try: + if _CAP.isOpened(): + # Skip frames + for i in range(int(_MAX_FPS / _FPS)): + _CAP.grab() + + is_valid_frame, to_return_frame = _CAP.retrieve() + + if not is_valid_frame: + to_return_frame = None + except Exception as e: + print("Error on getting a frame. Please, double-check if the video file is not corrupted.") + print("Supported file format: MPEG-4 (*.mp4).") + print("Check whether working versions of ffmpeg or gstreamer is installed.") + raise e + + return to_return_frame, (_CAP.get(cv2.CAP_PROP_POS_MSEC) / 1000) + + +def read(path_to_image, convert_to_grey_scale=False): + """ + Reads the file as an image. + :param path_to_image: (string) + :param convert_to_grey_scale: (bool) opens an image and converts it to a 2d greyscale image. + :return: (ndarray) 3d (channels last) or 2d image array. + """ + + loaded_image = None + exception = None + + # Read the file + try: + if convert_to_grey_scale: + loaded_image = cv2.imread(path_to_image, cv2.IMREAD_GRAYSCALE) + else: + loaded_image = cv2.imread(path_to_image, cv2.IMREAD_COLOR) + except Exception as e: + loaded_image = None + exception = e + + # Check if the file has been successfully read as an image + if loaded_image is None: + print("Error on trying to read the following file as an image: {}".format(path_to_image)) + print("Please, check if the file exists, is an image and is not corrupted.") + print("Supported file formats: JPEG (*.jpeg and *.jpg) and Portable Network Graphics (*.png).") + + if exception is None: + raise RuntimeError("Unable to read the file (unknown error:).") + else: + raise exception + + return loaded_image + + +def write(image, file_path, file_name): + full_path = os.path.join(file_path, file_name) + + if not os.path.isdir(file_path): + os.makedirs(file_path) + + cv2.imwrite(full_path, image) + + print("Image successfully saved at: %s" % full_path) + + +# Color conversion methods + +def convert_grey_to_bgr(image): + return cv2.cvtColor(image, cv2.COLOR_GRAY2BGR) + + +def convert_bgr_to_grey(image): + return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) + + +def convert_bgr_to_rgb(image): + return cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + + +def convert_rgb_to_grey(image): + return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) + + +def convert_rgb_to_bgr(image): + return cv2.cvtColor(image, cv2.COLOR_RGB2BGR) + + +# Transformation methods + +def resize(image, output_size=None, f=None): + if f is None: + return cv2.resize(image, output_size) + else: + return cv2.resize(image, output_size, fx=f, fy=f) diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/plotting.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/plotting.py new file mode 100644 index 0000000000..dfc8364b25 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm/utils/plotting.py @@ -0,0 +1,109 @@ +""" +This module implements methods for plotting. +Modified based on: +https://github.com/siqueira-hc/Efficient-Facial-Feature-Learning-with-Wide-Ensemble-based-Convolutional-Neural-Networks +""" + +from os import path, makedirs +import numpy as np +from matplotlib import pyplot as plt + + +def plot(data, title='Figure', legends=None, axis_x=None, axis_y=None, file_path=None, file_name=None, + figure_size=(16, 9), has_grid=True, limits_axis_y=None, upper_lower_data=None, limits_axis_x=None, + verbose=True): + """ + Plot a graph from a list of x and y values. + + :param data: List of x and y values. + :param title: Title of the graph (String). + :param legends: List of legend (String) for each function. + :param axis_x: Label (String) for the x-axis. + :param axis_y: Label (String) for the y-axis. + :param file_path: File path to save the Figure. If this variable is None the graph is shown to the user, otherwise, + the graph is saved only. + :param file_name: File name to save the Figure. If this variable is None the graph is shown to the user, otherwise, + the graph is saved only. + :param figure_size: Tuple containing the figure size (width, height). + :param has_grid: Flag for a grid background. + :param limits_axis_y: Tuple containing the limits (min, max, step) to the axis y. + :param upper_lower_data: Tuple containing the upper and lower error limits for each data array. + :param limits_axis_x: Tuple containing the limits (min, max, step) to the axis x. + """ + + plots = [] + colors = ['steelblue', 'indianred', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon', + 'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon', + 'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon', + 'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon', + 'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon', + 'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon', + 'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon', + 'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon', + 'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon', + 'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon', + 'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'gray', 'sienna', + 'tan', 'plum', 'steelblue', 'lavenderblush', 'pink', 'navajowhite', 'darkorange', + 'darkslateblue', 'blueviolet', 'slategray', 'indianred', 'olive', 'darksalmon'] + + plt.rcParams['figure.figsize'] = figure_size + plt.title(title) + plt.grid(has_grid) + + if not (axis_x is None): + plt.xlabel(axis_x) + if not (axis_y is None): + plt.ylabel(axis_y) + + for d in range(len(data)): + current_fig, = plt.plot(data[d][0], data[d][1], color=colors[d]) + if not (upper_lower_data is None): + plt.fill_between(data[d][0], np.array(upper_lower_data[d][0], dtype=float), + np.array(upper_lower_data[d][1], dtype=float), + where=np.array(upper_lower_data[d][0], dtype=float) > np.array(upper_lower_data[d][1], + dtype=float), alpha=0.5, + interpolate=True) + + plots.append(current_fig) + + if not (legends is None): + plt.legend(plots, legends) + + if not (limits_axis_y is None): + plt.ylim(limits_axis_y[:2]) + plt.yticks(np.arange(limits_axis_y[0], limits_axis_y[1] + limits_axis_y[2], limits_axis_y[2])) + + if not (limits_axis_x is None): + plt.xlim(limits_axis_x[:2]) + plt.xticks(np.arange(limits_axis_x[0], limits_axis_x[1] + limits_axis_x[2], limits_axis_x[2])) + + if (file_name is None) or (file_path is None): + plt.show() + else: + full_path = path.join(file_path, file_name) + if not path.isdir(file_path): + makedirs(file_path) + plt.savefig(full_path, format='svg') + plt.close() + if verbose: + print('Figure saved at %s successfully.' % full_path) diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/dependencies.ini b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/dependencies.ini new file mode 100644 index 0000000000..d97ad7d986 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/dependencies.ini @@ -0,0 +1,18 @@ +[runtime] +# 'python' key expects a value using the Python requirements file format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +python=torch==1.9.0 + torchvision==0.10.0 + dlib + scikit-learn + pandas + scikit-image + cycler + kiwisolver + matplotlib + numpy<=1.23.5 + opencv-python + Pillow + pyparsing + python-dateutil + six diff --git a/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/facial_emotion_learner.py b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/facial_emotion_learner.py new file mode 100644 index 0000000000..40158689b4 --- /dev/null +++ b/src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/facial_emotion_learner.py @@ -0,0 +1,799 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Reference: + Siqueira, H., Magg, S. and Wermter, S., 2020. Efficient Facial Feature Learning with Wide Ensemble-based + Convolutional Neural Networks. Proceedings of the Thirty-Fourth AAAI Conference on Artificial Intelligence + (AAAI-20), pages 1–1, New York, USA. +""" + +# External Libraries +from torch.utils.data import DataLoader +from torchvision import transforms +import torch.optim as optim +import torch.nn as nn +import PIL +import numpy as np +import zipfile +import torch +import os +from os import path, makedirs +import onnxruntime +import shutil +import json +from urllib.request import urlretrieve + +# OpenDR engine imports +from opendr.engine.learners import Learner +from opendr.engine.target import Category +from opendr.engine.constants import OPENDR_SERVER_URL +from opendr.perception.facial_expression_recognition.image_based_facial_emotion_estimation.algorithm.model.esr_9 \ + import ESR +from opendr.perception.facial_expression_recognition.image_based_facial_emotion_estimation.algorithm.model.\ + diversified_esr import DiversifiedESR +from opendr.perception.facial_expression_recognition.image_based_facial_emotion_estimation.algorithm.utils \ + import datasets, plotting +from opendr.perception.facial_expression_recognition.image_based_facial_emotion_estimation.algorithm.utils.diversity \ + import BranchDiversity + + +class FacialEmotionLearner(Learner): + def __init__(self, lr=1e-1, batch_size=32, + temp_path='./temp/', device='cuda', device_ind=[0], + validation_interval=1, max_training_epoch=2, momentum=0.9, + ensemble_size=9, base_path_experiment='./experiments/', name_experiment='esr_9', + dimensional_finetune=True, categorical_train=False, base_path_to_dataset='./data/AffectNet', + max_tuning_epoch=1, diversify=False + ): + super(FacialEmotionLearner, self).__init__(lr=lr, batch_size=batch_size, temp_path=temp_path, device=device) + self.device = device + self.device_ind = device_ind + self.output_device = self.device_ind[0] if type(self.device_ind) is list else self.device_ind + self.lr = lr + self.momentum = momentum + self.batch_size = batch_size + self.temp_path = temp_path + self.base_path_experiment = base_path_experiment + self.name_experiment = name_experiment + self.base_path_to_dataset = base_path_to_dataset + self.validation_interval = validation_interval + self.max_training_epoch = max_training_epoch + self.ensemble_size = ensemble_size + self.dimensional_finetune = dimensional_finetune + self.categorical_train = categorical_train + self.diversify = diversify + self.ort_session = None + self.max_tuning_epoch = max_tuning_epoch + self.criterion_cat = nn.CrossEntropyLoss() + self.criterion_dim = nn.MSELoss(reduction='mean') + self.criterion_div = BranchDiversity() + + def init_model(self, num_branches): + """ + This method is used to initialize the model. + + :param num_branches: Specifies the number of ensemble branches in the model. + """ + if self.diversify: + self.model = DiversifiedESR(device=self.device, ensemble_size=num_branches) + else: + self.model = ESR(device=self.device, ensemble_size=num_branches) + self.model.to_device(self.device) + + def save(self, state_dicts, base_path_to_save_model, verbose=True): + """ + This method is used to save a trained model. + :param state_dicts: Object of type Python dictionary containing the trained model weights. + :param base_path_to_save_model: Specifies the path in which the model will be saved. + """ + model_metadata = {"model_paths": [], "framework": "pytorch", "format": "", "has_data": False, + "inference_params": {}, "optimized": None, "optimizer_info": {}} + if not path.isdir(base_path_to_save_model): + makedirs(base_path_to_save_model) + if self.ort_session is None: + model_metadata["model_paths"] = [base_path_to_save_model] + model_metadata["optimized"] = False + model_metadata["format"] = "pt" + torch.save(state_dicts[0], path.join(base_path_to_save_model, "Net-Base-Shared_Representations.pt")) + for i in range(1, len(state_dicts)): + torch.save(state_dicts[i], path.join(base_path_to_save_model, "Net-Branch_{}.pt".format(i))) + if verbose: + print("Pytorch model has been saved at: {}".format(base_path_to_save_model)) + else: + model_metadata["model_paths"] = [base_path_to_save_model] + model_metadata["optimized"] = True + model_metadata["format"] = "onnx" + shutil.copy2(path.join(self.temp_path, self.name_experiment + ".onnx"), + model_metadata["model_paths"][0]) + if verbose: + print("ONNX model has been saved at: {}".format(base_path_to_save_model)) + json_model_name = self.name_experiment + '.json' + json_model_path = path.join(base_path_to_save_model, json_model_name) + with open(json_model_path, 'w') as outfile: + json.dump(model_metadata, outfile) + + def load(self, ensemble_size=9, path_to_saved_network="./trained_models/esr_9", + file_name_base_network="Net-Base-Shared_Representations.pt", + file_name_conv_branch="Net-Branch_{}.pt", fix_backbone=True): + """ + Loads the model from inside the directory of the path provided, using the metadata .json file included. + + :param ensemble_size: Specifies the number of ensemble branches in the model for which the pretrained weights + should be loaded. + :param path_to_saved_network: Path of the model to be loaded. + :param file_name_base_network: The file name of the base network to be loaded. + :param file_name_conv_branch: The file name of the ensemble branch network to be loaded. + :param fix_backbone: If true, all the model weights except the classifier are fixed so that the last layers' + weights are fine-tuned on dimensional data. Otherwise, all the model weights will be trained from scratch. + + """ + with open(path.join(path_to_saved_network, self.name_experiment + ".json")) as metadata_file: + metadata = json.load(metadata_file) + if metadata["optimized"]: + self.__load_from_onnx(path.join(path_to_saved_network, self.name_experiment + '.onnx')) + else: + # Load base + self.model.base.load_state_dict(torch.load( + path.join(path_to_saved_network, file_name_base_network), map_location=self.device)) + # Load branches + for i in range(ensemble_size): + self.model.convolutional_branches[i].load_state_dict( + torch.load(path.join(path_to_saved_network, file_name_conv_branch.format(i + 1)), + map_location=self.device)) + if self.dimensional_finetune and fix_backbone: + for param in self.model.parameters(): + param.requires_grad = False + for i in range(ensemble_size): + for p in self.model.convolutional_branches[i].fc_dimensional.parameters(): + p.requires_grad = True + + def fit(self, verbose=True): + """ + This method is used for training the algorithm on a train dataset and validating on a val dataset. + """ + # Make dir + if not path.isdir(path.join(self.base_path_experiment, self.name_experiment)): + makedirs(path.join(self.base_path_experiment, self.name_experiment)) + # Define data transforms + data_transforms = [transforms.ColorJitter(brightness=0.5, contrast=0.5), + transforms.RandomHorizontalFlip(p=0.5), + transforms.RandomAffine(degrees=30, + translate=(.1, .1), + scale=(1.0, 1.25), + resample=PIL.Image.BILINEAR)] + if verbose: + print("Starting: {}".format(str(self.name_experiment))) + print("Running on {}".format(self.device)) + + # Train a new model on AffectNet_Categorical from scratch + if self.categorical_train: + self.model = None + self.init_model(num_branches=1) # The model is built by adding and training branches one by one + self.model.to_device(self.device) + self.optimizer_ = optim.SGD([{'params': self.model.base.parameters(), 'lr': self.lr, + 'momentum': self.momentum}, + {'params': self.model.convolutional_branches[-1].parameters(), 'lr': self.lr, + 'momentum': self.momentum}]) + # Data loader + train_data = datasets.AffectNetCategorical(idx_set=0, + max_loaded_images_per_label=5000, + transforms=transforms.Compose(data_transforms), + is_norm_by_mean_std=False, + base_path_to_affectnet=self.base_path_to_dataset) + train_loader = DataLoader(train_data, batch_size=self.batch_size, shuffle=True, num_workers=8) + val_data = datasets.AffectNetCategorical(idx_set=2, + max_loaded_images_per_label=100000, + transforms=None, + is_norm_by_mean_std=False, + base_path_to_affectnet=self.base_path_to_dataset) + + for branch_on_training in range(self.ensemble_size): + # Best network + best_ensemble = self.model.to_state_dict() + best_ensemble_acc = 0.0 + # Initialize scheduler + scheduler = optim.lr_scheduler.StepLR(self.optimizer_, step_size=10, gamma=0.5, last_epoch=-1) + # History + history_loss = [] + history_acc = [[] for _ in range(self.model.get_ensemble_size())] + history_val_loss = [[] for _ in range(self.model.get_ensemble_size())] + history_val_acc = [[] for _ in range(self.model.get_ensemble_size() + 1)] + + # Training branch + for epoch in range(self.max_training_epoch): + running_loss = 0.0 + running_corrects = [0.0 for _ in range(self.model.get_ensemble_size())] + running_updates = 0 + for inputs, labels in train_loader: + inputs, labels = inputs.to(self.device), labels.to(self.device) + self.optimizer_.zero_grad() + # Forward + out_emotions, out_va, attn = self.model(inputs) + confs_preds = [torch.max(o, 1) for o in out_emotions] + # Compute loss + loss = 0.0 + for i_4 in range(self.model.get_ensemble_size()): + preds = confs_preds[i_4][1] + running_corrects[i_4] += torch.sum(preds == labels).cpu().numpy() + loss += self.criterion_cat(out_emotions[i_4], labels) + + if self.diversify and self.model.get_ensemble_size() > 1: + attn_sp = attn[0] + attn_ch = attn[1] + # spatial diversity + div_sp = self.criterion_div(attn_sp, type='spatial').det_div + loss += div_sp + # channel diversity + div_ch = self.criterion_div(attn_ch, type='channel').det_div + loss += div_ch + + # Backward + loss.backward() + # Optimize + self.optimizer_.step() + scheduler.step() + # Save loss + running_loss += loss.item() + running_updates += 1 + # Statistics + if verbose: + print('[Branch {:d}, Epochs {:d}--{:d}] Loss: {:.4f} Acc: {}'. + format(self.model.get_ensemble_size(), epoch+1, self.max_training_epoch, + running_loss / running_updates, np.array(running_corrects) / len(train_data))) + + # Validation + if ((epoch % self.validation_interval) == 0) or ((epoch + 1) == self.max_training_epoch): + self.model.eval() + eval_results = self.eval(eval_type='categorical', current_branch_on_training=branch_on_training) + val_loss = eval_results["running_emotion_loss"] + val_corrects = eval_results["running_emotion_corrects"] + if verbose: + print('Validation - [Branch {:d}, Epochs {:d}--{:d}] Loss: {:.4f} Acc: {}'.format( + self.model.get_ensemble_size(), epoch + 1, self.max_training_epoch, val_loss[-1], + np.array(val_corrects) / len(val_data))) + + # Add to history training and validation statistics + history_loss.append(running_loss / running_updates) + for b in range(self.model.get_ensemble_size()): + history_acc[b].append(running_corrects[b] / len(train_data)) + history_val_loss[b].append(val_loss[b]) + history_val_acc[b].append(float(val_corrects[b]) / len(val_data)) + + # Add ensemble accuracy to history + history_val_acc[-1].append(float(val_corrects[-1]) / len(val_data)) + # Save best ensemble + ensemble_acc = (float(val_corrects[-1]) / len(val_data)) + if ensemble_acc >= best_ensemble_acc: + best_ensemble_acc = ensemble_acc + best_ensemble = self.model.to_state_dict() + # Save network + self.save(best_ensemble, + path.join(self.base_path_experiment, self.name_experiment, 'trained_models'), + verbose=verbose) + + # Save graphs + self.__plot_categorical(history_loss, history_acc, history_val_loss, history_val_acc, + self.model.get_ensemble_size(), + path.join(self.base_path_experiment, self.name_experiment)) + # Set network to training mode + self.model.train() + + # Change branch on training + if self.model.get_ensemble_size() < self.ensemble_size: + self.max_training_epoch = self.max_tuning_epoch + # Reload best configuration + self.model.reload(best_ensemble) + # Add a new branch + self.model.add_branch() + self.model.to_device(self.device) + self.optimizer_ = optim.SGD([{'params': self.model.base.parameters(), 'lr': self.lr/10, + 'momentum': self.momentum}, + {'params': self.model.convolutional_branches[-1].parameters(), + 'lr': self.lr, 'momentum': self.momentum}]) + for b in range(self.model.get_ensemble_size() - 1): + self.optimizer_.add_param_group({'params': self.model.convolutional_branches[b].parameters(), + 'lr': self.lr/10, 'momentum': self.momentum}) + # Finish training after training all branches + else: + break + + # Finetune the trained model on AffectNet_dimensional dataset for VA-estimation + if self.dimensional_finetune: + self.init_model(num_branches=self.ensemble_size) + # Load network trained on AffectNet_Categorical and fix its backbone + self.load(self.ensemble_size, path_to_saved_network=path.join( + self.base_path_experiment, self.name_experiment, 'trained_models'), + fix_backbone=True) + # Set loss and optimizer + self.model.to_device(self.device) + self.optimizer_ = optim.SGD([{'params': self.model.base.parameters(), 'lr': self.lr, + 'momentum': self.momentum}, + {'params': self.model.convolutional_branches[0].parameters(), + 'lr': self.lr, 'momentum': self.momentum}]) + for b in range(1, self.model.get_ensemble_size()): + self.optimizer_.add_param_group({'params': self.model.convolutional_branches[b].parameters(), + 'lr': self.lr / 10, 'momentum': self.momentum}) + # Data loaders + train_data = datasets.AffectNetDimensional(idx_set=0, + max_loaded_images_per_label=5000, + transforms=transforms.Compose(data_transforms), + is_norm_by_mean_std=False, + base_path_to_affectnet=self.base_path_to_dataset) + train_loader = DataLoader(train_data, batch_size=self.batch_size, shuffle=True, num_workers=8) + + # Finetune the pretrained model on continuous affect values + self.__finetune(train_loader=train_loader, verbose=verbose) + + def __finetune(self, train_loader, verbose=True): + current_branch_on_training = 0 + for branch_on_training in range(self.ensemble_size): + # Best network + best_ensemble = self.model.to_state_dict() + best_ensemble_rmse = 10000000.0 + # History + history_loss = [] + history_val_loss_valence = [[] for _ in range(self.model.get_ensemble_size() + 1)] + history_val_loss_arousal = [[] for _ in range(self.model.get_ensemble_size() + 1)] + + # Training branch + for epoch in range(self.max_training_epoch): + running_loss = 0.0 + running_updates = 0 + batch = 0 + for inputs, labels in train_loader: + batch += 1 + # Get the inputs + inputs, labels = inputs.to(self.device), labels.to(self.device) + labels_valence = labels[:, 0].view(len(labels[:, 0]), 1) + labels_arousal = labels[:, 1].view(len(labels[:, 1]), 1) + self.optimizer_.zero_grad() + # Forward + out_emotions, out_va, _ = self.model(inputs) + # Compute loss of affect_values + loss = 0.0 + for i_4 in range(current_branch_on_training + 1): + out_valence = out_va[i_4][:, 0].view(len(out_va[i_4][:, 0]), 1) + out_arousal = out_va[i_4][:, 1].view(len(out_va[i_4][:, 1]), 1) + loss += torch.sqrt(self.criterion_dim(out_valence, labels_valence)) + loss += torch.sqrt(self.criterion_dim(out_arousal, labels_arousal)) + # Backward + loss.backward() + # Optimize + self.optimizer_.step() + # Save loss + running_loss += loss.item() + running_updates += 1 + # Statistics + if verbose: + print('[Branch {:d}, Epochs {:d}--{:d}] Loss: {:.4f}'. + format(current_branch_on_training + 1, epoch + 1, self.max_training_epoch, + running_loss / running_updates)) + # Validation + if (epoch % self.validation_interval) == 0: + self.model.eval() + eval_results = self.eval(eval_type='dimensional', + current_branch_on_training=current_branch_on_training) + val_loss = eval_results["valence_arousal_losses"] + # Add to history training and validation statistics + history_loss.append(running_loss / running_updates) + for b in range(self.model.get_ensemble_size()): + history_val_loss_valence[b].append(val_loss[0][b]) + history_val_loss_arousal[b].append(val_loss[1][b]) + + # Add ensemble rmse to history + history_val_loss_valence[-1].append(val_loss[0][-1]) + history_val_loss_arousal[-1].append(val_loss[1][-1]) + if verbose: + print('Validation - [Branch {:d}, Epochs {:d}--{:d}] Loss (V) - (A): ({}) - ({})'.format( + current_branch_on_training + 1, epoch + 1, self.max_training_epoch, + [hvlv[-1] for hvlv in history_val_loss_valence], + [hvla[-1] for hvla in history_val_loss_arousal])) + + # Save best ensemble + ensemble_rmse = float(history_val_loss_valence[-1][-1]) + float(history_val_loss_arousal[-1][-1]) + if ensemble_rmse <= best_ensemble_rmse: + best_ensemble_rmse = ensemble_rmse + best_ensemble = self.model.to_state_dict() + # Save network + self.save(best_ensemble, path.join(self.base_path_experiment, + self.name_experiment, 'trained_models'), verbose=verbose) + + # Save graphs + self.__plot_dimensional(history_loss, history_val_loss_valence, history_val_loss_arousal, + current_branch_on_training + 1, + path.join(self.base_path_experiment, self.name_experiment), verbose=verbose) + self.model.train() + + # Change branch on training + if (current_branch_on_training + 1) < self.model.get_ensemble_size(): + current_branch_on_training += 1 + self.max_training_epoch = 2 + # Reload best configuration + self.model.reload(best_ensemble) + self.model.to_device(self.device) + self.optimizer_ = optim.SGD([ + {'params': self.model.base.parameters(), 'lr': self.lr / 10, + 'momentum': self.momentum}, + {'params': self.model.convolutional_branches[current_branch_on_training].parameters(), + 'lr': self.lr, + 'momentum': self.momentum}]) + for b in range(self.model.get_ensemble_size()): + if b != current_branch_on_training: + self.optimizer_.add_param_group({'params': self.model.convolutional_branches[b].parameters(), + 'lr': self.lr/10, + 'momentum': self.momentum}) + # Finish training after fine-tuning all branches + else: + break + + def eval(self, eval_type='categorical', current_branch_on_training=0): + """ + This method is used for evaluating the algorithm on a val dataset. + :param eval_type: Specifies the type of data that model is evaluated on. + It can be either categorical or dimensional data. + :param current_branch_on_training: Specifies the index of trained branch which should be evaluated on + validation data. + :return: a dictionary containing stats regarding evaluation. + """ + cpu_device = torch.device('cpu') + val_va_predictions = [[] for _ in range(self.model.get_ensemble_size() + 1)] + val_targets_valence = [] + val_targets_arousal = [] + valence_arousal_losses = [[], []] + + running_emotion_loss = [0.0 for _ in range(self.model.get_ensemble_size())] + running_emotion_corrects = [0 for _ in range(self.model.get_ensemble_size() + 1)] + running_emotion_steps = [0 for _ in range(self.model.get_ensemble_size())] + + if eval_type == 'categorical': + # load data + val_data = datasets.AffectNetCategorical(idx_set=2, + max_loaded_images_per_label=100000, + transforms=None, + is_norm_by_mean_std=False, + base_path_to_affectnet=self.base_path_to_dataset) + val_loader = DataLoader(val_data, batch_size=self.batch_size, shuffle=False, num_workers=8) + # evaluate + for inputs_eval, labels_eval in val_loader: + inputs_eval, labels_eval = inputs_eval.to(self.device), labels_eval.to(self.device) + out_emotion_eval, out_va_eval, _ = self.model(inputs_eval) + outputs_eval = out_emotion_eval[:current_branch_on_training + 1] + # Ensemble prediction + overall_preds = torch.zeros(outputs_eval[0].size()).to(self.device) + for o_eval, outputs_per_branch_eval in enumerate(outputs_eval, 0): + _, preds_eval = torch.max(outputs_per_branch_eval, 1) + running_emotion_corrects[o_eval] += torch.sum(preds_eval == labels_eval).cpu().numpy() + loss_eval = self.criterion_cat(outputs_per_branch_eval, labels_eval) + running_emotion_loss[o_eval] += loss_eval.item() + running_emotion_steps[o_eval] += 1 + for v_i, v_p in enumerate(preds_eval, 0): + overall_preds[v_i, v_p] += 1 + # Compute accuracy of ensemble predictions + _, preds_eval = torch.max(overall_preds, 1) + running_emotion_corrects[-1] += torch.sum(preds_eval == labels_eval).cpu().numpy() + + for b_eval in range(self.model.get_ensemble_size()): + div = running_emotion_steps[b_eval] if running_emotion_steps[b_eval] != 0 else 1 + running_emotion_loss[b_eval] /= div + + elif eval_type == 'dimensional': + # load data + val_data = datasets.AffectNetDimensional(idx_set=2, + max_loaded_images_per_label=100000, + transforms=None, + is_norm_by_mean_std=False, + base_path_to_affectnet=self.base_path_to_dataset) + val_loader = DataLoader(val_data, batch_size=self.batch_size, shuffle=False, num_workers=8) + # evaluate model + for inputs_eval, labels_eval in val_loader: + inputs_eval, labels_eval = inputs_eval.to(self.device), labels_eval + labels_eval_valence = labels_eval[:, 0].view(len(labels_eval[:, 0]), 1) + labels_eval_arousal = labels_eval[:, 1].view(len(labels_eval[:, 1]), 1) + out_emotion_eval, out_va_eval, _ = self.model(inputs_eval) + outputs_eval = out_va_eval[:current_branch_on_training + 1] + + # Ensemble prediction + val_predictions_ensemble = torch.zeros(outputs_eval[0].size()).to(cpu_device) + for evaluate_branch in range(current_branch_on_training + 1): + out_va_eval_cpu = out_va_eval[evaluate_branch].detach().to(cpu_device) + val_va_predictions[evaluate_branch].extend(out_va_eval_cpu) + val_predictions_ensemble += out_va_eval_cpu + val_va_predictions[-1].extend(val_predictions_ensemble / (current_branch_on_training + 1)) + val_targets_valence.extend(labels_eval_valence) + val_targets_arousal.extend(labels_eval_arousal) + + val_targets_valence = torch.stack(val_targets_valence) + val_targets_arousal = torch.stack(val_targets_arousal) + + for evaluate_branch in range(self.model.get_ensemble_size() + 1): + if evaluate_branch < (current_branch_on_training + 1) or \ + evaluate_branch == self.model.get_ensemble_size(): + list_tensor = torch.stack(val_va_predictions[evaluate_branch]) + out_valence_eval = list_tensor[:, 0].view(len(list_tensor[:, 0]), 1) + out_arousal_eval = list_tensor[:, 1].view(len(list_tensor[:, 1]), 1) + valence_arousal_losses[0].append(torch.sqrt(self.criterion_dim(out_valence_eval, + val_targets_valence))) + valence_arousal_losses[1].append(torch.sqrt(self.criterion_dim(out_arousal_eval, + val_targets_arousal))) + else: + valence_arousal_losses[0].append(torch.tensor(0)) + valence_arousal_losses[1].append(torch.tensor(0)) + results = { + "valence_arousal_losses": valence_arousal_losses, + "running_emotion_loss": running_emotion_loss, + "running_emotion_corrects": running_emotion_corrects + } + return results + + @staticmethod + def __plot_dimensional(his_loss, his_val_loss_valence, his_val_loss_arousal, branch_idx, base_path_his, + verbose=True): + losses_plot = [[range(len(his_loss)), his_loss]] + legends_plot_loss = ['Training'] + # Loss + for b_plot in range(len(his_val_loss_valence)): + losses_plot.append([range(len(his_val_loss_valence[b_plot])), his_val_loss_valence[b_plot]]) + legends_plot_loss.append('Validation ({}) (Val)'.format(b_plot + 1)) + losses_plot.append([range(len(his_val_loss_arousal[b_plot])), his_val_loss_arousal[b_plot]]) + legends_plot_loss.append('Validation ({}) (Aro)'.format(b_plot + 1)) + + # Loss + plotting.plot(losses_plot, + title='Training and Validation Losses vs. Epochs for Branch {}'.format(branch_idx), + legends=legends_plot_loss, + file_path=base_path_his, + file_name='Loss_Branch_{}'.format(branch_idx), + axis_x='Training Epoch', + axis_y='Loss', + limits_axis_y=(0.2, 0.6, 0.025), + verbose=verbose) + + np.save(path.join(base_path_his, 'Loss_Branch_{}'.format(branch_idx)), np.array(his_loss)) + np.save(path.join(base_path_his, 'Loss_Val_Branch_{}_Valence'.format(branch_idx)), + np.array(his_val_loss_valence)) + np.save(path.join(base_path_his, 'Loss_Val_Branch_{}_Arousal'.format(branch_idx)), + np.array(his_val_loss_arousal)) + + @staticmethod + def __plot_categorical(his_loss, his_acc, his_val_loss, his_val_acc, branch_idx, base_path_his, verbose=True): + accuracies_plot = [] + legends_plot_acc = [] + losses_plot = [[range(len(his_loss)), his_loss]] + legends_plot_loss = ["Training"] + + # Acc + for b_plot in range(len(his_acc)): + accuracies_plot.append([range(len(his_acc[b_plot])), his_acc[b_plot]]) + legends_plot_acc.append("Training ({})".format(b_plot + 1)) + accuracies_plot.append([range(len(his_val_acc[b_plot])), his_val_acc[b_plot]]) + legends_plot_acc.append("Validation ({})".format(b_plot + 1)) + + # Ensemble acc + accuracies_plot.append([range(len(his_val_acc[-1])), his_val_acc[-1]]) + legends_plot_acc.append("Validation (E)") + + # Accuracy + plotting.plot(accuracies_plot, + title="Training and Validation Accuracies vs. Epochs for Branch {}".format(branch_idx), + legends=legends_plot_acc, + file_path=base_path_his, + file_name="Acc_Branch_{}".format(branch_idx), + axis_x="Training Epoch", + axis_y="Accuracy", + limits_axis_y=(0.0, 1.0, 0.025), + verbose=verbose) + + # Loss + for b_plot in range(len(his_val_loss)): + losses_plot.append([range(len(his_val_loss[b_plot])), his_val_loss[b_plot]]) + legends_plot_loss.append("Validation ({})".format(b_plot + 1)) + plotting.plot(losses_plot, + title="Training and Validation Losses vs. Epochs for Branch {}".format(branch_idx), + legends=legends_plot_loss, + file_path=base_path_his, + file_name="Loss_Branch_{}".format(branch_idx), + axis_x="Training Epoch", + axis_y="Loss", + verbose=verbose) + + # Save plots + np.save(path.join(base_path_his, "Loss_Branch_{}".format(branch_idx)), np.array(his_loss)) + np.save(path.join(base_path_his, "Acc_Branch_{}".format(branch_idx)), np.array(his_acc)) + np.save(path.join(base_path_his, "Loss_Val_Branch_{}".format(branch_idx)), np.array(his_val_loss)) + np.save(path.join(base_path_his, "Acc_Val_Branch_{}".format(branch_idx)), np.array(his_val_acc)) + + def infer(self, input_batch): + """ + This method is used to perform inference on a batch of images + + :param input_batch: a batch of images + :return: dimensional and categorical emotion results. + """ + + if type(input_batch) is list: + input_batch = torch.stack([torch.tensor(v.data) for v in input_batch]) + else: + input_batch = torch.tensor(input_batch) + cpu_device = torch.device('cpu') + + input_batch = input_batch.to(device=self.device, dtype=torch.float) + self.model.eval() + out_emotions, out_va, _ = self.model(input_batch) + + # categorical result + softmax_ = nn.Softmax(dim=0) + categorical_results = out_emotions[:self.ensemble_size] # a list of 9 or (n) torch tensors + overall_emotion_preds = torch.zeros(categorical_results[0].size()).to(self.device) # size: batchsize * 8 + for o_eval, outputs_per_branch_eval in enumerate(categorical_results, 0): + _, preds_indices = torch.max(outputs_per_branch_eval, 1) + for v_i, v_p in enumerate(preds_indices, 0): + overall_emotion_preds[v_i, v_p] += 1 + ensemble_emotion_results = [Category(prediction=int(o.argmax(dim=0)), confidence=max(softmax_(o)), + description=datasets.AffectNetCategorical.get_class(int(o.argmax(dim=0)))) + for o in overall_emotion_preds] + # dimension result + dimensional_results = out_va[:self.ensemble_size] + overall_dimension_preds = torch.zeros(dimensional_results[0].size()).to(cpu_device) + for evaluate_branch in range(self.ensemble_size): + out_va_eval_cpu = out_va[evaluate_branch].detach().to(cpu_device) + overall_dimension_preds += out_va_eval_cpu + ensemble_dimension_results = overall_dimension_preds / self.ensemble_size + + return ensemble_emotion_results, ensemble_dimension_results + + def optimize(self, do_constant_folding=False): + """ + Optimize method converts the model to ONNX format and saves the + model in the parent directory defined by self.temp_path. The ONNX model is then loaded. + :param do_constant_folding: whether to optimize constants, defaults to 'False' + :type do_constant_folding: bool, optional + """ + if self.model is None: + raise UserWarning("No model is loaded, cannot optimize. Load or train a model first.") + if self.ort_session is not None: + raise UserWarning("Model is already optimized in ONNX.") + try: + self.__convert_to_onnx(path.join(self.temp_path, self.name_experiment + ".onnx"), do_constant_folding) + except FileNotFoundError: + # Create temp directory + os.makedirs(path.join(self.temp_path, self.name_experiment), exist_ok=True) + self.__convert_to_onnx(path.join(self.temp_path, self.name_experiment + ".onnx"), + do_constant_folding, verbose=False) + + self.__load_from_onnx(path.join(self.temp_path, self.name_experiment + ".onnx")) + + def __convert_to_onnx(self, output_name, do_constant_folding=False, verbose=False): + """ + Converts the loaded regular PyTorch model to an ONNX model and saves it to disk. + :param output_name: path and name to save the model, e.g. "/models/onnx_model.onnx" + :type output_name: str + :param do_constant_folding: whether to optimize constants, defaults to 'False' + :type do_constant_folding: bool, optional + """ + # Input to the model + onnx_input = torch.randn(self.batch_size, 3, 96, 96) + # Export the model + self.model.eval() + self.model.to_device(self.device) + + torch.onnx.export(self.model, + onnx_input, + output_name, + verbose=verbose, + opset_version=11, + do_constant_folding=do_constant_folding, + input_names=['onnx_input'], + output_names=['onnx_out_emotions', 'onnx_out_va', 'onnx_attn']) + + def __load_from_onnx(self, path): + """ + This method loads an ONNX model from the path provided into an onnxruntime inference session. + :param path: path to ONNX model + :type path: str + """ + self.ort_session = onnxruntime.InferenceSession(path) + + def reset(self): + """This method is not used in this implementation.""" + return NotImplementedError + + def download(self, path=None, mode="data", url=OPENDR_SERVER_URL + "perception/facial_emotion_estimation"): + """ + This method downloads data files and saves them in the path provided. + :param path: Local path to save the files, defaults to self.temp_path if None + :type path: str, path, optional + :param mode: Whether to download data or the pretrained model, or image/video for running demo + :type mode: It can be an item in ["data", "pretrained", "demo_image", "demo_video"] + :param url: URL of the FTP server, defaults to OpenDR FTP URL + :type url: str, optional + """ + + valid_modes = ["data", "pretrained", "demo_image", "demo_video"] + if mode not in valid_modes: + raise UserWarning("mode parameter not valid:", mode, ", file should be one of:", valid_modes) + if path is None: + path = self.temp_path + if not os.path.exists(path): + os.makedirs(path) + + if mode == "data": + print("Downloading data...") + data_path = os.path.join(path, 'data') + if not os.path.exists(data_path): + os.makedirs(data_path) + + zip_path = os.path.join(path, 'data/AffectNet_micro.zip') + if not os.path.exists(zip_path): + # Download data + file_url = os.path.join(url, 'data/AffectNet_micro.zip') + urlretrieve(file_url, zip_path) + with zipfile.ZipFile(zip_path, 'r') as zip_ref: + zip_ref.extractall(data_path) + else: + print("Data files already exist.") + print("Data download complete.") + downloaded_files_path = os.path.join(data_path, 'AffectNet_micro') + + elif mode == "pretrained": + print("Downloading pretrained model weights...") + model_path = os.path.join(path, 'pretrained') + if not os.path.exists(model_path): + os.makedirs(model_path) + + zip_path = os.path.join(path, 'pretrained/esr_9.zip') + if not os.path.exists(zip_path): + # Download data + file_url = os.path.join(url, 'pretrained/esr_9.zip') + urlretrieve(file_url, zip_path) + with zipfile.ZipFile(zip_path, 'r') as zip_ref: + zip_ref.extractall(model_path) + else: + print("Pretrained files already exist.") + print("Pretrained model weights download complete.") + downloaded_files_path = os.path.join(model_path, 'esr_9') + + elif mode == "demo_image": + print("Downloading image...") + demo_path = os.path.join(path, 'demo') + if not os.path.exists(demo_path): + os.makedirs(demo_path) + + img_path = os.path.join(demo_path, 'sheldon.jpg') + if not os.path.exists(img_path): + # Download data + file_url = os.path.join(url, 'demo/sheldon.jpg') + urlretrieve(file_url, img_path) + else: + print("Data files already exist.") + print("Data download complete.") + downloaded_files_path = img_path + + elif mode == "demo_video": + print("Downloading video...") + demo_path = os.path.join(path, 'demo') + if not os.path.exists(demo_path): + os.makedirs(demo_path) + + vid_path = os.path.join(demo_path, 'big_bang.mp4') + if not os.path.exists(vid_path): + # Download data + file_url = os.path.join(url, 'demo/big_bang.mp4') + urlretrieve(file_url, vid_path) + else: + print("Data files already exist.") + print("Data download complete.") + downloaded_files_path = vid_path + + return downloaded_files_path diff --git a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/dependencies.ini b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/dependencies.ini index a201ecf621..5687fc52e8 100644 --- a/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/dependencies.ini +++ b/src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/dependencies.ini @@ -3,6 +3,7 @@ # https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 tensorboardX>=2.0 matplotlib>=2.2.2 tqdm diff --git a/src/opendr/perception/heart_anomaly_detection/dependencies.ini b/src/opendr/perception/heart_anomaly_detection/dependencies.ini index c781145c09..eff297e4d4 100644 --- a/src/opendr/perception/heart_anomaly_detection/dependencies.ini +++ b/src/opendr/perception/heart_anomaly_detection/dependencies.ini @@ -1,8 +1,9 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 tensorboard>=2.4.1 tqdm scikit-learn>=0.22 diff --git a/src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/dependencies.ini b/src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/dependencies.ini index 2c7af07831..80ff4b1914 100644 --- a/src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/dependencies.ini +++ b/src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/dependencies.ini @@ -1,6 +1,7 @@ [runtime] python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 librosa==0.8.0 opencv-python tqdm diff --git a/src/opendr/perception/multimodal_human_centric/dependencies.ini b/src/opendr/perception/multimodal_human_centric/dependencies.ini index 499e9408f4..5b18913f76 100644 --- a/src/opendr/perception/multimodal_human_centric/dependencies.ini +++ b/src/opendr/perception/multimodal_human_centric/dependencies.ini @@ -1,10 +1,13 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 tensorboard>=2.4.1 tqdm imageio>=2.6.0 opendr=opendr-toolkit-engine + opendr-toolkit-compressive-learning + opendr-toolkit-object-detection-2d diff --git a/src/opendr/perception/object_detection_2d/__init__.py b/src/opendr/perception/object_detection_2d/__init__.py index 9fac6ba424..314e61fc52 100644 --- a/src/opendr/perception/object_detection_2d/__init__.py +++ b/src/opendr/perception/object_detection_2d/__init__.py @@ -4,6 +4,8 @@ from opendr.perception.object_detection_2d.retinaface.retinaface_learner import RetinaFaceLearner from opendr.perception.object_detection_2d.ssd.ssd_learner import SingleShotDetectorLearner from opendr.perception.object_detection_2d.yolov3.yolov3_learner import YOLOv3DetectorLearner +from opendr.perception.object_detection_2d.yolov5.yolov5_learner import YOLOv5DetectorLearner +from opendr.perception.object_detection_2d.nanodet.nanodet_learner import NanodetLearner from opendr.perception.object_detection_2d.datasets.wider_person import WiderPersonDataset from opendr.perception.object_detection_2d.datasets.wider_face import WiderFaceDataset @@ -16,6 +18,6 @@ from opendr.perception.object_detection_2d.nms.soft_nms.soft_nms import SoftNMS from opendr.perception.object_detection_2d.nms.seq2seq_nms.seq2seq_nms_learner import Seq2SeqNMSLearner -__all__ = ['CenterNetDetectorLearner', 'DetrLearner', 'GemLearner', 'RetinaFaceLearner', - 'SingleShotDetectorLearner', 'YOLOv3DetectorLearner', 'WiderPersonDataset', 'WiderFaceDataset', - 'transforms', 'draw_bounding_boxes', 'ClusterNMS', 'FastNMS', 'SoftNMS', 'Seq2SeqNMSLearner'] +__all__ = ['CenterNetDetectorLearner', 'DetrLearner', 'GemLearner', 'RetinaFaceLearner', 'SingleShotDetectorLearner', + 'YOLOv3DetectorLearner', 'NanodetLearner', 'WiderPersonDataset', 'WiderFaceDataset', 'transforms', + 'draw_bounding_boxes', 'ClusterNMS', 'FastNMS', 'SoftNMS', 'Seq2SeqNMSLearner', 'YOLOv5DetectorLearner'] diff --git a/src/opendr/perception/object_detection_2d/centernet/centernet_learner.py b/src/opendr/perception/object_detection_2d/centernet/centernet_learner.py index fd986af456..4df4575c00 100644 --- a/src/opendr/perception/object_detection_2d/centernet/centernet_learner.py +++ b/src/opendr/perception/object_detection_2d/centernet/centernet_learner.py @@ -531,21 +531,25 @@ def download(self, path=None, mode="pretrained", verbose=False, "centernet_voc.json") if verbose: print("Downloading metadata...") - urlretrieve(file_url, os.path.join(path, "centernet_default.json")) + file_path = os.path.join(path, "centernet_default.json") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) if verbose: print("Downloading params...") file_url = os.path.join(url, "pretrained", "centernet_voc", "centernet_voc.params") - - urlretrieve(file_url, - os.path.join(path, "centernet_voc.params")) + file_path = os.path.join(path, "centernet_voc.params") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) elif mode == "images": file_url = os.path.join(url, "images", "bicycles.jpg") if verbose: print("Downloading example image...") - urlretrieve(file_url, os.path.join(path, "bicycles.jpg")) + file_path = os.path.join(path, "bicycles.jpg") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) elif mode == "test_data": os.makedirs(os.path.join(path, "test_data"), exist_ok=True) @@ -555,17 +559,23 @@ def download(self, path=None, mode="pretrained", verbose=False, file_url = os.path.join(url, "test_data", "train.txt") if verbose: print("Downloading filelist...") - urlretrieve(file_url, os.path.join(path, "test_data", "train.txt")) + file_path = os.path.join(path, "test_data", "train.txt") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) # download image file_url = os.path.join(url, "test_data", "Images", "000040.jpg") if verbose: print("Downloading image...") - urlretrieve(file_url, os.path.join(path, "test_data", "Images", "000040.jpg")) + file_path = os.path.join(path, "test_data", "Images", "000040.jpg") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) # download annotations file_url = os.path.join(url, "test_data", "Annotations", "000040.jpg.txt") if verbose: print("Downloading annotations...") - urlretrieve(file_url, os.path.join(path, "test_data", "Annotations", "000040.jpg.txt")) + file_path = os.path.join(path, "test_data", "Annotations", "000040.jpg.txt") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) def optimize(self, target_device): """This method is not used in this implementation.""" diff --git a/src/opendr/perception/object_detection_2d/datasets/detection_dataset.py b/src/opendr/perception/object_detection_2d/datasets/detection_dataset.py index c72f9c71c9..f1fbcbe7b2 100644 --- a/src/opendr/perception/object_detection_2d/datasets/detection_dataset.py +++ b/src/opendr/perception/object_detection_2d/datasets/detection_dataset.py @@ -1,124 +1,124 @@ -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import bisect -from itertools import accumulate - -from opendr.engine.datasets import DatasetIterator - - -class DetectionDataset(DatasetIterator): - def __init__(self, classes, dataset_type, root, image_paths=None, splits='', - image_transform=None, target_transform=None, transform=None): - super().__init__() - self.classes = classes - self.num_classes = len(classes) - self.image_paths = image_paths - self.dataset_type = dataset_type - self.root = root - self.splits = splits - - self._transform = transform - self._image_transform = image_transform - self._target_transform = target_transform - - def set_transform(self, transform): - self._transform = transform - - def transform(self, transform): - return MappedDetectionDataset(self, transform) - - def set_image_transform(self, transform): - self._image_transform = transform - - def set_target_transform(self, transform): - self._target_transform = transform - - def get_bboxes(self, item): - pass - - def get_image(self, item): - pass - - def __len__(self): - pass - - def __getitem__(self, idx): - pass - - -class MappedDetectionDataset(DatasetIterator): - def __init__(self, data, map_function): - self.data = data - self.map_function = map_function - - def __len__(self): - return len(self.data) - - def __getitem__(self, idx): - item = self.data[idx] - if isinstance(item, tuple): - return self.map_function(*item) - return self.map_function(item) - - -class ConcatDataset(DetectionDataset): - """ - Basic dataset concatenation class. The datasets are assumed to have the same classes. - - :param datasets: list of DetectionDataset type or subclass - """ - def __init__(self, datasets): - super(ConcatDataset, self).__init__(classes=datasets[0].classes, dataset_type='concat_dataset', - root=None) - self.cumulative_lengths = list(accumulate([len(dataset) for dataset in datasets])) - self.datasets = datasets - - def set_transform(self, transform): - self._transform = transform - for dataset in self.datasets: - dataset.transform(transform) - - def transform(self, transform): - mapped_datasets = [MappedDetectionDataset(dataset, transform) for dataset in self.datasets] - return ConcatDataset(mapped_datasets) - - def set_image_transform(self, transform): - self._image_transform = transform - for dataset in self.datasets: - dataset.set_image_transform(transform) - - def set_target_transform(self, transform): - self._target_transform = transform - for dataset in self.datasets: - dataset.set_target_transform(transform) - - def __len__(self): - return self.cumulative_lengths[-1] - - def __getitem__(self, item): - dataset_idx = bisect.bisect_right(self.cumulative_lengths, item) - if dataset_idx == 0: - sample_idx = item - else: - sample_idx = item - self.cumulative_lengths[dataset_idx - 1] - return self.datasets[dataset_idx][sample_idx] - - -def is_image_type(filename): - return filename.lower().endswith(('png', 'jpg', 'jpeg', 'tiff', 'bmp', 'gif')) - - -def remove_extension(filename): - return '.'.join(filename.split('.')[:-1]) +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import bisect +from itertools import accumulate + +from opendr.engine.datasets import DatasetIterator + + +class DetectionDataset(DatasetIterator): + def __init__(self, classes, dataset_type, root, image_paths=None, splits='', + image_transform=None, target_transform=None, transform=None): + super().__init__() + self.classes = classes + self.num_classes = len(classes) + self.image_paths = image_paths + self.dataset_type = dataset_type + self.root = root + self.splits = splits + + self._transform = transform + self._image_transform = image_transform + self._target_transform = target_transform + + def set_transform(self, transform): + self._transform = transform + + def transform(self, transform): + return MappedDetectionDataset(self, transform) + + def set_image_transform(self, transform): + self._image_transform = transform + + def set_target_transform(self, transform): + self._target_transform = transform + + def get_bboxes(self, item): + pass + + def get_image(self, item): + pass + + def __len__(self): + pass + + def __getitem__(self, idx): + pass + + +class MappedDetectionDataset(DatasetIterator): + def __init__(self, data, map_function): + self.data = data + self.map_function = map_function + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + item = self.data[idx] + if isinstance(item, tuple): + return self.map_function(*item) + return self.map_function(item) + + +class ConcatDataset(DetectionDataset): + """ + Basic dataset concatenation class. The datasets are assumed to have the same classes. + + :param datasets: list of DetectionDataset type or subclass + """ + def __init__(self, datasets): + super(ConcatDataset, self).__init__(classes=datasets[0].classes, dataset_type='concat_dataset', + root=None) + self.cumulative_lengths = list(accumulate([len(dataset) for dataset in datasets])) + self.datasets = datasets + + def set_transform(self, transform): + self._transform = transform + for dataset in self.datasets: + dataset.transform(transform) + + def transform(self, transform): + mapped_datasets = [MappedDetectionDataset(dataset, transform) for dataset in self.datasets] + return ConcatDataset(mapped_datasets) + + def set_image_transform(self, transform): + self._image_transform = transform + for dataset in self.datasets: + dataset.set_image_transform(transform) + + def set_target_transform(self, transform): + self._target_transform = transform + for dataset in self.datasets: + dataset.set_target_transform(transform) + + def __len__(self): + return self.cumulative_lengths[-1] + + def __getitem__(self, item): + dataset_idx = bisect.bisect_right(self.cumulative_lengths, item) + if dataset_idx == 0: + sample_idx = item + else: + sample_idx = item - self.cumulative_lengths[dataset_idx - 1] + return self.datasets[dataset_idx][sample_idx] + + +def is_image_type(filename): + return filename.lower().endswith(('png', 'jpg', 'jpeg', 'tiff', 'bmp', 'gif')) + + +def remove_extension(filename): + return '.'.join(filename.split('.')[:-1]) diff --git a/src/opendr/perception/object_detection_2d/datasets/transforms.py b/src/opendr/perception/object_detection_2d/datasets/transforms.py index 08c0f34ecf..17fed438f9 100644 --- a/src/opendr/perception/object_detection_2d/datasets/transforms.py +++ b/src/opendr/perception/object_detection_2d/datasets/transforms.py @@ -1,161 +1,162 @@ -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import cv2 -import numpy as np -import mxnet as mx -import gluoncv.data.transforms.image as timage - - -def np_to_mx(img_np): - """ - Convert numpy image to MXNet image. - """ - img_mx = mx.image.image.nd.from_numpy(np.float32(img_np)) - return img_mx - - -def bbox_to_np(bbox): - """ - BoundingBox to [xmin, ymin, xmax, ymax, conf, cls] numpy array. - """ - bbox_np = np.asarray([bbox.left, bbox.top, bbox.left + bbox.width, bbox.top + bbox.height, bbox.confidence, bbox.name]) - return bbox_np - - -class BoundingBoxListToNumpyArray: - """ - Transform object to convert OpenDR BoundingBoxList to numpy array of [[xmin, ymin, xmax, ymax, score, cls_id],...] format. - """ - def __call__(self, bbox_list): - return np.asarray([bbox_to_np(bbox) for bbox in bbox_list.data]) - - -class ImageToNDArrayTransform: - """ - Transform object to convert OpenDR Image to MXNext image. - """ - def __call__(self, img): - return np_to_mx(img.data) - - -class ImageToNumpyArrayTransform: - """ - Transform object to convert OpenDR Image to Numpy array. - """ - def __call__(self, img): - return img.data - - -class ResizeImageAndBoxesTransform: - """ - Resizes a numpy image and corresponding bounding boxes to fit the given dimensions. - """ - def __init__(self, w, h): - self.w = w - self.h = h - - def __call__(self, img, labels): - h, w, _ = img.shape - w_r = self.w / w - h_r = self.h / h - img = cv2.resize(img, (self.w, self.h), interpolation=cv2.INTER_LINEAR) - labels[:, 0] *= w_r - labels[:, 2] *= w_r - labels[:, 1] *= h_r - labels[:, 3] *= h_r - return img, labels - - -def transform_test_resize(imgs, mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225), w=640, h=480): - """ - Function adapted from gluoncv.data.transforms.presets.ssd, resizes the image to a preset size. - :param imgs: - :type imgs: - :param mean: - :type mean: - :param std: - :type std: - :param w: Desired width of the output tensor. - :type w: int - :param h: Desired height of the output tensor. - :type h: int - :return: - :rtype: - """ - if isinstance(imgs, mx.nd.NDArray): - imgs = [imgs] - for im in imgs: - assert isinstance(im, mx.nd.NDArray), "Expect NDArray, got {}".format(type(im)) - - tensors = [] - origs = [] - for img in imgs: - img = timage.imresize(img, w, h) - orig_img = img.asnumpy().astype('uint8') - img = mx.nd.image.to_tensor(img) - img = mx.nd.image.normalize(img, mean=mean, std=std) - tensors.append(img.expand_dims(0)) - origs.append(orig_img) - if len(tensors) == 1: - return tensors[0], origs[0] - return tensors, origs - - -def transform_test(imgs, mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)): - """ - Function dapted from gluoncv.data.transforms.presets.ssd, normalizes and converts image to tensor. - :param imgs: - :type imgs: - :param mean: - :type mean: - :param std: - :type std: - :return: - :rtype: - """ - if isinstance(imgs, mx.nd.NDArray): - imgs = [imgs] - for im in imgs: - assert isinstance(im, mx.nd.NDArray), "Expect NDArray, got {}".format(type(im)) - - tensors = [] - origs = [] - for img in imgs: - orig_img = img.asnumpy().astype('uint8') - img = mx.nd.image.to_tensor(img) - img = mx.nd.image.normalize(img, mean=mean, std=std) - tensors.append(img.expand_dims(0)) - origs.append(orig_img) - if len(tensors) == 1: - return tensors[0], origs[0] - return tensors, origs - - -def pad_test(img, min_size=512): - h_pad_size = 0 - min_dim = 2 + np.argmin([img.shape[2:4]]) - img_padded = img - if img.shape[min_dim] < min_size: - h_pad_size = int((min_size - img.shape[min_dim]) / 2.0) - if min_dim == 2: - img_padded = mx.nd.pad(img, mode="constant", constant_value=0, - pad_width=(0, 0, 0, 0, h_pad_size, - h_pad_size, 0, 0)) - else: - img_padded = mx.nd.pad(img, mode="constant", constant_value=0, - pad_width=(0, 0, 0, 0, 0, 0, - h_pad_size, h_pad_size)) - return img_padded +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import cv2 +import numpy as np +import mxnet as mx +import gluoncv.data.transforms.image as timage + + +def np_to_mx(img_np): + """ + Convert numpy image to MXNet image. + """ + img_mx = mx.image.image.nd.from_numpy(np.float32(img_np)) + return img_mx + + +def bbox_to_np(bbox): + """ + BoundingBox to [xmin, ymin, xmax, ymax, conf, cls] numpy array. + """ + bbox_np = np.asarray([bbox.left, bbox.top, bbox.left + bbox.width, bbox.top + bbox.height, bbox.confidence, bbox.name], + dtype=object) + return bbox_np + + +class BoundingBoxListToNumpyArray: + """ + Transform object to convert OpenDR BoundingBoxList to numpy array of [[xmin, ymin, xmax, ymax, score, cls_id],...] format. + """ + def __call__(self, bbox_list): + return np.asarray([bbox_to_np(bbox) for bbox in bbox_list.data]) + + +class ImageToNDArrayTransform: + """ + Transform object to convert OpenDR Image to MXNext image. + """ + def __call__(self, img): + return np_to_mx(img.data) + + +class ImageToNumpyArrayTransform: + """ + Transform object to convert OpenDR Image to Numpy array. + """ + def __call__(self, img): + return img.data + + +class ResizeImageAndBoxesTransform: + """ + Resizes a numpy image and corresponding bounding boxes to fit the given dimensions. + """ + def __init__(self, w, h): + self.w = w + self.h = h + + def __call__(self, img, labels): + h, w, _ = img.shape + w_r = self.w / w + h_r = self.h / h + img = cv2.resize(img, (self.w, self.h), interpolation=cv2.INTER_LINEAR) + labels[:, 0] *= w_r + labels[:, 2] *= w_r + labels[:, 1] *= h_r + labels[:, 3] *= h_r + return img, labels + + +def transform_test_resize(imgs, mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225), w=640, h=480): + """ + Function adapted from gluoncv.data.transforms.presets.ssd, resizes the image to a preset size. + :param imgs: + :type imgs: + :param mean: + :type mean: + :param std: + :type std: + :param w: Desired width of the output tensor. + :type w: int + :param h: Desired height of the output tensor. + :type h: int + :return: + :rtype: + """ + if isinstance(imgs, mx.nd.NDArray): + imgs = [imgs] + for im in imgs: + assert isinstance(im, mx.nd.NDArray), "Expect NDArray, got {}".format(type(im)) + + tensors = [] + origs = [] + for img in imgs: + img = timage.imresize(img, w, h) + orig_img = img.asnumpy().astype('uint8') + img = mx.nd.image.to_tensor(img) + img = mx.nd.image.normalize(img, mean=mean, std=std) + tensors.append(img.expand_dims(0)) + origs.append(orig_img) + if len(tensors) == 1: + return tensors[0], origs[0] + return tensors, origs + + +def transform_test(imgs, mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)): + """ + Function dapted from gluoncv.data.transforms.presets.ssd, normalizes and converts image to tensor. + :param imgs: + :type imgs: + :param mean: + :type mean: + :param std: + :type std: + :return: + :rtype: + """ + if isinstance(imgs, mx.nd.NDArray): + imgs = [imgs] + for im in imgs: + assert isinstance(im, mx.nd.NDArray), "Expect NDArray, got {}".format(type(im)) + + tensors = [] + origs = [] + for img in imgs: + orig_img = img.asnumpy().astype('uint8') + img = mx.nd.image.to_tensor(img) + img = mx.nd.image.normalize(img, mean=mean, std=std) + tensors.append(img.expand_dims(0)) + origs.append(orig_img) + if len(tensors) == 1: + return tensors[0], origs[0] + return tensors, origs + + +def pad_test(img, min_size=512): + h_pad_size = 0 + min_dim = 2 + np.argmin([img.shape[2:4]]) + img_padded = img + if img.shape[min_dim] < min_size: + h_pad_size = int((min_size - img.shape[min_dim]) / 2.0) + if min_dim == 2: + img_padded = mx.nd.pad(img, mode="constant", constant_value=0, + pad_width=(0, 0, 0, 0, h_pad_size, + h_pad_size, 0, 0)) + else: + img_padded = mx.nd.pad(img, mode="constant", constant_value=0, + pad_width=(0, 0, 0, 0, 0, 0, + h_pad_size, h_pad_size)) + return img_padded diff --git a/src/opendr/perception/object_detection_2d/datasets/wider_person.py b/src/opendr/perception/object_detection_2d/datasets/wider_person.py index d1372d3ca7..a866ee2bee 100644 --- a/src/opendr/perception/object_detection_2d/datasets/wider_person.py +++ b/src/opendr/perception/object_detection_2d/datasets/wider_person.py @@ -1,131 +1,131 @@ -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import cv2 -from opendr.engine.target import BoundingBox, BoundingBoxList -from opendr.perception.object_detection_2d.datasets import DetectionDataset - - -class WiderPersonDataset(DetectionDataset): - """ - WiderPerson dataset wrapper for OpenDR detectors. Assumes data has been downloaded from - http://www.cbsr.ia.ac.cn/users/sfzhang/WiderPerson/ and unzipped in 'root' folder, so that it contains - the 'Images' and 'Annotations' folders. - """ - def __init__(self, root, splits, image_transform=None, target_transform=None, - transform=None): - classes = ['person'] - self.root = root - available_splits = ['train', 'val'] - self.splits = [split for split in splits if split in available_splits] - self.image_dir = os.path.join(self.root, 'Images') - self.anno_dir = os.path.join(self.root, 'Annotations') - - image_paths = [] - self.bboxes = [] - cls_id = 0 - for split in self.splits: - with open(os.path.join(self.root, '{}.txt'.format(split))) as f: - image_names = f.read().splitlines() - - for image_name in image_names: - anno_file = os.path.join(self.anno_dir, image_name + '.jpg.txt') - with open(anno_file) as f: - lines = f.readlines() - cur_line = 0 - - while True: - if len(lines) <= cur_line: - break - - n_boxes = max(1, int(lines[cur_line][:-1])) - bboxes = lines[cur_line + 1:cur_line + n_boxes + 1] - bounding_boxes = [] - for bbox in bboxes: - bbox = bbox.split(' ') - # convert to (xmin, ymin, xmax, ymax, c) format - # TODO: use BoundingBoxList format? - # coord = np.asarray([float(bbox[1]), float(bbox[2]), - # float(bbox[3]), float(bbox[4]), int(cls_id)]) - bounding_box = BoundingBox(name=int(cls_id), - left=float(bbox[1]), top=float(bbox[2]), - width=float(bbox[3]) - float(bbox[1]), - height=float(bbox[4]) - float(bbox[2])) - # skip box if it's too small - # w = coord[2] - coord[0] - w = bounding_box.width - h = bounding_box.height - # h = coord[3] - coord[1] - if min(w, h) < 64: - continue - bounding_boxes.append(bounding_box) - if bounding_boxes: - # self.bboxes.append(np.asarray(bounding_boxes)) - self.bboxes.append(BoundingBoxList(boxes=bounding_boxes)) - image_paths.append(os.path.join(self.image_dir, image_name + '.jpg')) - cur_line += 2 + n_boxes - dataset_type = 'wider_person' - super().__init__(classes=classes, dataset_type=dataset_type, image_paths=image_paths, - image_transform=image_transform, target_transform=target_transform, - transform=transform, splits=splits, root=root) - - def __getitem__(self, item): - image_path = self.image_paths[item] - label = self.bboxes[item] - # read image, apply transform, return result - img = cv2.imread(image_path) - if self._image_transform is not None: - img = self._image_transform(img) - - if self._target_transform is not None: - label = self._target_transform(label) - - if self._transform is not None: - return self._transform(img, label) - return img, label - - def get_image(self, item): - image_path = self.image_paths[item] - img = cv2.imread(image_path) - if self._image_transform is not None: - img = self._image_transform(img) - return img - - def get_bboxes(self, item): - boxes = self.bboxes[item] - if self._target_transform is not None: - boxes = self._target_transform(boxes) - return boxes - - def __len__(self): - return len(self.image_paths) - - -if __name__ == '__main__': - from opendr.perception.object_detection_2d.utils.vis_utils import draw_bounding_boxes - - dataset = WiderPersonDataset('/home/administrator/data/wider_person', - splits=['train']) - print(len(dataset)) - - all_boxes = [[[] for _ in range(len(dataset))] - for _ in range(dataset.num_classes)] - - for i, (img, targets) in enumerate(dataset): - img = draw_bounding_boxes(img, targets, class_names=dataset.classes) - cv2.imshow('img', img) - cv2.waitKey(0) - cv2.destroyAllWindows() +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import cv2 +from opendr.engine.target import BoundingBox, BoundingBoxList +from opendr.perception.object_detection_2d.datasets import DetectionDataset + + +class WiderPersonDataset(DetectionDataset): + """ + WiderPerson dataset wrapper for OpenDR detectors. Assumes data has been downloaded from + http://www.cbsr.ia.ac.cn/users/sfzhang/WiderPerson/ and unzipped in 'root' folder, so that it contains + the 'Images' and 'Annotations' folders. + """ + def __init__(self, root, splits, image_transform=None, target_transform=None, + transform=None): + classes = ['person'] + self.root = root + available_splits = ['train', 'val'] + self.splits = [split for split in splits if split in available_splits] + self.image_dir = os.path.join(self.root, 'Images') + self.anno_dir = os.path.join(self.root, 'Annotations') + + image_paths = [] + self.bboxes = [] + cls_id = 0 + for split in self.splits: + with open(os.path.join(self.root, '{}.txt'.format(split))) as f: + image_names = f.read().splitlines() + + for image_name in image_names: + anno_file = os.path.join(self.anno_dir, image_name + '.jpg.txt') + with open(anno_file) as f: + lines = f.readlines() + cur_line = 0 + + while True: + if len(lines) <= cur_line: + break + + n_boxes = max(1, int(lines[cur_line][:-1])) + bboxes = lines[cur_line + 1:cur_line + n_boxes + 1] + bounding_boxes = [] + for bbox in bboxes: + bbox = bbox.split(' ') + # convert to (xmin, ymin, xmax, ymax, c) format + # TODO: use BoundingBoxList format? + # coord = np.asarray([float(bbox[1]), float(bbox[2]), + # float(bbox[3]), float(bbox[4]), int(cls_id)]) + bounding_box = BoundingBox(name=int(cls_id), + left=float(bbox[1]), top=float(bbox[2]), + width=float(bbox[3]) - float(bbox[1]), + height=float(bbox[4]) - float(bbox[2])) + # skip box if it's too small + # w = coord[2] - coord[0] + w = bounding_box.width + h = bounding_box.height + # h = coord[3] - coord[1] + if min(w, h) < 64: + continue + bounding_boxes.append(bounding_box) + if bounding_boxes: + # self.bboxes.append(np.asarray(bounding_boxes)) + self.bboxes.append(BoundingBoxList(boxes=bounding_boxes)) + image_paths.append(os.path.join(self.image_dir, image_name + '.jpg')) + cur_line += 2 + n_boxes + dataset_type = 'wider_person' + super().__init__(classes=classes, dataset_type=dataset_type, image_paths=image_paths, + image_transform=image_transform, target_transform=target_transform, + transform=transform, splits=splits, root=root) + + def __getitem__(self, item): + image_path = self.image_paths[item] + label = self.bboxes[item] + # read image, apply transform, return result + img = cv2.imread(image_path) + if self._image_transform is not None: + img = self._image_transform(img) + + if self._target_transform is not None: + label = self._target_transform(label) + + if self._transform is not None: + return self._transform(img, label) + return img, label + + def get_image(self, item): + image_path = self.image_paths[item] + img = cv2.imread(image_path) + if self._image_transform is not None: + img = self._image_transform(img) + return img + + def get_bboxes(self, item): + boxes = self.bboxes[item] + if self._target_transform is not None: + boxes = self._target_transform(boxes) + return boxes + + def __len__(self): + return len(self.image_paths) + + +if __name__ == '__main__': + from opendr.perception.object_detection_2d.utils.vis_utils import draw_bounding_boxes + + dataset = WiderPersonDataset('/home/administrator/data/wider_person', + splits=['train']) + print(len(dataset)) + + all_boxes = [[[] for _ in range(len(dataset))] + for _ in range(dataset.num_classes)] + + for i, (img, targets) in enumerate(dataset): + img = draw_bounding_boxes(img, targets, class_names=dataset.classes) + cv2.imshow('img', img) + cv2.waitKey(0) + cv2.destroyAllWindows() diff --git a/src/opendr/perception/object_detection_2d/dependencies.ini b/src/opendr/perception/object_detection_2d/dependencies.ini index 2ee37ed03f..c6adba42f4 100644 --- a/src/opendr/perception/object_detection_2d/dependencies.ini +++ b/src/opendr/perception/object_detection_2d/dependencies.ini @@ -4,12 +4,16 @@ python=mxnet==1.8.0 gluoncv==0.11.0b20210908 + protobuf<=3.20.0 tqdm pycocotools>=2.0.4 easydict gdown numba==0.53.0 tensorboardX>=2.0 + seaborn + ipython + psutil linux=libopenblas-dev diff --git a/src/opendr/perception/object_detection_2d/detr/algorithm/datasets/coco.py b/src/opendr/perception/object_detection_2d/detr/algorithm/datasets/coco.py index 1edbe901a4..68681f2cb8 100644 --- a/src/opendr/perception/object_detection_2d/detr/algorithm/datasets/coco.py +++ b/src/opendr/perception/object_detection_2d/detr/algorithm/datasets/coco.py @@ -25,7 +25,7 @@ import torch.utils.data import torchvision from pycocotools import mask as coco_mask -import opendr.perception.object_detection_2d.detr.algorithm.datasets.transforms as T +from opendr.perception.object_detection_2d.detr.algorithm.datasets import transforms as T from PIL import Image as im diff --git a/src/opendr/perception/object_detection_2d/detr/algorithm/datasets/panoptic_eval.py b/src/opendr/perception/object_detection_2d/detr/algorithm/datasets/panoptic_eval.py index a80e120c7e..efeda3851d 100644 --- a/src/opendr/perception/object_detection_2d/detr/algorithm/datasets/panoptic_eval.py +++ b/src/opendr/perception/object_detection_2d/detr/algorithm/datasets/panoptic_eval.py @@ -16,7 +16,7 @@ import json import os -import opendr.perception.object_detection_2d.detr.algorithm.util.misc as utils +from opendr.perception.object_detection_2d.detr.algorithm.util import misc as utils try: from panopticapi.evaluation import pq_compute diff --git a/src/opendr/perception/object_detection_2d/detr/algorithm/engine.py b/src/opendr/perception/object_detection_2d/detr/algorithm/engine.py index daa5fb33f5..392ce2fd7f 100644 --- a/src/opendr/perception/object_detection_2d/detr/algorithm/engine.py +++ b/src/opendr/perception/object_detection_2d/detr/algorithm/engine.py @@ -25,7 +25,7 @@ import torch -import opendr.perception.object_detection_2d.detr.algorithm.util.misc as utils +from opendr.perception.object_detection_2d.detr.algorithm.util import misc as utils from opendr.perception.object_detection_2d.detr.algorithm.datasets.coco_eval import CocoEvaluator from opendr.perception.object_detection_2d.detr.algorithm.datasets.panoptic_eval import PanopticEvaluator diff --git a/src/opendr/perception/object_detection_2d/detr/algorithm/models/segmentation.py b/src/opendr/perception/object_detection_2d/detr/algorithm/models/segmentation.py index dea8674644..9d70d13cf7 100644 --- a/src/opendr/perception/object_detection_2d/detr/algorithm/models/segmentation.py +++ b/src/opendr/perception/object_detection_2d/detr/algorithm/models/segmentation.py @@ -26,7 +26,7 @@ from torch import Tensor from PIL import Image -import opendr.perception.object_detection_2d.detr.algorithm.util.box_ops as box_ops +from opendr.perception.object_detection_2d.detr.algorithm.util import box_ops as box_ops from opendr.perception.object_detection_2d.detr.algorithm.util.misc import (NestedTensor, interpolate, nested_tensor_from_tensor_list) diff --git a/src/opendr/perception/object_detection_2d/detr/dependencies.ini b/src/opendr/perception/object_detection_2d/detr/dependencies.ini index ed330bb49a..d03d7a2bc6 100644 --- a/src/opendr/perception/object_detection_2d/detr/dependencies.ini +++ b/src/opendr/perception/object_detection_2d/detr/dependencies.ini @@ -1,8 +1,9 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 pycocotools>=2.0.4 git+https://github.com/cocodataset/panopticapi.git#egg=panopticapi scipy diff --git a/src/opendr/perception/object_detection_2d/detr/detr_learner.py b/src/opendr/perception/object_detection_2d/detr/detr_learner.py index 6a2ec7c526..3c051bbc3f 100755 --- a/src/opendr/perception/object_detection_2d/detr/detr_learner.py +++ b/src/opendr/perception/object_detection_2d/detr/detr_learner.py @@ -43,9 +43,11 @@ import torchvision.transforms as T import numpy as np import onnxruntime as ort -import opendr.perception.object_detection_2d.detr.algorithm.util.misc as utils +from opendr.perception.object_detection_2d.detr.algorithm.util import misc as utils from PIL import Image as im +torch.hub._validate_not_a_forked_repo = lambda a, b, c: True # workaround for rate limit bug + class DetrLearner(Learner): def __init__( diff --git a/src/opendr/perception/object_detection_2d/gem/algorithm/engine.py b/src/opendr/perception/object_detection_2d/gem/algorithm/engine.py index ea24704a61..78e37bfd5a 100644 --- a/src/opendr/perception/object_detection_2d/gem/algorithm/engine.py +++ b/src/opendr/perception/object_detection_2d/gem/algorithm/engine.py @@ -24,7 +24,7 @@ import torch -import opendr.perception.object_detection_2d.gem.algorithm.util.misc as utils +from opendr.perception.object_detection_2d.gem.algorithm.util import misc as utils from opendr.perception.object_detection_2d.detr.algorithm.datasets.coco_eval import CocoEvaluator diff --git a/src/opendr/perception/object_detection_2d/gem/dependencies.ini b/src/opendr/perception/object_detection_2d/gem/dependencies.ini index e3fb6d356b..0d356fc01c 100644 --- a/src/opendr/perception/object_detection_2d/gem/dependencies.ini +++ b/src/opendr/perception/object_detection_2d/gem/dependencies.ini @@ -1,8 +1,9 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 pillow>=8.3.2 opencv-python==4.5.1.48 pycocotools>=2.0.4 diff --git a/src/opendr/perception/object_detection_2d/gem/gem_learner.py b/src/opendr/perception/object_detection_2d/gem/gem_learner.py index 8012d38106..c9daf2c589 100644 --- a/src/opendr/perception/object_detection_2d/gem/gem_learner.py +++ b/src/opendr/perception/object_detection_2d/gem/gem_learner.py @@ -43,11 +43,13 @@ import torchvision.transforms as T import numpy as np -import opendr.perception.object_detection_2d.detr.algorithm.util.misc as utils +from opendr.perception.object_detection_2d.detr.algorithm.util import misc as utils from PIL import Image as im import zipfile +torch.hub._validate_not_a_forked_repo = lambda a, b, c: True # workaround for rate limit bug + class GemLearner(Learner): def __init__( diff --git a/src/opendr/perception/object_detection_2d/nanodet/README.md b/src/opendr/perception/object_detection_2d/nanodet/README.md new file mode 100644 index 0000000000..409e07a847 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/README.md @@ -0,0 +1,13 @@ +OpenDR 2D Object Detection - Nanodet +====== + +This folder contains the OpenDR Learner class for Nanodet for 2D object detection. + +Sources +------ +Large parts of the implementation are taken from [Nanodet Github](https://github.com/RangiLyu/nanodet) with modifications to make it compatible with OpenDR specifications. + +Usage +------ +- For VOC and COCO like datasets, an ```ExternalDataset``` with the root path and dataset name (```voc```, ```coco```) must be passed to the fit function. +- The ```temp_path``` folder is used to save checkpoints during training. \ No newline at end of file diff --git a/src/opendr/perception/object_detection_2d/nanodet/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/config_file_detail.md b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/config_file_detail.md new file mode 100644 index 0000000000..b6224df4d2 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/config_file_detail.md @@ -0,0 +1,201 @@ +# NanoDet Config File Analysis + +NanoDet using [yacs](https://github.com/rbgirshick/yacs) to read YAML config file. + +## Saving path + +```yaml +save_dir: PATH_TO_SAVE +``` + +Change `save_dir` to where you want to save logs and models. If path doesn't exist, NanoDet will create it. + +## Model + +```yaml +model: + arch: + name: OneStageDetector + backbone: xxx + fpn: xxx + head: xxx +``` + +Most detection model architecture can be devided into 3 parts: backbone, task head and connector between them (e.g., FPN, BiFPN, PAN). + +### Backbone + +```yaml +backbone: + name: ShuffleNetV2 + model_size: 1.0x + out_stages: [2,3,4] + activation: LeakyReLU + with_last_conv: False +``` + +NanoDet using ShuffleNetV2 as backbone. You can modify model size, output feature levels and activation function. Moreover, NanoDet provides other lightweight backbones like **GhostNet** and **MobileNetV2**. You can also add your backbone network by importing it in `nanodet/model/backbone/__init__.py`. + +### FPN + +```yaml +fpn: + name: PAN + in_channels: [116, 232, 464] + out_channels: 96 + start_level: 0 + num_outs: 3 +``` + +NanoDet using modified [PAN](http://arxiv.org/abs/1803.01534) (replace downsample convs with interpolation to reduce amount of computations). + +`in_channels`: a list of feature map channels extracted from backbone. + +`out_channels`: output feature map channel. + +### Head + +```yaml +head: + name: NanoDetHead + num_classes: 80 + input_channel: 96 + feat_channels: 96 + stacked_convs: 2 + share_cls_reg: True + octave_base_scale: 8 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 7 + norm_cfg: + type: BN + loss: +``` + +`name`: task head class name + +`num_classes`: number of classes + +`input_channel`: input feature map channel + +`feat_channels`: channel of task head convs + +`stacked_convs`: how many conv blocks use in one task head + +`share_cls_reg`: use same conv blocks for classification and box regression + +`octave_base_scale`: base box scale + +`scales_per_octave`: anchor free model only have one base box, default value 1 + +`strides`: down sample stride of each feature map level + +`reg_max`: max value of per-level l-r-t-b distance + +`norm_cfg`: normalization layer setting + +`loss`: adjust loss functions and weights + +## Weight averaging + +Nanodet supports weight averaging method like EMA: + +```yaml +model: + weight_averager: + name: ExpMovingAverager + decay: 0.9998 + arch: + ... +``` + +## Data + +```yaml +data: + train: + input_size: [320,320] + keep_ratio: True + multi_scale: [0.6, 1.4] + pipeline: + val: + ... +``` + +In `data` you need to set your train and validate dataset. + +`input_size`: [width, height] +`keep_ratio`: whether to maintain the original image ratio when resizing to input size +`multi_scale`: scaling range for multi-scale training. Set to None to turn off. +`pipeline`: data preprocessing and augmentation pipeline + +## Device + +```yaml +device: + gpu_ids: [0] + workers_per_gpu: 12 + batchsize_per_gpu: 160 +``` + +`gpu_ids`: CUDA device id. For multi-gpu training, set [0, 1, 2...]. + +`workers_per_gpu`: how many dataloader processes for each gpu + +`batchsize_per_gpu`: amount of images in one batch for each gpu + +## schedule + +```yaml +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.14 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 300 + ratio: 0.1 + total_epochs: 70 + lr_schedule: + name: MultiStepLR + milestones: [40,55,60,65] + gamma: 0.1 + val_intervals: 10 +``` + +Set training schedule. + +`resume`: to restore # checkpoint, if 0 model start from random initialization + +`load_model`: path to trained weight + +`optimizer`: support all optimizer provided by pytorch. + +You should adjust the `lr` with `batch_size`. Following linear scaling rule in paper *[Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour](https://research.fb.com/wp-content/uploads/2017/06/imagenet1kin1h5.pdf)* + +`warmup`: warm up your network before training. Support `constant`, `exp` and `linear` three types of warm up. + +`total_epochs`: total epochs to train + +`lr_schedule`: please refer to [pytorch lr_scheduler documentation](https://pytorch.org/docs/stable/optim.html?highlight=lr_scheduler#torch.optim.lr_scheduler) + +`val_intervals`: epoch interval of evaluating during training + +## Evaluate + +```yaml +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +``` + +Currently only support COCO eval. + +`save_key`: metric of best model. Support mAP, AP50, AP75.... + +**** + +`class_names`: used in visualization diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite0_320.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite0_320.yml new file mode 100644 index 0000000000..cdddc320cb --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite0_320.yml @@ -0,0 +1,112 @@ +# nanodet-EfficientNet-Lite0_320 +# COCO mAP(0.5:0.95) = 0.247 +# AP_50 = 0.404 +# AP_75 = 0.250 +# AP_small = 0.079 +# AP_m = 0.243 +# AP_l = 0.406 +save_dir: ./workspace/efficient0_320 +check_point_name: EfficientNet_Lite0_320 +model: + arch: + name: OneStageDetector + backbone: + name: EfficientNetLite + model_name: efficientnet_lite0 + out_stages: [2,4,6] + activation: ReLU6 + fpn: + name: PAN + in_channels: [40, 112, 320] + out_channels: 96 + start_level: 0 + num_outs: 3 + head: + name: NanoDetHead + num_classes: 80 + input_channel: 96 + feat_channels: 96 + activation: ReLU6 + stacked_convs: 2 + share_cls_reg: True + octave_base_scale: 5 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[127.0, 127.0, 127.0], [128.0, 128.0, 128.0]] + val: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + normalize: [[127.0, 127.0, 127.0], [128.0, 128.0, 128.0]] +device: + gpu_ids: [0] + workers_per_gpu: 12 + batchsize_per_gpu: 150 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.15 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 500 + ratio: 0.01 + total_epochs: 190 + lr_schedule: + name: MultiStepLR + milestones: [140,170,180,185] + gamma: 0.1 + val_intervals: 1 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP + +log: + interval: 10 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite1_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite1_416.yml new file mode 100644 index 0000000000..a189662a77 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite1_416.yml @@ -0,0 +1,113 @@ +# nanodet-EfficientNet-Lite1_416 +# COCO mAP(0.5:0.95) = 0.303 +# AP_50 = 0.471 +# AP_75 = 0.313 +# AP_small = 0.122 +# AP_m = 0.321 +# AP_l = 0.432 +save_dir: ./workspace/efficient1_416_SGD +check_point_name: EfficientNet_Lite1_416 +model: + arch: + name: OneStageDetector + backbone: + name: EfficientNetLite + model_name: efficientnet_lite1 + out_stages: [2,4,6] + activation: ReLU6 + pretrain: True + fpn: + name: PAN + in_channels: [40, 112, 320] + out_channels: 128 + start_level: 0 + num_outs: 3 + head: + name: NanoDetHead + num_classes: 80 + input_channel: 128 + feat_channels: 128 + stacked_convs: 3 + activation: ReLU6 + share_cls_reg: True + octave_base_scale: 8 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 10 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [416,416] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.5, 1.5] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[127.0, 127.0, 127.0], [128.0, 128.0, 128.0]] + val: + input_size: [416,416] #[w,h] + keep_ratio: True + pipeline: + normalize: [[127.0, 127.0, 127.0], [128.0, 128.0, 128.0]] +device: + gpu_ids: [0] + workers_per_gpu: 12 + batchsize_per_gpu: 100 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.07 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 500 + ratio: 0.01 + total_epochs: 170 + lr_schedule: + name: MultiStepLR + milestones: [130,150,160,165] + gamma: 0.1 + val_intervals: 5 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP + +log: + interval: 10 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite2_512.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite2_512.yml new file mode 100644 index 0000000000..20664fe7ca --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/EfficientNet_Lite/nanodet_EfficientNet_Lite2_512.yml @@ -0,0 +1,113 @@ +# nanodet-EfficientNet-Lite2_512 +# COCO mAP(0.5:0.95) = 0.326 +# AP_50 = 0.501 +# AP_75 = 0.344 +# AP_small = 0.152 +# AP_m = 0.342 +# AP_l = 0.481 +save_dir: ./workspace/efficientlite2_512 +check_point_name: EfficientNet_Lite2_512 +model: + arch: + name: OneStageDetector + backbone: + name: EfficientNetLite + model_name: efficientnet_lite2 + out_stages: [2,4,6] + activation: ReLU6 + pretrain: True + fpn: + name: PAN + in_channels: [48, 120, 352] + out_channels: 128 + start_level: 0 + num_outs: 3 + head: + name: NanoDetHead + num_classes: 80 + input_channel: 128 + feat_channels: 128 + stacked_convs: 4 + activation: ReLU6 + share_cls_reg: True + octave_base_scale: 5 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 10 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [512,512] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.5, 1.5] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[127.0, 127.0, 127.0], [128.0, 128.0, 128.0]] + val: + input_size: [512,512] #[w,h] + keep_ratio: True + pipeline: + normalize: [[127.0, 127.0, 127.0], [128.0, 128.0, 128.0]] +device: + gpu_ids: [0] + workers_per_gpu: 12 + batchsize_per_gpu: 60 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.06 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 300 + ratio: 0.1 + total_epochs: 135 + lr_schedule: + name: MultiStepLR + milestones: [90,110,120,130] + gamma: 0.1 + val_intervals: 5 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP + +log: + interval: 10 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/RepVGG/nanodet_RepVGG_A0_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/RepVGG/nanodet_RepVGG_A0_416.yml new file mode 100644 index 0000000000..8a0d8debeb --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/RepVGG/nanodet_RepVGG_A0_416.yml @@ -0,0 +1,107 @@ +save_dir: ./workspace/RepVGG_A0_416 +check_point_name: RepVGG_A0_416 +model: + arch: + name: OneStageDetector + backbone: + name: RepVGG + arch: A0 + out_stages: [2,3,4] + activation: ReLU + last_channel: 512 + deploy: False + fpn: + name: PAN + in_channels: [96, 192, 512] + out_channels: 128 + start_level: 0 + num_outs: 3 + head: + name: NanoDetHead + num_classes: 80 + conv_type: Conv + input_channel: 128 + feat_channels: 128 + stacked_convs: 2 + activation: ReLU + share_cls_reg: True + octave_base_scale: 8 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 10 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [416,416] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.5, 1.5] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [416,416] #[w,h] + keep_ratio: True + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 1 + batchsize_per_gpu: 100 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.07 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 500 + ratio: 0.01 + total_epochs: 170 + lr_schedule: + name: MultiStepLR + milestones: [130,150,160,165] + gamma: 0.1 + val_intervals: 5 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 10 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/Transformer/nanodet_t.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/Transformer/nanodet_t.yml new file mode 100644 index 0000000000..a8c312cd61 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/Transformer/nanodet_t.yml @@ -0,0 +1,115 @@ +# NanoDet-m with transformer attention +# COCO mAP(0.5:0.95) = 0.217 +# AP_50 = 0.363 +# AP_75 = 0.218 +# AP_small = 0.069 +# AP_m = 0.214 +# AP_l = 0.364 + +save_dir: ./workspace/nanodet_t +check_point_name: t +model: + arch: + name: OneStageDetector + backbone: + name: ShuffleNetV2 + model_size: 1.0x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: TAN # transformer attention network + in_channels: [116, 232, 464] + out_channels: 128 + feature_hw: [20,20] # size for position embedding + num_heads: 8 + num_encoders: 1 + mlp_ratio: 4 + dropout_ratio: 0.1 + activation: LeakyReLU + head: + name: NanoDetHead + num_classes: 80 + input_channel: 128 + feat_channels: 128 + stacked_convs: 2 + share_cls_reg: True + octave_base_scale: 5 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.8, 1.2] + saturation: [0.8, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 8 + batchsize_per_gpu: 160 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.14 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 500 + ratio: 0.01 + total_epochs: 190 + lr_schedule: + name: MultiStepLR + milestones: [140,170,180,185] + gamma: 0.1 + val_intervals: 10 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 10 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_g.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_g.yml new file mode 100644 index 0000000000..0d09c335ab --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_g.yml @@ -0,0 +1,115 @@ +# NanoDet-g-416 is designed for edge NPU, GPU or TPU with high parallel computing power but low memory bandwidth +# COCO mAP(0.5:0.95) = 22.9 +# Flops = 4.2B +# Params = 3.8M +# COCO pre-trained weight link: https://drive.google.com/file/d/10uW7oqZKw231l_tr4C1bJWkbCXgBf7av/view?usp=sharing +save_dir: ./workspace/nanodet_g +check_point_name: g +model: + arch: + name: OneStageDetector + backbone: + name: CustomCspNet + net_cfg: [[ 'Conv', 3, 32, 3, 2], # 1/2 + [ 'MaxPool', 3, 2 ], # 1/4 + [ 'CspBlock', 32, 1, 3, 1 ], # 1/4 + [ 'CspBlock', 64, 2, 3, 2 ], # 1/8 + [ 'CspBlock', 128, 2, 3, 2 ], # 1/16 + [ 'CspBlock', 256, 3, 3, 2 ]] # 1/32 + out_stages: [3,4,5] + activation: LeakyReLU + fpn: + name: PAN + in_channels: [128, 256, 512] + out_channels: 128 + start_level: 0 + num_outs: 3 + head: + name: NanoDetHead + num_classes: 80 + conv_type: Conv + activation: LeakyReLU + input_channel: 128 + feat_channels: 128 + stacked_convs: 1 + share_cls_reg: True + octave_base_scale: 8 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 10 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [416,416] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [416,416] #[w,h] + keep_ratio: True + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 10 + batchsize_per_gpu: 128 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.1 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 500 + ratio: 0.01 + total_epochs: 190 + lr_schedule: + name: MultiStepLR + milestones: [130,160,175,185] + gamma: 0.1 + val_intervals: 5 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 10 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m.yml new file mode 100644 index 0000000000..876168e7ad --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m.yml @@ -0,0 +1,104 @@ +#Config File example +save_dir: ./workspace/nanodet_m +check_point_name: m +model: + arch: + name: OneStageDetector + backbone: + name: ShuffleNetV2 + model_size: 1.0x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: PAN + in_channels: [116, 232, 464] + out_channels: 96 + start_level: 0 + num_outs: 3 + head: + name: NanoDetHead + num_classes: 80 + input_channel: 96 + feat_channels: 96 + stacked_convs: 2 + share_cls_reg: True + octave_base_scale: 5 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 8 + batchsize_per_gpu: 192 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.14 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 300 + ratio: 0.1 + total_epochs: 280 + lr_schedule: + name: MultiStepLR + milestones: [240,260,275] + gamma: 0.1 + val_intervals: 10 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 10 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_0.5x.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_0.5x.yml new file mode 100644 index 0000000000..2a38388336 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_0.5x.yml @@ -0,0 +1,110 @@ +# nanodet-m-0.5x +# COCO mAP(0.5:0.95) = 0.135 +# AP_50 = 0.245 +# AP_75 = 0.129 +# AP_small = 0.036 +# AP_m = 0.119 +# AP_l = 0.232 +save_dir: ./workspace/nanodet_m_0.5x +check_point_name: m_0.5x +model: + arch: + name: OneStageDetector + backbone: + name: ShuffleNetV2 + model_size: 0.5x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: PAN + in_channels: [48, 96, 192] + out_channels: 96 + start_level: 0 + num_outs: 3 + head: + name: NanoDetHead + num_classes: 80 + input_channel: 96 + feat_channels: 96 + stacked_convs: 2 + share_cls_reg: True + octave_base_scale: 5 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.5, 1.5] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 8 + batchsize_per_gpu: 96 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.07 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 1000 + ratio: 0.00001 + total_epochs: 180 + lr_schedule: + name: MultiStepLR + milestones: [130,160,175] + gamma: 0.1 + val_intervals: 10 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 50 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x.yml new file mode 100644 index 0000000000..a54268f70a --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x.yml @@ -0,0 +1,111 @@ +#nanodet-m-1.5x +# COCO mAP(0.5:0.95) = 0.235 +# AP_50 = 0.384 +# AP_75 = 0.239 +# AP_small = 0.069 +# AP_m = 0.235 +# AP_l = 0.389 +save_dir: ./workspace/nanodet_m_1.5x +check_point_name: m_1.5x +model: + arch: + name: OneStageDetector + backbone: + name: ShuffleNetV2 + model_size: 1.5x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: PAN + in_channels: [176, 352, 704] + out_channels: 128 + start_level: 0 + num_outs: 3 + head: + name: NanoDetHead + num_classes: 80 + input_channel: 128 + feat_channels: 128 + stacked_convs: 2 + share_cls_reg: True + octave_base_scale: 5 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 8 + batchsize_per_gpu: 192 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.14 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 300 + ratio: 0.1 + total_epochs: 280 + lr_schedule: + name: MultiStepLR + milestones: [240,260,275] + gamma: 0.1 + val_intervals: 10 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP + +log: + interval: 10 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x_416.yml new file mode 100644 index 0000000000..b8274403b1 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_1.5x_416.yml @@ -0,0 +1,110 @@ +#nanodet-m-1.5x-416 +# COCO mAP(0.5:0.95) = 0.268 +# AP_50 = 0.424 +# AP_75 = 0.276 +# AP_small = 0.098 +# AP_m = 0.277 +# AP_l = 0.420 +save_dir: ./workspace/nanodet_m_1.5x_416 +check_point_name: m_1.5x_416 +model: + arch: + name: OneStageDetector + backbone: + name: ShuffleNetV2 + model_size: 1.5x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: PAN + in_channels: [176, 352, 704] + out_channels: 128 + start_level: 0 + num_outs: 3 + head: + name: NanoDetHead + num_classes: 80 + input_channel: 128 + feat_channels: 128 + stacked_convs: 2 + share_cls_reg: True + octave_base_scale: 5 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [416,416] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.5, 1.4] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [416,416] #[w,h] + keep_ratio: True + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 8 + batchsize_per_gpu: 176 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.14 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 300 + ratio: 0.1 + total_epochs: 280 + lr_schedule: + name: MultiStepLR + milestones: [240,260,275] + gamma: 0.1 + val_intervals: 10 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 10 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_416.yml new file mode 100644 index 0000000000..eb30de1e0d --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/legacy_v0.x_configs/nanodet_m_416.yml @@ -0,0 +1,111 @@ +#nanodet-m-416 +# COCO mAP(0.5:0.95) = 0.235 +# AP_50 = 0.384 +# AP_75 = 0.242 +# AP_small = 0.082 +# AP_m = 0.240 +# AP_l = 0.375 +save_dir: ./workspace/nanodet_m_416 +check_point_name: m_416 +model: + arch: + name: OneStageDetector + backbone: + name: ShuffleNetV2 + model_size: 1.0x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: PAN + in_channels: [116, 232, 464] + out_channels: 96 + start_level: 0 + num_outs: 3 + head: + name: NanoDetHead + num_classes: 80 + input_channel: 96 + feat_channels: 96 + stacked_convs: 2 + share_cls_reg: True + octave_base_scale: 5 + scales_per_octave: 1 + strides: [8, 16, 32] + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 +data: + train: + input_size: [416,416] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.5, 1.4] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [416,416] #[w,h] + keep_ratio: True + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 8 + batchsize_per_gpu: 192 +schedule: + resume: 0 + optimizer: + name: SGD + lr: 0.14 + momentum: 0.9 + weight_decay: 0.0001 + warmup: + name: linear + steps: 300 + ratio: 0.1 + total_epochs: 280 + lr_schedule: + name: MultiStepLR + milestones: [240,260,275] + gamma: 0.1 + val_intervals: 10 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP + +log: + interval: 10 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_custom.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_custom.yml new file mode 100644 index 0000000000..bf58986a48 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_custom.yml @@ -0,0 +1,125 @@ +# nanodet-plus-m-1.5x_416 +# COCO mAP(0.5:0.95) = 0.341 +# AP_50 = 0.506 +# AP_75 = 0.357 +# AP_small = 0.143 +# AP_m = 0.363 +# AP_l = 0.539 +save_dir: ./workspace/nanodet_plus_m_1.5x_416/test_training +check_point_name: plus_m_1.5x_416_default +model: + weight_averager: + name: ExpMovingAverager + decay: 0.9998 + arch: + name: NanoDetPlus + detach_epoch: 10 + backbone: + name: ShuffleNetV2 + model_size: 1.5x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: GhostPAN + in_channels: [176, 352, 704] + out_channels: 128 + kernel_size: 5 + num_extra_level: 1 + use_depthwise: True + activation: LeakyReLU + head: + name: NanoDetPlusHead + num_classes: 80 + input_channel: 128 + feat_channels: 128 + stacked_convs: 2 + kernel_size: 5 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 + # Auxiliary head, only use in training time. + aux_head: + name: SimpleConvHead + num_classes: 80 + input_channel: 256 + feat_channels: 256 + stacked_convs: 4 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 +data: + train: + input_size: [416,416] #[w,h] + keep_ratio: False + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[0.8, 1.2], [0.8, 1.2]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [416,416] #[w,h] + keep_ratio: False + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 10 + batchsize_per_gpu: 12 #96 +schedule: + resume: 0 + optimizer: + name: AdamW + lr: 0.000125 + weight_decay: 0.05 + warmup: + name: linear + steps: 500 + ratio: 0.0001 + total_epochs: 300 + lr_schedule: + name: CosineAnnealingLR + T_max: 300 + eta_min: 0.00005 + val_intervals: 10 +grad_clip: 35 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 50 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_guide.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_guide.yml new file mode 100644 index 0000000000..3729c111ec --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_guide.yml @@ -0,0 +1,107 @@ +#Config File example +save_dir: ./workspace/nanodet_m +check_point_name: +model: + weight_averager: + name: ExpMovingAverager + decay: 0.9998 + arch: + name: NanoDetPlus + detach_epoch: 10 + backbone: + name: ShuffleNetV2 + model_size: 1.0x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: GhostPAN + in_channels: [116, 232, 464] + out_channels: 96 + kernel_size: 5 + num_extra_level: 1 + use_depthwise: True + activation: LeakyReLU + head: + name: NanoDetPlusHead + num_classes: 80 + input_channel: 96 + feat_channels: 96 + stacked_convs: 2 + kernel_size: 5 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 + # Auxiliary head, only use in training time. + aux_head: + name: SimpleConvHead + num_classes: 80 + input_channel: 192 + feat_channels: 192 + stacked_convs: 4 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 + +class_names: &class_names ['NAME1', 'NAME2', 'NAME3', 'NAME4', '...'] #Please fill in the category names (not include background category) +data: + train: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[1, 1], [1, 1]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.8, 1.2] + saturation: [0.8, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [320,320] #[w,h] + keep_ratio: True + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] # Set like [0, 1, 2, 3] if you have multi-GPUs + workers_per_gpu: 8 + batchsize_per_gpu: 96 +schedule: + resume: 0 + optimizer: + name: AdamW + lr: 0.001 + weight_decay: 0.05 + warmup: + name: linear + steps: 500 + ratio: 0.0001 + total_epochs: 300 + lr_schedule: + name: CosineAnnealingLR + T_max: 300 + eta_min: 0.00005 + val_intervals: 10 +grad_clip: 35 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP + +log: + interval: 10 diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_320.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_320.yml new file mode 100644 index 0000000000..3dcd1a2973 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_320.yml @@ -0,0 +1,125 @@ +# nanodet-plus-m-1.5x_320 +# COCO mAP(0.5:0.95) = 0.299 +# AP_50 = 0.454 +# AP_75 = 0.312 +# AP_small = 0.102 +# AP_m = 0.309 +# AP_l = 0.493 +save_dir: ./workspace/nanodet_plus_m_1.5x_320 +check_point_name: plus_m_1.5x_320 +model: + weight_averager: + name: ExpMovingAverager + decay: 0.9998 + arch: + name: NanoDetPlus + detach_epoch: 10 + backbone: + name: ShuffleNetV2 + model_size: 1.5x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: GhostPAN + in_channels: [176, 352, 704] + out_channels: 128 + kernel_size: 5 + num_extra_level: 1 + use_depthwise: True + activation: LeakyReLU + head: + name: NanoDetPlusHead + num_classes: 80 + input_channel: 128 + feat_channels: 128 + stacked_convs: 2 + kernel_size: 5 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 + # Auxiliary head, only use in training time. + aux_head: + name: SimpleConvHead + num_classes: 80 + input_channel: 256 + feat_channels: 256 + stacked_convs: 4 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 +data: + train: + input_size: [320,320] #[w,h] + keep_ratio: False + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[0.8, 1.2], [0.8, 1.2]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [320,320] #[w,h] + keep_ratio: False + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 10 + batchsize_per_gpu: 96 +schedule: + resume: 0 + optimizer: + name: AdamW + lr: 0.001 + weight_decay: 0.05 + warmup: + name: linear + steps: 500 + ratio: 0.0001 + total_epochs: 300 + lr_schedule: + name: CosineAnnealingLR + T_max: 300 + eta_min: 0.00005 + val_intervals: 10 +grad_clip: 35 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 50 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_416.yml new file mode 100644 index 0000000000..5a76789b50 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_1.5x_416.yml @@ -0,0 +1,125 @@ +# nanodet-plus-m-1.5x_416 +# COCO mAP(0.5:0.95) = 0.341 +# AP_50 = 0.506 +# AP_75 = 0.357 +# AP_small = 0.143 +# AP_m = 0.363 +# AP_l = 0.539 +save_dir: ./workspace/nanodet_plus_m_1.5x_416 +check_point_name: plus_m_1.5x_416 +model: + weight_averager: + name: ExpMovingAverager + decay: 0.9998 + arch: + name: NanoDetPlus + detach_epoch: 10 + backbone: + name: ShuffleNetV2 + model_size: 1.5x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: GhostPAN + in_channels: [176, 352, 704] + out_channels: 128 + kernel_size: 5 + num_extra_level: 1 + use_depthwise: True + activation: LeakyReLU + head: + name: NanoDetPlusHead + num_classes: 80 + input_channel: 128 + feat_channels: 128 + stacked_convs: 2 + kernel_size: 5 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 + # Auxiliary head, only use in training time. + aux_head: + name: SimpleConvHead + num_classes: 80 + input_channel: 256 + feat_channels: 256 + stacked_convs: 4 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 +data: + train: + input_size: [416,416] #[w,h] + keep_ratio: False + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[0.8, 1.2], [0.8, 1.2]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [416,416] #[w,h] + keep_ratio: False + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 10 + batchsize_per_gpu: 96 +schedule: + resume: 0 + optimizer: + name: AdamW + lr: 0.001 + weight_decay: 0.05 + warmup: + name: linear + steps: 500 + ratio: 0.0001 + total_epochs: 300 + lr_schedule: + name: CosineAnnealingLR + T_max: 300 + eta_min: 0.00005 + val_intervals: 10 +grad_clip: 35 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 50 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_320.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_320.yml new file mode 100644 index 0000000000..e4b5f58f9c --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_320.yml @@ -0,0 +1,125 @@ +# nanodet-plus-m_320 +# COCO mAP(0.5:0.95) = 0.270 +# AP_50 = 0.418 +# AP_75 = 0.281 +# AP_small = 0.083 +# AP_m = 0.278 +# AP_l = 0.451 +save_dir: ./workspace/nanodet_plus_m_320 +check_point_name: plus_m_320 +model: + weight_averager: + name: ExpMovingAverager + decay: 0.9998 + arch: + name: NanoDetPlus + detach_epoch: 10 + backbone: + name: ShuffleNetV2 + model_size: 1.0x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: GhostPAN + in_channels: [116, 232, 464] + out_channels: 96 + kernel_size: 5 + num_extra_level: 1 + use_depthwise: True + activation: LeakyReLU + head: + name: NanoDetPlusHead + num_classes: 80 + input_channel: 96 + feat_channels: 96 + stacked_convs: 2 + kernel_size: 5 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 + # Auxiliary head, only use in training time. + aux_head: + name: SimpleConvHead + num_classes: 80 + input_channel: 192 + feat_channels: 192 + stacked_convs: 4 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 +data: + train: + input_size: [320,320] #[w,h] + keep_ratio: False + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[0.8, 1.2], [0.8, 1.2]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [320,320] #[w,h] + keep_ratio: False + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] # Set like [0, 1, 2, 3] if you have multi-GPUs + workers_per_gpu: 10 + batchsize_per_gpu: 32 #96 +schedule: + resume: 0 + optimizer: + name: AdamW + lr: 0.001 + weight_decay: 0.05 + warmup: + name: linear + steps: 500 + ratio: 0.0001 + total_epochs: 300 + lr_schedule: + name: CosineAnnealingLR + T_max: 300 + eta_min: 0.00005 + val_intervals: 10 +grad_clip: 35 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 50 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_416.yml b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_416.yml new file mode 100644 index 0000000000..61a536ad7d --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/config/nanodet_plus_m_416.yml @@ -0,0 +1,125 @@ +# nanodet-plus-m_416 +# COCO mAP(0.5:0.95) = 0.304 +# AP_50 = 0.459 +# AP_75 = 0.317 +# AP_small = 0.106 +# AP_m = 0.322 +# AP_l = 0.477 +save_dir: ./workspace/nanodet_plus_m_416 +check_point_name: plus_m_416 +model: + weight_averager: + name: ExpMovingAverager + decay: 0.9998 + arch: + name: NanoDetPlus + detach_epoch: 10 + backbone: + name: ShuffleNetV2 + model_size: 1.0x + out_stages: [2,3,4] + activation: LeakyReLU + fpn: + name: GhostPAN + in_channels: [116, 232, 464] + out_channels: 96 + kernel_size: 5 + num_extra_level: 1 + use_depthwise: True + activation: LeakyReLU + head: + name: NanoDetPlusHead + num_classes: 80 + input_channel: 96 + feat_channels: 96 + stacked_convs: 2 + kernel_size: 5 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 + norm_cfg: + type: BN + loss: + loss_qfl: + name: QualityFocalLoss + use_sigmoid: True + beta: 2.0 + loss_weight: 1.0 + loss_dfl: + name: DistributionFocalLoss + loss_weight: 0.25 + loss_bbox: + name: GIoULoss + loss_weight: 2.0 + # Auxiliary head, only use in training time. + aux_head: + name: SimpleConvHead + num_classes: 80 + input_channel: 192 + feat_channels: 192 + stacked_convs: 4 + strides: [8, 16, 32, 64] + activation: LeakyReLU + reg_max: 7 +data: + train: + input_size: [416,416] #[w,h] + keep_ratio: False + pipeline: + perspective: 0.0 + scale: [0.6, 1.4] + stretch: [[0.8, 1.2], [0.8, 1.2]] + rotation: 0 + shear: 0 + translate: 0.2 + flip: 0.5 + brightness: 0.2 + contrast: [0.6, 1.4] + saturation: [0.5, 1.2] + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] + val: + input_size: [416,416] #[w,h] + keep_ratio: False + pipeline: + normalize: [[103.53, 116.28, 123.675], [57.375, 57.12, 58.395]] +device: + gpu_ids: [0] + workers_per_gpu: 10 + batchsize_per_gpu: 4 #96 +schedule: + resume: 0 + optimizer: + name: AdamW + lr: 0.001 + weight_decay: 0.05 + warmup: + name: linear + steps: 500 + ratio: 0.0001 + total_epochs: 300 + lr_schedule: + name: CosineAnnealingLR + T_max: 300 + eta_min: 0.00005 + val_intervals: 1 +grad_clip: 35 +evaluator: + name: CocoDetectionEvaluator + save_key: mAP +log: + interval: 200 + +class_names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant', + 'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat', + 'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket', + 'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush'] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/batch_process.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/batch_process.py new file mode 100644 index 0000000000..f84170a275 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/batch_process.py @@ -0,0 +1,37 @@ +from typing import Sequence + +import torch +import torch.nn.functional as F + + +def stack_batch_img( + img_tensors: Sequence[torch.Tensor], divisible: int = 0, pad_value: float = 0.0 +) -> torch.Tensor: + """ + Args: + img_tensors (Sequence[torch.Tensor]): + divisible (int): + pad_value (float): value to pad + + Returns: + torch.Tensor. + """ + assert len(img_tensors) > 0 + assert isinstance(img_tensors, (tuple, list)) + assert divisible >= 0 + img_heights = [] + img_widths = [] + for img in img_tensors: + assert img.shape[:-2] == img_tensors[0].shape[:-2] + img_heights.append(img.shape[-2]) + img_widths.append(img.shape[-1]) + max_h, max_w = max(img_heights), max(img_widths) + if divisible > 0: + max_h = (max_h + divisible - 1) // divisible * divisible + max_w = (max_w + divisible - 1) // divisible * divisible + + batch_imgs = [] + for img in img_tensors: + padding_size = [0, max_w - img.shape[-1], 0, max_h - img.shape[-2]] + batch_imgs.append(F.pad(img, padding_size, value=pad_value)) + return torch.stack(batch_imgs, dim=0).contiguous() diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/collate.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/collate.py new file mode 100644 index 0000000000..825272bbc3 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/collate.py @@ -0,0 +1,78 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import re + +import torch +from torch._six import string_classes + +np_str_obj_array_pattern = re.compile(r"[SaUO]") + +default_collate_err_msg_format = ( + "default_collate: batch must contain tensors, numpy arrays, numbers, " + "dicts or lists; found {}" +) + + +def collate_function(batch): + r"""Puts each data field into a tensor with outer dimension batch size""" + + elem = batch[0] + elem_type = type(elem) + if isinstance(elem, torch.Tensor): + out = None + if torch.utils.data.get_worker_info() is not None: + # If we're in a background process, concatenate directly into a + # shared memory tensor to avoid an extra copy + numel = sum([x.numel() for x in batch]) + storage = elem.storage()._new_shared(numel) + out = elem.new(storage) + return torch.stack(batch, 0, out=out) + elif elem_type.__module__ == "numpy" and elem_type.__name__ != "str_" and elem_type.__name__ != "string_": + elem = batch[0] + if elem_type.__name__ == "ndarray": + # array of string classes and object + if np_str_obj_array_pattern.search(elem.dtype.str) is not None: + raise TypeError(default_collate_err_msg_format.format(elem.dtype)) + + return batch + elif elem.shape == (): # scalars + return batch + elif isinstance(elem, float): + return torch.tensor(batch, dtype=torch.float64) + elif isinstance(elem, int): + return torch.tensor(batch) + elif isinstance(elem, string_classes): + return batch + elif isinstance(elem, collections.abc.Mapping): + return {key: collate_function([d[key] for d in batch]) for key in elem} + elif isinstance(elem, tuple) and hasattr(elem, "_fields"): # namedtuple + return elem_type(*(collate_function(samples) for samples in zip(*batch))) + elif isinstance(elem, collections.abc.Sequence): + transposed = zip(*batch) + return [collate_function(samples) for samples in transposed] + + raise TypeError(default_collate_err_msg_format.format(elem_type)) + + +def naive_collate(batch): + """Only collate dict value in to a list. E.g. meta data dict and img_info + dict will be collated.""" + + elem = batch[0] + if isinstance(elem, dict): + return {key: naive_collate([d[key] for d in batch]) for key in elem} + else: + return batch diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/__init__.py new file mode 100644 index 0000000000..b68b60e389 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/__init__.py @@ -0,0 +1,58 @@ +# Modifications Copyright 2021 - present, OpenDR European Project +# +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import copy +from opendr.engine.datasets import ExternalDataset + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.dataset.coco import CocoDataset +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.dataset.xml_dataset import XMLDataset + + +def build_dataset(cfg, dataset, class_names, mode, verbose=True): + dataset_cfg = copy.deepcopy(cfg) + supported_datasets = ['coco', 'voc'] + if isinstance(dataset, ExternalDataset): + if dataset.dataset_type.lower() not in supported_datasets: + raise UserWarning("ExternalDataset dataset_type must be one of: ", supported_datasets) + + if verbose: + print("Loading {} type dataset...".format(dataset.dataset_type)) + print("From {}".format(dataset.path)) + + if dataset.dataset_type.lower() == 'voc': + if mode == "train": + img_path = "{}/train/JPEGImages".format(dataset.path) + ann_path = "{}/train/Annotations".format(dataset.path) + else: + img_path = "{}/val/JPEGImages".format(dataset.path) + ann_path = "{}/val/Annotations".format(dataset.path) + dataset = XMLDataset(img_path=img_path, ann_path=ann_path, mode=mode, + class_names=class_names, **dataset_cfg) + + elif dataset.dataset_type.lower() == 'coco': + if mode == "train": + img_path = "{}/train2017".format(dataset.path) + ann_path = "{}/annotations/instances_train2017.json".format(dataset.path) + else: + img_path = "{}/val2017".format(dataset.path) + ann_path = "{}/annotations/instances_val2017.json".format(dataset.path) + dataset = CocoDataset(img_path=img_path, ann_path=ann_path, mode=mode, **dataset_cfg) + if verbose: + print("ExternalDataset loaded.") + return dataset + else: + raise ValueError("Dataset type {} not supported".format(type(dataset))) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/base.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/base.py new file mode 100644 index 0000000000..8a144a1d4a --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/base.py @@ -0,0 +1,124 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import random +from abc import ABCMeta, abstractmethod +from typing import Tuple + +import numpy as np +from torch.utils.data import Dataset + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform import Pipeline + + +class BaseDataset(Dataset, metaclass=ABCMeta): + """ + A base class of detection dataset. Referring from MMDetection. + A dataset should have images, annotations and preprocessing pipelines + NanoDet use [xmin, ymin, xmax, ymax] format for box and + [[x0,y0], [x1,y1] ... [xn,yn]] format for key points. + instance masks should decode into binary masks for each instance like + { + 'bbox': [xmin,ymin,xmax,ymax], + 'mask': mask + } + segmentation mask should decode into binary masks for each class. + Args: + img_path (str): image data folder + ann_path (str): annotation file path or folder + use_instance_mask (bool): load instance segmentation data + use_seg_mask (bool): load semantic segmentation data + use_keypoint (bool): load pose keypoint data + load_mosaic (bool): using mosaic data augmentation from yolov4 + mode (str): 'train' or 'val' or 'test' + multi_scale (Tuple[float, float]): Multi-scale factor range. + """ + + def __init__( + self, + img_path, + ann_path, + input_size, + pipeline, + keep_ratio=True, + use_instance_mask=False, + use_seg_mask=False, + use_keypoint=False, + load_mosaic=False, + mode="train", + multi_scale=None, + ): + assert mode in ["train", "val", "test"] + self.img_path = img_path + self.ann_path = ann_path + self.input_size = input_size + self.pipeline = Pipeline(pipeline, keep_ratio) + self.keep_ratio = keep_ratio + self.use_instance_mask = use_instance_mask + self.use_seg_mask = use_seg_mask + self.use_keypoint = use_keypoint + self.load_mosaic = load_mosaic + self.multi_scale = multi_scale + self.mode = mode + + print(ann_path) + self.data_info = self.get_data_info(ann_path) + + def __len__(self): + return len(self.data_info) + + def __getitem__(self, idx): + if self.mode == "val" or self.mode == "test": + return self.get_val_data(idx) + else: + while True: + data = self.get_train_data(idx) + if data is None: + idx = self.get_another_id() + continue + return data + + @staticmethod + def get_random_size( + scale_range: Tuple[float, float], image_size: Tuple[int, int] + ) -> Tuple[int, int]: + """ + Get random image shape by multi-scale factor and image_size. + Args: + scale_range (Tuple[float, float]): Multi-scale factor range. + Format in [(width, height), (width, height)] + image_size (Tuple[int, int]): Image size. Format in (width, height). + + Returns: + Tuple[int, int] + """ + assert len(scale_range) == 2 + scale_factor = random.uniform(*scale_range) + width = int(image_size[0] * scale_factor) + height = int(image_size[1] * scale_factor) + return width, height + + @abstractmethod + def get_data_info(self, ann_path): + pass + + @abstractmethod + def get_train_data(self, idx): + pass + + @abstractmethod + def get_val_data(self, idx): + pass + + def get_another_id(self): + return np.random.random_integers(0, len(self.data_info) - 1) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/coco.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/coco.py new file mode 100644 index 0000000000..a67ee7cb0c --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/coco.py @@ -0,0 +1,158 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import cv2 +import numpy as np +import torch +from pycocotools.coco import COCO + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.dataset.base import BaseDataset + + +class CocoDataset(BaseDataset): + def get_data_info(self, ann_path): + """ + Load basic information of dataset such as image path, label and so on. + :param ann_path: coco json file path + :return: image info: + [{'license': 2, + 'file_name': '000000000139.jpg', + 'coco_url': 'http://images.cocodataset.org/val2017/000000000139.jpg', + 'height': 426, + 'width': 640, + 'date_captured': '2013-11-21 01:34:01', + 'flickr_url': + 'http://farm9.staticflickr.com/8035/8024364858_9c41dc1666_z.jpg', + 'id': 139}, + ... + ] + """ + self.coco_api = COCO(ann_path) + self.cat_ids = sorted(self.coco_api.getCatIds()) + self.cat2label = {cat_id: i for i, cat_id in enumerate(self.cat_ids)} + self.cats = self.coco_api.loadCats(self.cat_ids) + self.class_names = [cat["name"] for cat in self.cats] + self.img_ids = sorted(self.coco_api.imgs.keys()) + img_info = self.coco_api.loadImgs(self.img_ids) + return img_info + + def get_per_img_info(self, idx): + img_info = self.data_info[idx] + file_name = img_info["file_name"] + height = img_info["height"] + width = img_info["width"] + id = img_info["id"] + if not isinstance(id, int): + raise TypeError("Image id must be int.") + info = {"file_name": file_name, "height": height, "width": width, "id": id} + return info + + def get_img_annotation(self, idx): + """ + load per image annotation + :param idx: index in dataloader + :return: annotation dict + """ + img_id = self.img_ids[idx] + ann_ids = self.coco_api.getAnnIds([img_id]) + anns = self.coco_api.loadAnns(ann_ids) + gt_bboxes = [] + gt_labels = [] + gt_bboxes_ignore = [] + if self.use_instance_mask: + gt_masks = [] + if self.use_keypoint: + gt_keypoints = [] + for ann in anns: + if ann.get("ignore", False): + continue + x1, y1, w, h = ann["bbox"] + if ann["area"] <= 0 or w < 1 or h < 1: + continue + if ann["category_id"] not in self.cat_ids: + continue + bbox = [x1, y1, x1 + w, y1 + h] + if ann.get("iscrowd", False): + gt_bboxes_ignore.append(bbox) + else: + gt_bboxes.append(bbox) + gt_labels.append(self.cat2label[ann["category_id"]]) + if self.use_instance_mask: + gt_masks.append(self.coco_api.annToMask(ann)) + if self.use_keypoint: + gt_keypoints.append(ann["keypoints"]) + if gt_bboxes: + gt_bboxes = np.array(gt_bboxes, dtype=np.float32) + gt_labels = np.array(gt_labels, dtype=np.int64) + else: + gt_bboxes = np.zeros((0, 4), dtype=np.float32) + gt_labels = np.array([], dtype=np.int64) + if gt_bboxes_ignore: + gt_bboxes_ignore = np.array(gt_bboxes_ignore, dtype=np.float32) + else: + gt_bboxes_ignore = np.zeros((0, 4), dtype=np.float32) + annotation = dict( + bboxes=gt_bboxes, labels=gt_labels, bboxes_ignore=gt_bboxes_ignore + ) + if self.use_instance_mask: + annotation["masks"] = gt_masks + if self.use_keypoint: + if gt_keypoints: + annotation["keypoints"] = np.array(gt_keypoints, dtype=np.float32) + else: + annotation["keypoints"] = np.zeros((0, 51), dtype=np.float32) + return annotation + + def get_train_data(self, idx): + """ + Load image and annotation + :param idx: + :return: meta-data (a dict containing image, annotation and other information) + """ + img_info = self.get_per_img_info(idx) + file_name = img_info["file_name"] + image_path = os.path.join(self.img_path, file_name) + img = cv2.imread(image_path) + if img is None: + print("image {} read failed.".format(image_path)) + raise FileNotFoundError("Cant load image! Please check image path!") + ann = self.get_img_annotation(idx) + meta = dict( + img=img, img_info=img_info, gt_bboxes=ann["bboxes"], gt_labels=ann["labels"] + ) + if self.use_instance_mask: + meta["gt_masks"] = ann["masks"] + if self.use_keypoint: + meta["gt_keypoints"] = ann["keypoints"] + + input_size = self.input_size + if self.multi_scale: + input_size = self.get_random_size(self.multi_scale, input_size) + + meta = self.pipeline(self, meta, input_size) + + meta["img"] = torch.from_numpy(meta["img"].transpose(2, 0, 1)) + return meta + + def get_val_data(self, idx): + """ + Currently no difference from get_train_data. + Not support TTA(testing time augmentation) yet. + :param idx: + :return: + """ + # TODO: support TTA + return self.get_train_data(idx) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/xml_dataset.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/xml_dataset.py new file mode 100644 index 0000000000..c5778e1302 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/dataset/xml_dataset.py @@ -0,0 +1,157 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os +import time +import xml.etree.ElementTree as ET +from collections import defaultdict + +from pycocotools.coco import COCO + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.dataset.coco import CocoDataset + + +def get_file_list(path, type=".xml"): + file_names = [] + for maindir, subdir, file_name_list in os.walk(path): + for filename in file_name_list: + apath = os.path.join(maindir, filename) + ext = os.path.splitext(apath)[1] + if ext == type: + file_names.append(filename) + return file_names + + +class CocoXML(COCO): + def __init__(self, annotation): + """ + Constructor of Microsoft COCO helper class for + reading and visualizing annotations. + :param annotation: annotation dict + :return: + """ + # load dataset + self.dataset, self.anns, self.cats, self.imgs = dict(), dict(), dict(), dict() + self.imgToAnns, self.catToImgs = defaultdict(list), defaultdict(list) + dataset = annotation + assert type(dataset) == dict, "annotation file format {} not supported".format( + type(dataset) + ) + self.dataset = dataset + self.createIndex() + + +class XMLDataset(CocoDataset): + def __init__(self, class_names, **kwargs): + self.class_names = class_names + super(XMLDataset, self).__init__(**kwargs) + + def xml_to_coco(self, ann_path): + """ + convert xml annotations to coco_api + :param ann_path: + :return: + """ + logging.info("loading annotations into memory...") + tic = time.time() + ann_file_names = get_file_list(ann_path, type=".xml") + logging.info("Found {} annotation files.".format(len(ann_file_names))) + image_info = [] + categories = [] + annotations = [] + for idx, supercat in enumerate(self.class_names): + categories.append( + {"supercategory": supercat, "id": idx + 1, "name": supercat} + ) + ann_id = 1 + for idx, xml_name in enumerate(ann_file_names): + tree = ET.parse(os.path.join(ann_path, xml_name)) + root = tree.getroot() + file_name = root.find("filename").text + width = int(root.find("size").find("width").text) + height = int(root.find("size").find("height").text) + info = { + "file_name": file_name, + "height": height, + "width": width, + "id": idx + 1, + } + image_info.append(info) + for _object in root.findall("object"): + category = _object.find("name").text + if category not in self.class_names: + logging.warning( + "WARNING! {} is not in class_names! " + "Pass this box annotation.".format(category) + ) + continue + for cat in categories: + if category == cat["name"]: + cat_id = cat["id"] + xmin = int(_object.find("bndbox").find("xmin").text) + ymin = int(_object.find("bndbox").find("ymin").text) + xmax = int(_object.find("bndbox").find("xmax").text) + ymax = int(_object.find("bndbox").find("ymax").text) + w = xmax - xmin + h = ymax - ymin + if w < 0 or h < 0: + logging.warning( + "WARNING! Find error data in file {}! Box w and " + "h should > 0. Pass this box annotation.".format(xml_name) + ) + continue + coco_box = [max(xmin, 0), max(ymin, 0), min(w, width), min(h, height)] + ann = { + "image_id": idx + 1, + "bbox": coco_box, + "category_id": cat_id, + "iscrowd": 0, + "id": ann_id, + "area": coco_box[2] * coco_box[3], + } + annotations.append(ann) + ann_id += 1 + + coco_dict = { + "images": image_info, + "categories": categories, + "annotations": annotations, + } + logging.info( + "Load {} xml files and {} boxes".format(len(image_info), len(annotations)) + ) + logging.info("Done (t={:0.2f}s)".format(time.time() - tic)) + return coco_dict + + def get_data_info(self, ann_path): + """ + Load basic information of dataset such as image path, label and so on. + :param ann_path: coco json file path + :return: image info: + [{'file_name': '000000000139.jpg', + 'height': 426, + 'width': 640, + 'id': 139}, + ... + ] + """ + coco_dict = self.xml_to_coco(ann_path) + self.coco_api = CocoXML(coco_dict) + self.cat_ids = sorted(self.coco_api.getCatIds()) + self.cat2label = {cat_id: i for i, cat_id in enumerate(self.cat_ids)} + self.cats = self.coco_api.loadCats(self.cat_ids) + self.img_ids = sorted(self.coco_api.imgs.keys()) + img_info = self.coco_api.loadImgs(self.img_ids) + return img_info diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/__init__.py new file mode 100644 index 0000000000..c30ae7665b --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .pipeline import Pipeline + +__all__ = ["Pipeline"] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/color.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/color.py new file mode 100644 index 0000000000..907b533797 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/color.py @@ -0,0 +1,69 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import random + +import cv2 +import numpy as np + + +def random_brightness(img, delta): + img += random.uniform(-delta, delta) + return img + + +def random_contrast(img, alpha_low, alpha_up): + img *= random.uniform(alpha_low, alpha_up) + return img + + +def random_saturation(img, alpha_low, alpha_up): + hsv_img = cv2.cvtColor(img.astype(np.float32), cv2.COLOR_BGR2HSV) + hsv_img[..., 1] *= random.uniform(alpha_low, alpha_up) + img = cv2.cvtColor(hsv_img, cv2.COLOR_HSV2BGR) + return img + + +def normalize(meta, mean, std): + img = meta["img"].astype(np.float32) + mean = np.array(mean, dtype=np.float64).reshape(1, -1) + stdinv = 1 / np.array(std, dtype=np.float64).reshape(1, -1) + cv2.subtract(img, mean, img) + cv2.multiply(img, stdinv, img) + meta["img"] = img + return meta + + +def _normalize(img, mean, std): + mean = np.array(mean, dtype=np.float32).reshape(1, 1, 3) / 255 + std = np.array(std, dtype=np.float32).reshape(1, 1, 3) / 255 + img = (img - mean) / std + return img + + +def color_aug_and_norm(meta, kwargs): + img = meta["img"].astype(np.float32) / 255 + + if "brightness" in kwargs and random.randint(0, 1): + img = random_brightness(img, kwargs["brightness"]) + + if "contrast" in kwargs and random.randint(0, 1): + img = random_contrast(img, *kwargs["contrast"]) + + if "saturation" in kwargs and random.randint(0, 1): + img = random_saturation(img, *kwargs["saturation"]) + + img = _normalize(img, *kwargs["normalize"]) + meta["img"] = img + return meta diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/pipeline.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/pipeline.py new file mode 100644 index 0000000000..24acdb1880 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/pipeline.py @@ -0,0 +1,59 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +import warnings +from typing import Dict, Tuple + +from torch.utils.data import Dataset + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.color import color_aug_and_norm +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.warp import ShapeTransform, warp_and_resize + + +class LegacyPipeline: + def __init__(self, cfg, keep_ratio): + warnings.warn( + "Deprecated warning! Pipeline from nanodet v0.x has been deprecated," + "Please use new Pipeline and update your config!" + ) + self.warp = functools.partial( + warp_and_resize, warp_kwargs=cfg, keep_ratio=keep_ratio + ) + self.color = functools.partial(color_aug_and_norm, kwargs=cfg) + + def __call__(self, meta, dst_shape): + meta = self.warp(meta, dst_shape=dst_shape) + meta = self.color(meta=meta) + return meta + + +class Pipeline: + """Data process pipeline. Apply augmentation and pre-processing on + meta_data from dataset. + + Args: + cfg (Dict): Data pipeline config. + keep_ratio (bool): Whether to keep aspect ratio when resizing image. + + """ + + def __init__(self, cfg: Dict, keep_ratio: bool): + self.shape_transform = ShapeTransform(keep_ratio, **cfg) + self.color = functools.partial(color_aug_and_norm, kwargs=cfg) + + def __call__(self, dataset: Dataset, meta: Dict, dst_shape: Tuple[int, int]): + meta = self.shape_transform(meta, dst_shape=dst_shape) + meta = self.color(meta=meta) + return meta diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/warp.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/warp.py new file mode 100644 index 0000000000..6ffd1b66d3 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/data/transform/warp.py @@ -0,0 +1,330 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +import random +from typing import Dict, Optional, Tuple + +import cv2 +import numpy as np + + +def get_flip_matrix(prob=0.5): + F = np.eye(3) + if random.random() < prob: + F[0, 0] = -1 + return F + + +def get_perspective_matrix(perspective=0.0): + """ + + :param perspective: + :return: + """ + P = np.eye(3) + P[2, 0] = random.uniform(-perspective, perspective) # x perspective (about y) + P[2, 1] = random.uniform(-perspective, perspective) # y perspective (about x) + return P + + +def get_rotation_matrix(degree=0.0): + """ + + :param degree: + :return: + """ + R = np.eye(3) + a = random.uniform(-degree, degree) + R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=1) + return R + + +def get_scale_matrix(ratio=(1, 1)): + """ + + :param ratio: + """ + Scl = np.eye(3) + scale = random.uniform(*ratio) + Scl[0, 0] *= scale + Scl[1, 1] *= scale + return Scl + + +def get_stretch_matrix(width_ratio=(1, 1), height_ratio=(1, 1)): + """ + + :param width_ratio: + :param height_ratio: + """ + Str = np.eye(3) + Str[0, 0] *= random.uniform(*width_ratio) + Str[1, 1] *= random.uniform(*height_ratio) + return Str + + +def get_shear_matrix(degree): + """ + + :param degree: + :return: + """ + Sh = np.eye(3) + Sh[0, 1] = math.tan( + random.uniform(-degree, degree) * math.pi / 180 + ) # x shear (deg) + Sh[1, 0] = math.tan( + random.uniform(-degree, degree) * math.pi / 180 + ) # y shear (deg) + return Sh + + +def get_translate_matrix(translate, width, height): + """ + + :param translate: + :return: + """ + T = np.eye(3) + T[0, 2] = random.uniform(0.5 - translate, 0.5 + translate) * width # x translation + T[1, 2] = random.uniform(0.5 - translate, 0.5 + translate) * height # y translation + return T + + +def get_resize_matrix(raw_shape, dst_shape, keep_ratio): + """ + Get resize matrix for resizing raw img to input size + :param raw_shape: (width, height) of raw image + :param dst_shape: (width, height) of input image + :param keep_ratio: whether keep original ratio + :return: 3x3 Matrix + """ + r_w, r_h = raw_shape + d_w, d_h = dst_shape + Rs = np.eye(3) + if keep_ratio: + C = np.eye(3) + C[0, 2] = -r_w / 2 + C[1, 2] = -r_h / 2 + + if r_w / r_h < d_w / d_h: + ratio = d_h / r_h + else: + ratio = d_w / r_w + Rs[0, 0] *= ratio + Rs[1, 1] *= ratio + + T = np.eye(3) + T[0, 2] = 0.5 * d_w + T[1, 2] = 0.5 * d_h + return T @ Rs @ C + else: + Rs[0, 0] *= d_w / r_w + Rs[1, 1] *= d_h / r_h + return Rs + + +def warp_and_resize( + meta: Dict, + warp_kwargs: Dict, + dst_shape: Tuple[int, int], + keep_ratio: bool = True, +): + # TODO: background, type + raw_img = meta["img"] + height = raw_img.shape[0] # shape(h,w,c) + width = raw_img.shape[1] + + # center + C = np.eye(3) + C[0, 2] = -width / 2 + C[1, 2] = -height / 2 + + # do not change the order of mat mul + if "perspective" in warp_kwargs and random.randint(0, 1): + P = get_perspective_matrix(warp_kwargs["perspective"]) + C = P @ C + if "scale" in warp_kwargs and random.randint(0, 1): + Scl = get_scale_matrix(warp_kwargs["scale"]) + C = Scl @ C + if "stretch" in warp_kwargs and random.randint(0, 1): + Str = get_stretch_matrix(*warp_kwargs["stretch"]) + C = Str @ C + if "rotation" in warp_kwargs and random.randint(0, 1): + R = get_rotation_matrix(warp_kwargs["rotation"]) + C = R @ C + if "shear" in warp_kwargs and random.randint(0, 1): + Sh = get_shear_matrix(warp_kwargs["shear"]) + C = Sh @ C + if "flip" in warp_kwargs: + F = get_flip_matrix(warp_kwargs["flip"]) + C = F @ C + if "translate" in warp_kwargs and random.randint(0, 1): + T = get_translate_matrix(warp_kwargs["translate"], width, height) + else: + T = get_translate_matrix(0, width, height) + M = T @ C + # M = T @ Sh @ R @ Str @ P @ C + ResizeM = get_resize_matrix((width, height), dst_shape, keep_ratio) + M = ResizeM @ M + img = cv2.warpPerspective(raw_img, M, dsize=tuple(dst_shape)) + meta["img"] = img + meta["warp_matrix"] = M + if "gt_bboxes" in meta: + boxes = meta["gt_bboxes"] + meta["gt_bboxes"] = warp_boxes(boxes, M, dst_shape[0], dst_shape[1]) + if "gt_masks" in meta: + for i, mask in enumerate(meta["gt_masks"]): + meta["gt_masks"][i] = cv2.warpPerspective(mask, M, dsize=tuple(dst_shape)) + + return meta + + +def warp_boxes(boxes, M, width, height): + n = len(boxes) + if n: + # warp points + xy = np.ones((n * 4, 3)) + xy[:, :2] = boxes[:, [0, 1, 2, 3, 0, 3, 2, 1]].reshape( + n * 4, 2 + ) # x1y1, x2y2, x1y2, x2y1 + xy = xy @ M.T # transform + xy = (xy[:, :2] / xy[:, 2:3]).reshape(n, 8) # rescale + # create new boxes + x = xy[:, [0, 2, 4, 6]] + y = xy[:, [1, 3, 5, 7]] + xy = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4, n).T + # clip boxes + xy[:, [0, 2]] = xy[:, [0, 2]].clip(0, width) + xy[:, [1, 3]] = xy[:, [1, 3]].clip(0, height) + return xy.astype(np.float32) + else: + return boxes + + +def get_minimum_dst_shape( + src_shape: Tuple[int, int], + dst_shape: Tuple[int, int], + divisible: Optional[int] = None, +) -> Tuple[int, int]: + """Calculate minimum dst shape""" + src_w, src_h = src_shape + dst_w, dst_h = dst_shape + + if src_w / src_h < dst_w / dst_h: + ratio = dst_h / src_h + else: + ratio = dst_w / src_w + + dst_w = int(ratio * src_w) + dst_h = int(ratio * src_h) + + if divisible and divisible > 0: + dst_w = max(divisible, int((dst_w + divisible - 1) // divisible * divisible)) + dst_h = max(divisible, int((dst_h + divisible - 1) // divisible * divisible)) + return dst_w, dst_h + + +class ShapeTransform: + """Shape transforms including resize, random perspective, random scale, + random stretch, random rotation, random shear, random translate, + and random flip. + + Args: + keep_ratio: Whether to keep aspect ratio of the image. + divisible: Make image height and width is divisible by a number. + perspective: Random perspective factor. + scale: Random scale ratio. + stretch: Width and height stretch ratio range. + rotation: Random rotate degree. + shear: Random shear degree. + translate: Random translate ratio. + flip: Random flip probability. + """ + + def __init__( + self, + keep_ratio, + divisible=0, + perspective=0.0, + scale=(1, 1), + stretch=((1, 1), (1, 1)), + rotation=0.0, + shear=0.0, + translate=0.0, + flip=0.0, + **kwargs + ): + self.keep_ratio = keep_ratio + self.divisible = divisible + self.perspective = perspective + self.scale_ratio = scale + self.stretch_ratio = stretch + self.rotation_degree = rotation + self.shear_degree = shear + self.flip_prob = flip + self.translate_ratio = translate + + def __call__(self, meta_data, dst_shape): + raw_img = meta_data["img"] + height = raw_img.shape[0] # shape(h,w,c) + width = raw_img.shape[1] + + # center + C = np.eye(3) + C[0, 2] = -width / 2 + C[1, 2] = -height / 2 + + P = get_perspective_matrix(self.perspective) + C = P @ C + + Scl = get_scale_matrix(self.scale_ratio) + C = Scl @ C + + Str = get_stretch_matrix(*self.stretch_ratio) + C = Str @ C + + R = get_rotation_matrix(self.rotation_degree) + C = R @ C + + Sh = get_shear_matrix(self.shear_degree) + C = Sh @ C + + F = get_flip_matrix(self.flip_prob) + C = F @ C + + T = get_translate_matrix(self.translate_ratio, width, height) + M = T @ C + + if self.keep_ratio: + dst_shape = get_minimum_dst_shape( + (width, height), dst_shape, self.divisible + ) + + ResizeM = get_resize_matrix((width, height), dst_shape, self.keep_ratio) + M = ResizeM @ M + img = cv2.warpPerspective(raw_img, M, dsize=tuple(dst_shape)) + meta_data["img"] = img + meta_data["warp_matrix"] = M + if "gt_bboxes" in meta_data: + boxes = meta_data["gt_bboxes"] + meta_data["gt_bboxes"] = warp_boxes(boxes, M, dst_shape[0], dst_shape[1]) + if "gt_masks" in meta_data: + for i, mask in enumerate(meta_data["gt_masks"]): + meta_data["gt_masks"][i] = cv2.warpPerspective( + mask, M, dsize=tuple(dst_shape) + ) + + return meta_data diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/evaluator/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/evaluator/__init__.py new file mode 100644 index 0000000000..2e2a2513e9 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/evaluator/__init__.py @@ -0,0 +1,25 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import copy + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.evaluator.coco_detection import CocoDetectionEvaluator + + +def build_evaluator(cfg, dataset): + evaluator_cfg = copy.deepcopy(cfg) + name = evaluator_cfg.pop("name") + if name == "CocoDetectionEvaluator": + return CocoDetectionEvaluator(dataset) + else: + raise NotImplementedError diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/evaluator/coco_detection.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/evaluator/coco_detection.py new file mode 100644 index 0000000000..c408d996a6 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/evaluator/coco_detection.py @@ -0,0 +1,151 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import contextlib +import copy +import io +import itertools +import json +import logging +import os +import warnings + +import numpy as np +from pycocotools.cocoeval import COCOeval +from tabulate import tabulate + +logger = logging.getLogger("NanoDet") + + +def xyxy2xywh(bbox): + """ + change bbox to coco format + :param bbox: [x1, y1, x2, y2] + :return: [x, y, w, h] + """ + return [ + bbox[0], + bbox[1], + bbox[2] - bbox[0], + bbox[3] - bbox[1], + ] + + +class CocoDetectionEvaluator: + def __init__(self, dataset): + assert hasattr(dataset, "coco_api") + self.class_names = dataset.class_names + self.coco_api = dataset.coco_api + self.cat_ids = dataset.cat_ids + self.metric_names = ["mAP", "AP_50", "AP_75", "AP_small", "AP_m", "AP_l"] + + def results2json(self, results): + """ + results: {image_id: {label: [bboxes...] } } + :return coco json format: {image_id: + category_id: + bbox: + score: } + """ + json_results = [] + for image_id, dets in results.items(): + for label, bboxes in dets.items(): + category_id = self.cat_ids[label] + for bbox in bboxes: + score = float(bbox[4]) + detection = dict( + image_id=int(image_id), + category_id=int(category_id), + bbox=xyxy2xywh(bbox), + score=score, + ) + json_results.append(detection) + return json_results + + def evaluate(self, results, save_dir): # rank=-1 + results_json = self.results2json(results) + if len(results_json) == 0: + warnings.warn( + "Detection result is empty! Please check whether " + "training set is too small (need to increase val_interval " + "in config and train more epochs). Or check annotation " + "correctness." + ) + empty_eval_results = {} + for key in self.metric_names: + empty_eval_results[key] = 0 + return empty_eval_results + # json_path = os.path.join(save_dir, "results{}.json".format(rank)) + json_path = os.path.join(save_dir, "results.json") + json.dump(results_json, open(json_path, "w")) + coco_dets = self.coco_api.loadRes(json_path) + coco_eval = COCOeval( + copy.deepcopy(self.coco_api), copy.deepcopy(coco_dets), "bbox" + ) + coco_eval.evaluate() + coco_eval.accumulate() + + # use logger to log coco eval results + redirect_string = io.StringIO() + with contextlib.redirect_stdout(redirect_string): + coco_eval.summarize() + logger.info("\n" + redirect_string.getvalue()) + + # print per class AP + headers = ["class", "AP50", "mAP"] + colums = 6 + per_class_ap50s = [] + per_class_maps = [] + precisions = coco_eval.eval["precision"] + # dimension of precisions: [TxRxKxAxM] + # precision has dims (iou, recall, cls, area range, max dets) + assert len(self.class_names) == precisions.shape[2] + + for idx, name in enumerate(self.class_names): + # area range index 0: all area ranges + # max dets index -1: typically 100 per image + precision_50 = precisions[0, :, idx, 0, -1] + precision_50 = precision_50[precision_50 > -1] + ap50 = np.mean(precision_50) if precision_50.size else float("nan") + per_class_ap50s.append(float(ap50 * 100)) + + precision = precisions[:, :, idx, 0, -1] + precision = precision[precision > -1] + ap = np.mean(precision) if precision.size else float("nan") + per_class_maps.append(float(ap * 100)) + + num_cols = min(colums, len(self.class_names) * len(headers)) + flatten_results = [] + for name, ap50, mAP in zip(self.class_names, per_class_ap50s, per_class_maps): + flatten_results += [name, ap50, mAP] + + row_pair = itertools.zip_longest( + *[flatten_results[i::num_cols] for i in range(num_cols)] + ) + table_headers = headers * (num_cols // len(headers)) + table = tabulate( + row_pair, + tablefmt="pipe", + floatfmt=".1f", + headers=table_headers, + numalign="left", + ) + logger.info("\n" + table) + + aps = coco_eval.stats[:6] + eval_results = {} + for k, v in zip(self.metric_names, aps): + eval_results[k] = v + return eval_results diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/inferencer/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/inferencer/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/inferencer/utilities.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/inferencer/utilities.py new file mode 100644 index 0000000000..b20b891d58 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/inferencer/utilities.py @@ -0,0 +1,69 @@ +# Modifications Copyright 2021 - present, OpenDR European Project +# +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import torch + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.batch_process import stack_batch_img +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.collate import naive_collate +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform import Pipeline +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.arch import build_model + +image_ext = [".jpg", ".jpeg", ".webp", ".bmp", ".png"] +video_ext = ["mp4", "mov", "avi", "mkv"] + + +class Predictor(object): + def __init__(self, cfg, model, device="cuda"): + self.cfg = cfg + self.device = device + + if self.cfg.model.arch.backbone.name == "RepVGG": + deploy_config = self.cfg.model + deploy_config.arch.backbone.update({"deploy": True}) + deploy_model = build_model(deploy_config) + from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone.repvgg\ + import repvgg_det_model_convert + model = repvgg_det_model_convert(model, deploy_model) + + self.model = model.to(device).eval() + + self.pipeline = Pipeline(self.cfg.data.val.pipeline, self.cfg.data.val.keep_ratio) + + def inference(self, img, verbose=True): + img_info = {"id": 0} + height, width = img.shape[:2] + img_info["height"] = height + img_info["width"] = width + meta = dict(img_info=img_info, raw_img=img, img=img) + meta = self.pipeline(None, meta, self.cfg.data.val.input_size) + meta["img"] = torch.from_numpy(meta["img"].transpose(2, 0, 1)).to(self.device) + meta = naive_collate([meta]) + meta["img"] = stack_batch_img(meta["img"], divisible=32) + with torch.no_grad(): + results = self.model.inference(meta, verbose) + return meta, results + + +def get_image_list(path): + image_names = [] + for maindir, subdir, file_name_list in os.walk(path): + for filename in file_name_list: + apath = os.path.join(maindir, filename) + ext = os.path.splitext(apath)[1] + if ext in image_ext: + image_names.append(apath) + return image_names diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/__init__.py new file mode 100644 index 0000000000..f0b10b8a01 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/__init__.py @@ -0,0 +1,42 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +import warnings + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.arch.nanodet_plus import NanoDetPlus +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.arch.one_stage_detector import OneStageDetector + + +def build_model(model_cfg): + model_cfg = copy.deepcopy(model_cfg) + name = model_cfg.arch.pop("name") + if name == "GFL": + warnings.warn( + "Model architecture name is changed to 'OneStageDetector'. " + "The name 'GFL' is deprecated, please change the model->arch->name " + "in your YAML config file to OneStageDetector." + ) + model = OneStageDetector( + model_cfg.arch.backbone, model_cfg.arch.fpn, model_cfg.arch.head + ) + elif name == "OneStageDetector": + model = OneStageDetector( + model_cfg.arch.backbone, model_cfg.arch.fpn, model_cfg.arch.head + ) + elif name == "NanoDetPlus": + model = NanoDetPlus(**model_cfg.arch) + else: + raise NotImplementedError + return model diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/nanodet_plus.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/nanodet_plus.py new file mode 100644 index 0000000000..518c0af01b --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/nanodet_plus.py @@ -0,0 +1,57 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy + +import torch + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head import build_head +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.arch.one_stage_detector import OneStageDetector + + +class NanoDetPlus(OneStageDetector): + def __init__( + self, + backbone, + fpn, + aux_head, + head, + detach_epoch=0, + ): + super(NanoDetPlus, self).__init__( + backbone_cfg=backbone, fpn_cfg=fpn, head_cfg=head + ) + self.aux_fpn = copy.deepcopy(self.fpn) + self.aux_head = build_head(aux_head) + self.detach_epoch = detach_epoch + + def forward_train(self, gt_meta): + img = gt_meta["img"] + feat = self.backbone(img) + fpn_feat = self.fpn(feat) + if self.epoch >= self.detach_epoch: + aux_fpn_feat = self.aux_fpn([f.detach() for f in feat]) + dual_fpn_feat = ( + torch.cat([f.detach(), aux_f], dim=1) + for f, aux_f in zip(fpn_feat, aux_fpn_feat) + ) + else: + aux_fpn_feat = self.aux_fpn(feat) + dual_fpn_feat = ( + torch.cat([f, aux_f], dim=1) for f, aux_f in zip(fpn_feat, aux_fpn_feat) + ) + head_out = self.head(fpn_feat) + aux_head_out = self.aux_head(dual_fpn_feat) + loss, loss_states = self.head.loss(head_out, gt_meta, aux_preds=aux_head_out) + return head_out, loss, loss_states diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/one_stage_detector.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/one_stage_detector.py new file mode 100644 index 0000000000..e1ce7a650e --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/arch/one_stage_detector.py @@ -0,0 +1,59 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone import build_backbone +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.fpn import build_fpn +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head import build_head + + +class OneStageDetector(nn.Module): + def __init__( + self, + backbone_cfg, + fpn_cfg=None, + head_cfg=None, + ): + super(OneStageDetector, self).__init__() + self.backbone = build_backbone(backbone_cfg) + if fpn_cfg is not None: + self.fpn = build_fpn(fpn_cfg) + if head_cfg is not None: + self.head = build_head(head_cfg) + self.epoch = 0 + + def forward(self, x): + x = self.backbone(x) + if hasattr(self, "fpn"): + x = self.fpn(x) + if hasattr(self, "head"): + x = self.head(x) + return x + + def inference(self, meta, verbose=True): + with torch.no_grad(): + preds = self(meta["img"]) + results = self.head.post_process(preds, meta) + return results + + def forward_train(self, gt_meta): + preds = self(gt_meta["img"]) + loss, loss_states = self.head.loss(preds, gt_meta) + + return preds, loss, loss_states + + def set_epoch(self, epoch): + self.epoch = epoch diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/__init__.py new file mode 100755 index 0000000000..414b8c245f --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/__init__.py @@ -0,0 +1,44 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone.custom_csp import CustomCspNet +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone.efficientnet_lite import EfficientNetLite +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone.ghostnet import GhostNet +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone.mobilenetv2 import MobileNetV2 +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone.repvgg import RepVGG +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone.resnet import ResNet +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone.shufflenetv2 import ShuffleNetV2 + + +def build_backbone(cfg): + backbone_cfg = copy.deepcopy(cfg) + name = backbone_cfg.pop("name") + if name == "ResNet": + return ResNet(**backbone_cfg) + elif name == "ShuffleNetV2": + return ShuffleNetV2(**backbone_cfg) + elif name == "GhostNet": + return GhostNet(**backbone_cfg) + elif name == "MobileNetV2": + return MobileNetV2(**backbone_cfg) + elif name == "EfficientNetLite": + return EfficientNetLite(**backbone_cfg) + elif name == "CustomCspNet": + return CustomCspNet(**backbone_cfg) + elif name == "RepVGG": + return RepVGG(**backbone_cfg) + else: + raise NotImplementedError diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/custom_csp.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/custom_csp.py new file mode 100755 index 0000000000..17cd08402e --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/custom_csp.py @@ -0,0 +1,168 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule + + +class TinyResBlock(nn.Module): + def __init__( + self, in_channels, kernel_size, norm_cfg, activation, res_type="concat" + ): + super(TinyResBlock, self).__init__() + assert in_channels % 2 == 0 + assert res_type in ["concat", "add"] + self.res_type = res_type + self.in_conv = ConvModule( + in_channels, + in_channels // 2, + kernel_size, + padding=(kernel_size - 1) // 2, + norm_cfg=norm_cfg, + activation=activation, + ) + self.mid_conv = ConvModule( + in_channels // 2, + in_channels // 2, + kernel_size, + padding=(kernel_size - 1) // 2, + norm_cfg=norm_cfg, + activation=activation, + ) + if res_type == "add": + self.out_conv = ConvModule( + in_channels // 2, + in_channels, + kernel_size, + padding=(kernel_size - 1) // 2, + norm_cfg=norm_cfg, + activation=activation, + ) + + def forward(self, x): + x = self.in_conv(x) + x1 = self.mid_conv(x) + if self.res_type == "add": + return self.out_conv(x + x1) + else: + return torch.cat((x1, x), dim=1) + + +class CspBlock(nn.Module): + def __init__( + self, + in_channels, + num_res, + kernel_size=3, + stride=0, + norm_cfg=dict(type="BN", requires_grad=True), + activation="LeakyReLU", + ): + super(CspBlock, self).__init__() + assert in_channels % 2 == 0 + self.in_conv = ConvModule( + in_channels, + in_channels, + kernel_size, + stride, + padding=(kernel_size - 1) // 2, + norm_cfg=norm_cfg, + activation=activation, + ) + res_blocks = [] + for i in range(num_res): + res_block = TinyResBlock(in_channels, kernel_size, norm_cfg, activation) + res_blocks.append(res_block) + self.res_blocks = nn.Sequential(*res_blocks) + self.res_out_conv = ConvModule( + in_channels, + in_channels, + kernel_size, + padding=(kernel_size - 1) // 2, + norm_cfg=norm_cfg, + activation=activation, + ) + + def forward(self, x): + x = self.in_conv(x) + x1 = self.res_blocks(x) + x1 = self.res_out_conv(x1) + out = torch.cat((x1, x), dim=1) + return out + + +class CustomCspNet(nn.Module): + def __init__( + self, + net_cfg, + out_stages, + norm_cfg=dict(type="BN", requires_grad=True), + activation="LeakyReLU", + ): + super(CustomCspNet, self).__init__() + assert isinstance(net_cfg, list) + assert set(out_stages).issubset(i for i in range(len(net_cfg))) + self.out_stages = out_stages + self.activation = activation + self.stages = nn.ModuleList() + for stage_cfg in net_cfg: + if stage_cfg[0] == "Conv": + in_channels, out_channels, kernel_size, stride = stage_cfg[1:] + stage = ConvModule( + in_channels, + out_channels, + kernel_size, + stride, + padding=(kernel_size - 1) // 2, + norm_cfg=norm_cfg, + activation=activation, + ) + elif stage_cfg[0] == "CspBlock": + in_channels, num_res, kernel_size, stride = stage_cfg[1:] + stage = CspBlock( + in_channels, num_res, kernel_size, stride, norm_cfg, activation + ) + elif stage_cfg[0] == "MaxPool": + kernel_size, stride = stage_cfg[1:] + stage = nn.MaxPool2d( + kernel_size, stride, padding=(kernel_size - 1) // 2 + ) + else: + raise ModuleNotFoundError + self.stages.append(stage) + self._init_weight() + + def forward(self, x): + output = [] + for i, stage in enumerate(self.stages): + x = stage(x) + if i in self.out_stages: + output.append(x) + return tuple(output) + + def _init_weight(self): + for m in self.modules(): + if self.activation == "LeakyReLU": + nonlinearity = "leaky_relu" + else: + nonlinearity = "relu" + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_( + m.weight, mode="fan_out", nonlinearity=nonlinearity + ) + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/efficientnet_lite.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/efficientnet_lite.py new file mode 100644 index 0000000000..9cd6e41baf --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/efficientnet_lite.py @@ -0,0 +1,283 @@ +import math + +import torch +import torch.functional as F +import torch.utils.model_zoo as model_zoo +from torch import nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.activation import act_layers + +efficientnet_lite_params = { + # width_coefficient, depth_coefficient, image_size, dropout_rate + "efficientnet_lite0": [1.0, 1.0, 224, 0.2], + "efficientnet_lite1": [1.0, 1.1, 240, 0.2], + "efficientnet_lite2": [1.1, 1.2, 260, 0.3], + "efficientnet_lite3": [1.2, 1.4, 280, 0.3], + "efficientnet_lite4": [1.4, 1.8, 300, 0.3], +} + +model_urls = { + "efficientnet_lite0": "https://github.com/RangiLyu/EfficientNet-Lite/releases/download/v1.0/efficientnet_lite0.pth", # noqa: E501 + "efficientnet_lite1": "https://github.com/RangiLyu/EfficientNet-Lite/releases/download/v1.0/efficientnet_lite1.pth", # noqa: E501 + "efficientnet_lite2": "https://github.com/RangiLyu/EfficientNet-Lite/releases/download/v1.0/efficientnet_lite2.pth", # noqa: E501 + "efficientnet_lite3": "https://github.com/RangiLyu/EfficientNet-Lite/releases/download/v1.0/efficientnet_lite3.pth", # noqa: E501 + "efficientnet_lite4": "https://github.com/RangiLyu/EfficientNet-Lite/releases/download/v1.0/efficientnet_lite4.pth", # noqa: E501 +} + + +def round_filters(filters, multiplier, divisor=8, min_width=None): + """Calculate and round number of filters based on width multiplier.""" + if not multiplier: + return filters + filters *= multiplier + min_width = min_width or divisor + new_filters = max(min_width, int(filters + divisor / 2) // divisor * divisor) + # Make sure that round down does not go down by more than 10%. + if new_filters < 0.9 * filters: + new_filters += divisor + return int(new_filters) + + +def round_repeats(repeats, multiplier): + """Round number of filters based on depth multiplier.""" + if not multiplier: + return repeats + return int(math.ceil(multiplier * repeats)) + + +def drop_connect(x, drop_connect_rate, training): + if not training: + return x + keep_prob = 1.0 - drop_connect_rate + batch_size = x.shape[0] + random_tensor = keep_prob + random_tensor += torch.rand([batch_size, 1, 1, 1], dtype=x.dtype, device=x.device) + binary_mask = torch.floor(random_tensor) + x = (x / keep_prob) * binary_mask + return x + + +class MBConvBlock(nn.Module): + def __init__( + self, + inp, + final_oup, + k, + s, + expand_ratio, + se_ratio, + has_se=False, + activation="ReLU6", + ): + super(MBConvBlock, self).__init__() + + self._momentum = 0.01 + self._epsilon = 1e-3 + self.input_filters = inp + self.output_filters = final_oup + self.stride = s + self.expand_ratio = expand_ratio + self.has_se = has_se + self.id_skip = True # skip connection and drop connect + + # Expansion phase + oup = inp * expand_ratio # number of output channels + if expand_ratio != 1: + self._expand_conv = nn.Conv2d( + in_channels=inp, out_channels=oup, kernel_size=1, bias=False + ) + self._bn0 = nn.BatchNorm2d( + num_features=oup, momentum=self._momentum, eps=self._epsilon + ) + + # Depthwise convolution phase + self._depthwise_conv = nn.Conv2d( + in_channels=oup, + out_channels=oup, + groups=oup, # groups makes it depthwise + kernel_size=k, + padding=(k - 1) // 2, + stride=s, + bias=False, + ) + self._bn1 = nn.BatchNorm2d( + num_features=oup, momentum=self._momentum, eps=self._epsilon + ) + + # Squeeze and Excitation layer, if desired + if self.has_se: + num_squeezed_channels = max(1, int(inp * se_ratio)) + self._se_reduce = nn.Conv2d( + in_channels=oup, out_channels=num_squeezed_channels, kernel_size=1 + ) + self._se_expand = nn.Conv2d( + in_channels=num_squeezed_channels, out_channels=oup, kernel_size=1 + ) + + # Output phase + self._project_conv = nn.Conv2d( + in_channels=oup, out_channels=final_oup, kernel_size=1, bias=False + ) + self._bn2 = nn.BatchNorm2d( + num_features=final_oup, momentum=self._momentum, eps=self._epsilon + ) + self._relu = act_layers(activation) + + def forward(self, x, drop_connect_rate=None): + """ + :param x: input tensor + :param drop_connect_rate: drop connect rate (float, between 0 and 1) + :return: output of block + """ + + # Expansion and Depthwise Convolution + identity = x + if self.expand_ratio != 1: + x = self._relu(self._bn0(self._expand_conv(x))) + x = self._relu(self._bn1(self._depthwise_conv(x))) + + # Squeeze and Excitation + if self.has_se: + x_squeezed = F.adaptive_avg_pool2d(x, 1) + x_squeezed = self._se_expand(self._relu(self._se_reduce(x_squeezed))) + x = torch.sigmoid(x_squeezed) * x + + x = self._bn2(self._project_conv(x)) + + # Skip connection and drop connect + if self.id_skip and self.stride == 1 and self.input_filters == self.output_filters: + if drop_connect_rate: + x = drop_connect(x, drop_connect_rate, training=self.training) + x += identity # skip connection + return x + + +class EfficientNetLite(nn.Module): + def __init__( + self, model_name, out_stages=(2, 4, 6), activation="ReLU6", pretrain=True + ): + super(EfficientNetLite, self).__init__() + assert set(out_stages).issubset(i for i in range(0, 7)) + assert model_name in efficientnet_lite_params + + self.model_name = model_name + # Batch norm parameters + momentum = 0.01 + epsilon = 1e-3 + width_multiplier, depth_multiplier, _, dropout_rate = efficientnet_lite_params[ + model_name + ] + self.drop_connect_rate = 0.2 + self.out_stages = out_stages + + mb_block_settings = [ + # repeat|kernel_size|stride|expand|input|output|se_ratio + [1, 3, 1, 1, 32, 16, 0.25], # stage0 + [2, 3, 2, 6, 16, 24, 0.25], # stage1 - 1/4 + [2, 5, 2, 6, 24, 40, 0.25], # stage2 - 1/8 + [3, 3, 2, 6, 40, 80, 0.25], # stage3 + [3, 5, 1, 6, 80, 112, 0.25], # stage4 - 1/16 + [4, 5, 2, 6, 112, 192, 0.25], # stage5 + [1, 3, 1, 6, 192, 320, 0.25], # stage6 - 1/32 + ] + + # Stem + out_channels = 32 + self.stem = nn.Sequential( + nn.Conv2d(3, out_channels, kernel_size=3, stride=2, padding=1, bias=False), + nn.BatchNorm2d(num_features=out_channels, momentum=momentum, eps=epsilon), + act_layers(activation), + ) + + # Build blocks + self.blocks = nn.ModuleList([]) + for i, stage_setting in enumerate(mb_block_settings): + stage = nn.ModuleList([]) + ( + num_repeat, + kernal_size, + stride, + expand_ratio, + input_filters, + output_filters, + se_ratio, + ) = stage_setting + # Update block input and output filters based on width multiplier. + input_filters = ( + input_filters + if i == 0 + else round_filters(input_filters, width_multiplier) + ) + output_filters = round_filters(output_filters, width_multiplier) + num_repeat = ( + num_repeat + if i == 0 or i == len(mb_block_settings) - 1 + else round_repeats(num_repeat, depth_multiplier) + ) + + # The first block needs to take care of stride and filter size increase. + stage.append( + MBConvBlock( + input_filters, + output_filters, + kernal_size, + stride, + expand_ratio, + se_ratio, + has_se=False, + ) + ) + if num_repeat > 1: + input_filters = output_filters + stride = 1 + for _ in range(num_repeat - 1): + stage.append( + MBConvBlock( + input_filters, + output_filters, + kernal_size, + stride, + expand_ratio, + se_ratio, + has_se=False, + ) + ) + + self.blocks.append(stage) + self._initialize_weights(pretrain) + + def forward(self, x): + x = self.stem(x) + output = [] + idx = 0 + for j, stage in enumerate(self.blocks): + for block in stage: + drop_connect_rate = self.drop_connect_rate + if drop_connect_rate: + drop_connect_rate *= float(idx) / len(self.blocks) + x = block(x, drop_connect_rate) + idx += 1 + if j in self.out_stages: + output.append(x) + return output + + def _initialize_weights(self, pretrain=True): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2.0 / n)) + if m.bias is not None: + m.bias.data.zero_() + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + if pretrain: + url = model_urls[self.model_name] + if url is not None: + pretrained_state_dict = model_zoo.load_url(url) + print("=> loading pretrained model {}".format(url)) + self.load_state_dict(pretrained_state_dict, strict=False) + + def load_pretrain(self, path): + state_dict = torch.load(path) + self.load_state_dict(state_dict, strict=True) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/ghostnet.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/ghostnet.py new file mode 100644 index 0000000000..2e8f59f001 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/ghostnet.py @@ -0,0 +1,350 @@ +""" +2020.06.09-Changed for building GhostNet +Huawei Technologies Co., Ltd. +Creates a GhostNet Model as defined in: +GhostNet: More Features from Cheap Operations By Kai Han, Yunhe Wang, +Qi Tian, Jianyuan Guo, Chunjing Xu, Chang Xu. +https://arxiv.org/abs/1911.11907 +Modified from https://github.com/d-li14/mobilenetv3.pytorch +and https://github.com/rwightman/pytorch-image-models +""" +import logging +import math +import warnings + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.activation import act_layers + +torch.hub._validate_not_a_forked_repo = lambda a, b, c: True # workaround for rate limit bug + + +def get_url(width_mult=1.0): + if width_mult == 1.0: + return "https://raw.githubusercontent.com/huawei-noah/CV-Backbones/master/ghostnet_pytorch/models/state_dict_73.98.pth" # noqa E501 + else: + logging.info("GhostNet only has 1.0 pretrain model. ") + return None + + +def _make_divisible(v, divisor, min_value=None): + """ + This function is taken from the original tf repo. + It ensures that all layers have a channel number that is divisible by 8 + It can be seen here: + https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py + """ + if min_value is None: + min_value = divisor + new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) + # Make sure that round down does not go down by more than 10%. + if new_v < 0.9 * v: + new_v += divisor + return new_v + + +def hard_sigmoid(x, inplace: bool = False): + if inplace: + return x.add_(3.0).clamp_(0.0, 6.0).div_(6.0) + else: + return F.relu6(x + 3.0) / 6.0 + + +class SqueezeExcite(nn.Module): + def __init__( + self, + in_chs, + se_ratio=0.25, + reduced_base_chs=None, + activation="ReLU", + gate_fn=hard_sigmoid, + divisor=4, + **_ + ): + super(SqueezeExcite, self).__init__() + self.gate_fn = gate_fn + reduced_chs = _make_divisible((reduced_base_chs or in_chs) * se_ratio, divisor) + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.conv_reduce = nn.Conv2d(in_chs, reduced_chs, 1, bias=True) + self.act1 = act_layers(activation) + self.conv_expand = nn.Conv2d(reduced_chs, in_chs, 1, bias=True) + + def forward(self, x): + x_se = self.avg_pool(x) + x_se = self.conv_reduce(x_se) + x_se = self.act1(x_se) + x_se = self.conv_expand(x_se) + x = x * self.gate_fn(x_se) + return x + + +class ConvBnAct(nn.Module): + def __init__(self, in_chs, out_chs, kernel_size, stride=1, activation="ReLU"): + super(ConvBnAct, self).__init__() + self.conv = nn.Conv2d( + in_chs, out_chs, kernel_size, stride, kernel_size // 2, bias=False + ) + self.bn1 = nn.BatchNorm2d(out_chs) + self.act1 = act_layers(activation) + + def forward(self, x): + x = self.conv(x) + x = self.bn1(x) + x = self.act1(x) + return x + + +class GhostModule(nn.Module): + def __init__( + self, inp, oup, kernel_size=1, ratio=2, dw_size=3, stride=1, activation="ReLU" + ): + super(GhostModule, self).__init__() + self.oup = oup + init_channels = math.ceil(oup / ratio) + new_channels = init_channels * (ratio - 1) + + self.primary_conv = nn.Sequential( + nn.Conv2d( + inp, init_channels, kernel_size, stride, kernel_size // 2, bias=False + ), + nn.BatchNorm2d(init_channels), + act_layers(activation) if activation else nn.Sequential(), + ) + + self.cheap_operation = nn.Sequential( + nn.Conv2d( + init_channels, + new_channels, + dw_size, + 1, + dw_size // 2, + groups=init_channels, + bias=False, + ), + nn.BatchNorm2d(new_channels), + act_layers(activation) if activation else nn.Sequential(), + ) + + def forward(self, x): + x1 = self.primary_conv(x) + x2 = self.cheap_operation(x1) + out = torch.cat([x1, x2], dim=1) + return out + + +class GhostBottleneck(nn.Module): + """Ghost bottleneck w/ optional SE""" + + def __init__( + self, + in_chs, + mid_chs, + out_chs, + dw_kernel_size=3, + stride=1, + activation="ReLU", + se_ratio=0.0, + ): + super(GhostBottleneck, self).__init__() + has_se = se_ratio is not None and se_ratio > 0.0 + self.stride = stride + + # Point-wise expansion + self.ghost1 = GhostModule(in_chs, mid_chs, activation=activation) + + # Depth-wise convolution + if self.stride > 1: + self.conv_dw = nn.Conv2d( + mid_chs, + mid_chs, + dw_kernel_size, + stride=stride, + padding=(dw_kernel_size - 1) // 2, + groups=mid_chs, + bias=False, + ) + self.bn_dw = nn.BatchNorm2d(mid_chs) + + # Squeeze-and-excitation + if has_se: + self.se = SqueezeExcite(mid_chs, se_ratio=se_ratio) + else: + self.se = None + + # Point-wise linear projection + self.ghost2 = GhostModule(mid_chs, out_chs, activation=None) + + # shortcut + if in_chs == out_chs and self.stride == 1: + self.shortcut = nn.Sequential() + else: + self.shortcut = nn.Sequential( + nn.Conv2d( + in_chs, + in_chs, + dw_kernel_size, + stride=stride, + padding=(dw_kernel_size - 1) // 2, + groups=in_chs, + bias=False, + ), + nn.BatchNorm2d(in_chs), + nn.Conv2d(in_chs, out_chs, 1, stride=1, padding=0, bias=False), + nn.BatchNorm2d(out_chs), + ) + + def forward(self, x): + residual = x + + # 1st ghost bottleneck + x = self.ghost1(x) + + # Depth-wise convolution + if self.stride > 1: + x = self.conv_dw(x) + x = self.bn_dw(x) + + # Squeeze-and-excitation + if self.se is not None: + x = self.se(x) + + # 2nd ghost bottleneck + x = self.ghost2(x) + + x += self.shortcut(residual) + return x + + +class GhostNet(nn.Module): + def __init__( + self, + width_mult=1.0, + out_stages=(4, 6, 9), + activation="ReLU", + pretrain=True, + act=None, + ): + super(GhostNet, self).__init__() + assert set(out_stages).issubset(i for i in range(10)) + self.width_mult = width_mult + self.out_stages = out_stages + # setting of inverted residual blocks + self.cfgs = [ + # k, t, c, SE, s + # stage1 + [[3, 16, 16, 0, 1]], # 0 + # stage2 + [[3, 48, 24, 0, 2]], # 1 + [[3, 72, 24, 0, 1]], # 2 1/4 + # stage3 + [[5, 72, 40, 0.25, 2]], # 3 + [[5, 120, 40, 0.25, 1]], # 4 1/8 + # stage4 + [[3, 240, 80, 0, 2]], # 5 + [ + [3, 200, 80, 0, 1], + [3, 184, 80, 0, 1], + [3, 184, 80, 0, 1], + [3, 480, 112, 0.25, 1], + [3, 672, 112, 0.25, 1], + ], # 6 1/16 + # stage5 + [[5, 672, 160, 0.25, 2]], # 7 + [ + [5, 960, 160, 0, 1], + [5, 960, 160, 0.25, 1], + [5, 960, 160, 0, 1], + [5, 960, 160, 0.25, 1], + ], # 8 + ] + # ------conv+bn+act----------# 9 1/32 + + self.activation = activation + if act is not None: + warnings.warn( + "Warning! act argument has been deprecated, " "use activation instead!" + ) + self.activation = act + + # building first layer + output_channel = _make_divisible(16 * width_mult, 4) + self.conv_stem = nn.Conv2d(3, output_channel, 3, 2, 1, bias=False) + self.bn1 = nn.BatchNorm2d(output_channel) + self.act1 = act_layers(self.activation) + input_channel = output_channel + + # building inverted residual blocks + stages = [] + block = GhostBottleneck + for cfg in self.cfgs: + layers = [] + for k, exp_size, c, se_ratio, s in cfg: + output_channel = _make_divisible(c * width_mult, 4) + hidden_channel = _make_divisible(exp_size * width_mult, 4) + layers.append( + block( + input_channel, + hidden_channel, + output_channel, + k, + s, + activation=self.activation, + se_ratio=se_ratio, + ) + ) + input_channel = output_channel + stages.append(nn.Sequential(*layers)) + + output_channel = _make_divisible(exp_size * width_mult, 4) + stages.append( + nn.Sequential( + ConvBnAct(input_channel, output_channel, 1, activation=self.activation) + ) + ) # 9 + + self.blocks = nn.Sequential(*stages) + + self._initialize_weights(pretrain) + + def forward(self, x): + x = self.conv_stem(x) + x = self.bn1(x) + x = self.act1(x) + output = [] + for i in range(10): + x = self.blocks[i](x) + if i in self.out_stages: + output.append(x) + return tuple(output) + + def _initialize_weights(self, pretrain=True): + print("init weights...") + for name, m in self.named_modules(): + if isinstance(m, nn.Conv2d): + if "conv_stem" in name: + nn.init.normal_(m.weight, 0, 0.01) + else: + nn.init.normal_(m.weight, 0, 1.0 / m.weight.shape[1]) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0.0001) + nn.init.constant_(m.running_mean, 0) + elif isinstance(m, nn.BatchNorm1d): + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0.0001) + nn.init.constant_(m.running_mean, 0) + elif isinstance(m, nn.Linear): + nn.init.normal_(m.weight, 0, 0.01) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + if pretrain: + url = get_url(self.width_mult) + if url is not None: + state_dict = torch.hub.load_state_dict_from_url(url, progress=True) + self.load_state_dict(state_dict, strict=False) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/mobilenetv2.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/mobilenetv2.py new file mode 100644 index 0000000000..19fcae379e --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/mobilenetv2.py @@ -0,0 +1,176 @@ +from __future__ import absolute_import, division, print_function + +import warnings + +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.activation import act_layers + + +class ConvBNReLU(nn.Sequential): + def __init__( + self, + in_planes, + out_planes, + kernel_size=3, + stride=1, + groups=1, + activation="ReLU", + ): + padding = (kernel_size - 1) // 2 + super(ConvBNReLU, self).__init__( + nn.Conv2d( + in_planes, + out_planes, + kernel_size, + stride, + padding, + groups=groups, + bias=False, + ), + nn.BatchNorm2d(out_planes), + act_layers(activation), + ) + + +class InvertedResidual(nn.Module): + def __init__(self, inp, oup, stride, expand_ratio, activation="ReLU"): + super(InvertedResidual, self).__init__() + self.stride = stride + assert stride in [1, 2] + + hidden_dim = int(round(inp * expand_ratio)) + self.use_res_connect = self.stride == 1 and inp == oup + + layers = [] + if expand_ratio != 1: + # pw + layers.append( + ConvBNReLU(inp, hidden_dim, kernel_size=1, activation=activation) + ) + layers.extend( + [ + # dw + ConvBNReLU( + hidden_dim, + hidden_dim, + stride=stride, + groups=hidden_dim, + activation=activation, + ), + # pw-linear + nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), + nn.BatchNorm2d(oup), + ] + ) + self.conv = nn.Sequential(*layers) + + def forward(self, x): + if self.use_res_connect: + return x + self.conv(x) + else: + return self.conv(x) + + +class MobileNetV2(nn.Module): + def __init__( + self, + width_mult=1.0, + out_stages=(1, 2, 4, 6), + last_channel=1280, + activation="ReLU", + act=None, + ): + super(MobileNetV2, self).__init__() + # TODO: support load torchvison pretrained weight + assert set(out_stages).issubset(i for i in range(7)) + self.width_mult = width_mult + self.out_stages = out_stages + input_channel = 32 + self.last_channel = last_channel + self.activation = activation + if act is not None: + warnings.warn( + "Warning! act argument has been deprecated, " "use activation instead!" + ) + self.activation = act + self.interverted_residual_setting = [ + # t, c, n, s + [1, 16, 1, 1], + [6, 24, 2, 2], + [6, 32, 3, 2], + [6, 64, 4, 2], + [6, 96, 3, 1], + [6, 160, 3, 2], + [6, 320, 1, 1], + ] + + # building first layer + self.input_channel = int(input_channel * width_mult) + self.first_layer = ConvBNReLU( + 3, self.input_channel, stride=2, activation=self.activation + ) + # building inverted residual blocks + for i in range(7): + name = "stage{}".format(i) + setattr(self, name, self.build_mobilenet_stage(stage_num=i)) + + self._initialize_weights() + + def build_mobilenet_stage(self, stage_num): + stage = [] + t, c, n, s = self.interverted_residual_setting[stage_num] + output_channel = int(c * self.width_mult) + for i in range(n): + if i == 0: + stage.append( + InvertedResidual( + self.input_channel, + output_channel, + s, + expand_ratio=t, + activation=self.activation, + ) + ) + else: + stage.append( + InvertedResidual( + self.input_channel, + output_channel, + 1, + expand_ratio=t, + activation=self.activation, + ) + ) + self.input_channel = output_channel + if stage_num == 6: + last_layer = ConvBNReLU( + self.input_channel, + self.last_channel, + kernel_size=1, + activation=self.activation, + ) + stage.append(last_layer) + stage = nn.Sequential(*stage) + return stage + + def forward(self, x): + x = self.first_layer(x) + output = [] + for i in range(0, 7): + stage = getattr(self, "stage{}".format(i)) + x = stage(x) + if i in self.out_stages: + output.append(x) + + return tuple(output) + + def _initialize_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.normal_(m.weight, std=0.001) + if m.bias is not None: + m.bias.data.zero_() + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/repvgg.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/repvgg.py new file mode 100644 index 0000000000..fa30508f13 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/repvgg.py @@ -0,0 +1,234 @@ +""" +@article{ding2101repvgg, + title={RepVGG: Making VGG-style ConvNets Great Again}, + author={Ding, Xiaohan and Zhang, Xiangyu and Ma, Ningning and Han, + Jungong and Ding, Guiguang and Sun, Jian}, + journal={arXiv preprint arXiv:2101.03697}} +RepVGG Backbone from paper RepVGG: Making VGG-style ConvNets Great Again +Code from https://github.com/DingXiaoH/RepVGG +""" + +import numpy as np +import torch +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import RepVGGConvModule + +optional_groupwise_layers = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26] +g2_map = {layer: 2 for layer in optional_groupwise_layers} +g4_map = {layer: 4 for layer in optional_groupwise_layers} + +model_param = { + "RepVGG-A0": dict( + num_blocks=[2, 4, 14, 1], + width_multiplier=[0.75, 0.75, 0.75, 2.5], + override_groups_map=None, + ), + "RepVGG-A1": dict( + num_blocks=[2, 4, 14, 1], + width_multiplier=[1, 1, 1, 2.5], + override_groups_map=None, + ), + "RepVGG-A2": dict( + num_blocks=[2, 4, 14, 1], + width_multiplier=[1.5, 1.5, 1.5, 2.75], + override_groups_map=None, + ), + "RepVGG-B0": dict( + num_blocks=[4, 6, 16, 1], + width_multiplier=[1, 1, 1, 2.5], + override_groups_map=None, + ), + "RepVGG-B1": dict( + num_blocks=[4, 6, 16, 1], + width_multiplier=[2, 2, 2, 4], + override_groups_map=None, + ), + "RepVGG-B1g2": dict( + num_blocks=[4, 6, 16, 1], + width_multiplier=[2, 2, 2, 4], + override_groups_map=g2_map, + ), + "RepVGG-B1g4": dict( + num_blocks=[4, 6, 16, 1], + width_multiplier=[2, 2, 2, 4], + override_groups_map=g4_map, + ), + "RepVGG-B2": dict( + num_blocks=[4, 6, 16, 1], + width_multiplier=[2.5, 2.5, 2.5, 5], + override_groups_map=None, + ), + "RepVGG-B2g2": dict( + num_blocks=[4, 6, 16, 1], + width_multiplier=[2.5, 2.5, 2.5, 5], + override_groups_map=g2_map, + ), + "RepVGG-B2g4": dict( + num_blocks=[4, 6, 16, 1], + width_multiplier=[2.5, 2.5, 2.5, 5], + override_groups_map=g4_map, + ), + "RepVGG-B3": dict( + num_blocks=[4, 6, 16, 1], + width_multiplier=[3, 3, 3, 5], + override_groups_map=None, + ), + "RepVGG-B3g2": dict( + num_blocks=[4, 6, 16, 1], + width_multiplier=[3, 3, 3, 5], + override_groups_map=g2_map, + ), + "RepVGG-B3g4": dict( + num_blocks=[4, 6, 16, 1], + width_multiplier=[3, 3, 3, 5], + override_groups_map=g4_map, + ), +} + + +def conv_bn(in_channels, out_channels, kernel_size, stride, padding, groups=1): + result = nn.Sequential() + result.add_module( + "conv", + nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + groups=groups, + bias=False, + ), + ) + result.add_module("bn", nn.BatchNorm2d(num_features=out_channels)) + return result + + +class RepVGG(nn.Module): + def __init__( + self, + arch, + out_stages=(1, 2, 3, 4), + activation="ReLU", + deploy=False, + last_channel=None, + ): + super(RepVGG, self).__init__() + # TODO: Update code to Xiaohan's repo + model_name = "RepVGG-" + arch + assert model_name in model_param + assert set(out_stages).issubset((1, 2, 3, 4)) + num_blocks = model_param[model_name]["num_blocks"] + width_multiplier = model_param[model_name]["width_multiplier"] + assert len(width_multiplier) == 4 + self.out_stages = out_stages + self.activation = activation + self.deploy = deploy + self.override_groups_map = ( + model_param[model_name]["override_groups_map"] or dict() + ) + + assert 0 not in self.override_groups_map + + self.in_planes = min(64, int(64 * width_multiplier[0])) + + self.stage0 = RepVGGConvModule( + in_channels=3, + out_channels=self.in_planes, + kernel_size=3, + stride=2, + padding=1, + activation=activation, + deploy=self.deploy, + ) + self.cur_layer_idx = 1 + self.stage1 = self._make_stage( + int(64 * width_multiplier[0]), num_blocks[0], stride=2 + ) + self.stage2 = self._make_stage( + int(128 * width_multiplier[1]), num_blocks[1], stride=2 + ) + self.stage3 = self._make_stage( + int(256 * width_multiplier[2]), num_blocks[2], stride=2 + ) + out_planes = last_channel if last_channel else int(512 * width_multiplier[3]) + self.stage4 = self._make_stage(out_planes, num_blocks[3], stride=2) + + def _make_stage(self, planes, num_blocks, stride): + strides = [stride] + [1] * (num_blocks - 1) + blocks = [] + for stride in strides: + cur_groups = self.override_groups_map.get(self.cur_layer_idx, 1) + blocks.append( + RepVGGConvModule( + in_channels=self.in_planes, + out_channels=planes, + kernel_size=3, + stride=stride, + padding=1, + groups=cur_groups, + activation=self.activation, + deploy=self.deploy, + ) + ) + self.in_planes = planes + self.cur_layer_idx += 1 + return nn.Sequential(*blocks) + + def forward(self, x): + x = self.stage0(x) + output = [] + for i in range(1, 5): + stage = getattr(self, "stage{}".format(i)) + x = stage(x) + if i in self.out_stages: + output.append(x) + return tuple(output) + + +def repvgg_model_convert(model, deploy_model, save_path=None): + """ + Examples: + >>> train_model = RepVGG(arch='A0', deploy=False) + >>> deploy_model = RepVGG(arch='A0', deploy=True) + >>> deploy_model = repvgg_model_convert( + >>> train_model, deploy_model, save_path='repvgg_deploy.pth') + """ + converted_weights = {} + for name, module in model.named_modules(): + if hasattr(module, "repvgg_convert"): + kernel, bias = module.repvgg_convert() + converted_weights[name + ".rbr_reparam.weight"] = kernel + converted_weights[name + ".rbr_reparam.bias"] = bias + elif isinstance(module, torch.nn.Linear): + converted_weights[name + ".weight"] = module.weight.detach().cpu().numpy() + converted_weights[name + ".bias"] = module.bias.detach().cpu().numpy() + del model + + for name, param in deploy_model.named_parameters(): + print("deploy param: ", name, param.size(), np.mean(converted_weights[name])) + param.data = torch.from_numpy(converted_weights[name]).float() + + if save_path is not None: + torch.save(deploy_model.state_dict(), save_path) + + return deploy_model + + +def repvgg_det_model_convert(model, deploy_model): + converted_weights = {} + deploy_model.load_state_dict(model.state_dict(), strict=False) + for name, module in model.backbone.named_modules(): + if hasattr(module, "repvgg_convert"): + kernel, bias = module.repvgg_convert() + converted_weights[name + ".rbr_reparam.weight"] = kernel + converted_weights[name + ".rbr_reparam.bias"] = bias + elif isinstance(module, torch.nn.Linear): + converted_weights[name + ".weight"] = module.weight.detach().cpu().numpy() + converted_weights[name + ".bias"] = module.bias.detach().cpu().numpy() + del model + for name, param in deploy_model.backbone.named_parameters(): + print("deploy param: ", name, param.size(), np.mean(converted_weights[name])) + param.data = torch.from_numpy(converted_weights[name]).float() + return deploy_model diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/resnet.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/resnet.py new file mode 100644 index 0000000000..cbd84f7546 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/resnet.py @@ -0,0 +1,196 @@ +from __future__ import absolute_import, division, print_function + +import torch.nn as nn +import torch.utils.model_zoo as model_zoo + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.activation import act_layers + +model_urls = { + "resnet18": "https://download.pytorch.org/models/resnet18-5c106cde.pth", + "resnet34": "https://download.pytorch.org/models/resnet34-333f7ec4.pth", + "resnet50": "https://download.pytorch.org/models/resnet50-19c8e357.pth", + "resnet101": "https://download.pytorch.org/models/resnet101-5d3b4d8f.pth", + "resnet152": "https://download.pytorch.org/models/resnet152-b121ed2d.pth", +} + + +def conv3x3(in_planes, out_planes, stride=1): + """3x3 convolution with padding""" + return nn.Conv2d( + in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False + ) + + +class BasicBlock(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, stride=1, downsample=None, activation="ReLU"): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = nn.BatchNorm2d(planes) + self.act = act_layers(activation) + self.conv2 = conv3x3(planes, planes) + self.bn2 = nn.BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.act(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.act(out) + + return out + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1, downsample=None, activation="ReLU"): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + self.conv2 = nn.Conv2d( + planes, planes, kernel_size=3, stride=stride, padding=1, bias=False + ) + self.bn2 = nn.BatchNorm2d(planes) + self.conv3 = nn.Conv2d( + planes, planes * self.expansion, kernel_size=1, bias=False + ) + self.bn3 = nn.BatchNorm2d(planes * self.expansion) + self.act = act_layers(activation) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.act(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.act(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.act(out) + + return out + + +def fill_fc_weights(layers): + for m in layers.modules(): + if isinstance(m, nn.Conv2d): + nn.init.normal_(m.weight, std=0.001) + # torch.nn.init.kaiming_normal_(m.weight.data, nonlinearity='relu') + # torch.nn.init.xavier_normal_(m.weight.data) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + +class ResNet(nn.Module): + resnet_spec = { + 18: (BasicBlock, [2, 2, 2, 2]), + 34: (BasicBlock, [3, 4, 6, 3]), + 50: (Bottleneck, [3, 4, 6, 3]), + 101: (Bottleneck, [3, 4, 23, 3]), + 152: (Bottleneck, [3, 8, 36, 3]), + } + + def __init__( + self, depth, out_stages=(1, 2, 3, 4), activation="ReLU", pretrain=True + ): + super(ResNet, self).__init__() + if depth not in self.resnet_spec: + raise KeyError("invalid resnet depth {}".format(depth)) + assert set(out_stages).issubset((1, 2, 3, 4)) + self.activation = activation + block, layers = self.resnet_spec[depth] + self.depth = depth + self.inplanes = 64 + self.out_stages = out_stages + + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(64) + self.act = act_layers(self.activation) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2) + self.init_weights(pretrain=pretrain) + + def _make_layer(self, block, planes, blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d( + self.inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias=False, + ), + nn.BatchNorm2d(planes * block.expansion), + ) + + layers = [] + layers.append( + block(self.inplanes, planes, stride, downsample, activation=self.activation) + ) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes, activation=self.activation)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.act(x) + x = self.maxpool(x) + output = [] + for i in range(1, 5): + res_layer = getattr(self, "layer{}".format(i)) + x = res_layer(x) + if i in self.out_stages: + output.append(x) + + return tuple(output) + + def init_weights(self, pretrain=True): + if pretrain: + url = model_urls["resnet{}".format(self.depth)] + pretrained_state_dict = model_zoo.load_url(url) + print("=> loading pretrained model {}".format(url)) + self.load_state_dict(pretrained_state_dict, strict=False) + else: + for m in self.modules(): + if self.activation == "LeakyReLU": + nonlinearity = "leaky_relu" + else: + nonlinearity = "relu" + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_( + m.weight, mode="fan_out", nonlinearity=nonlinearity + ) + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/shufflenetv2.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/shufflenetv2.py new file mode 100644 index 0000000000..013f22a8c1 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/backbone/shufflenetv2.py @@ -0,0 +1,207 @@ +import torch +import torch.nn as nn +import torch.utils.model_zoo as model_zoo + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.activation import act_layers + +model_urls = { + "shufflenetv2_0.5x": "https://download.pytorch.org/models/shufflenetv2_x0.5-f707e7126e.pth", # noqa: E501 + "shufflenetv2_1.0x": "https://download.pytorch.org/models/shufflenetv2_x1-5666bf0f80.pth", # noqa: E501 + "shufflenetv2_1.5x": None, + "shufflenetv2_2.0x": None, +} + + +def channel_shuffle(x, groups): + # type: (torch.Tensor, int) -> torch.Tensor + batchsize, num_channels, height, width = x.data.size() + channels_per_group = num_channels // groups + + # reshape + x = x.view(batchsize, groups, channels_per_group, height, width) + + x = torch.transpose(x, 1, 2).contiguous() + + # flatten + x = x.view(batchsize, -1, height, width) + + return x + + +class ShuffleV2Block(nn.Module): + def __init__(self, inp, oup, stride, activation="ReLU"): + super(ShuffleV2Block, self).__init__() + + if not (1 <= stride <= 3): + raise ValueError("illegal stride value") + self.stride = stride + + branch_features = oup // 2 + assert (self.stride != 1) or (inp == branch_features << 1) + + if self.stride > 1: + self.branch1 = nn.Sequential( + self.depthwise_conv( + inp, inp, kernel_size=3, stride=self.stride, padding=1 + ), + nn.BatchNorm2d(inp), + nn.Conv2d( + inp, branch_features, kernel_size=1, stride=1, padding=0, bias=False + ), + nn.BatchNorm2d(branch_features), + act_layers(activation), + ) + else: + self.branch1 = nn.Sequential() + + self.branch2 = nn.Sequential( + nn.Conv2d( + inp if (self.stride > 1) else branch_features, + branch_features, + kernel_size=1, + stride=1, + padding=0, + bias=False, + ), + nn.BatchNorm2d(branch_features), + act_layers(activation), + self.depthwise_conv( + branch_features, + branch_features, + kernel_size=3, + stride=self.stride, + padding=1, + ), + nn.BatchNorm2d(branch_features), + nn.Conv2d( + branch_features, + branch_features, + kernel_size=1, + stride=1, + padding=0, + bias=False, + ), + nn.BatchNorm2d(branch_features), + act_layers(activation), + ) + + @staticmethod + def depthwise_conv(i, o, kernel_size, stride=1, padding=0, bias=False): + return nn.Conv2d(i, o, kernel_size, stride, padding, bias=bias, groups=i) + + def forward(self, x): + if self.stride == 1: + x1, x2 = x.chunk(2, dim=1) + out = torch.cat((x1, self.branch2(x2)), dim=1) + else: + out = torch.cat((self.branch1(x), self.branch2(x)), dim=1) + + out = channel_shuffle(out, 2) + + return out + + +class ShuffleNetV2(nn.Module): + def __init__( + self, + model_size="1.5x", + out_stages=(2, 3, 4), + with_last_conv=False, + kernal_size=3, + activation="ReLU", + pretrain=True, + ): + super(ShuffleNetV2, self).__init__() + # out_stages can only be a subset of (2, 3, 4) + assert set(out_stages).issubset((2, 3, 4)) + + print("model size is ", model_size) + + self.stage_repeats = [4, 8, 4] + self.model_size = model_size + self.out_stages = out_stages + self.with_last_conv = with_last_conv + self.kernal_size = kernal_size + self.activation = activation + if model_size == "0.5x": + self._stage_out_channels = [24, 48, 96, 192, 1024] + elif model_size == "1.0x": + self._stage_out_channels = [24, 116, 232, 464, 1024] + elif model_size == "1.5x": + self._stage_out_channels = [24, 176, 352, 704, 1024] + elif model_size == "2.0x": + self._stage_out_channels = [24, 244, 488, 976, 2048] + else: + raise NotImplementedError + + # building first layer + input_channels = 3 + output_channels = self._stage_out_channels[0] + self.conv1 = nn.Sequential( + nn.Conv2d(input_channels, output_channels, 3, 2, 1, bias=False), + nn.BatchNorm2d(output_channels), + act_layers(activation), + ) + input_channels = output_channels + + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + stage_names = ["stage{}".format(i) for i in [2, 3, 4]] + for name, repeats, output_channels in zip( + stage_names, self.stage_repeats, self._stage_out_channels[1:] + ): + seq = [ + ShuffleV2Block( + input_channels, output_channels, 2, activation=activation + ) + ] + for i in range(repeats - 1): + seq.append( + ShuffleV2Block( + output_channels, output_channels, 1, activation=activation + ) + ) + setattr(self, name, nn.Sequential(*seq)) + input_channels = output_channels + output_channels = self._stage_out_channels[-1] + if self.with_last_conv: + conv5 = nn.Sequential( + nn.Conv2d(input_channels, output_channels, 1, 1, 0, bias=False), + nn.BatchNorm2d(output_channels), + act_layers(activation), + ) + self.stage4.add_module("conv5", conv5) + self._initialize_weights(pretrain) + + def forward(self, x): + x = self.conv1(x) + x = self.maxpool(x) + output = [] + for i in range(2, 5): + stage = getattr(self, "stage{}".format(i)) + x = stage(x) + if i in self.out_stages: + output.append(x) + return tuple(output) + + def _initialize_weights(self, pretrain=True): + print("init weights...") + for name, m in self.named_modules(): + if isinstance(m, nn.Conv2d): + if "first" in name: + nn.init.normal_(m.weight, 0, 0.01) + else: + nn.init.normal_(m.weight, 0, 1.0 / m.weight.shape[1]) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + if m.bias is not None: + nn.init.constant_(m.bias, 0.0001) + nn.init.constant_(m.running_mean, 0) + if pretrain: + url = model_urls["shufflenetv2_{}".format(self.model_size)] + if url is not None: + pretrained_state_dict = model_zoo.load_url(url) + print("=> loading pretrained model {}".format(url)) + self.load_state_dict(pretrained_state_dict, strict=False) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/__init__.py new file mode 100644 index 0000000000..233fd18103 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/__init__.py @@ -0,0 +1,35 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.fpn.fpn import FPN +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.fpn.ghost_pan import GhostPAN +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.fpn.pan import PAN +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.fpn.tan import TAN + + +def build_fpn(cfg): + fpn_cfg = copy.deepcopy(cfg) + name = fpn_cfg.pop("name") + if name == "FPN": + return FPN(**fpn_cfg) + elif name == "PAN": + return PAN(**fpn_cfg) + elif name == "TAN": + return TAN(**fpn_cfg) + elif name == "GhostPAN": + return GhostPAN(**fpn_cfg) + else: + raise NotImplementedError diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/fpn.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/fpn.py new file mode 100644 index 0000000000..4549c7409e --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/fpn.py @@ -0,0 +1,100 @@ +# Modification 2020 RangiLyu +# Copyright 2018-2019 Open-MMLab. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch.nn as nn +import torch.nn.functional as F + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights import xavier_init + + +class FPN(nn.Module): + def __init__( + self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + conv_cfg=None, + norm_cfg=None, + activation=None, + ): + super(FPN, self).__init__() + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.num_outs = num_outs + self.fp16_enabled = False + + if end_level == -1: + self.backbone_end_level = self.num_ins + assert num_outs >= self.num_ins - start_level + else: + # if end_level < inputs, no extra level is allowed + self.backbone_end_level = end_level + assert end_level <= len(in_channels) + assert num_outs == end_level - start_level + self.start_level = start_level + self.end_level = end_level + self.lateral_convs = nn.ModuleList() + + for i in range(self.start_level, self.backbone_end_level): + l_conv = ConvModule( + in_channels[i], + out_channels, + 1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + activation=activation, + inplace=False, + ) + + self.lateral_convs.append(l_conv) + self.init_weights() + + # default init_weights for conv(msra) and norm in ConvModule + def init_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + xavier_init(m, distribution="uniform") + + def forward(self, inputs): + assert len(inputs) == len(self.in_channels) + + # build laterals + laterals = [ + lateral_conv(inputs[i + self.start_level]) + for i, lateral_conv in enumerate(self.lateral_convs) + ] + + # build top-down path + used_backbone_levels = len(laterals) + for i in range(used_backbone_levels - 1, 0, -1): + laterals[i - 1] += F.interpolate( + laterals[i], scale_factor=2, mode="bilinear" + ) + + # build outputs + outs = [ + # self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels) + laterals[i] + for i in range(used_backbone_levels) + ] + return tuple(outs) + + +# if __name__ == '__main__': diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/ghost_pan.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/ghost_pan.py new file mode 100644 index 0000000000..76e043179c --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/ghost_pan.py @@ -0,0 +1,244 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import torch +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.backbone.ghostnet import GhostBottleneck +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule, DepthwiseConvModule + + +class GhostBlocks(nn.Module): + """Stack of GhostBottleneck used in GhostPAN. + + Args: + in_channels (int): Number of input channels. + out_channels (int): Number of output channels. + expand (int): Expand ratio of GhostBottleneck. Default: 1. + kernel_size (int): Kernel size of depthwise convolution. Default: 5. + num_blocks (int): Number of GhostBottlecneck blocks. Default: 1. + use_res (bool): Whether to use residual connection. Default: False. + activation (str): Name of activation function. Default: LeakyReLU. + """ + + def __init__( + self, + in_channels, + out_channels, + expand=1, + kernel_size=5, + num_blocks=1, + use_res=False, + activation="LeakyReLU", + ): + super(GhostBlocks, self).__init__() + self.use_res = use_res + if use_res: + self.reduce_conv = ConvModule( + in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0, + activation=activation, + ) + blocks = [] + for _ in range(num_blocks): + blocks.append( + GhostBottleneck( + in_channels, + int(out_channels * expand), + out_channels, + dw_kernel_size=kernel_size, + activation=activation, + ) + ) + self.blocks = nn.Sequential(*blocks) + + def forward(self, x): + out = self.blocks(x) + if self.use_res: + out = out + self.reduce_conv(x) + return out + + +class GhostPAN(nn.Module): + """Path Aggregation Network with Ghost block. + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale) + num_csp_blocks (int): Number of bottlenecks in CSPLayer. Default: 3 + use_depthwise (bool): Whether to depthwise separable convolution in + blocks. Default: False + kernel_size (int): Kernel size of depthwise convolution. Default: 5. + expand (int): Expand ratio of GhostBottleneck. Default: 1. + num_blocks (int): Number of GhostBottlecneck blocks. Default: 1. + use_res (bool): Whether to use residual connection. Default: False. + num_extra_level (int): Number of extra conv layers for more feature levels. + Default: 0. + upsample_cfg (dict): Config dict for interpolate layer. + Default: `dict(scale_factor=2, mode='nearest')` + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN') + activation (str): Activation layer name. + Default: LeakyReLU. + """ + + def __init__( + self, + in_channels, + out_channels, + use_depthwise=False, + kernel_size=5, + expand=1, + num_blocks=1, + use_res=False, + num_extra_level=0, + upsample_cfg=dict(scale_factor=2, mode="bilinear"), + norm_cfg=dict(type="BN"), + activation="LeakyReLU", + ): + super(GhostPAN, self).__init__() + assert num_extra_level >= 0 + assert num_blocks >= 1 + self.in_channels = in_channels + self.out_channels = out_channels + + conv = DepthwiseConvModule if use_depthwise else ConvModule + + # build top-down blocks + self.upsample = nn.Upsample(**upsample_cfg) + self.reduce_layers = nn.ModuleList() + for idx in range(len(in_channels)): + self.reduce_layers.append( + ConvModule( + in_channels[idx], + out_channels, + 1, + norm_cfg=norm_cfg, + activation=activation, + ) + ) + self.top_down_blocks = nn.ModuleList() + for idx in range(len(in_channels) - 1, 0, -1): + self.top_down_blocks.append( + GhostBlocks( + out_channels * 2, + out_channels, + expand, + kernel_size=kernel_size, + num_blocks=num_blocks, + use_res=use_res, + activation=activation, + ) + ) + + # build bottom-up blocks + self.downsamples = nn.ModuleList() + self.bottom_up_blocks = nn.ModuleList() + for idx in range(len(in_channels) - 1): + self.downsamples.append( + conv( + out_channels, + out_channels, + kernel_size, + stride=2, + padding=kernel_size // 2, + norm_cfg=norm_cfg, + activation=activation, + ) + ) + self.bottom_up_blocks.append( + GhostBlocks( + out_channels * 2, + out_channels, + expand, + kernel_size=kernel_size, + num_blocks=num_blocks, + use_res=use_res, + activation=activation, + ) + ) + + # extra layers + self.extra_lvl_in_conv = nn.ModuleList() + self.extra_lvl_out_conv = nn.ModuleList() + for i in range(num_extra_level): + self.extra_lvl_in_conv.append( + conv( + out_channels, + out_channels, + kernel_size, + stride=2, + padding=kernel_size // 2, + norm_cfg=norm_cfg, + activation=activation, + ) + ) + self.extra_lvl_out_conv.append( + conv( + out_channels, + out_channels, + kernel_size, + stride=2, + padding=kernel_size // 2, + norm_cfg=norm_cfg, + activation=activation, + ) + ) + + def forward(self, inputs): + """ + Args: + inputs (tuple[Tensor]): input features. + Returns: + tuple[Tensor]: multi level features. + """ + assert len(inputs) == len(self.in_channels) + inputs = [ + reduce(input_x) for input_x, reduce in zip(inputs, self.reduce_layers) + ] + # top-down path + inner_outs = [inputs[-1]] + for idx in range(len(self.in_channels) - 1, 0, -1): + feat_heigh = inner_outs[0] + feat_low = inputs[idx - 1] + + inner_outs[0] = feat_heigh + + upsample_feat = self.upsample(feat_heigh) + + inner_out = self.top_down_blocks[len(self.in_channels) - 1 - idx]( + torch.cat([upsample_feat, feat_low], 1) + ) + inner_outs.insert(0, inner_out) + + # bottom-up path + outs = [inner_outs[0]] + for idx in range(len(self.in_channels) - 1): + feat_low = outs[-1] + feat_height = inner_outs[idx + 1] + downsample_feat = self.downsamples[idx](feat_low) + out = self.bottom_up_blocks[idx]( + torch.cat([downsample_feat, feat_height], 1) + ) + outs.append(out) + + # extra layers + for extra_in_layer, extra_out_layer in zip( + self.extra_lvl_in_conv, self.extra_lvl_out_conv + ): + outs.append(extra_in_layer(inputs[-1]) + extra_out_layer(outs[-1])) + + return tuple(outs) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/pan.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/pan.py new file mode 100644 index 0000000000..c12482f294 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/pan.py @@ -0,0 +1,94 @@ +# Modification 2020 RangiLyu +# Copyright 2018-2019 Open-MMLab. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch.nn.functional as F + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.fpn.fpn import FPN + + +class PAN(FPN): + """Path Aggregation Network for Instance Segmentation. + + This is an implementation of the `PAN in Path Aggregation Network + `_. + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale) + num_outs (int): Number of output scales. + start_level (int): Index of the start input backbone level used to + build the feature pyramid. Default: 0. + end_level (int): Index of the end input backbone level (exclusive) to + build the feature pyramid. Default: -1, which means the last level. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Config dict for normalization layer. Default: None. + activation (str): Config dict for activation layer in ConvModule. + Default: None. + """ + + def __init__( + self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + conv_cfg=None, + norm_cfg=None, + activation=None, + ): + super(PAN, self).__init__( + in_channels, + out_channels, + num_outs, + start_level, + end_level, + conv_cfg, + norm_cfg, + activation, + ) + self.init_weights() + + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == len(self.in_channels) + + # build laterals + laterals = [ + lateral_conv(inputs[i + self.start_level]) + for i, lateral_conv in enumerate(self.lateral_convs) + ] + + # build top-down path + used_backbone_levels = len(laterals) + for i in range(used_backbone_levels - 1, 0, -1): + laterals[i - 1] += F.interpolate( + laterals[i], scale_factor=2, mode="bilinear" + ) + + # build outputs + # part 1: from original levels + inter_outs = [laterals[i] for i in range(used_backbone_levels)] + + # part 2: add bottom-up path + for i in range(0, used_backbone_levels - 1): + inter_outs[i + 1] += F.interpolate( + inter_outs[i], scale_factor=0.5, mode="bilinear" + ) + + outs = [] + outs.append(inter_outs[0]) + outs.extend([inter_outs[i] for i in range(1, used_backbone_levels)]) + return tuple(outs) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/tan.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/tan.py new file mode 100644 index 0000000000..42efd128b9 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/fpn/tan.py @@ -0,0 +1,121 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights import normal_init +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.transformer import TransformerBlock + + +class TAN(nn.Module): + """ + Transformer Attention Network. + + :param in_channels: Number of input channels per scale. + :param out_channels: Number of output channel. + :param feature_hw: Size of feature map input to transformer. + :param num_heads: Number of attention heads. + :param num_encoders: Number of transformer encoder layers. + :param mlp_ratio: Hidden layer dimension expand ratio in MLP. + :param dropout_ratio: Probability of an element to be zeroed. + :param activation: Activation layer type. + """ + + def __init__( + self, + in_channels, + out_channels, + feature_hw, + num_heads, + num_encoders, + mlp_ratio, + dropout_ratio, + activation="LeakyReLU", + ): + super(TAN, self).__init__() + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + assert self.num_ins == 3 + + self.lateral_convs = nn.ModuleList() + for i in range(self.num_ins): + l_conv = ConvModule( + in_channels[i], + out_channels, + 1, + norm_cfg=dict(type="BN"), + activation=activation, + inplace=False, + ) + self.lateral_convs.append(l_conv) + self.transformer = TransformerBlock( + out_channels * self.num_ins, + out_channels, + num_heads, + num_encoders, + mlp_ratio, + dropout_ratio, + activation=activation, + ) + self.pos_embed = nn.Parameter( + torch.zeros(feature_hw[0] * feature_hw[1], 1, out_channels) + ) + + self.init_weights() + + def init_weights(self): + torch.nn.init.trunc_normal_(self.pos_embed, std=0.02) + for m in self.modules(): + if isinstance(m, nn.Linear): + torch.nn.init.trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + normal_init(m, 0.01) + + def forward(self, inputs): + assert len(inputs) == len(self.in_channels) + + # build laterals + laterals = [ + lateral_conv(inputs[i]) for i, lateral_conv in enumerate(self.lateral_convs) + ] + + # transformer attention + mid_shape = laterals[1].shape[2:] + mid_lvl = torch.cat( + ( + F.interpolate(laterals[0], size=mid_shape, mode="bilinear"), + laterals[1], + F.interpolate(laterals[2], size=mid_shape, mode="bilinear"), + ), + dim=1, + ) + mid_lvl = self.transformer(mid_lvl, self.pos_embed) + + # build outputs + outs = [ + laterals[0] + F.interpolate(mid_lvl, size=laterals[0].shape[2:], mode="bilinear"), + laterals[1] + mid_lvl, + laterals[2] + F.interpolate(mid_lvl, size=laterals[2].shape[2:], mode="bilinear"), + ] + return tuple(outs) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/__init__.py new file mode 100644 index 0000000000..7a75675158 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/__init__.py @@ -0,0 +1,21 @@ +import copy + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.gfl_head import GFLHead +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.nanodet_head import NanoDetHead +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.nanodet_plus_head import NanoDetPlusHead +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.simple_conv_head import SimpleConvHead + + +def build_head(cfg): + head_cfg = copy.deepcopy(cfg) + name = head_cfg.pop("name") + if name == "GFLHead": + return GFLHead(**head_cfg) + elif name == "NanoDetHead": + return NanoDetHead(**head_cfg) + elif name == "NanoDetPlusHead": + return NanoDetPlusHead(**head_cfg) + elif name == "SimpleConvHead": + return SimpleConvHead(**head_cfg) + else: + raise NotImplementedError diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/assign_result.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/assign_result.py new file mode 100644 index 0000000000..ca32f0c8f7 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/assign_result.py @@ -0,0 +1,228 @@ +# Modification 2020 RangiLyu +# Copyright 2018-2019 Open-MMLab. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util import util_mixins + + +class AssignResult(util_mixins.NiceRepr): + """ + Stores assignments between predicted and truth boxes. + + Attributes: + num_gts (int): the number of truth boxes considered when computing this + assignment + + gt_inds (LongTensor): for each predicted box indicates the 1-based + index of the assigned truth box. 0 means unassigned and -1 means + ignore. + + max_overlaps (FloatTensor): the iou between the predicted box and its + assigned truth box. + + labels (None | LongTensor): If specified, for each predicted box + indicates the category label of the assigned truth box. + + Example: + >>> # An assign result between 4 predicted boxes and 9 true boxes + >>> # where only two boxes were assigned. + >>> num_gts = 9 + >>> max_overlaps = torch.LongTensor([0, .5, .9, 0]) + >>> gt_inds = torch.LongTensor([-1, 1, 2, 0]) + >>> labels = torch.LongTensor([0, 3, 4, 0]) + >>> self = AssignResult(num_gts, gt_inds, max_overlaps, labels) + >>> print(str(self)) # xdoctest: +IGNORE_WANT + + >>> # Force addition of gt labels (when adding gt as proposals) + >>> new_labels = torch.LongTensor([3, 4, 5]) + >>> self.add_gt_(new_labels) + >>> print(str(self)) # xdoctest: +IGNORE_WANT + + """ + + def __init__(self, num_gts, gt_inds, max_overlaps, labels=None): + self.num_gts = num_gts + self.gt_inds = gt_inds + self.max_overlaps = max_overlaps + self.labels = labels + # Interface for possible user-defined properties + self._extra_properties = {} + + @property + def num_preds(self): + """int: the number of predictions in this assignment""" + return len(self.gt_inds) + + def set_extra_property(self, key, value): + """Set user-defined new property.""" + assert key not in self.info + self._extra_properties[key] = value + + def get_extra_property(self, key): + """Get user-defined property.""" + return self._extra_properties.get(key, None) + + @property + def info(self): + """dict: a dictionary of info about the object""" + basic_info = { + "num_gts": self.num_gts, + "num_preds": self.num_preds, + "gt_inds": self.gt_inds, + "max_overlaps": self.max_overlaps, + "labels": self.labels, + } + basic_info.update(self._extra_properties) + return basic_info + + def __nice__(self): + """str: a "nice" summary string describing this assign result""" + parts = [] + parts.append(f"num_gts={self.num_gts!r}") + if self.gt_inds is None: + parts.append(f"gt_inds={self.gt_inds!r}") + else: + parts.append(f"gt_inds.shape={tuple(self.gt_inds.shape)!r}") + if self.max_overlaps is None: + parts.append(f"max_overlaps={self.max_overlaps!r}") + else: + parts.append("max_overlaps.shape=" f"{tuple(self.max_overlaps.shape)!r}") + if self.labels is None: + parts.append(f"labels={self.labels!r}") + else: + parts.append(f"labels.shape={tuple(self.labels.shape)!r}") + return ", ".join(parts) + + @classmethod + def random(cls, **kwargs): + """Create random AssignResult for tests or debugging. + + Args: + num_preds: number of predicted boxes + num_gts: number of true boxes + p_ignore (float): probability of a predicted box assinged to an + ignored truth + p_assigned (float): probability of a predicted box not being + assigned + p_use_label (float | bool): with labels or not + rng (None | int | numpy.random.RandomState): seed or state + + Returns: + :obj:`AssignResult`: Randomly generated assign results. + + Example: + >>> from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head\ + .assigner.assign_result import AssignResult + >>> self = AssignResult.random() + >>> print(self.info) + """ + rng = kwargs.get("rng", None) + num_gts = kwargs.get("num_gts", None) + num_preds = kwargs.get("num_preds", None) + p_ignore = kwargs.get("p_ignore", 0.3) + p_assigned = kwargs.get("p_assigned", 0.7) + p_use_label = kwargs.get("p_use_label", 0.5) + num_classes = kwargs.get("p_use_label", 3) + + import numpy as np + + if rng is None: + rng = np.random.mtrand._rand + elif isinstance(rng, int): + rng = np.random.RandomState(rng) + else: + rng = rng + if num_gts is None: + num_gts = rng.randint(0, 8) + if num_preds is None: + num_preds = rng.randint(0, 16) + + if num_gts == 0: + max_overlaps = torch.zeros(num_preds, dtype=torch.float32) + gt_inds = torch.zeros(num_preds, dtype=torch.int64) + if p_use_label is True or p_use_label < rng.rand(): + labels = torch.zeros(num_preds, dtype=torch.int64) + else: + labels = None + else: + import numpy as np + + # Create an overlap for each predicted box + max_overlaps = torch.from_numpy(rng.rand(num_preds)) + + # Construct gt_inds for each predicted box + is_assigned = torch.from_numpy(rng.rand(num_preds) < p_assigned) + # maximum number of assignments constraints + n_assigned = min(num_preds, min(num_gts, is_assigned.sum())) + + assigned_idxs = np.where(is_assigned)[0] + rng.shuffle(assigned_idxs) + assigned_idxs = assigned_idxs[0:n_assigned] + assigned_idxs.sort() + + is_assigned[:] = 0 + is_assigned[assigned_idxs] = True + + is_ignore = torch.from_numpy(rng.rand(num_preds) < p_ignore) & is_assigned + + gt_inds = torch.zeros(num_preds, dtype=torch.int64) + + true_idxs = np.arange(num_gts) + rng.shuffle(true_idxs) + true_idxs = torch.from_numpy(true_idxs) + gt_inds[is_assigned] = true_idxs[:n_assigned] + + gt_inds = torch.from_numpy(rng.randint(1, num_gts + 1, size=num_preds)) + gt_inds[is_ignore] = -1 + gt_inds[~is_assigned] = 0 + max_overlaps[~is_assigned] = 0 + + if p_use_label is True or p_use_label < rng.rand(): + if num_classes == 0: + labels = torch.zeros(num_preds, dtype=torch.int64) + else: + labels = torch.from_numpy( + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + rng.randint(0, num_classes, size=num_preds) + ) + labels[~is_assigned] = 0 + else: + labels = None + + self = cls(num_gts, gt_inds, max_overlaps, labels) + return self + + def add_gt_(self, gt_labels): + """Add ground truth as assigned results. + + Args: + gt_labels (torch.Tensor): Labels of gt boxes + """ + self_inds = torch.arange( + 1, len(gt_labels) + 1, dtype=torch.long, device=gt_labels.device + ) + self.gt_inds = torch.cat([self_inds, self.gt_inds]) + + self.max_overlaps = torch.cat( + [self.max_overlaps.new_ones(len(gt_labels)), self.max_overlaps] + ) + + if self.labels is not None: + self.labels = torch.cat([gt_labels, self.labels]) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/atss_assigner.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/atss_assigner.py new file mode 100644 index 0000000000..ab4c8cf86e --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/atss_assigner.py @@ -0,0 +1,174 @@ +# Modification 2020 RangiLyu +# Copyright 2018-2019 Open-MMLab. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.iou_loss import bbox_overlaps +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.assigner.assign_result import AssignResult +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.assigner.base_assigner import BaseAssigner + + +class ATSSAssigner(BaseAssigner): + """Assign a corresponding gt bbox or background to each bbox. + + Each proposals will be assigned with `0` or a positive integer + indicating the ground truth index. + + - 0: negative sample, no assigned gt + - positive integer: positive sample, index (1-based) of assigned gt + + Args: + topk (float): number of bbox selected in each level + """ + + def __init__(self, topk): + self.topk = topk + + # https://github.com/sfzhang15/ATSS/blob/master/atss_core/modeling/rpn/atss/loss.py + + def assign( + self, bboxes, num_level_bboxes, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None + ): + """Assign gt to bboxes. + + The assignment is done in following steps + + 1. compute iou between all bbox (bbox of all pyramid levels) and gt + 2. compute center distance between all bbox and gt + 3. on each pyramid level, for each gt, select k bbox whose center + are closest to the gt center, so we total select k*l bbox as + candidates for each gt + 4. get corresponding iou for the these candidates, and compute the + mean and std, set mean + std as the iou threshold + 5. select these candidates whose iou are greater than or equal to + the threshold as postive + 6. limit the positive sample's center in gt + + + Args: + bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). + num_level_bboxes (List): num of bboxes in each level + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + INF = 100000000 + bboxes = bboxes[:, :4] + num_gt, num_bboxes = gt_bboxes.size(0), bboxes.size(0) + + # compute iou between all bbox and gt + overlaps = bbox_overlaps(bboxes, gt_bboxes) + + # assign 0 by default + assigned_gt_inds = overlaps.new_full((num_bboxes,), 0, dtype=torch.long) + + if num_gt == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + max_overlaps = overlaps.new_zeros((num_bboxes,)) + if num_gt == 0: + # No truth, assign everything to background + assigned_gt_inds[:] = 0 + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = overlaps.new_full((num_bboxes,), -1, dtype=torch.long) + return AssignResult( + num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels + ) + + # compute center distance between all bbox and gt + gt_cx = (gt_bboxes[:, 0] + gt_bboxes[:, 2]) / 2.0 + gt_cy = (gt_bboxes[:, 1] + gt_bboxes[:, 3]) / 2.0 + gt_points = torch.stack((gt_cx, gt_cy), dim=1) + + bboxes_cx = (bboxes[:, 0] + bboxes[:, 2]) / 2.0 + bboxes_cy = (bboxes[:, 1] + bboxes[:, 3]) / 2.0 + bboxes_points = torch.stack((bboxes_cx, bboxes_cy), dim=1) + + distances = ( + (bboxes_points[:, None, :] - gt_points[None, :, :]).pow(2).sum(-1).sqrt() + ) + + # Selecting candidates based on the center distance + candidate_idxs = [] + start_idx = 0 + for level, bboxes_per_level in enumerate(num_level_bboxes): + # on each pyramid level, for each gt, + # select k bbox whose center are closest to the gt center + end_idx = start_idx + bboxes_per_level + distances_per_level = distances[start_idx:end_idx, :] + selectable_k = min(self.topk, bboxes_per_level) + _, topk_idxs_per_level = distances_per_level.topk( + selectable_k, dim=0, largest=False + ) + candidate_idxs.append(topk_idxs_per_level + start_idx) + start_idx = end_idx + candidate_idxs = torch.cat(candidate_idxs, dim=0) + + # get corresponding iou for the these candidates, and compute the + # mean and std, set mean + std as the iou threshold + candidate_overlaps = overlaps[candidate_idxs, torch.arange(num_gt)] + overlaps_mean_per_gt = candidate_overlaps.mean(0) + overlaps_std_per_gt = candidate_overlaps.std(0) + overlaps_thr_per_gt = overlaps_mean_per_gt + overlaps_std_per_gt + + is_pos = candidate_overlaps >= overlaps_thr_per_gt[None, :] + + # limit the positive sample's center in gt + for gt_idx in range(num_gt): + candidate_idxs[:, gt_idx] += gt_idx * num_bboxes + ep_bboxes_cx = ( + bboxes_cx.view(1, -1).expand(num_gt, num_bboxes).contiguous().view(-1) + ) + ep_bboxes_cy = ( + bboxes_cy.view(1, -1).expand(num_gt, num_bboxes).contiguous().view(-1) + ) + candidate_idxs = candidate_idxs.view(-1) + + # calculate the left, top, right, bottom distance between positive + # bbox center and gt side + l_ = ep_bboxes_cx[candidate_idxs].view(-1, num_gt) - gt_bboxes[:, 0] + t_ = ep_bboxes_cy[candidate_idxs].view(-1, num_gt) - gt_bboxes[:, 1] + r_ = gt_bboxes[:, 2] - ep_bboxes_cx[candidate_idxs].view(-1, num_gt) + b_ = gt_bboxes[:, 3] - ep_bboxes_cy[candidate_idxs].view(-1, num_gt) + is_in_gts = torch.stack([l_, t_, r_, b_], dim=1).min(dim=1)[0] > 0.01 + is_pos = is_pos & is_in_gts + + # if an anchor box is assigned to multiple gts, + # the one with the highest IoU will be selected. + overlaps_inf = torch.full_like(overlaps, -INF).t().contiguous().view(-1) + index = candidate_idxs.view(-1)[is_pos.view(-1)] + overlaps_inf[index] = overlaps.t().contiguous().view(-1)[index] + overlaps_inf = overlaps_inf.view(num_gt, -1).t() + + max_overlaps, argmax_overlaps = overlaps_inf.max(dim=1) + assigned_gt_inds[max_overlaps != -INF] = ( + argmax_overlaps[max_overlaps != -INF] + 1 + ) + + if gt_labels is not None: + assigned_labels = assigned_gt_inds.new_full((num_bboxes,), -1) + pos_inds = torch.nonzero(assigned_gt_inds > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[assigned_gt_inds[pos_inds] - 1] + else: + assigned_labels = None + return AssignResult( + num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels + ) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/base_assigner.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/base_assigner.py new file mode 100644 index 0000000000..8a9094faa5 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/base_assigner.py @@ -0,0 +1,7 @@ +from abc import ABCMeta, abstractmethod + + +class BaseAssigner(metaclass=ABCMeta): + @abstractmethod + def assign(self, bboxes, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): + pass diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/dsl_assigner.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/dsl_assigner.py new file mode 100644 index 0000000000..a75bf1fbc9 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/assigner/dsl_assigner.py @@ -0,0 +1,154 @@ +import torch +import torch.nn.functional as F + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.iou_loss import bbox_overlaps +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.assigner.assign_result import AssignResult +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.assigner.base_assigner import BaseAssigner + + +class DynamicSoftLabelAssigner(BaseAssigner): + """Computes matching between predictions and ground truth with + dynamic soft label assignment. + + Args: + topk (int): Select top-k predictions to calculate dynamic k + best matchs for each gt. Default 13. + iou_factor (float): The scale factor of iou cost. Default 3.0. + """ + + def __init__(self, topk=13, iou_factor=3.0): + self.topk = topk + self.iou_factor = iou_factor + + def assign( + self, + pred_scores, + priors, + decoded_bboxes, + gt_bboxes, + gt_labels, + ): + """Assign gt to priors with dynamic soft label assignment. + Args: + pred_scores (Tensor): Classification scores of one image, + a 2D-Tensor with shape [num_priors, num_classes] + priors (Tensor): All priors of one image, a 2D-Tensor with shape + [num_priors, 4] in [cx, xy, stride_w, stride_y] format. + decoded_bboxes (Tensor): Predicted bboxes, a 2D-Tensor with shape + [num_priors, 4] in [tl_x, tl_y, br_x, br_y] format. + gt_bboxes (Tensor): Ground truth bboxes of one image, a 2D-Tensor + with shape [num_gts, 4] in [tl_x, tl_y, br_x, br_y] format. + gt_labels (Tensor): Ground truth labels of one image, a Tensor + with shape [num_gts]. + + Returns: + :obj:`AssignResult`: The assigned result. + """ + INF = 100000000 + num_gt = gt_bboxes.size(0) + num_bboxes = decoded_bboxes.size(0) + + # assign 0 by default + assigned_gt_inds = decoded_bboxes.new_full((num_bboxes,), 0, dtype=torch.long) + + prior_center = priors[:, :2] + lt_ = prior_center[:, None] - gt_bboxes[:, :2] + rb_ = gt_bboxes[:, 2:] - prior_center[:, None] + + deltas = torch.cat([lt_, rb_], dim=-1) + is_in_gts = deltas.min(dim=-1).values > 0 + valid_mask = is_in_gts.sum(dim=1) > 0 + + valid_decoded_bbox = decoded_bboxes[valid_mask] + valid_pred_scores = pred_scores[valid_mask] + num_valid = valid_decoded_bbox.size(0) + + if num_gt == 0 or num_bboxes == 0 or num_valid == 0: + # No ground truth or boxes, return empty assignment + max_overlaps = decoded_bboxes.new_zeros((num_bboxes,)) + if num_gt == 0: + # No truth, assign everything to background + assigned_gt_inds[:] = 0 + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = decoded_bboxes.new_full( + (num_bboxes,), -1, dtype=torch.long + ) + return AssignResult( + num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels + ) + + pairwise_ious = bbox_overlaps(valid_decoded_bbox, gt_bboxes) + iou_cost = -torch.log(pairwise_ious + 1e-7) + + gt_onehot_label = ( + F.one_hot(gt_labels.to(torch.int64), pred_scores.shape[-1]) + .float() + .unsqueeze(0) + .repeat(num_valid, 1, 1) + ) + valid_pred_scores = valid_pred_scores.unsqueeze(1).repeat(1, num_gt, 1) + + soft_label = gt_onehot_label * pairwise_ious[..., None] + scale_factor = soft_label - valid_pred_scores + + cls_cost = F.binary_cross_entropy( + valid_pred_scores, soft_label, reduction="none" + ) * scale_factor.abs().pow(2.0) + + cls_cost = cls_cost.sum(dim=-1) + + cost_matrix = cls_cost + iou_cost * self.iou_factor + + matched_pred_ious, matched_gt_inds = self.dynamic_k_matching( + cost_matrix, pairwise_ious, num_gt, valid_mask + ) + + # convert to AssignResult format + assigned_gt_inds[valid_mask] = matched_gt_inds + 1 + assigned_labels = assigned_gt_inds.new_full((num_bboxes,), -1) + assigned_labels[valid_mask] = gt_labels[matched_gt_inds].long() + max_overlaps = assigned_gt_inds.new_full( + (num_bboxes,), -INF, dtype=torch.float32 + ) + max_overlaps[valid_mask] = matched_pred_ious + return AssignResult( + num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels + ) + + def dynamic_k_matching(self, cost, pairwise_ious, num_gt, valid_mask): + """Use sum of topk pred iou as dynamic k. Refer from OTA and YOLOX. + + Args: + cost (Tensor): Cost matrix. + pairwise_ious (Tensor): Pairwise iou matrix. + num_gt (int): Number of gt. + valid_mask (Tensor): Mask for valid bboxes. + """ + matching_matrix = torch.zeros_like(cost) + # select candidate topk ious for dynamic-k calculation + candidate_topk = min(self.topk, pairwise_ious.size(0)) + topk_ious, _ = torch.topk(pairwise_ious, candidate_topk, dim=0) + # calculate dynamic k for each gt + dynamic_ks = torch.clamp(topk_ious.sum(0).int(), min=1) + for gt_idx in range(num_gt): + _, pos_idx = torch.topk( + cost[:, gt_idx], k=dynamic_ks[gt_idx].item(), largest=False + ) + matching_matrix[:, gt_idx][pos_idx] = 1.0 + + del topk_ious, dynamic_ks, pos_idx + + prior_match_gt_mask = matching_matrix.sum(1) > 1 + if prior_match_gt_mask.sum() > 0: + cost_min, cost_argmin = torch.min(cost[prior_match_gt_mask, :], dim=1) + matching_matrix[prior_match_gt_mask, :] *= 0.0 + matching_matrix[prior_match_gt_mask, cost_argmin] = 1.0 + # get foreground mask inside box and center prior + fg_mask_inboxes = matching_matrix.sum(1) > 0.0 + valid_mask[valid_mask.clone()] = fg_mask_inboxes + + matched_gt_inds = matching_matrix[fg_mask_inboxes, :].argmax(1) + matched_pred_ious = (matching_matrix * pairwise_ious).sum(1)[fg_mask_inboxes] + return matched_pred_ious, matched_gt_inds diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/gfl_head.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/gfl_head.py new file mode 100644 index 0000000000..e26e083b37 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/gfl_head.py @@ -0,0 +1,700 @@ +import math + +import numpy as np +import torch +import torch.distributed as dist +import torch.nn as nn +import torch.nn.functional as F + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util import ( + bbox2distance, + distance2bbox, + images_to_levels, + multi_apply, +) + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.warp import warp_boxes +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.gfocal_loss\ + import DistributionFocalLoss, QualityFocalLoss +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.iou_loss import GIoULoss, bbox_overlaps +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights import normal_init +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.nms import multiclass_nms +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.scale import Scale +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.assigner.atss_assigner\ + import ATSSAssigner + + +def reduce_mean(tensor): + if not (dist.is_available() and dist.is_initialized()): + return tensor + tensor = tensor.clone() + dist.all_reduce(tensor.true_divide(dist.get_world_size()), op=dist.ReduceOp.SUM) + return tensor + + +class Integral(nn.Module): + """A fixed layer for calculating integral result from distribution. + This layer calculates the target location by :math: `sum{P(y_i) * y_i}`, + P(y_i) denotes the softmax vector that represents the discrete distribution + y_i denotes the discrete set, usually {0, 1, 2, ..., reg_max} + Args: + reg_max (int): The maximal value of the discrete set. Default: 16. You + may want to reset it according to your new dataset or related + settings. + """ + + def __init__(self, reg_max=16): + super(Integral, self).__init__() + self.reg_max = reg_max + self.register_buffer( + "project", torch.linspace(0, self.reg_max, self.reg_max + 1) + ) + + def forward(self, x): + """Forward feature from the regression head to get integral result of + bounding box location. + Args: + x (Tensor): Features of the regression head, shape (N, 4*(n+1)), + n is self.reg_max. + Returns: + x (Tensor): Integral result of box locations, i.e., distance + offsets from the box center in four directions, shape (N, 4). + """ + shape = x.size() + x = F.softmax(x.reshape(*shape[:-1], 4, self.reg_max + 1), dim=-1) + x = F.linear(x, self.project.type_as(x)).reshape(*shape[:-1], 4) + return x + + +class GFLHead(nn.Module): + """Generalized Focal Loss: Learning Qualified and Distributed Bounding + Boxes for Dense Object Detection. + + GFL head structure is similar with ATSS, however GFL uses + 1) joint representation for classification and localization quality, and + 2) flexible General distribution for bounding box locations, + which are supervised by + Quality Focal Loss (QFL) and Distribution Focal Loss (DFL), respectively + + https://arxiv.org/abs/2006.04388 + + :param num_classes: Number of categories excluding the background category. + :param loss: Config of all loss functions. + :param input_channel: Number of channels in the input feature map. + :param feat_channels: Number of conv layers in cls and reg tower. Default: 4. + :param stacked_convs: Number of conv layers in cls and reg tower. Default: 4. + :param octave_base_scale: Scale factor of grid cells. + :param strides: Down sample strides of all level feature map + :param conv_cfg: Dictionary to construct and config conv layer. Default: None. + :param norm_cfg: Dictionary to construct and config norm layer. + :param reg_max: Max value of integral set :math: `{0, ..., reg_max}` + in QFL setting. Default: 16. + :param kwargs: + """ + + def __init__( + self, + num_classes, + loss, + input_channel, + feat_channels=256, + stacked_convs=4, + octave_base_scale=4, + strides=[8, 16, 32], + conv_cfg=None, + norm_cfg=dict(type="GN", num_groups=32, requires_grad=True), + reg_max=16, + **kwargs + ): + super(GFLHead, self).__init__() + self.num_classes = num_classes + self.in_channels = input_channel + self.feat_channels = feat_channels + self.stacked_convs = stacked_convs + self.grid_cell_scale = octave_base_scale + self.strides = strides + self.reg_max = reg_max + + self.loss_cfg = loss + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.use_sigmoid = self.loss_cfg.loss_qfl.use_sigmoid + if self.use_sigmoid: + self.cls_out_channels = num_classes + else: + self.cls_out_channels = num_classes + 1 + + self.assigner = ATSSAssigner(topk=9) + self.distribution_project = Integral(self.reg_max) + + self.loss_qfl = QualityFocalLoss( + use_sigmoid=self.use_sigmoid, + beta=self.loss_cfg.loss_qfl.beta, + loss_weight=self.loss_cfg.loss_qfl.loss_weight, + ) + self.loss_dfl = DistributionFocalLoss( + loss_weight=self.loss_cfg.loss_dfl.loss_weight + ) + self.loss_bbox = GIoULoss(loss_weight=self.loss_cfg.loss_bbox.loss_weight) + self._init_layers() + self.init_weights() + + def _init_layers(self): + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + ) + ) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + ) + ) + self.gfl_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1 + ) + self.gfl_reg = nn.Conv2d( + self.feat_channels, 4 * (self.reg_max + 1), 3, padding=1 + ) + self.scales = nn.ModuleList([Scale(1.0) for _ in self.strides]) + + def init_weights(self): + for m in self.cls_convs: + normal_init(m.conv, std=0.01) + for m in self.reg_convs: + normal_init(m.conv, std=0.01) + bias_cls = -4.595 + normal_init(self.gfl_cls, std=0.01, bias=bias_cls) + normal_init(self.gfl_reg, std=0.01) + + def forward(self, feats): + if torch.onnx.is_in_onnx_export(): + return self._forward_onnx(feats) + outputs = [] + for x, scale in zip(feats, self.scales): + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_score = self.gfl_cls(cls_feat) + bbox_pred = scale(self.gfl_reg(reg_feat)).float() + output = torch.cat([cls_score, bbox_pred], dim=1) + outputs.append(output.flatten(start_dim=2)) + outputs = torch.cat(outputs, dim=2).permute(0, 2, 1) + return outputs + + def loss(self, preds, gt_meta): + cls_scores, bbox_preds = preds.split( + [self.num_classes, 4 * (self.reg_max + 1)], dim=-1 + ) + device = cls_scores.device + gt_bboxes = gt_meta["gt_bboxes"] + gt_labels = gt_meta["gt_labels"] + input_height, input_width = gt_meta["img"].shape[2:] + gt_bboxes_ignore = None + + featmap_sizes = [ + (math.ceil(input_height / stride), math.ceil(input_width) / stride) + for stride in self.strides + ] + + cls_reg_targets = self.target_assign( + cls_scores, + bbox_preds, + featmap_sizes, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + device=device, + ) + if cls_reg_targets is None: + return None + + ( + cls_preds_list, + reg_preds_list, + grid_cells_list, + labels_list, + label_weights_list, + bbox_targets_list, + bbox_weights_list, + num_total_pos, + num_total_neg, + ) = cls_reg_targets + + num_total_samples = reduce_mean(torch.tensor(num_total_pos).to(device)).item() + num_total_samples = max(num_total_samples, 1.0) + + losses_qfl, losses_bbox, losses_dfl, avg_factor = multi_apply( + self.loss_single, + grid_cells_list, + cls_preds_list, + reg_preds_list, + labels_list, + label_weights_list, + bbox_targets_list, + self.strides, + num_total_samples=num_total_samples, + ) + + avg_factor = sum(avg_factor) + avg_factor = reduce_mean(avg_factor).item() + if avg_factor <= 0: + loss_qfl = torch.tensor(0, dtype=torch.float32, requires_grad=True).to( + device + ) + loss_bbox = torch.tensor(0, dtype=torch.float32, requires_grad=True).to( + device + ) + loss_dfl = torch.tensor(0, dtype=torch.float32, requires_grad=True).to( + device + ) + else: + losses_bbox = list(map(lambda x: x / avg_factor, losses_bbox)) + losses_dfl = list(map(lambda x: x / avg_factor, losses_dfl)) + + loss_qfl = sum(losses_qfl) + loss_bbox = sum(losses_bbox) + loss_dfl = sum(losses_dfl) + + loss = loss_qfl + loss_bbox + loss_dfl + loss_states = dict(loss_qfl=loss_qfl, loss_bbox=loss_bbox, loss_dfl=loss_dfl) + + return loss, loss_states + + def loss_single( + self, + grid_cells, + cls_score, + bbox_pred, + labels, + label_weights, + bbox_targets, + stride, + num_total_samples, + ): + grid_cells = grid_cells.reshape(-1, 4) + cls_score = cls_score.reshape(-1, self.cls_out_channels) + bbox_pred = bbox_pred.reshape(-1, 4 * (self.reg_max + 1)) + bbox_targets = bbox_targets.reshape(-1, 4) + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = self.num_classes + pos_inds = torch.nonzero( + (labels >= 0) & (labels < bg_class_ind), as_tuple=False + ).squeeze(1) + + score = label_weights.new_zeros(labels.shape) + + if len(pos_inds) > 0: + pos_bbox_targets = bbox_targets[pos_inds] + pos_bbox_pred = bbox_pred[pos_inds] # (n, 4 * (reg_max + 1)) + pos_grid_cells = grid_cells[pos_inds] + pos_grid_cell_centers = self.grid_cells_to_center(pos_grid_cells) / stride + + weight_targets = cls_score.detach().sigmoid() + weight_targets = weight_targets.max(dim=1)[0][pos_inds] + pos_bbox_pred_corners = self.distribution_project(pos_bbox_pred) + pos_decode_bbox_pred = distance2bbox( + pos_grid_cell_centers, pos_bbox_pred_corners + ) + pos_decode_bbox_targets = pos_bbox_targets / stride + score[pos_inds] = bbox_overlaps( + pos_decode_bbox_pred.detach(), pos_decode_bbox_targets, is_aligned=True + ) + pred_corners = pos_bbox_pred.reshape(-1, self.reg_max + 1) + target_corners = bbox2distance( + pos_grid_cell_centers, pos_decode_bbox_targets, self.reg_max + ).reshape(-1) + + # regression loss + loss_bbox = self.loss_bbox( + pos_decode_bbox_pred, + pos_decode_bbox_targets, + weight=weight_targets, + avg_factor=1.0, + ) + + # dfl loss + loss_dfl = self.loss_dfl( + pred_corners, + target_corners, + weight=weight_targets[:, None].expand(-1, 4).reshape(-1), + avg_factor=4.0, + ) + else: + loss_bbox = bbox_pred.sum() * 0 + loss_dfl = bbox_pred.sum() * 0 + weight_targets = torch.tensor(0).to(cls_score.device) + + # qfl loss + loss_qfl = self.loss_qfl( + cls_score, + (labels, score), + weight=label_weights, + avg_factor=num_total_samples, + ) + + return loss_qfl, loss_bbox, loss_dfl, weight_targets.sum() + + def target_assign( + self, + cls_preds, + reg_preds, + featmap_sizes, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + device, + ): + """ + Assign target for a batch of images. + :param batch_size: num of images in one batch + :param featmap_sizes: A list of all grid cell boxes in all image + :param gt_bboxes_list: A list of ground truth boxes in all image + :param gt_bboxes_ignore_list: A list of all ignored boxes in all image + :param gt_labels_list: A list of all ground truth label in all image + :param device: pytorch device + :return: Assign results of all images. + """ + batch_size = cls_preds.shape[0] + # get grid cells of one image + multi_level_grid_cells = [ + self.get_grid_cells( + featmap_sizes[i], + self.grid_cell_scale, + stride, + dtype=torch.float32, + device=device, + ) + for i, stride in enumerate(self.strides) + ] + mlvl_grid_cells_list = [multi_level_grid_cells for i in range(batch_size)] + + # pixel cell number of multi-level feature maps + num_level_cells = [grid_cells.size(0) for grid_cells in mlvl_grid_cells_list[0]] + num_level_cells_list = [num_level_cells] * batch_size + # concat all level cells and to a single tensor + for i in range(batch_size): + mlvl_grid_cells_list[i] = torch.cat(mlvl_grid_cells_list[i]) + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(batch_size)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(batch_size)] + # target assign on all images, get list of tensors + # list length = batch size + # tensor first dim = num of all grid cell + ( + all_grid_cells, + all_labels, + all_label_weights, + all_bbox_targets, + all_bbox_weights, + pos_inds_list, + neg_inds_list, + ) = multi_apply( + self.target_assign_single_img, + mlvl_grid_cells_list, + num_level_cells_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + ) + # no valid cells + if any([labels is None for labels in all_labels]): + return None + # sampled cells of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # merge list of targets tensors into one batch then split to multi levels + mlvl_cls_preds = images_to_levels([c for c in cls_preds], num_level_cells) + mlvl_reg_preds = images_to_levels([r for r in reg_preds], num_level_cells) + mlvl_grid_cells = images_to_levels(all_grid_cells, num_level_cells) + mlvl_labels = images_to_levels(all_labels, num_level_cells) + mlvl_label_weights = images_to_levels(all_label_weights, num_level_cells) + mlvl_bbox_targets = images_to_levels(all_bbox_targets, num_level_cells) + mlvl_bbox_weights = images_to_levels(all_bbox_weights, num_level_cells) + return ( + mlvl_cls_preds, + mlvl_reg_preds, + mlvl_grid_cells, + mlvl_labels, + mlvl_label_weights, + mlvl_bbox_targets, + mlvl_bbox_weights, + num_total_pos, + num_total_neg, + ) + + def target_assign_single_img( + self, grid_cells, num_level_cells, gt_bboxes, gt_bboxes_ignore, gt_labels + ): + """ + Using ATSS Assigner to assign target on one image. + :param grid_cells: Grid cell boxes of all pixels on feature map + :param num_level_cells: numbers of grid cells on each level's feature map + :param gt_bboxes: Ground truth boxes + :param gt_bboxes_ignore: Ground truths which are ignored + :param gt_labels: Ground truth labels + :return: Assign results of a single image + """ + device = grid_cells.device + gt_bboxes = torch.from_numpy(gt_bboxes).to(device) + gt_labels = torch.from_numpy(gt_labels).to(device) + + assign_result = self.assigner.assign( + grid_cells, num_level_cells, gt_bboxes, gt_bboxes_ignore, gt_labels + ) + + pos_inds, neg_inds, pos_gt_bboxes, pos_assigned_gt_inds = self.sample( + assign_result, gt_bboxes + ) + + num_cells = grid_cells.shape[0] + bbox_targets = torch.zeros_like(grid_cells) + bbox_weights = torch.zeros_like(grid_cells) + labels = grid_cells.new_full((num_cells,), self.num_classes, dtype=torch.long) + label_weights = grid_cells.new_zeros(num_cells, dtype=torch.float) + + if len(pos_inds) > 0: + pos_bbox_targets = pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1.0 + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[pos_assigned_gt_inds] + + label_weights[pos_inds] = 1.0 + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + return ( + grid_cells, + labels, + label_weights, + bbox_targets, + bbox_weights, + pos_inds, + neg_inds, + ) + + def sample(self, assign_result, gt_bboxes): + pos_inds = ( + torch.nonzero(assign_result.gt_inds > 0, as_tuple=False) + .squeeze(-1) + .unique() + ) + neg_inds = ( + torch.nonzero(assign_result.gt_inds == 0, as_tuple=False) + .squeeze(-1) + .unique() + ) + pos_assigned_gt_inds = assign_result.gt_inds[pos_inds] - 1 + + if gt_bboxes.numel() == 0: + # hack for index error case + assert pos_assigned_gt_inds.numel() == 0 + pos_gt_bboxes = torch.empty_like(gt_bboxes).view(-1, 4) + else: + if len(gt_bboxes.shape) < 2: + gt_bboxes = gt_bboxes.view(-1, 4) + pos_gt_bboxes = gt_bboxes[pos_assigned_gt_inds, :] + return pos_inds, neg_inds, pos_gt_bboxes, pos_assigned_gt_inds + + def post_process(self, preds, meta): + cls_scores, bbox_preds = preds.split( + [self.num_classes, 4 * (self.reg_max + 1)], dim=-1 + ) + result_list = self.get_bboxes(cls_scores, bbox_preds, meta) + det_results = {} + warp_matrixes = ( + meta["warp_matrix"] + if isinstance(meta["warp_matrix"], list) + else meta["warp_matrix"] + ) + img_heights = ( + meta["img_info"]["height"].cpu().numpy() + if isinstance(meta["img_info"]["height"], torch.Tensor) + else meta["img_info"]["height"] + ) + img_widths = ( + meta["img_info"]["width"].cpu().numpy() + if isinstance(meta["img_info"]["width"], torch.Tensor) + else meta["img_info"]["width"] + ) + img_ids = ( + meta["img_info"]["id"].cpu().numpy() + if isinstance(meta["img_info"]["id"], torch.Tensor) + else meta["img_info"]["id"] + ) + + for result, img_width, img_height, img_id, warp_matrix in zip( + result_list, img_widths, img_heights, img_ids, warp_matrixes + ): + det_result = {} + det_bboxes, det_labels = result + det_bboxes = det_bboxes.detach().cpu().numpy() + det_bboxes[:, :4] = warp_boxes( + det_bboxes[:, :4], np.linalg.inv(warp_matrix), img_width, img_height + ) + classes = det_labels.detach().cpu().numpy() + for i in range(self.num_classes): + inds = classes == i + det_result[i] = np.concatenate( + [ + det_bboxes[inds, :4].astype(np.float32), + det_bboxes[inds, 4:5].astype(np.float32), + ], + axis=1, + ).tolist() + det_results[img_id] = det_result + return det_results + + def get_bboxes(self, cls_preds, reg_preds, img_metas): + """Decode the outputs to bboxes. + Args: + cls_preds (Tensor): Shape (num_imgs, num_points, num_classes). + reg_preds (Tensor): Shape (num_imgs, num_points, 4 * (regmax + 1)). + img_metas (dict): Dict of image info. + + Returns: + results_list (list[tuple]): List of detection bboxes and labels. + """ + device = cls_preds.device + b = cls_preds.shape[0] + input_height, input_width = img_metas["img"].shape[2:] + input_shape = (input_height, input_width) + + featmap_sizes = [ + (math.ceil(input_height / stride), math.ceil(input_width) / stride) + for stride in self.strides + ] + # get grid cells of one image + mlvl_center_priors = [] + for i, stride in enumerate(self.strides): + y, x = self.get_single_level_center_point( + featmap_sizes[i], stride, torch.float32, device + ) + strides = x.new_full((x.shape[0],), stride) + proiors = torch.stack([x, y, strides, strides], dim=-1) + mlvl_center_priors.append(proiors.unsqueeze(0).repeat(b, 1, 1)) + + center_priors = torch.cat(mlvl_center_priors, dim=1) + dis_preds = self.distribution_project(reg_preds) * center_priors[..., 2, None] + bboxes = distance2bbox(center_priors[..., :2], dis_preds, max_shape=input_shape) + scores = cls_preds.sigmoid() + result_list = [] + for i in range(b): + # add a dummy background class at the end of all labels + # same with mmdetection2.0 + score, bbox = scores[i], bboxes[i] + padding = score.new_zeros(score.shape[0], 1) + score = torch.cat([score, padding], dim=1) + results = multiclass_nms( + bbox, + score, + score_thr=0.05, + nms_cfg=dict(type="nms", iou_threshold=0.6), + max_num=100, + ) + result_list.append(results) + return result_list + + def get_single_level_center_point( + self, featmap_size, stride, dtype, device, flatten=True + ): + """ + Generate pixel centers of a single stage feature map. + :param featmap_size: height and width of the feature map + :param stride: down sample stride of the feature map + :param dtype: data type of the tensors + :param device: device of the tensors + :param flatten: flatten the x and y tensors + :return: y and x of the center points + """ + h, w = featmap_size + x_range = (torch.arange(w, dtype=dtype, device=device) + 0.5) * stride + y_range = (torch.arange(h, dtype=dtype, device=device) + 0.5) * stride + y, x = torch.meshgrid(y_range, x_range) + if flatten: + y = y.flatten() + x = x.flatten() + return y, x + + def get_grid_cells(self, featmap_size, scale, stride, dtype, device): + """ + Generate grid cells of a feature map for target assignment. + :param featmap_size: Size of a single level feature map. + :param scale: Grid cell scale. + :param stride: Down sample stride of the feature map. + :param dtype: Data type of the tensors. + :param device: Device of the tensors. + :return: Grid_cells xyxy position. Size should be [feat_w * feat_h, 4] + """ + cell_size = stride * scale + y, x = self.get_single_level_center_point( + featmap_size, stride, dtype, device, flatten=True + ) + grid_cells = torch.stack( + [ + x - 0.5 * cell_size, + y - 0.5 * cell_size, + x + 0.5 * cell_size, + y + 0.5 * cell_size, + ], + dim=-1, + ) + return grid_cells + + def grid_cells_to_center(self, grid_cells): + """ + Get center location of each gird cell + :param grid_cells: grid cells of a feature map + :return: center points + """ + cells_cx = (grid_cells[:, 2] + grid_cells[:, 0]) / 2 + cells_cy = (grid_cells[:, 3] + grid_cells[:, 1]) / 2 + return torch.stack([cells_cx, cells_cy], dim=-1) + + def _forward_onnx(self, feats): + """only used for onnx export""" + outputs = [] + for x, scale in zip(feats, self.scales): + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_pred = self.gfl_cls(cls_feat) + reg_pred = scale(self.gfl_reg(reg_feat)) + cls_pred = cls_pred.sigmoid() + out = torch.cat([cls_pred, reg_pred], dim=1) + outputs.append(out.flatten(start_dim=2)) + return torch.cat(outputs, dim=2).permute(0, 2, 1) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_head.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_head.py new file mode 100755 index 0000000000..01eac4146e --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_head.py @@ -0,0 +1,185 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule, DepthwiseConvModule +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights import normal_init +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.gfl_head import GFLHead + + +class NanoDetHead(GFLHead): + """ + Modified from GFL, use same loss functions but much lightweight convolution heads + """ + + def __init__( + self, + num_classes, + loss, + input_channel, + stacked_convs=2, + octave_base_scale=5, + conv_type="DWConv", + conv_cfg=None, + norm_cfg=dict(type="BN"), + reg_max=16, + share_cls_reg=False, + activation="LeakyReLU", + feat_channels=256, + strides=[8, 16, 32], + **kwargs + ): + self.share_cls_reg = share_cls_reg + self.activation = activation + self.ConvModule = ConvModule if conv_type == "Conv" else DepthwiseConvModule + super(NanoDetHead, self).__init__( + num_classes, + loss, + input_channel, + feat_channels, + stacked_convs, + octave_base_scale, + strides, + conv_cfg, + norm_cfg, + reg_max, + **kwargs + ) + + def _init_layers(self): + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for _ in self.strides: + cls_convs, reg_convs = self._buid_not_shared_head() + self.cls_convs.append(cls_convs) + self.reg_convs.append(reg_convs) + + self.gfl_cls = nn.ModuleList( + [ + nn.Conv2d( + self.feat_channels, + self.cls_out_channels + 4 * (self.reg_max + 1) + if self.share_cls_reg + else self.cls_out_channels, + 1, + padding=0, + ) + for _ in self.strides + ] + ) + # TODO: if + self.gfl_reg = nn.ModuleList( + [ + nn.Conv2d(self.feat_channels, 4 * (self.reg_max + 1), 1, padding=0) + for _ in self.strides + ] + ) + + def _buid_not_shared_head(self): + cls_convs = nn.ModuleList() + reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + cls_convs.append( + self.ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + norm_cfg=self.norm_cfg, + bias=self.norm_cfg is None, + activation=self.activation, + ) + ) + if not self.share_cls_reg: + reg_convs.append( + self.ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + norm_cfg=self.norm_cfg, + bias=self.norm_cfg is None, + activation=self.activation, + ) + ) + + return cls_convs, reg_convs + + def init_weights(self): + for m in self.cls_convs.modules(): + if isinstance(m, nn.Conv2d): + normal_init(m, std=0.01) + for m in self.reg_convs.modules(): + if isinstance(m, nn.Conv2d): + normal_init(m, std=0.01) + # init cls head with confidence = 0.01 + bias_cls = -4.595 + for i in range(len(self.strides)): + normal_init(self.gfl_cls[i], std=0.01, bias=bias_cls) + normal_init(self.gfl_reg[i], std=0.01) + print("Finish initialize NanoDet Head.") + + def forward(self, feats): + if torch.onnx.is_in_onnx_export(): + return self._forward_onnx(feats) + outputs = [] + for x, cls_convs, reg_convs, gfl_cls, gfl_reg in zip( + feats, self.cls_convs, self.reg_convs, self.gfl_cls, self.gfl_reg + ): + cls_feat = x + reg_feat = x + for cls_conv in cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in reg_convs: + reg_feat = reg_conv(reg_feat) + if self.share_cls_reg: + output = gfl_cls(cls_feat) + else: + cls_score = gfl_cls(cls_feat) + bbox_pred = gfl_reg(reg_feat) + output = torch.cat([cls_score, bbox_pred], dim=1) + outputs.append(output.flatten(start_dim=2)) + outputs = torch.cat(outputs, dim=2).permute(0, 2, 1) + return outputs + + def _forward_onnx(self, feats): + """only used for onnx export""" + outputs = [] + for x, cls_convs, reg_convs, gfl_cls, gfl_reg in zip( + feats, self.cls_convs, self.reg_convs, self.gfl_cls, self.gfl_reg + ): + cls_feat = x + reg_feat = x + for cls_conv in cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in reg_convs: + reg_feat = reg_conv(reg_feat) + if self.share_cls_reg: + output = gfl_cls(cls_feat) + cls_pred, reg_pred = output.split( + [self.num_classes, 4 * (self.reg_max + 1)], dim=1 + ) + else: + cls_pred = gfl_cls(cls_feat) + reg_pred = gfl_reg(reg_feat) + + cls_pred = cls_pred.sigmoid() + out = torch.cat([cls_pred, reg_pred], dim=1) + outputs.append(out.flatten(start_dim=2)) + return torch.cat(outputs, dim=2).permute(0, 2, 1) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_plus_head.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_plus_head.py new file mode 100644 index 0000000000..5d853d5ecf --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/nanodet_plus_head.py @@ -0,0 +1,510 @@ +import math + +import numpy as np +import torch +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util\ + import bbox2distance, distance2bbox, multi_apply +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.transform.warp import warp_boxes +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.gfocal_loss \ + import DistributionFocalLoss, QualityFocalLoss +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.iou_loss import GIoULoss +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv \ + import ConvModule, DepthwiseConvModule +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights import normal_init +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.nms import multiclass_nms +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.assigner.dsl_assigner \ + import DynamicSoftLabelAssigner +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.head.gfl_head import Integral, reduce_mean + + +class NanoDetPlusHead(nn.Module): + """Detection head used in NanoDet-Plus. + + Args: + num_classes (int): Number of categories excluding the background + category. + loss (dict): Loss config. + input_channel (int): Number of channels of the input feature. + feat_channels (int): Number of channels of the feature. + Default: 96. + stacked_convs (int): Number of conv layers in the stacked convs. + Default: 2. + kernel_size (int): Size of the convolving kernel. Default: 5. + strides (list[int]): Strides of input multi-level feature maps. + Default: [8, 16, 32]. + conv_type (str): Type of the convolution. + Default: "DWConv". + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN'). + reg_max (int): The maximal value of the discrete set. Default: 7. + activation (str): Type of activation function. Default: "LeakyReLU". + assigner_cfg (dict): Config dict of the assigner. Default: dict(topk=13). + """ + + def __init__( + self, + num_classes, + loss, + input_channel, + feat_channels=96, + stacked_convs=2, + kernel_size=5, + strides=[8, 16, 32], + conv_type="DWConv", + norm_cfg=dict(type="BN"), + reg_max=7, + activation="LeakyReLU", + assigner_cfg=dict(topk=13), + **kwargs + ): + super(NanoDetPlusHead, self).__init__() + self.num_classes = num_classes + self.in_channels = input_channel + self.feat_channels = feat_channels + self.stacked_convs = stacked_convs + self.kernel_size = kernel_size + self.strides = strides + self.reg_max = reg_max + self.activation = activation + self.ConvModule = ConvModule if conv_type == "Conv" else DepthwiseConvModule + + self.loss_cfg = loss + self.norm_cfg = norm_cfg + + self.assigner = DynamicSoftLabelAssigner(**assigner_cfg) + self.distribution_project = Integral(self.reg_max) + + self.loss_qfl = QualityFocalLoss( + beta=self.loss_cfg.loss_qfl.beta, + loss_weight=self.loss_cfg.loss_qfl.loss_weight, + ) + self.loss_dfl = DistributionFocalLoss( + loss_weight=self.loss_cfg.loss_dfl.loss_weight + ) + self.loss_bbox = GIoULoss(loss_weight=self.loss_cfg.loss_bbox.loss_weight) + self._init_layers() + self.init_weights() + + def _init_layers(self): + self.cls_convs = nn.ModuleList() + for _ in self.strides: + cls_convs = self._buid_not_shared_head() + self.cls_convs.append(cls_convs) + + self.gfl_cls = nn.ModuleList( + [ + nn.Conv2d( + self.feat_channels, + self.num_classes + 4 * (self.reg_max + 1), + 1, + padding=0, + ) + for _ in self.strides + ] + ) + + def _buid_not_shared_head(self): + cls_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + cls_convs.append( + self.ConvModule( + chn, + self.feat_channels, + self.kernel_size, + stride=1, + padding=self.kernel_size // 2, + norm_cfg=self.norm_cfg, + bias=self.norm_cfg is None, + activation=self.activation, + ) + ) + return cls_convs + + def init_weights(self): + for m in self.cls_convs.modules(): + if isinstance(m, nn.Conv2d): + normal_init(m, std=0.01) + # init cls head with confidence = 0.01 + bias_cls = -4.595 + for i in range(len(self.strides)): + normal_init(self.gfl_cls[i], std=0.01, bias=bias_cls) + print("Finish initialize NanoDet-Plus Head.") + + def forward(self, feats): + if torch.onnx.is_in_onnx_export(): + return self._forward_onnx(feats) + outputs = [] + for feat, cls_convs, gfl_cls in zip( + feats, + self.cls_convs, + self.gfl_cls, + ): + for conv in cls_convs: + feat = conv(feat) + output = gfl_cls(feat) + outputs.append(output.flatten(start_dim=2)) + outputs = torch.cat(outputs, dim=2).permute(0, 2, 1) + return outputs + + def loss(self, preds, gt_meta, aux_preds=None): + """Compute losses. + Args: + preds (Tensor): Prediction output. + gt_meta (dict): Ground truth information. + aux_preds (tuple[Tensor], optional): Auxiliary head prediction output. + + Returns: + loss (Tensor): Loss tensor. + loss_states (dict): State dict of each loss. + """ + gt_bboxes = gt_meta["gt_bboxes"] + gt_labels = gt_meta["gt_labels"] + device = preds.device + batch_size = preds.shape[0] + input_height, input_width = gt_meta["img"].shape[2:] + featmap_sizes = [ + (math.ceil(input_height / stride), math.ceil(input_width) / stride) + for stride in self.strides + ] + # get grid cells of one image + mlvl_center_priors = [ + self.get_single_level_center_priors( + batch_size, + featmap_sizes[i], + stride, + dtype=torch.float32, + device=device, + ) + for i, stride in enumerate(self.strides) + ] + center_priors = torch.cat(mlvl_center_priors, dim=1) + + cls_preds, reg_preds = preds.split( + [self.num_classes, 4 * (self.reg_max + 1)], dim=-1 + ) + dis_preds = self.distribution_project(reg_preds) * center_priors[..., 2, None] + decoded_bboxes = distance2bbox(center_priors[..., :2], dis_preds) + + if aux_preds is not None: + # use auxiliary head to assign + aux_cls_preds, aux_reg_preds = aux_preds.split( + [self.num_classes, 4 * (self.reg_max + 1)], dim=-1 + ) + aux_dis_preds = ( + self.distribution_project(aux_reg_preds) * center_priors[..., 2, None] + ) + aux_decoded_bboxes = distance2bbox(center_priors[..., :2], aux_dis_preds) + batch_assign_res = multi_apply( + self.target_assign_single_img, + aux_cls_preds.detach(), + center_priors, + aux_decoded_bboxes.detach(), + gt_bboxes, + gt_labels, + ) + else: + # use self prediction to assign + batch_assign_res = multi_apply( + self.target_assign_single_img, + cls_preds.detach(), + center_priors, + decoded_bboxes.detach(), + gt_bboxes, + gt_labels, + ) + + loss, loss_states = self._get_loss_from_assign( + cls_preds, reg_preds, decoded_bboxes, batch_assign_res + ) + + if aux_preds is not None: + aux_loss, aux_loss_states = self._get_loss_from_assign( + aux_cls_preds, aux_reg_preds, aux_decoded_bboxes, batch_assign_res + ) + loss = loss + aux_loss + for k, v in aux_loss_states.items(): + loss_states["aux_" + k] = v + return loss, loss_states + + def _get_loss_from_assign(self, cls_preds, reg_preds, decoded_bboxes, assign): + device = cls_preds.device + labels, label_scores, bbox_targets, dist_targets, num_pos = assign + num_total_samples = max( + reduce_mean(torch.tensor(sum(num_pos)).to(device)).item(), 1.0 + ) + + labels = torch.cat(labels, dim=0) + label_scores = torch.cat(label_scores, dim=0) + bbox_targets = torch.cat(bbox_targets, dim=0) + cls_preds = cls_preds.reshape(-1, self.num_classes) + reg_preds = reg_preds.reshape(-1, 4 * (self.reg_max + 1)) + decoded_bboxes = decoded_bboxes.reshape(-1, 4) + loss_qfl = self.loss_qfl( + cls_preds, (labels, label_scores), avg_factor=num_total_samples + ) + + pos_inds = torch.nonzero( + (labels >= 0) & (labels < self.num_classes), as_tuple=False + ).squeeze(1) + + if len(pos_inds) > 0: + weight_targets = cls_preds[pos_inds].detach().sigmoid().max(dim=1)[0] + bbox_avg_factor = max(reduce_mean(weight_targets.sum()).item(), 1.0) + + loss_bbox = self.loss_bbox( + decoded_bboxes[pos_inds], + bbox_targets[pos_inds], + weight=weight_targets, + avg_factor=bbox_avg_factor, + ) + + dist_targets = torch.cat(dist_targets, dim=0) + loss_dfl = self.loss_dfl( + reg_preds[pos_inds].reshape(-1, self.reg_max + 1), + dist_targets[pos_inds].reshape(-1), + weight=weight_targets[:, None].expand(-1, 4).reshape(-1), + avg_factor=4.0 * bbox_avg_factor, + ) + else: + loss_bbox = reg_preds.sum() * 0 + loss_dfl = reg_preds.sum() * 0 + + loss = loss_qfl + loss_bbox + loss_dfl + loss_states = dict(loss_qfl=loss_qfl, loss_bbox=loss_bbox, loss_dfl=loss_dfl) + return loss, loss_states + + @torch.no_grad() + def target_assign_single_img( + self, cls_preds, center_priors, decoded_bboxes, gt_bboxes, gt_labels + ): + """Compute classification, regression, and objectness targets for + priors in a single image. + Args: + cls_preds (Tensor): Classification predictions of one image, + a 2D-Tensor with shape [num_priors, num_classes] + center_priors (Tensor): All priors of one image, a 2D-Tensor with + shape [num_priors, 4] in [cx, xy, stride_w, stride_y] format. + decoded_bboxes (Tensor): Decoded bboxes predictions of one image, + a 2D-Tensor with shape [num_priors, 4] in [tl_x, tl_y, + br_x, br_y] format. + gt_bboxes (Tensor): Ground truth bboxes of one image, a 2D-Tensor + with shape [num_gts, 4] in [tl_x, tl_y, br_x, br_y] format. + gt_labels (Tensor): Ground truth labels of one image, a Tensor + with shape [num_gts]. + """ + + num_priors = center_priors.size(0) + device = center_priors.device + gt_bboxes = torch.from_numpy(gt_bboxes).to(device) + gt_labels = torch.from_numpy(gt_labels).to(device) + num_gts = gt_labels.size(0) + gt_bboxes = gt_bboxes.to(decoded_bboxes.dtype) + + bbox_targets = torch.zeros_like(center_priors) + dist_targets = torch.zeros_like(center_priors) + labels = center_priors.new_full( + (num_priors,), self.num_classes, dtype=torch.long + ) + label_scores = center_priors.new_zeros(labels.shape, dtype=torch.float) + # No target + if num_gts == 0: + return labels, label_scores, bbox_targets, dist_targets, 0 + + assign_result = self.assigner.assign( + cls_preds.sigmoid(), center_priors, decoded_bboxes, gt_bboxes, gt_labels + ) + pos_inds, neg_inds, pos_gt_bboxes, pos_assigned_gt_inds = self.sample( + assign_result, gt_bboxes + ) + num_pos_per_img = pos_inds.size(0) + pos_ious = assign_result.max_overlaps[pos_inds] + + if len(pos_inds) > 0: + bbox_targets[pos_inds, :] = pos_gt_bboxes + dist_targets[pos_inds, :] = bbox2distance(center_priors[pos_inds, :2], + pos_gt_bboxes) / center_priors[pos_inds, None, 2] + dist_targets = dist_targets.clamp(min=0, max=self.reg_max - 0.1) + labels[pos_inds] = gt_labels[pos_assigned_gt_inds] + label_scores[pos_inds] = pos_ious + return ( + labels, + label_scores, + bbox_targets, + dist_targets, + num_pos_per_img, + ) + + def sample(self, assign_result, gt_bboxes): + """Sample positive and negative bboxes.""" + pos_inds = ( + torch.nonzero(assign_result.gt_inds > 0, as_tuple=False) + .squeeze(-1) + .unique() + ) + neg_inds = ( + torch.nonzero(assign_result.gt_inds == 0, as_tuple=False) + .squeeze(-1) + .unique() + ) + pos_assigned_gt_inds = assign_result.gt_inds[pos_inds] - 1 + + if gt_bboxes.numel() == 0: + # hack for index error case + assert pos_assigned_gt_inds.numel() == 0 + pos_gt_bboxes = torch.empty_like(gt_bboxes).view(-1, 4) + else: + if len(gt_bboxes.shape) < 2: + gt_bboxes = gt_bboxes.view(-1, 4) + pos_gt_bboxes = gt_bboxes[pos_assigned_gt_inds, :] + return pos_inds, neg_inds, pos_gt_bboxes, pos_assigned_gt_inds + + def post_process(self, preds, meta): + """Prediction results post processing. Decode bboxes and rescale + to original image size. + Args: + preds (Tensor): Prediction output. + meta (dict): Meta info. + """ + cls_scores, bbox_preds = preds.split( + [self.num_classes, 4 * (self.reg_max + 1)], dim=-1 + ) + result_list = self.get_bboxes(cls_scores, bbox_preds, meta) + det_results = {} + warp_matrixes = ( + meta["warp_matrix"] + if isinstance(meta["warp_matrix"], list) + else meta["warp_matrix"] + ) + img_heights = ( + meta["img_info"]["height"].cpu().numpy() + if isinstance(meta["img_info"]["height"], torch.Tensor) + else meta["img_info"]["height"] + ) + img_widths = ( + meta["img_info"]["width"].cpu().numpy() + if isinstance(meta["img_info"]["width"], torch.Tensor) + else meta["img_info"]["width"] + ) + img_ids = ( + meta["img_info"]["id"].cpu().numpy() + if isinstance(meta["img_info"]["id"], torch.Tensor) + else meta["img_info"]["id"] + ) + + for result, img_width, img_height, img_id, warp_matrix in zip( + result_list, img_widths, img_heights, img_ids, warp_matrixes + ): + det_result = {} + det_bboxes, det_labels = result + det_bboxes = det_bboxes.detach().cpu().numpy() + det_bboxes[:, :4] = warp_boxes( + det_bboxes[:, :4], np.linalg.inv(warp_matrix), img_width, img_height + ) + classes = det_labels.detach().cpu().numpy() + for i in range(self.num_classes): + inds = classes == i + det_result[i] = np.concatenate( + [ + det_bboxes[inds, :4].astype(np.float32), + det_bboxes[inds, 4:5].astype(np.float32), + ], + axis=1, + ).tolist() + det_results[img_id] = det_result + return det_results + + def get_bboxes(self, cls_preds, reg_preds, img_metas): + """Decode the outputs to bboxes. + Args: + cls_preds (Tensor): Shape (num_imgs, num_points, num_classes). + reg_preds (Tensor): Shape (num_imgs, num_points, 4 * (regmax + 1)). + img_metas (dict): Dict of image info. + + Returns: + results_list (list[tuple]): List of detection bboxes and labels. + """ + device = cls_preds.device + b = cls_preds.shape[0] + input_height, input_width = img_metas["img"].shape[2:] + input_shape = (input_height, input_width) + + featmap_sizes = [ + (math.ceil(input_height / stride), math.ceil(input_width) / stride) + for stride in self.strides + ] + # get grid cells of one image + mlvl_center_priors = [ + self.get_single_level_center_priors( + b, + featmap_sizes[i], + stride, + dtype=torch.float32, + device=device, + ) + for i, stride in enumerate(self.strides) + ] + center_priors = torch.cat(mlvl_center_priors, dim=1) + dis_preds = self.distribution_project(reg_preds) * center_priors[..., 2, None] + bboxes = distance2bbox(center_priors[..., :2], dis_preds, max_shape=input_shape) + scores = cls_preds.sigmoid() + result_list = [] + for i in range(b): + # add a dummy background class at the end of all labels + # same with mmdetection2.0 + score, bbox = scores[i], bboxes[i] + padding = score.new_zeros(score.shape[0], 1) + score = torch.cat([score, padding], dim=1) + results = multiclass_nms( + bbox, + score, + score_thr=0.05, + nms_cfg=dict(type="nms", iou_threshold=0.6), + max_num=100, + ) + result_list.append(results) + return result_list + + def get_single_level_center_priors( + self, batch_size, featmap_size, stride, dtype, device + ): + """Generate centers of a single stage feature map. + Args: + batch_size (int): Number of images in one batch. + featmap_size (tuple[int]): height and width of the feature map + stride (int): down sample stride of the feature map + dtype (obj:`torch.dtype`): data type of the tensors + device (obj:`torch.device`): device of the tensors + Return: + priors (Tensor): center priors of a single level feature map. + """ + h, w = featmap_size + x_range = (torch.arange(w, dtype=dtype, device=device)) * stride + y_range = (torch.arange(h, dtype=dtype, device=device)) * stride + y, x = torch.meshgrid(y_range, x_range) + y = y.flatten() + x = x.flatten() + strides = x.new_full((x.shape[0],), stride) + proiors = torch.stack([x, y, strides, strides], dim=-1) + return proiors.unsqueeze(0).repeat(batch_size, 1, 1) + + def _forward_onnx(self, feats): + """only used for onnx export""" + outputs = [] + for feat, cls_convs, gfl_cls in zip( + feats, + self.cls_convs, + self.gfl_cls, + ): + for conv in cls_convs: + feat = conv(feat) + output = gfl_cls(feat) + cls_pred, reg_pred = output.split( + [self.num_classes, 4 * (self.reg_max + 1)], dim=1 + ) + cls_pred = cls_pred.sigmoid() + out = torch.cat([cls_pred, reg_pred], dim=1) + outputs.append(out.flatten(start_dim=2)) + return torch.cat(outputs, dim=2).permute(0, 2, 1) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/simple_conv_head.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/simple_conv_head.py new file mode 100644 index 0000000000..b3d4d95ff7 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/head/simple_conv_head.py @@ -0,0 +1,100 @@ +import torch +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights import normal_init +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.scale import Scale + + +class SimpleConvHead(nn.Module): + def __init__( + self, + num_classes, + input_channel, + feat_channels=256, + stacked_convs=4, + strides=[8, 16, 32], + conv_cfg=None, + norm_cfg=dict(type="GN", num_groups=32, requires_grad=True), + activation="LeakyReLU", + reg_max=16, + **kwargs + ): + super(SimpleConvHead, self).__init__() + self.num_classes = num_classes + self.in_channels = input_channel + self.feat_channels = feat_channels + self.stacked_convs = stacked_convs + self.strides = strides + self.reg_max = reg_max + + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.activation = activation + self.cls_out_channels = num_classes + + self._init_layers() + self.init_weights() + + def _init_layers(self): + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + activation=self.activation, + ) + ) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + activation=self.activation, + ) + ) + self.gfl_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1 + ) + self.gfl_reg = nn.Conv2d( + self.feat_channels, 4 * (self.reg_max + 1), 3, padding=1 + ) + self.scales = nn.ModuleList([Scale(1.0) for _ in self.strides]) + + def init_weights(self): + for m in self.cls_convs: + normal_init(m.conv, std=0.01) + for m in self.reg_convs: + normal_init(m.conv, std=0.01) + bias_cls = -4.595 + normal_init(self.gfl_cls, std=0.01, bias=bias_cls) + normal_init(self.gfl_reg, std=0.01) + + def forward(self, feats): + outputs = [] + for x, scale in zip(feats, self.scales): + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_score = self.gfl_cls(cls_feat) + bbox_pred = scale(self.gfl_reg(reg_feat)).float() + output = torch.cat([cls_score, bbox_pred], dim=1) + outputs.append(output.flatten(start_dim=2)) + outputs = torch.cat(outputs, dim=2).permute(0, 2, 1) + return outputs diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/gfocal_loss.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/gfocal_loss.py new file mode 100644 index 0000000000..af0b4251c2 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/gfocal_loss.py @@ -0,0 +1,178 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.utils import weighted_loss + + +@weighted_loss +def quality_focal_loss(pred, target, beta=2.0): + r"""Quality Focal Loss (QFL) is from `Generalized Focal Loss: Learning + Qualified and Distributed Bounding Boxes for Dense Object Detection + `_. + + Args: + pred (torch.Tensor): Predicted joint representation of classification + and quality (IoU) estimation with shape (N, C), C is the number of + classes. + target (tuple([torch.Tensor])): Target category label with shape (N,) + and target quality label with shape (N,). + beta (float): The beta parameter for calculating the modulating factor. + Defaults to 2.0. + + Returns: + torch.Tensor: Loss tensor with shape (N,). + """ + assert ( + len(target) == 2 + ), """target for QFL must be a tuple of two elements, + including category label and quality label, respectively""" + # label denotes the category id, score denotes the quality score + label, score = target + + # negatives are supervised by 0 quality score + pred_sigmoid = pred.sigmoid() + scale_factor = pred_sigmoid + zerolabel = scale_factor.new_zeros(pred.shape) + loss = F.binary_cross_entropy_with_logits( + pred, zerolabel, reduction="none" + ) * scale_factor.pow(beta) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = pred.size(1) + pos = torch.nonzero((label >= 0) & (label < bg_class_ind), as_tuple=False).squeeze( + 1 + ) + pos_label = label[pos].long() + # positives are supervised by bbox quality (IoU) score + scale_factor = score[pos] - pred_sigmoid[pos, pos_label] + loss[pos, pos_label] = F.binary_cross_entropy_with_logits( + pred[pos, pos_label], score[pos], reduction="none" + ) * scale_factor.abs().pow(beta) + + loss = loss.sum(dim=1, keepdim=False) + return loss + + +@weighted_loss +def distribution_focal_loss(pred, label): + r"""Distribution Focal Loss (DFL) is from `Generalized Focal Loss: Learning + Qualified and Distributed Bounding Boxes for Dense Object Detection + `_. + + Args: + pred (torch.Tensor): Predicted general distribution of bounding boxes + (before softmax) with shape (N, n+1), n is the max value of the + integral set `{0, ..., n}` in paper. + label (torch.Tensor): Target distance label for bounding boxes with + shape (N,). + + Returns: + torch.Tensor: Loss tensor with shape (N,). + """ + dis_left = label.long() + dis_right = dis_left + 1 + weight_left = dis_right.float() - label + weight_right = label - dis_left.float() + loss = F.cross_entropy(pred, dis_left, reduction="none") * weight_left + \ + F.cross_entropy(pred, dis_right, reduction="none") * weight_right + return loss + + +class QualityFocalLoss(nn.Module): + r"""Quality Focal Loss (QFL) is a variant of `Generalized Focal Loss: + Learning Qualified and Distributed Bounding Boxes for Dense Object + Detection `_. + + Args: + use_sigmoid (bool): Whether sigmoid operation is conducted in QFL. + Defaults to True. + beta (float): The beta parameter for calculating the modulating factor. + Defaults to 2.0. + reduction (str): Options are "none", "mean" and "sum". + loss_weight (float): Loss weight of current loss. + """ + + def __init__(self, use_sigmoid=True, beta=2.0, reduction="mean", loss_weight=1.0): + super(QualityFocalLoss, self).__init__() + assert use_sigmoid is True, "Only sigmoid in QFL supported now." + self.use_sigmoid = use_sigmoid + self.beta = beta + self.reduction = reduction + self.loss_weight = loss_weight + + def forward( + self, pred, target, weight=None, avg_factor=None, reduction_override=None + ): + """Forward function. + + Args: + pred (torch.Tensor): Predicted joint representation of + classification and quality (IoU) estimation with shape (N, C), + C is the number of classes. + target (tuple([torch.Tensor])): Target category label with shape + (N,) and target quality label with shape (N,). + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, "none", "mean", "sum") + reduction = reduction_override if reduction_override else self.reduction + if self.use_sigmoid: + loss_cls = self.loss_weight * quality_focal_loss( + pred, + target, + weight, + beta=self.beta, + reduction=reduction, + avg_factor=avg_factor, + ) + else: + raise NotImplementedError + return loss_cls + + +class DistributionFocalLoss(nn.Module): + r"""Distribution Focal Loss (DFL) is a variant of `Generalized Focal Loss: + Learning Qualified and Distributed Bounding Boxes for Dense Object + Detection `_. + + Args: + reduction (str): Options are `'none'`, `'mean'` and `'sum'`. + loss_weight (float): Loss weight of current loss. + """ + + def __init__(self, reduction="mean", loss_weight=1.0): + super(DistributionFocalLoss, self).__init__() + self.reduction = reduction + self.loss_weight = loss_weight + + def forward( + self, pred, target, weight=None, avg_factor=None, reduction_override=None + ): + """Forward function. + + Args: + pred (torch.Tensor): Predicted general distribution of bounding + boxes (before softmax) with shape (N, n+1), n is the max value + of the integral set `{0, ..., n}` in paper. + target (torch.Tensor): Target distance label for bounding boxes + with shape (N,). + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, "none", "mean", "sum") + reduction = reduction_override if reduction_override else self.reduction + loss_cls = self.loss_weight * distribution_focal_loss( + pred, target, weight, reduction=reduction, avg_factor=avg_factor + ) + return loss_cls diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/iou_loss.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/iou_loss.py new file mode 100644 index 0000000000..7ee9d324a3 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/iou_loss.py @@ -0,0 +1,544 @@ +# Modification 2020 RangiLyu +# Copyright 2018-2019 Open-MMLab. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math + +import torch +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.loss.utils import weighted_loss + + +def bbox_overlaps(bboxes1, bboxes2, mode="iou", is_aligned=False, eps=1e-6): + """Calculate overlap between two set of bboxes. + + If ``is_aligned `` is ``False``, then calculate the overlaps between each + bbox of bboxes1 and bboxes2, otherwise the overlaps between each aligned + pair of bboxes1 and bboxes2. + + Args: + bboxes1 (Tensor): shape (B, m, 4) in format or empty. + bboxes2 (Tensor): shape (B, n, 4) in format or empty. + B indicates the batch dim, in shape (B1, B2, ..., Bn). + If ``is_aligned `` is ``True``, then m and n must be equal. + mode (str): "iou" (intersection over union) or "iof" (intersection over + foreground). + is_aligned (bool, optional): If True, then m and n must be equal. + Default False. + eps (float, optional): A value added to the denominator for numerical + stability. Default 1e-6. + + Returns: + Tensor: shape (m, n) if ``is_aligned `` is False else shape (m,) + + Example: + >>> bboxes1 = torch.FloatTensor([ + >>> [0, 0, 10, 10], + >>> [10, 10, 20, 20], + >>> [32, 32, 38, 42], + >>> ]) + >>> bboxes2 = torch.FloatTensor([ + >>> [0, 0, 10, 20], + >>> [0, 10, 10, 19], + >>> [10, 10, 20, 20], + >>> ]) + >>> bbox_overlaps(bboxes1, bboxes2) + tensor([[0.5000, 0.0000, 0.0000], + [0.0000, 0.0000, 1.0000], + [0.0000, 0.0000, 0.0000]]) + >>> bbox_overlaps(bboxes1, bboxes2, mode='giou', eps=1e-7) + tensor([[0.5000, 0.0000, -0.5000], + [-0.2500, -0.0500, 1.0000], + [-0.8371, -0.8766, -0.8214]]) + + Example: + >>> empty = torch.FloatTensor([]) + >>> nonempty = torch.FloatTensor([ + >>> [0, 0, 10, 9], + >>> ]) + >>> assert tuple(bbox_overlaps(empty, nonempty).shape) == (0, 1) + >>> assert tuple(bbox_overlaps(nonempty, empty).shape) == (1, 0) + >>> assert tuple(bbox_overlaps(empty, empty).shape) == (0, 0) + """ + + assert mode in ["iou", "iof", "giou"], f"Unsupported mode {mode}" + # Either the boxes are empty or the length of boxes's last dimenstion is 4 + assert bboxes1.size(-1) == 4 or bboxes1.size(0) == 0 + assert bboxes2.size(-1) == 4 or bboxes2.size(0) == 0 + + # Batch dim must be the same + # Batch dim: (B1, B2, ... Bn) + assert bboxes1.shape[:-2] == bboxes2.shape[:-2] + batch_shape = bboxes1.shape[:-2] + + rows = bboxes1.size(-2) + cols = bboxes2.size(-2) + if is_aligned: + assert rows == cols + + if rows * cols == 0: + if is_aligned: + return bboxes1.new(batch_shape + (rows,)) + else: + return bboxes1.new(batch_shape + (rows, cols)) + + area1 = (bboxes1[..., 2] - bboxes1[..., 0]) * (bboxes1[..., 3] - bboxes1[..., 1]) + area2 = (bboxes2[..., 2] - bboxes2[..., 0]) * (bboxes2[..., 3] - bboxes2[..., 1]) + + if is_aligned: + lt = torch.max(bboxes1[..., :2], bboxes2[..., :2]) # [B, rows, 2] + rb = torch.min(bboxes1[..., 2:], bboxes2[..., 2:]) # [B, rows, 2] + + wh = (rb - lt).clamp(min=0) # [B, rows, 2] + overlap = wh[..., 0] * wh[..., 1] + + if mode in ["iou", "giou"]: + union = area1 + area2 - overlap + else: + union = area1 + if mode == "giou": + enclosed_lt = torch.min(bboxes1[..., :2], bboxes2[..., :2]) + enclosed_rb = torch.max(bboxes1[..., 2:], bboxes2[..., 2:]) + else: + lt = torch.max( + bboxes1[..., :, None, :2], bboxes2[..., None, :, :2] + ) # [B, rows, cols, 2] + rb = torch.min( + bboxes1[..., :, None, 2:], bboxes2[..., None, :, 2:] + ) # [B, rows, cols, 2] + + wh = (rb - lt).clamp(min=0) # [B, rows, cols, 2] + overlap = wh[..., 0] * wh[..., 1] + + if mode in ["iou", "giou"]: + union = area1[..., None] + area2[..., None, :] - overlap + else: + union = area1[..., None] + if mode == "giou": + enclosed_lt = torch.min( + bboxes1[..., :, None, :2], bboxes2[..., None, :, :2] + ) + enclosed_rb = torch.max( + bboxes1[..., :, None, 2:], bboxes2[..., None, :, 2:] + ) + + eps = union.new_tensor([eps]) + union = torch.max(union, eps) + ious = overlap / union + if mode in ["iou", "iof"]: + return ious + # calculate gious + enclose_wh = (enclosed_rb - enclosed_lt).clamp(min=0) + enclose_area = enclose_wh[..., 0] * enclose_wh[..., 1] + enclose_area = torch.max(enclose_area, eps) + gious = ious - (enclose_area - union) / enclose_area + return gious + + +@weighted_loss +def iou_loss(pred, target, eps=1e-6): + """IoU loss. + + Computing the IoU loss between a set of predicted bboxes and target bboxes. + The loss is calculated as negative log of IoU. + + Args: + pred (torch.Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (torch.Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + + Return: + torch.Tensor: Loss tensor. + """ + ious = bbox_overlaps(pred, target, is_aligned=True).clamp(min=eps) + loss = -ious.log() + return loss + + +@weighted_loss +def bounded_iou_loss(pred, target, beta=0.2, eps=1e-3): + """BIoULoss. + + This is an implementation of paper + `Improving Object Localization with Fitness NMS and Bounded IoU Loss. + `_. + + Args: + pred (torch.Tensor): Predicted bboxes. + target (torch.Tensor): Target bboxes. + beta (float): beta parameter in smoothl1. + eps (float): eps to avoid NaN. + """ + pred_ctrx = (pred[:, 0] + pred[:, 2]) * 0.5 + pred_ctry = (pred[:, 1] + pred[:, 3]) * 0.5 + pred_w = pred[:, 2] - pred[:, 0] + pred_h = pred[:, 3] - pred[:, 1] + with torch.no_grad(): + target_ctrx = (target[:, 0] + target[:, 2]) * 0.5 + target_ctry = (target[:, 1] + target[:, 3]) * 0.5 + target_w = target[:, 2] - target[:, 0] + target_h = target[:, 3] - target[:, 1] + + dx = target_ctrx - pred_ctrx + dy = target_ctry - pred_ctry + + loss_dx = 1 - torch.max( + (target_w - 2 * dx.abs()) / (target_w + 2 * dx.abs() + eps), + torch.zeros_like(dx), + ) + loss_dy = 1 - torch.max( + (target_h - 2 * dy.abs()) / (target_h + 2 * dy.abs() + eps), + torch.zeros_like(dy), + ) + loss_dw = 1 - torch.min(target_w / (pred_w + eps), pred_w / (target_w + eps)) + loss_dh = 1 - torch.min(target_h / (pred_h + eps), pred_h / (target_h + eps)) + loss_comb = torch.stack([loss_dx, loss_dy, loss_dw, loss_dh], dim=-1).view( + loss_dx.size(0), -1 + ) + + loss = torch.where( + loss_comb < beta, 0.5 * loss_comb * loss_comb / beta, loss_comb - 0.5 * beta + ).sum(dim=-1) + return loss + + +@weighted_loss +def giou_loss(pred, target, eps=1e-7): + r"""`Generalized Intersection over Union: A Metric and A Loss for Bounding + Box Regression `_. + + Args: + pred (torch.Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (torch.Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + + Return: + Tensor: Loss tensor. + """ + gious = bbox_overlaps(pred, target, mode="giou", is_aligned=True, eps=eps) + loss = 1 - gious + return loss + + +@weighted_loss +def diou_loss(pred, target, eps=1e-7): + r"""`Implementation of Distance-IoU Loss: Faster and Better + Learning for Bounding Box Regression, https://arxiv.org/abs/1911.08287`_. + + Code is modified from https://github.com/Zzh-tju/DIoU. + + Args: + pred (Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + Return: + Tensor: Loss tensor. + """ + # overlap + lt = torch.max(pred[:, :2], target[:, :2]) + rb = torch.min(pred[:, 2:], target[:, 2:]) + wh = (rb - lt).clamp(min=0) + overlap = wh[:, 0] * wh[:, 1] + + # union + ap = (pred[:, 2] - pred[:, 0]) * (pred[:, 3] - pred[:, 1]) + ag = (target[:, 2] - target[:, 0]) * (target[:, 3] - target[:, 1]) + union = ap + ag - overlap + eps + + # IoU + ious = overlap / union + + # enclose area + enclose_x1y1 = torch.min(pred[:, :2], target[:, :2]) + enclose_x2y2 = torch.max(pred[:, 2:], target[:, 2:]) + enclose_wh = (enclose_x2y2 - enclose_x1y1).clamp(min=0) + + cw = enclose_wh[:, 0] + ch = enclose_wh[:, 1] + + c2 = cw ** 2 + ch ** 2 + eps + + b1_x1, b1_y1 = pred[:, 0], pred[:, 1] + b1_x2, b1_y2 = pred[:, 2], pred[:, 3] + b2_x1, b2_y1 = target[:, 0], target[:, 1] + b2_x2, b2_y2 = target[:, 2], target[:, 3] + + left = ((b2_x1 + b2_x2) - (b1_x1 + b1_x2)) ** 2 / 4 + right = ((b2_y1 + b2_y2) - (b1_y1 + b1_y2)) ** 2 / 4 + rho2 = left + right + + # DIoU + dious = ious - rho2 / c2 + loss = 1 - dious + return loss + + +@weighted_loss +def ciou_loss(pred, target, eps=1e-7): + r"""`Implementation of paper `Enhancing Geometric Factors into + Model Learning and Inference for Object Detection and Instance + Segmentation `_. + + Code is modified from https://github.com/Zzh-tju/CIoU. + + Args: + pred (Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + Return: + Tensor: Loss tensor. + """ + # overlap + lt = torch.max(pred[:, :2], target[:, :2]) + rb = torch.min(pred[:, 2:], target[:, 2:]) + wh = (rb - lt).clamp(min=0) + overlap = wh[:, 0] * wh[:, 1] + + # union + ap = (pred[:, 2] - pred[:, 0]) * (pred[:, 3] - pred[:, 1]) + ag = (target[:, 2] - target[:, 0]) * (target[:, 3] - target[:, 1]) + union = ap + ag - overlap + eps + + # IoU + ious = overlap / union + + # enclose area + enclose_x1y1 = torch.min(pred[:, :2], target[:, :2]) + enclose_x2y2 = torch.max(pred[:, 2:], target[:, 2:]) + enclose_wh = (enclose_x2y2 - enclose_x1y1).clamp(min=0) + + cw = enclose_wh[:, 0] + ch = enclose_wh[:, 1] + + c2 = cw ** 2 + ch ** 2 + eps + + b1_x1, b1_y1 = pred[:, 0], pred[:, 1] + b1_x2, b1_y2 = pred[:, 2], pred[:, 3] + b2_x1, b2_y1 = target[:, 0], target[:, 1] + b2_x2, b2_y2 = target[:, 2], target[:, 3] + + w1, h1 = b1_x2 - b1_x1, b1_y2 - b1_y1 + eps + w2, h2 = b2_x2 - b2_x1, b2_y2 - b2_y1 + eps + + left = ((b2_x1 + b2_x2) - (b1_x1 + b1_x2)) ** 2 / 4 + right = ((b2_y1 + b2_y2) - (b1_y1 + b1_y2)) ** 2 / 4 + rho2 = left + right + + factor = 4 / math.pi ** 2 + v = factor * torch.pow(torch.atan(w2 / h2) - torch.atan(w1 / h1), 2) + + # CIoU + cious = ious - (rho2 / c2 + v ** 2 / (1 - ious + v)) + loss = 1 - cious + return loss + + +class IoULoss(nn.Module): + """IoULoss. + + Computing the IoU loss between a set of predicted bboxes and target bboxes. + + Args: + eps (float): Eps to avoid log(0). + reduction (str): Options are "none", "mean" and "sum". + loss_weight (float): Weight of loss. + """ + + def __init__(self, eps=1e-6, reduction="mean", loss_weight=1.0): + super(IoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward( + self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs, + ): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. Options are "none", "mean" and "sum". + """ + assert reduction_override in (None, "none", "mean", "sum") + reduction = reduction_override if reduction_override else self.reduction + if (weight is not None) and (not torch.any(weight > 0)) and (reduction != "none"): + if pred.dim() == weight.dim() + 1: + weight = weight.unsqueeze(1) + return (pred * weight).sum() # 0 + loss = self.loss_weight * iou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs, + ) + return loss + + +class BoundedIoULoss(nn.Module): + def __init__(self, beta=0.2, eps=1e-3, reduction="mean", loss_weight=1.0): + super(BoundedIoULoss, self).__init__() + self.beta = beta + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward( + self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs, + ): + if weight is not None and not torch.any(weight > 0): + if pred.dim() == weight.dim() + 1: + weight = weight.unsqueeze(1) + return (pred * weight).sum() # 0 + assert reduction_override in (None, "none", "mean", "sum") + reduction = reduction_override if reduction_override else self.reduction + loss = self.loss_weight * bounded_iou_loss( + pred, + target, + weight, + beta=self.beta, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs, + ) + return loss + + +class GIoULoss(nn.Module): + def __init__(self, eps=1e-6, reduction="mean", loss_weight=1.0): + super(GIoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward( + self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs, + ): + if weight is not None and not torch.any(weight > 0): + if pred.dim() == weight.dim() + 1: + weight = weight.unsqueeze(1) + return (pred * weight).sum() # 0 + assert reduction_override in (None, "none", "mean", "sum") + reduction = reduction_override if reduction_override else self.reduction + loss = self.loss_weight * giou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs, + ) + return loss + + +class DIoULoss(nn.Module): + def __init__(self, eps=1e-6, reduction="mean", loss_weight=1.0): + super(DIoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward( + self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs, + ): + if weight is not None and not torch.any(weight > 0): + if pred.dim() == weight.dim() + 1: + weight = weight.unsqueeze(1) + return (pred * weight).sum() # 0 + assert reduction_override in (None, "none", "mean", "sum") + reduction = reduction_override if reduction_override else self.reduction + loss = self.loss_weight * diou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs, + ) + return loss + + +class CIoULoss(nn.Module): + def __init__(self, eps=1e-6, reduction="mean", loss_weight=1.0): + super(CIoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward( + self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs, + ): + if weight is not None and not torch.any(weight > 0): + if pred.dim() == weight.dim() + 1: + weight = weight.unsqueeze(1) + return (pred * weight).sum() # 0 + assert reduction_override in (None, "none", "mean", "sum") + reduction = reduction_override if reduction_override else self.reduction + loss = self.loss_weight * ciou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs, + ) + return loss diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/utils.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/utils.py new file mode 100644 index 0000000000..f8bae7d5f7 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/loss/utils.py @@ -0,0 +1,93 @@ +import functools + +import torch.nn.functional as F + + +def reduce_loss(loss, reduction): + """Reduce loss as specified. + + Args: + loss (Tensor): Elementwise loss tensor. + reduction (str): Options are "none", "mean" and "sum". + + Return: + Tensor: Reduced loss tensor. + """ + reduction_enum = F._Reduction.get_enum(reduction) + # none: 0, elementwise_mean:1, sum: 2 + if reduction_enum == 0: + return loss + elif reduction_enum == 1: + return loss.mean() + elif reduction_enum == 2: + return loss.sum() + + +def weight_reduce_loss(loss, weight=None, reduction="mean", avg_factor=None): + """Apply element-wise weight and reduce loss. + + Args: + loss (Tensor): Element-wise loss. + weight (Tensor): Element-wise weights. + reduction (str): Same as built-in losses of PyTorch. + avg_factor (float): Avarage factor when computing the mean of losses. + + Returns: + Tensor: Processed loss values. + """ + # if weight is specified, apply element-wise weight + if weight is not None: + loss = loss * weight + + # if avg_factor is not specified, just reduce the loss + if avg_factor is None: + loss = reduce_loss(loss, reduction) + else: + # if reduction is mean, then average the loss by avg_factor + if reduction == "mean": + loss = loss.sum() / avg_factor + # if reduction is 'none', then do nothing, otherwise raise an error + elif reduction != "none": + raise ValueError('avg_factor can not be used with reduction="sum"') + return loss + + +def weighted_loss(loss_func): + """Create a weighted version of a given loss function. + + To use this decorator, the loss function must have the signature like + `loss_func(pred, target, **kwargs)`. The function only needs to compute + element-wise loss without any reduction. This decorator will add weight + and reduction arguments to the function. The decorated function will have + the signature like `loss_func(pred, target, weight=None, reduction='mean', + avg_factor=None, **kwargs)`. + + :Example: + + >>> import torch + >>> @weighted_loss + >>> def l1_loss(pred, target): + >>> return (pred - target).abs() + + >>> pred = torch.Tensor([0, 2, 3]) + >>> target = torch.Tensor([1, 1, 1]) + >>> weight = torch.Tensor([1, 0, 1]) + + >>> l1_loss(pred, target) + tensor(1.3333) + >>> l1_loss(pred, target, weight) + tensor(1.) + >>> l1_loss(pred, target, reduction='none') + tensor([1., 1., 2.]) + >>> l1_loss(pred, target, weight, avg_factor=2) + tensor(1.5000) + """ + + @functools.wraps(loss_func) + def wrapper(pred, target, weight=None, reduction="mean", avg_factor=None, **kwargs): + # get element-wise loss + loss = loss_func(pred, target, **kwargs) + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + return wrapper diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/activation.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/activation.py new file mode 100644 index 0000000000..8047fc81ce --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/activation.py @@ -0,0 +1,41 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch.nn as nn + +activations = { + "ReLU": nn.ReLU, + "LeakyReLU": nn.LeakyReLU, + "ReLU6": nn.ReLU6, + "SELU": nn.SELU, + "ELU": nn.ELU, + "GELU": nn.GELU, + "PReLU": nn.PReLU, + "SiLU": nn.SiLU, + "HardSwish": nn.Hardswish, + "Hardswish": nn.Hardswish, + None: nn.Identity, +} + + +def act_layers(name): + assert name in activations.keys() + if name == "LeakyReLU": + return nn.LeakyReLU(negative_slope=0.1, inplace=True) + elif name == "GELU": + return nn.GELU() + elif name == "PReLU": + return nn.PReLU() + else: + return activations[name](inplace=True) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/conv.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/conv.py new file mode 100644 index 0000000000..693e6fd0fe --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/conv.py @@ -0,0 +1,393 @@ +""" +ConvModule refers from MMDetection +RepVGGConvModule refers from RepVGG: Making VGG-style ConvNets Great Again +""" +import warnings + +import numpy as np +import torch +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.activation import act_layers +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.init_weights\ + import constant_init, kaiming_init +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.norm import build_norm_layer + + +class ConvModule(nn.Module): + """A conv block that contains conv/norm/activation layers. + + Args: + in_channels (int): Same as nn.Conv2d. + out_channels (int): Same as nn.Conv2d. + kernel_size (int or tuple[int]): Same as nn.Conv2d. + stride (int or tuple[int]): Same as nn.Conv2d. + padding (int or tuple[int]): Same as nn.Conv2d. + dilation (int or tuple[int]): Same as nn.Conv2d. + groups (int): Same as nn.Conv2d. + bias (bool or str): If specified as `auto`, it will be decided by the + norm_cfg. Bias will be set as True if norm_cfg is None, otherwise + False. + conv_cfg (dict): Config dict for convolution layer. + norm_cfg (dict): Config dict for normalization layer. + activation (str): activation layer, "ReLU" by default. + inplace (bool): Whether to use inplace mode for activation. + order (tuple[str]): The order of conv/norm/activation layers. It is a + sequence of "conv", "norm" and "act". Examples are + ("conv", "norm", "act") and ("act", "conv", "norm"). + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + bias="auto", + conv_cfg=None, + norm_cfg=None, + activation="ReLU", + inplace=True, + order=("conv", "norm", "act"), + ): + super(ConvModule, self).__init__() + assert conv_cfg is None or isinstance(conv_cfg, dict) + assert norm_cfg is None or isinstance(norm_cfg, dict) + assert activation is None or isinstance(activation, str) + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.activation = activation + self.inplace = inplace + self.order = order + assert isinstance(self.order, tuple) and len(self.order) == 3 + assert set(order) == {"conv", "norm", "act"} + + self.with_norm = norm_cfg is not None + # if the conv layer is before a norm layer, bias is unnecessary. + if bias == "auto": + bias = False if self.with_norm else True + self.with_bias = bias + + if self.with_norm and self.with_bias: + warnings.warn("ConvModule has norm and bias at the same time") + + # build convolution layer + self.conv = nn.Conv2d( # + in_channels, + out_channels, + kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + bias=bias, + ) + # export the attributes of self.conv to a higher level for convenience + self.in_channels = self.conv.in_channels + self.out_channels = self.conv.out_channels + self.kernel_size = self.conv.kernel_size + self.stride = self.conv.stride + self.padding = self.conv.padding + self.dilation = self.conv.dilation + self.transposed = self.conv.transposed + self.output_padding = self.conv.output_padding + self.groups = self.conv.groups + + # build normalization layers + if self.with_norm: + # norm layer is after conv layer + if order.index("norm") > order.index("conv"): + norm_channels = out_channels + else: + norm_channels = in_channels + self.norm_name, norm = build_norm_layer(norm_cfg, norm_channels) + self.add_module(self.norm_name, norm) + else: + self.norm_name = None + + # build activation layer + if self.activation: + self.act = act_layers(self.activation) + + # Use msra init by default + self.init_weights() + + @property + def norm(self): + if self.norm_name: + return getattr(self, self.norm_name) + else: + return None + + def init_weights(self): + if self.activation == "LeakyReLU": + nonlinearity = "leaky_relu" + else: + nonlinearity = "relu" + kaiming_init(self.conv, nonlinearity=nonlinearity) + if self.with_norm: + constant_init(self.norm, 1, bias=0) + + def forward(self, x, norm=True): + for layer in self.order: + if layer == "conv": + x = self.conv(x) + elif layer == "norm" and norm and self.with_norm: + x = self.norm(x) + elif layer == "act" and self.activation: + x = self.act(x) + return x + + +class DepthwiseConvModule(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + bias="auto", + norm_cfg=dict(type="BN"), + activation="ReLU", + inplace=True, + order=("depthwise", "dwnorm", "act", "pointwise", "pwnorm", "act"), + ): + super(DepthwiseConvModule, self).__init__() + assert activation is None or isinstance(activation, str) + self.activation = activation + self.inplace = inplace + self.order = order + assert isinstance(self.order, tuple) and len(self.order) == 6 + assert set(order) == { + "depthwise", + "dwnorm", + "act", + "pointwise", + "pwnorm", + "act", + } + + self.with_norm = norm_cfg is not None + # if the conv layer is before a norm layer, bias is unnecessary. + if bias == "auto": + bias = False if self.with_norm else True + self.with_bias = bias + + if self.with_norm and self.with_bias: + warnings.warn("ConvModule has norm and bias at the same time") + + # build convolution layer + self.depthwise = nn.Conv2d( + in_channels, + in_channels, + kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=in_channels, + bias=bias, + ) + self.pointwise = nn.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, padding=0, bias=bias + ) + + # export the attributes of self.conv to a higher level for convenience + self.in_channels = self.depthwise.in_channels + self.out_channels = self.pointwise.out_channels + self.kernel_size = self.depthwise.kernel_size + self.stride = self.depthwise.stride + self.padding = self.depthwise.padding + self.dilation = self.depthwise.dilation + self.transposed = self.depthwise.transposed + self.output_padding = self.depthwise.output_padding + + # build normalization layers + if self.with_norm: + # norm layer is after conv layer + _, self.dwnorm = build_norm_layer(norm_cfg, in_channels) + _, self.pwnorm = build_norm_layer(norm_cfg, out_channels) + + # build activation layer + if self.activation: + self.act = act_layers(self.activation) + + # Use msra init by default + self.init_weights() + + def init_weights(self): + if self.activation == "LeakyReLU": + nonlinearity = "leaky_relu" + else: + nonlinearity = "relu" + kaiming_init(self.depthwise, nonlinearity=nonlinearity) + kaiming_init(self.pointwise, nonlinearity=nonlinearity) + if self.with_norm: + constant_init(self.dwnorm, 1, bias=0) + constant_init(self.pwnorm, 1, bias=0) + + def forward(self, x, norm=True): + for layer_name in self.order: + if layer_name != "act": + layer = self.__getattr__(layer_name) + x = layer(x) + elif layer_name == "act" and self.activation: + x = self.act(x) + return x + + +class RepVGGConvModule(nn.Module): + """ + RepVGG Conv Block from paper RepVGG: Making VGG-style ConvNets Great Again + https://arxiv.org/abs/2101.03697 + https://github.com/DingXiaoH/RepVGG + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + dilation=1, + groups=1, + activation="ReLU", + padding_mode="zeros", + deploy=False, + **kwargs + ): + super(RepVGGConvModule, self).__init__() + assert activation is None or isinstance(activation, str) + self.activation = activation + + self.deploy = deploy + self.groups = groups + self.in_channels = in_channels + + assert kernel_size == 3 + assert padding == 1 + + padding_11 = padding - kernel_size // 2 + + # build activation layer + if self.activation: + self.act = act_layers(self.activation) + + if deploy: + self.rbr_reparam = nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + bias=True, + padding_mode=padding_mode, + ) + + else: + self.rbr_identity = ( + nn.BatchNorm2d(num_features=in_channels) + if out_channels == in_channels and stride == 1 + else None + ) + + self.rbr_dense = nn.Sequential( + nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + groups=groups, + bias=False, + ), + nn.BatchNorm2d(num_features=out_channels), + ) + + self.rbr_1x1 = nn.Sequential( + nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=1, + stride=stride, + padding=padding_11, + groups=groups, + bias=False, + ), + nn.BatchNorm2d(num_features=out_channels), + ) + print("RepVGG Block, identity = ", self.rbr_identity) + + def forward(self, inputs): + if hasattr(self, "rbr_reparam"): + return self.act(self.rbr_reparam(inputs)) + + if self.rbr_identity is None: + id_out = 0 + else: + id_out = self.rbr_identity(inputs) + + return self.act(self.rbr_dense(inputs) + self.rbr_1x1(inputs) + id_out) + + # This func derives the equivalent kernel and bias in a DIFFERENTIABLE way. + # You can get the equivalent kernel and bias at any time and do whatever you want, + # for example, apply some penalties or constraints during training, just like you + # do to the other models. May be useful for quantization or pruning. + def get_equivalent_kernel_bias(self): + kernel3x3, bias3x3 = self._fuse_bn_tensor(self.rbr_dense) + kernel1x1, bias1x1 = self._fuse_bn_tensor(self.rbr_1x1) + kernelid, biasid = self._fuse_bn_tensor(self.rbr_identity) + return ( + kernel3x3 + self._pad_1x1_to_3x3_tensor(kernel1x1) + kernelid, + bias3x3 + bias1x1 + biasid, + ) + + def _pad_1x1_to_3x3_tensor(self, kernel1x1): + if kernel1x1 is None: + return 0 + else: + return nn.functional.pad(kernel1x1, [1, 1, 1, 1]) + + def _fuse_bn_tensor(self, branch): + if branch is None: + return 0, 0 + if isinstance(branch, nn.Sequential): + kernel = branch[0].weight + running_mean = branch[1].running_mean + running_var = branch[1].running_var + gamma = branch[1].weight + beta = branch[1].bias + eps = branch[1].eps + else: + assert isinstance(branch, nn.BatchNorm2d) + if not hasattr(self, "id_tensor"): + input_dim = self.in_channels // self.groups + kernel_value = np.zeros( + (self.in_channels, input_dim, 3, 3), dtype=np.float32 + ) + for i in range(self.in_channels): + kernel_value[i, i % input_dim, 1, 1] = 1 + self.id_tensor = torch.from_numpy(kernel_value).to(branch.weight.device) + kernel = self.id_tensor + running_mean = branch.running_mean + running_var = branch.running_var + gamma = branch.weight + beta = branch.bias + eps = branch.eps + std = (running_var + eps).sqrt() + t = (gamma / std).reshape(-1, 1, 1, 1) + return kernel * t, beta - running_mean * gamma / std + + def repvgg_convert(self): + kernel, bias = self.get_equivalent_kernel_bias() + return ( + kernel.detach().cpu().numpy(), + bias.detach().cpu().numpy(), + ) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/init_weights.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/init_weights.py new file mode 100644 index 0000000000..27da85c922 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/init_weights.py @@ -0,0 +1,43 @@ +# Modification 2020 RangiLyu +# Copyright 2018-2019 Open-MMLab. + +import torch.nn as nn + + +def kaiming_init( + module, a=0, mode="fan_out", nonlinearity="relu", bias=0, distribution="normal" +): + assert distribution in ["uniform", "normal"] + if distribution == "uniform": + nn.init.kaiming_uniform_( + module.weight, a=a, mode=mode, nonlinearity=nonlinearity + ) + else: + nn.init.kaiming_normal_( + module.weight, a=a, mode=mode, nonlinearity=nonlinearity + ) + if hasattr(module, "bias") and module.bias is not None: + nn.init.constant_(module.bias, bias) + + +def xavier_init(module, gain=1, bias=0, distribution="normal"): + assert distribution in ["uniform", "normal"] + if distribution == "uniform": + nn.init.xavier_uniform_(module.weight, gain=gain) + else: + nn.init.xavier_normal_(module.weight, gain=gain) + if hasattr(module, "bias") and module.bias is not None: + nn.init.constant_(module.bias, bias) + + +def normal_init(module, mean=0, std=1, bias=0): + nn.init.normal_(module.weight, mean, std) + if hasattr(module, "bias") and module.bias is not None: + nn.init.constant_(module.bias, bias) + + +def constant_init(module, val, bias=0): + if hasattr(module, "weight") and module.weight is not None: + nn.init.constant_(module.weight, val) + if hasattr(module, "bias") and module.bias is not None: + nn.init.constant_(module.bias, bias) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/nms.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/nms.py new file mode 100644 index 0000000000..e5fa3e216c --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/nms.py @@ -0,0 +1,122 @@ +import torch +from torchvision.ops import nms + + +def multiclass_nms( + multi_bboxes, multi_scores, score_thr, nms_cfg, max_num=-1, score_factors=None +): + """NMS for multi-class bboxes. + + Args: + multi_bboxes (Tensor): shape (n, #class*4) or (n, 4) + multi_scores (Tensor): shape (n, #class), where the last column + contains scores of the background class, but this will be ignored. + score_thr (float): bbox threshold, bboxes with scores lower than it + will not be considered. + nms_thr (float): NMS IoU threshold + max_num (int): if there are more than max_num bboxes after NMS, + only top max_num will be kept. + score_factors (Tensor): The factors multiplied to scores before + applying NMS + + Returns: + tuple: (bboxes, labels), tensors of shape (k, 5) and (k, 1). Labels \ + are 0-based. + """ + num_classes = multi_scores.size(1) - 1 + # exclude background category + if multi_bboxes.shape[1] > 4: + bboxes = multi_bboxes.view(multi_scores.size(0), -1, 4) + else: + bboxes = multi_bboxes[:, None].expand(multi_scores.size(0), num_classes, 4) + scores = multi_scores[:, :-1] + + # filter out boxes with low scores + valid_mask = scores > score_thr + + # We use masked_select for ONNX exporting purpose, + # which is equivalent to bboxes = bboxes[valid_mask] + # we have to use this ugly code + bboxes = torch.masked_select( + bboxes, torch.stack((valid_mask, valid_mask, valid_mask, valid_mask), -1) + ).view(-1, 4) + if score_factors is not None: + scores = scores * score_factors[:, None] + scores = torch.masked_select(scores, valid_mask) + labels = valid_mask.nonzero(as_tuple=False)[:, 1] + + if bboxes.numel() == 0: + bboxes = multi_bboxes.new_zeros((0, 5)) + labels = multi_bboxes.new_zeros((0,), dtype=torch.long) + + if torch.onnx.is_in_onnx_export(): + raise RuntimeError( + "[ONNX Error] Can not record NMS " + "as it has not been executed this time" + ) + return bboxes, labels + + dets, keep = batched_nms(bboxes, scores, labels, nms_cfg) + + if max_num > 0: + dets = dets[:max_num] + keep = keep[:max_num] + + return dets, labels[keep] + + +def batched_nms(boxes, scores, idxs, nms_cfg, class_agnostic=False): + """Performs non-maximum suppression in a batched fashion. + Modified from https://github.com/pytorch/vision/blob + /505cd6957711af790211896d32b40291bea1bc21/torchvision/ops/boxes.py#L39. + In order to perform NMS independently per class, we add an offset to all + the boxes. The offset is dependent only on the class idx, and is large + enough so that boxes from different classes do not overlap. + Arguments: + boxes (torch.Tensor): boxes in shape (N, 4). + scores (torch.Tensor): scores in shape (N, ). + idxs (torch.Tensor): each index value correspond to a bbox cluster, + and NMS will not be applied between elements of different idxs, + shape (N, ). + nms_cfg (dict): specify nms type and other parameters like iou_thr. + Possible keys includes the following. + - iou_thr (float): IoU threshold used for NMS. + - split_thr (float): threshold number of boxes. In some cases the + number of boxes is large (e.g., 200k). To avoid OOM during + training, the users could set `split_thr` to a small value. + If the number of boxes is greater than the threshold, it will + perform NMS on each group of boxes separately and sequentially. + Defaults to 10000. + class_agnostic (bool): if true, nms is class agnostic, + i.e. IoU thresholding happens over all boxes, + regardless of the predicted class. + Returns: + tuple: kept dets and indice. + """ + nms_cfg_ = nms_cfg.copy() + class_agnostic = nms_cfg_.pop("class_agnostic", class_agnostic) + if class_agnostic: + boxes_for_nms = boxes + else: + max_coordinate = boxes.max() + offsets = idxs.to(boxes) * (max_coordinate + 1) + boxes_for_nms = boxes + offsets[:, None] + nms_cfg_.pop("type", "nms") + split_thr = nms_cfg_.pop("split_thr", 10000) + if len(boxes_for_nms) < split_thr: + keep = nms(boxes_for_nms, scores, **nms_cfg_) + boxes = boxes[keep] + scores = scores[keep] + else: + total_mask = scores.new_zeros(scores.size(), dtype=torch.bool) + for id in torch.unique(idxs): + mask = (idxs == id).nonzero(as_tuple=False).view(-1) + keep = nms(boxes_for_nms[mask], scores[mask], **nms_cfg_) + total_mask[mask[keep]] = True + + keep = total_mask.nonzero(as_tuple=False).view(-1) + keep = keep[scores[keep].argsort(descending=True)] + boxes = boxes[keep] + scores = scores[keep] + + return torch.cat([boxes, scores[:, None]], -1), keep diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/norm.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/norm.py new file mode 100644 index 0000000000..b9dd8f43e0 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/norm.py @@ -0,0 +1,55 @@ +import torch.nn as nn + +norm_cfg = { + # format: layer_type: (abbreviation, module) + "BN": ("bn", nn.BatchNorm2d), + "SyncBN": ("bn", nn.SyncBatchNorm), + "GN": ("gn", nn.GroupNorm), + # and potentially 'SN' +} + + +def build_norm_layer(cfg, num_features, postfix=""): + """Build normalization layer + + Args: + cfg (dict): cfg should contain: + type (str): identify norm layer type. + layer args: args needed to instantiate a norm layer. + requires_grad (bool): [optional] whether stop gradient updates + num_features (int): number of channels from input. + postfix (int, str): appended into norm abbreviation to + create named layer. + + Returns: + name (str): abbreviation + postfix + layer (nn.Module): created norm layer + """ + assert isinstance(cfg, dict) and "type" in cfg + cfg_ = cfg.copy() + + layer_type = cfg_.pop("type") + if layer_type not in norm_cfg: + raise KeyError("Unrecognized norm type {}".format(layer_type)) + else: + abbr, norm_layer = norm_cfg[layer_type] + if norm_layer is None: + raise NotImplementedError + + assert isinstance(postfix, (int, str)) + name = abbr + str(postfix) + + requires_grad = cfg_.pop("requires_grad", True) + cfg_.setdefault("eps", 1e-5) + if layer_type != "GN": + layer = norm_layer(num_features, **cfg_) + if layer_type == "SyncBN" and hasattr(layer, "_specify_ddp_gpu_num"): + layer._specify_ddp_gpu_num(1) + else: + assert "num_groups" in cfg_ + layer = norm_layer(num_channels=num_features, **cfg_) + + for param in layer.parameters(): + param.requires_grad = requires_grad + + return name, layer diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/scale.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/scale.py new file mode 100644 index 0000000000..2461af8a6f --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/scale.py @@ -0,0 +1,15 @@ +import torch +import torch.nn as nn + + +class Scale(nn.Module): + """ + A learnable scale parameter + """ + + def __init__(self, scale=1.0): + super(Scale, self).__init__() + self.scale = nn.Parameter(torch.tensor(scale, dtype=torch.float)) + + def forward(self, x): + return x * self.scale diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/transformer.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/transformer.py new file mode 100644 index 0000000000..24e2de458b --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/module/transformer.py @@ -0,0 +1,138 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch.nn as nn + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.activation import act_layers +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.module.conv import ConvModule + + +class MLP(nn.Module): + def __init__( + self, in_dim, hidden_dim=None, out_dim=None, drop=0.0, activation="GELU" + ): + super(MLP, self).__init__() + out_dim = out_dim or in_dim + hidden_dim = hidden_dim or in_dim + self.fc1 = nn.Linear(in_dim, hidden_dim) + self.act = act_layers(activation) + self.fc2 = nn.Linear(hidden_dim, out_dim) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class TransformerEncoder(nn.Module): + """ + Encoder layer of transformer + :param dim: feature dimension + :param num_heads: number of attention heads + :param mlp_ratio: hidden layer dimension expand ratio in MLP + :param dropout_ratio: probability of an element to be zeroed + :param activation: activation layer type + :param kv_bias: add bias on key and values + """ + + def __init__( + self, + dim, + num_heads, + mlp_ratio, + dropout_ratio=0.0, + activation="GELU", + kv_bias=False, + ): + super(TransformerEncoder, self).__init__() + self.norm1 = nn.LayerNorm(dim) + + # embed_dim must be divisible by num_heads + assert dim // num_heads * num_heads == dim + self.attn = nn.MultiheadAttention( + embed_dim=dim, + num_heads=num_heads, + dropout=dropout_ratio, + add_bias_kv=kv_bias, + ) + self.norm2 = nn.LayerNorm(dim) + self.mlp = MLP( + in_dim=dim, + hidden_dim=int(dim * mlp_ratio), + drop=dropout_ratio, + activation=activation, + ) + + def forward(self, x): + _x = self.norm1(x) + x = x + self.attn(_x, _x, _x)[0] + x = x + self.mlp(self.norm2(x)) + return x + + +class TransformerBlock(nn.Module): + """ + Block of transformer encoder layers. Used in vision task. + :param in_channels: input channels + :param out_channels: output channels + :param num_heads: number of attention heads + :param num_encoders: number of transformer encoder layers + :param mlp_ratio: hidden layer dimension expand ratio in MLP + :param dropout_ratio: probability of an element to be zeroed + :param activation: activation layer type + :param kv_bias: add bias on key and values + """ + + def __init__( + self, + in_channels, + out_channels, + num_heads, + num_encoders=1, + mlp_ratio=1, + dropout_ratio=0.0, + kv_bias=False, + activation="GELU", + ): + super(TransformerBlock, self).__init__() + + # out_channels must be divisible by num_heads + assert out_channels // num_heads * num_heads == out_channels + + self.conv = ( + nn.Identity() + if in_channels == out_channels + else ConvModule(in_channels, out_channels, 1) + ) + self.linear = nn.Linear(out_channels, out_channels) + encoders = [ + TransformerEncoder( + out_channels, num_heads, mlp_ratio, dropout_ratio, activation, kv_bias + ) + for _ in range(num_encoders) + ] + self.encoders = nn.Sequential(*encoders) + + def forward(self, x, pos_embed): + b, _, h, w = x.shape + x = self.conv(x) + x = x.flatten(2).permute(2, 0, 1) + x = x + pos_embed + x = self.encoders(x) + x = x.permute(1, 2, 0).reshape(b, -1, h, w) + return x diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/weight_averager/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/weight_averager/__init__.py new file mode 100644 index 0000000000..170b589cc9 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/weight_averager/__init__.py @@ -0,0 +1,26 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.weight_averager.ema import ExpMovingAverager + + +def build_weight_averager(cfg, device="cpu"): + cfg = copy.deepcopy(cfg) + name = cfg.pop("name") + if name == "ExpMovingAverager": + return ExpMovingAverager(**cfg, device=device) + else: + raise NotImplementedError(f"{name} is not implemented") diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/weight_averager/ema.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/weight_averager/ema.py new file mode 100644 index 0000000000..0906c7c6d7 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/model/weight_averager/ema.py @@ -0,0 +1,80 @@ +# Copyright 2021 RangiLyu. All rights reserved. +# ===================================================================== +# Modified from: https://github.com/facebookresearch/d2go +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Licensed under the Apache License, Version 2.0 (the "License") +import itertools +import math +from typing import Any, Dict, Optional + +import torch +import torch.nn as nn + + +class ExpMovingAverager(object): + """Exponential Moving Average. + + Args: + decay (float): EMA decay factor, should be in [0, 1]. A decay of 0 corresponds + to always using the latest value (no EMA) and a decay of 1 corresponds to + not updating weights after initialization. Default to 0.9998. + device (str): If not None, move EMA state to device. + """ + + def __init__(self, decay: float = 0.9998, device: Optional[str] = None): + if decay < 0 or decay > 1.0: + raise ValueError(f"Decay should be in [0, 1], {decay} was given.") + self.decay = decay + self.state = {} + self.device = device + + def load_from(self, model: nn.Module) -> None: + """Load state from the model.""" + self.state.clear() + for name, val in self._get_model_state_iterator(model): + val = val.detach().clone() + self.state[name] = val.to(self.device) if self.device else val + + def has_inited(self) -> bool: + return len(self.state) > 0 + + def apply_to(self, model: nn.Module) -> None: + """Apply EMA state to the model.""" + with torch.no_grad(): + for name, val in self._get_model_state_iterator(model): + assert ( + name in self.state + ), f"Name {name} not exist, available names are {self.state.keys()}" + val.copy_(self.state[name]) + + def state_dict(self) -> Dict[str, Any]: + return self.state + + def load_state_dict(self, state_dict: Dict[str, Any]) -> None: + self.state.clear() + for name, val in state_dict.items(): + self.state[name] = val.to(self.device) if self.device else val + + def to(self, device: torch.device) -> None: + """moves EMA state to device.""" + for name, val in self.state.items(): + self.state[name] = val.to(device) + + def _get_model_state_iterator(self, model: nn.Module): + param_iter = model.named_parameters() + # pyre-fixme[16]: `nn.Module` has no attribute `named_buffers`. + buffer_iter = model.named_buffers() + return itertools.chain(param_iter, buffer_iter) + + def calculate_dacay(self, iteration: int) -> float: + decay = (self.decay) * math.exp(-(1 + iteration) / 2000) + (1 - self.decay) + return decay + + def update(self, model: nn.Module, iteration: int) -> None: + decay = self.calculate_dacay(iteration) + with torch.no_grad(): + for name, val in self._get_model_state_iterator(model): + ema_val = self.state[name] + if self.device: + val = val.to(self.device) + ema_val.copy_(ema_val * (1 - decay) + val * decay) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/trainer/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/trainer/__init__.py new file mode 100644 index 0000000000..bc1f10f77d --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/trainer/__init__.py @@ -0,0 +1,16 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.trainer.task import TrainingTask + +__all__ = ["TrainingTask"] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/trainer/task.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/trainer/task.py new file mode 100644 index 0000000000..d2939d22e1 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/trainer/task.py @@ -0,0 +1,362 @@ +# Modifications Copyright 2021 - present, OpenDR European Project +# +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +import os +import warnings +from typing import Any, Dict, List + +import torch +import torch.distributed as dist +from pytorch_lightning import LightningModule + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.batch_process import stack_batch_img +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util\ + import convert_avg_params, gather_results, mkdir +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.check_point import save_model_state +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.weight_averager import build_weight_averager + + +class TrainingTask(LightningModule): + """ + Pytorch Lightning module of a general training task. + Including training, evaluating and testing. + Args: + cfg: Training configurations + evaluator: Evaluator for evaluating the model performance. + """ + + def __init__(self, cfg, model, evaluator=None): + super(TrainingTask, self).__init__() + self.cfg = cfg + self.model = model + self.evaluator = evaluator + self.save_flag = -10 + self.log_style = "NanoDet" + self.weight_averager = None + if "weight_averager" in self.cfg.model: + self.weight_averager = build_weight_averager( + self.cfg.model.weight_averager, device=self.device + ) + self.avg_model = copy.deepcopy(self.model) + + def _preprocess_batch_input(self, batch): + batch_imgs = batch["img"] + if isinstance(batch_imgs, list): + batch_imgs = [img.to(self.device) for img in batch_imgs] + batch_img_tensor = stack_batch_img(batch_imgs, divisible=32) + batch["img"] = batch_img_tensor + return batch + + def forward(self, x): + x = self.model(x) + return x + + @torch.no_grad() + def predict(self, batch, batch_idx=None, dataloader_idx=None): + batch = self._preprocess_batch_input(batch) + preds = self.forward(batch["img"]) + results = self.model.head.post_process(preds, batch) + return results + + def save_current_model(self, path, logger): + save_model_state(path=path, model=self.model, weight_averager=self.weight_averager, logger=logger) + + def training_step(self, batch, batch_idx): + batch = self._preprocess_batch_input(batch) + preds, loss, loss_states = self.model.forward_train(batch) + + # log train losses + if self.global_step % self.cfg.log.interval == 0: + lr = self.optimizers().param_groups[0]["lr"] + log_msg = "Train|Epoch{}/{}|Iter{}({})| lr:{:.2e}| ".format( + self.current_epoch + 1, + self.cfg.schedule.total_epochs, + self.global_step, + batch_idx, + lr, + ) + self.scalar_summary("Train_loss/lr", "Train", lr, self.global_step) + for loss_name in loss_states: + log_msg += "{}:{:.4f}| ".format( + loss_name, loss_states[loss_name].mean().item() + ) + self.scalar_summary( + "Train_loss/" + loss_name, + "Train", + loss_states[loss_name].mean().item(), + self.global_step, + ) + if self.logger: + self.logger.info(log_msg) + + return loss + + def training_epoch_end(self, outputs: List[Any]) -> None: + # save models in schedule epoches + if self.current_epoch % self.cfg.schedule.val_intervals == 0: + checkpoint_save_path = os.path.join(self.cfg.save_dir, "checkpoints") + mkdir(checkpoint_save_path) + print("===" * 10) + print("checkpoint_save_path: {} \n epoch: {}".format(checkpoint_save_path, self.current_epoch)) + print("===" * 10) + self.trainer.save_checkpoint( + os.path.join(checkpoint_save_path, "model_iter_{}.ckpt".format(self.current_epoch)) + ) + + self.lr_scheduler.step() + + def validation_step(self, batch, batch_idx): + batch = self._preprocess_batch_input(batch) + if self.weight_averager is not None: + preds, loss, loss_states = self.avg_model.forward_train(batch) + else: + preds, loss, loss_states = self.model.forward_train(batch) + + if batch_idx % self.cfg.log.interval == 0: + lr = self.optimizers().param_groups[0]["lr"] + log_msg = "Val|Epoch{}/{}|Iter{}({})| lr:{:.2e}| ".format( + self.current_epoch + 1, + self.cfg.schedule.total_epochs, + self.global_step, + batch_idx, + lr, + ) + for loss_name in loss_states: + log_msg += "{}:{:.4f}| ".format( + loss_name, loss_states[loss_name].mean().item() + ) + if self.logger: + self.logger.info(log_msg) + + dets = self.model.head.post_process(preds, batch) + return dets + + def validation_epoch_end(self, validation_step_outputs): + """ + Called at the end of the validation epoch with the + outputs of all validation steps.Evaluating results + and save best model. + Args: + validation_step_outputs: A list of val outputs + + """ + results = {} + for res in validation_step_outputs: + results.update(res) + all_results = ( + gather_results(results, self.device) + if dist.is_available() and dist.is_initialized() + else results + ) + if all_results: + eval_results = self.evaluator.evaluate( + all_results, self.cfg.save_dir) + metric = eval_results[self.cfg.evaluator.save_key] + # save best model + if metric > self.save_flag: + self.save_flag = metric + best_save_path = os.path.join(self.cfg.save_dir, "model_best") + mkdir(best_save_path) + self.trainer.save_checkpoint( + os.path.join(best_save_path, "model_best.ckpt") + ) + self.save_current_model(os.path.join(best_save_path, "nanodet_model_best.pth"), logger=self.logger) + txt_path = os.path.join(best_save_path, "eval_results.txt") + with open(txt_path, "a") as f: + f.write("Epoch:{}\n".format(self.current_epoch + 1)) + for k, v in eval_results.items(): + f.write("{}: {}\n".format(k, v)) + else: + warnings.warn( + "Warning! Save_key is not in eval results! Only save model last!" + ) + if self.logger: + self.logger.log_metrics(eval_results, self.current_epoch + 1) + else: + # self.logger.info("Skip val on rank {}".format(self.local_rank)) + if self.logger: + self.logger.info("Skip val ") + + def test_step(self, batch, batch_idx): + dets = self.predict(batch, batch_idx) + return dets + + def test_epoch_end(self, test_step_outputs): + results = {} + for res in test_step_outputs: + results.update(res) + all_results = ( + gather_results(results, self.device) + if dist.is_available() and dist.is_initialized() + else results + ) + if all_results: + if self.cfg.test_mode == "val": + eval_results = self.evaluator.evaluate( + all_results, self.cfg.save_dir) + txt_path = os.path.join(self.cfg.save_dir, "eval_results.txt") + with open(txt_path, "a") as f: + for k, v in eval_results.items(): + f.write("{}: {}\n".format(k, v)) + + else: + if self.logger: + self.logger.info("Skip test on rank {}".format(self.local_rank)) + + def configure_optimizers(self): + """ + Prepare optimizer and learning-rate scheduler + to use in optimization. + + Returns: + optimizer + """ + + optimizer_cfg = copy.deepcopy(self.cfg.schedule.optimizer) + name = optimizer_cfg.pop("name") + build_optimizer = getattr(torch.optim, name) + optimizer = build_optimizer(params=self.parameters(), **optimizer_cfg) + + schedule_cfg = copy.deepcopy(self.cfg.schedule.lr_schedule) + name = schedule_cfg.pop("name") + build_scheduler = getattr(torch.optim.lr_scheduler, name) + self.lr_scheduler = build_scheduler(optimizer=optimizer, **schedule_cfg) + + return optimizer + + def optimizer_step( + self, + epoch=None, + batch_idx=None, + optimizer=None, + optimizer_idx=None, + optimizer_closure=None, + on_tpu=None, + using_native_amp=None, + using_lbfgs=None, + ): + """ + Performs a single optimization step (parameter update). + Args: + epoch: Current epoch + batch_idx: Index of current batch + optimizer: A PyTorch optimizer + optimizer_idx: If you used multiple optimizers this indexes into that list. + optimizer_closure: closure for all optimizers + on_tpu: true if TPU backward is required + using_native_amp: True if using native amp + using_lbfgs: True if the matching optimizer is lbfgs + """ + # warm up lr + if self.trainer.global_step <= self.cfg.schedule.warmup.steps: + if self.cfg.schedule.warmup.name == "constant": + warmup_lr = ( + self.cfg.schedule.optimizer.lr * self.cfg.schedule.warmup.ratio + ) + elif self.cfg.schedule.warmup.name == "linear": + k = (1 - self.trainer.global_step / self.cfg.schedule.warmup.steps) * ( + 1 - self.cfg.schedule.warmup.ratio + ) + warmup_lr = self.cfg.schedule.optimizer.lr * (1 - k) + elif self.cfg.schedule.warmup.name == "exp": + k = self.cfg.schedule.warmup.ratio ** ( + 1 - self.trainer.global_step / self.cfg.schedule.warmup.steps + ) + warmup_lr = self.cfg.schedule.optimizer.lr * k + else: + raise Exception("Unsupported warm up type!") + for pg in optimizer.param_groups: + pg["lr"] = warmup_lr + + # update params + optimizer.step(closure=optimizer_closure) + optimizer.zero_grad() + + def get_progress_bar_dict(self): + # don't show the version number + items = super().get_progress_bar_dict() + items.pop("v_num", None) + items.pop("loss", None) + return items + + def scalar_summary(self, tag, phase, value, step): + """ + Write Tensorboard scalar summary log. + Args: + tag: Name for the tag + phase: 'Train' or 'Val' + value: Value to record + step: Step value to record + + """ + # if self.local_rank < 1: + if self.logger: + self.logger.experiment.add_scalars(tag, {phase: value}, step) + + def info(self, string): + if self.logger: + self.logger.info(string) + + # ------------Hooks----------------- + def on_train_start(self) -> None: + if self.current_epoch > 0: + self.lr_scheduler.last_epoch = self.current_epoch - 1 + + def on_pretrain_routine_end(self) -> None: + if "weight_averager" in self.cfg.model: + if self.logger: + self.logger.info("Weight Averaging is enabled") + if self.weight_averager and self.weight_averager.has_inited(): + self.weight_averager.to(self.weight_averager.device) + return + self.weight_averager = build_weight_averager( + self.cfg.model.weight_averager, device=self.device + ) + self.weight_averager.load_from(self.model) + + def on_epoch_start(self): + self.model.set_epoch(self.current_epoch) + + def on_train_batch_end(self, outputs, batch, batch_idx, dataloader_idx) -> None: + if self.weight_averager: + self.weight_averager.update(self.model, self.global_step) + + def on_validation_epoch_start(self): + if self.weight_averager: + self.weight_averager.apply_to(self.avg_model) + + def on_test_epoch_start(self) -> None: + if self.weight_averager: + self.on_load_checkpoint({"state_dict": self.state_dict()}) + self.weight_averager.apply_to(self.model) + + def on_load_checkpoint(self, checkpointed_state: Dict[str, Any]) -> None: + if self.weight_averager: + avg_params = convert_avg_params(checkpointed_state) + if len(avg_params) != len(self.model.state_dict()): + if self.logger: + self.logger.info( + "Weight averaging is enabled but average state does not" + "match the model" + ) + else: + self.weight_averager = build_weight_averager( + self.cfg.model.weight_averager, device=self.device + ) + self.weight_averager.load_state_dict(avg_params) + if self.logger: + self.logger.info("Loaded average state from checkpoint.") diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/__init__.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/__init__.py new file mode 100644 index 0000000000..10c0ee2b88 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/__init__.py @@ -0,0 +1,41 @@ +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.box_transform \ + import bbox2distance, distance2bbox +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.check_point import ( + convert_avg_params, + load_model_weight, + save_model, +) +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.config import cfg, load_config +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.logger \ + import AverageMeter, Logger, MovingAverage, NanoDetLightningLogger +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.misc \ + import images_to_levels, multi_apply, unmap +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.path import collect_files, mkdir +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.rank_filter import rank_filter +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.scatter_gather \ + import gather_results, scatter_kwargs +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.util_mixins import NiceRepr + + +__all__ = [ + "distance2bbox", + "bbox2distance", + "load_model_weight", + "save_model", + "cfg", + "load_config", + "AverageMeter", + "Logger", + "MovingAverage", + "images_to_levels", + "multi_apply", + "unmap", + "mkdir", + "rank_filter", + "gather_results", + "scatter_kwargs", + "NiceRepr", + "collect_files", + "NanoDetLightningLogger", + "convert_avg_params", +] diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/box_transform.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/box_transform.py new file mode 100644 index 0000000000..4b82a8c19f --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/box_transform.py @@ -0,0 +1,49 @@ +import torch + + +def distance2bbox(points, distance, max_shape=None): + """Decode distance prediction to bounding box. + + Args: + points (Tensor): Shape (n, 2), [x, y]. + distance (Tensor): Distance from the given point to 4 + boundaries (left, top, right, bottom). + max_shape (tuple): Shape of the image. + + Returns: + Tensor: Decoded bboxes. + """ + x1 = points[..., 0] - distance[..., 0] + y1 = points[..., 1] - distance[..., 1] + x2 = points[..., 0] + distance[..., 2] + y2 = points[..., 1] + distance[..., 3] + if max_shape is not None: + x1 = x1.clamp(min=0, max=max_shape[1]) + y1 = y1.clamp(min=0, max=max_shape[0]) + x2 = x2.clamp(min=0, max=max_shape[1]) + y2 = y2.clamp(min=0, max=max_shape[0]) + return torch.stack([x1, y1, x2, y2], -1) + + +def bbox2distance(points, bbox, max_dis=None, eps=0.1): + """Decode bounding box based on distances. + + Args: + points (Tensor): Shape (n, 2), [x, y]. + bbox (Tensor): Shape (n, 4), "xyxy" format + max_dis (float): Upper bound of the distance. + eps (float): a small value to ensure target < max_dis, instead <= + + Returns: + Tensor: Decoded distances. + """ + left = points[:, 0] - bbox[:, 0] + top = points[:, 1] - bbox[:, 1] + right = bbox[:, 2] - points[:, 0] + bottom = bbox[:, 3] - points[:, 1] + if max_dis is not None: + left = left.clamp(min=0, max=max_dis - eps) + top = top.clamp(min=0, max=max_dis - eps) + right = right.clamp(min=0, max=max_dis - eps) + bottom = bottom.clamp(min=0, max=max_dis - eps) + return torch.stack([left, top, right, bottom], -1) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/check_point.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/check_point.py new file mode 100644 index 0000000000..2ac516167a --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/check_point.py @@ -0,0 +1,100 @@ +# Modifications Copyright 2021 - present, OpenDR European Project +# +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Dict +import torch + + +def load_model_weight(model, checkpoint, logger=None): + state_dict = checkpoint["state_dict"].copy() + for k in checkpoint["state_dict"]: + # convert average model weights + if k.startswith("avg_model."): + v = state_dict.pop(k) + state_dict[k[4:]] = v + # strip prefix of state_dict + if list(state_dict.keys())[0].startswith("module."): + state_dict = {k[7:]: v for k, v in state_dict.items()} + if list(state_dict.keys())[0].startswith("model."): + state_dict = {k[6:]: v for k, v in state_dict.items()} + + model_state_dict = ( + model.module.state_dict() if hasattr(model, "module") else model.state_dict() + ) + + # check loaded parameters and created model parameters + for k in state_dict: + if k in model_state_dict: + if state_dict[k].shape != model_state_dict[k].shape: + if logger: + logger.log( + "Skip loading parameter {}, required shape{}, " + "loaded shape{}.".format( + k, model_state_dict[k].shape, state_dict[k].shape + ) + ) + state_dict[k] = model_state_dict[k] + else: + if logger: + logger.log("Drop parameter {}.".format(k)) + for k in model_state_dict: + if not (k in state_dict): + if logger: + logger.log("No param {}.".format(k)) + state_dict[k] = model_state_dict[k] + model.load_state_dict(state_dict, strict=False) + return model + + +# @rank_zero_only +# @rank_filter +def save_model(model, path, epoch, iter, optimizer=None): + model_state_dict = ( + model.module.state_dict() if hasattr(model, "module") else model.state_dict() + ) + data = {"epoch": epoch, "state_dict": model_state_dict, "iter": iter} + if optimizer is not None: + data["optimizer"] = optimizer.state_dict() + + torch.save(data, path) + + +# @rank_zero_only +# @rank_filter +def save_model_state(path, model, weight_averager=None, logger=None): + if logger: + logger.info("Saving model to {}".format(path)) + state_dict = ( + weight_averager.state_dict() + if weight_averager + else model.state_dict() + ) + torch.save({"state_dict": state_dict}, path) + + +def convert_avg_params(checkpoint: Dict[str, Any]) -> Dict[str, Any]: + """Converts average state dict to the format that can be loaded to a model. + Args: + checkpoint: model. + Returns: + Converted average state dict. + """ + state_dict = checkpoint["state_dict"] + avg_weights = {} + for k, v in state_dict.items(): + if "avg_model" in k: + avg_weights[k[10:]] = v + return avg_weights diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/config.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/config.py new file mode 100644 index 0000000000..1b8b3e055c --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/config.py @@ -0,0 +1,39 @@ +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.yacs import CfgNode + +cfg = CfgNode(new_allowed=True) +cfg.save_dir = "./" +# common params for NETWORK +cfg.model = CfgNode(new_allowed=True) +cfg.model.arch = CfgNode(new_allowed=True) +cfg.model.arch.backbone = CfgNode(new_allowed=True) +cfg.model.arch.fpn = CfgNode(new_allowed=True) +cfg.model.arch.head = CfgNode(new_allowed=True) + +# DATASET related params +cfg.data = CfgNode(new_allowed=True) +cfg.data.train = CfgNode(new_allowed=True) +cfg.data.val = CfgNode(new_allowed=True) +cfg.device = CfgNode(new_allowed=True) +# train +cfg.schedule = CfgNode(new_allowed=True) + +# logger +cfg.log = CfgNode() +cfg.log.interval = 50 + +# testing +cfg.test = CfgNode() +# size of images for each device + + +def load_config(cfg, args_cfg): + cfg.defrost() + cfg.merge_from_file(args_cfg) + cfg.freeze() + + +if __name__ == "__main__": + import sys + + with open(sys.argv[1], "w") as f: + print(cfg, file=f) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/logger.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/logger.py new file mode 100644 index 0000000000..b883d8f336 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/logger.py @@ -0,0 +1,216 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os +import time + +import numpy as np +from pytorch_lightning.loggers import LightningLoggerBase +from pytorch_lightning.loggers.base import rank_zero_experiment +from pytorch_lightning.utilities import rank_zero_only +from pytorch_lightning.utilities.cloud_io import get_filesystem + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.path import mkdir + + +class Logger: + def __init__(self, local_rank, save_dir="./", use_tensorboard=True): + # mkdir(local_rank, save_dir) + mkdir(save_dir) + self.rank = local_rank + fmt = ("[%(name)s] [%(asctime)s] %(levelname)s: %(message)s") + logging.basicConfig( + level=logging.INFO, + filename=os.path.join(save_dir, "logs.txt"), + filemode="w", + ) + self.log_dir = os.path.join(save_dir, "logs") + console = logging.StreamHandler() + console.setLevel(logging.INFO) + formatter = logging.Formatter(fmt, datefmt="%m-%d %H:%M:%S") + console.setFormatter(formatter) + logging.getLogger().addHandler(console) + if use_tensorboard: + try: + from torch.utils.tensorboard import SummaryWriter + except ImportError: + raise ImportError( + 'Please run "pip install future tensorboard" to install ' + "the dependencies to use torch.utils.tensorboard " + "(applicable to PyTorch 1.1 or higher)" + ) from None + if self.rank < 1: + logging.info( + "Using Tensorboard, logs will be saved in {}".format(self.log_dir) + ) + self.writer = SummaryWriter(log_dir=self.log_dir) + + def log(self, string): + if self.rank < 1: + logging.info(string) + + def scalar_summary(self, tag, phase, value, step): + if self.rank < 1: + self.writer.add_scalars(tag, {phase: value}, step) + + +class MovingAverage(object): + def __init__(self, val, window_size=50): + self.window_size = window_size + self.reset() + self.push(val) + + def reset(self): + self.queue = [] + + def push(self, val): + self.queue.append(val) + if len(self.queue) > self.window_size: + self.queue.pop(0) + + def avg(self): + return np.mean(self.queue) + + +class AverageMeter(object): + """Computes and stores the average and current value""" + + def __init__(self, val): + self.reset() + self.update(val) + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + if self.count > 0: + self.avg = self.sum / self.count + + +class NanoDetLightningLogger(LightningLoggerBase): + def __init__(self, save_dir="./", **kwargs): + super().__init__() + self._name = "NanoDet" + self._version = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + self.log_dir = os.path.join(save_dir, f"logs-{self._version}") + + self._fs = get_filesystem(save_dir) + self._fs.makedirs(self.log_dir, exist_ok=True) + self._init_logger() + + self._experiment = None + self._kwargs = kwargs + + @property + def name(self): + return self._name + + @property + @rank_zero_experiment + def experiment(self): + r""" + Actual tensorboard object. To use TensorBoard features in your + :class:`~pytorch_lightning.core.lightning.LightningModule` do the following. + + Example:: + + self.logger.experiment.some_tensorboard_function() + + """ + if self._experiment is not None: + return self._experiment + + assert rank_zero_only.rank == 0, "tried to init log dirs in non global_rank=0" + + try: + from torch.utils.tensorboard import SummaryWriter + except ImportError: + raise ImportError( + 'Please run "pip install future tensorboard" to install ' + "the dependencies to use torch.utils.tensorboard " + "(applicable to PyTorch 1.1 or higher)" + ) from None + + self._experiment = SummaryWriter(log_dir=self.log_dir, **self._kwargs) + return self._experiment + + @property + def version(self): + return self._version + + @rank_zero_only + def _init_logger(self): + self.logger = logging.getLogger(name=self.name) + self.logger.setLevel(logging.INFO) + + # create file handler + fh = logging.FileHandler(os.path.join(self.log_dir, "logs.txt")) + fh.setLevel(logging.INFO) + # set file formatter + f_fmt = "[%(name)s][%(asctime)s]%(levelname)s: %(message)s" + file_formatter = logging.Formatter(f_fmt, datefmt="%m-%d %H:%M:%S") + fh.setFormatter(file_formatter) + + # create console handler + ch = logging.StreamHandler() + ch.setLevel(logging.INFO) + # set console formatter + + c_fmt = ("[%(name)s] [%(asctime)s] %(levelname)s: %(message)s") + console_formatter = logging.Formatter(c_fmt, datefmt="%m-%d %H:%M:%S") + ch.setFormatter(console_formatter) + + # add the handlers to the logger + self.logger.addHandler(fh) + self.logger.addHandler(ch) + + @rank_zero_only + def info(self, string): + self.logger.info(string) + + @rank_zero_only + def log(self, string): + self.logger.info(string) + + @rank_zero_only + def dump_cfg(self, cfg_node): + with open(os.path.join(self.log_dir, "train_cfg.yml"), "w") as f: + cfg_node.dump(stream=f) + + @rank_zero_only + def log_hyperparams(self, params): + self.logger.info(f"hyperparams: {params}") + + @rank_zero_only + def log_metrics(self, metrics, step): + self.logger.info(f"Val_metrics: {metrics}") + for k, v in metrics.items(): + self.experiment.add_scalars("Val_metrics/" + k, {"Val": v}, step) + + @rank_zero_only + def save(self): + super().save() + + @rank_zero_only + def finalize(self, status): + self.experiment.flush() + self.experiment.close() + self.save() diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/misc.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/misc.py new file mode 100644 index 0000000000..961b77bc28 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/misc.py @@ -0,0 +1,52 @@ +# Modification 2020 RangiLyu +# Copyright 2018-2019 Open-MMLab. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from functools import partial + +import torch + + +def multi_apply(func, *args, **kwargs): + pfunc = partial(func, **kwargs) if kwargs else func + map_results = map(pfunc, *args) + return tuple(map(list, zip(*map_results))) + + +def images_to_levels(target, num_level_anchors): + """Convert targets by image to targets by feature level. + + [target_img0, target_img1] -> [target_level0, target_level1, ...] + """ + target = torch.stack(target, 0) + level_targets = [] + start = 0 + for n in num_level_anchors: + end = start + n + level_targets.append(target[:, start:end].squeeze(0)) + start = end + return level_targets + + +def unmap(data, count, inds, fill=0): + """Unmap a subset of item (data) back to the original set of items (of + size count)""" + if data.dim() == 1: + ret = data.new_full((count,), fill) + ret[inds.type(torch.bool)] = data + else: + new_size = (count,) + data.size()[1:] + ret = data.new_full(new_size, fill) + ret[inds.type(torch.bool), :] = data + return ret diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/path.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/path.py new file mode 100644 index 0000000000..b0887d41a6 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/path.py @@ -0,0 +1,34 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +# from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.rank_filter import rank_filter + + +# @rank_filter +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path) + + +def collect_files(path, exts): + file_paths = [] + for maindir, subdir, filename_list in os.walk(path): + for filename in filename_list: + file_path = os.path.join(maindir, filename) + ext = os.path.splitext(file_path)[1] + if ext in exts: + file_paths.append(file_path) + return file_paths diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/rank_filter.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/rank_filter.py new file mode 100644 index 0000000000..2316b2f983 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/rank_filter.py @@ -0,0 +1,23 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def rank_filter(func): + def func_filter(local_rank=-1, *args, **kwargs): + if local_rank < 1: + return func(*args, **kwargs) + else: + pass + + return func_filter diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/scatter_gather.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/scatter_gather.py new file mode 100644 index 0000000000..8e28560d69 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/scatter_gather.py @@ -0,0 +1,97 @@ +# Copyright 2021 RangiLyu. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pickle + +import torch +import torch.distributed as dist +from torch.autograd import Variable +from torch.nn.parallel._functions import Scatter + + +def list_scatter(input, target_gpus, chunk_sizes): + ret = [] + for idx, size in enumerate(chunk_sizes): + ret.append(input[:size]) + del input[:size] + return tuple(ret) + + +def scatter(inputs, target_gpus, dim=0, chunk_sizes=None): + """ + Slices variables into approximately equal chunks and + distributes them across given GPUs. Duplicates + references to objects that are not variables. Does not + support Tensors. + """ + + def scatter_map(obj): + if isinstance(obj, Variable): + return Scatter.apply(target_gpus, chunk_sizes, dim, obj) + assert not torch.is_tensor(obj), "Tensors not supported in scatter." + if isinstance(obj, list): + return list_scatter(obj, target_gpus, chunk_sizes) + if isinstance(obj, tuple): + return list(zip(*map(scatter_map, obj))) + if isinstance(obj, dict): + return list(map(type(obj), zip(*map(scatter_map, obj.items())))) + return [obj for targets in target_gpus] + + return scatter_map(inputs) + + +def scatter_kwargs(inputs, kwargs, target_gpus, dim=0, chunk_sizes=None): + r"""Scatter with support for kwargs dictionary""" + inputs = scatter(inputs, target_gpus, dim, chunk_sizes) if inputs else [] + kwargs = scatter(kwargs, target_gpus, dim, chunk_sizes) if kwargs else [] + if len(inputs) < len(kwargs): + inputs.extend([() for _ in range(len(kwargs) - len(inputs))]) + elif len(kwargs) < len(inputs): + kwargs.extend([{} for _ in range(len(inputs) - len(kwargs))]) + inputs = tuple(inputs) + kwargs = tuple(kwargs) + return inputs, kwargs + + +def gather_results(result_part, device): + rank = -1 + world_size = 1 + if dist.is_available() and dist.is_initialized(): + rank = dist.get_rank() + world_size = dist.get_world_size() + + # dump result part to tensor with pickle + part_tensor = torch.tensor( + bytearray(pickle.dumps(result_part)), dtype=torch.uint8, device=device + ) + + # gather all result part tensor shape + shape_tensor = torch.tensor(part_tensor.shape, device=device) + shape_list = [shape_tensor.clone() for _ in range(world_size)] + dist.all_gather(shape_list, shape_tensor) + + # padding result part tensor to max length + shape_max = torch.tensor(shape_list).max() + part_send = torch.zeros(shape_max, dtype=torch.uint8, device=device) + part_send[: shape_tensor[0]] = part_tensor + part_recv_list = [part_tensor.new_zeros(shape_max) for _ in range(world_size)] + + # gather all result dict + dist.all_gather(part_recv_list, part_send) + + if rank < 1: + all_res = {} + for recv, shape in zip(part_recv_list, shape_list): + all_res.update(pickle.loads(recv[: shape[0]].cpu().numpy().tobytes())) + return all_res diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/util_mixins.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/util_mixins.py new file mode 100644 index 0000000000..278aa037f8 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/util_mixins.py @@ -0,0 +1,105 @@ +"""This module defines the :class:`NiceRepr` mixin class, which defines a +``__repr__`` and ``__str__`` method that only depend on a custom ``__nice__`` +method, which you must define. This means you only have to overload one +function instead of two. Furthermore, if the object defines a ``__len__`` +method, then the ``__nice__`` method defaults to something sensible, otherwise +it is treated as abstract and raises ``NotImplementedError``. + +To use simply have your object inherit from :class:`NiceRepr` +(multi-inheritance should be ok). + +This code was copied from the ubelt library: https://github.com/Erotemic/ubelt + +Example: + >>> # Objects that define __nice__ have a default __str__ and __repr__ + >>> class Student(NiceRepr): + ... def __init__(self, name): + ... self.name = name + ... def __nice__(self): + ... return self.name + >>> s1 = Student('Alice') + >>> s2 = Student('Bob') + >>> print(f's1 = {s1}') + >>> print(f's2 = {s2}') + s1 = + s2 = + +Example: + >>> # Objects that define __len__ have a default __nice__ + >>> class Group(NiceRepr): + ... def __init__(self, data): + ... self.data = data + ... def __len__(self): + ... return len(self.data) + >>> g = Group([1, 2, 3]) + >>> print(f'g = {g}') + g = +""" +import warnings + + +class NiceRepr(object): + """Inherit from this class and define ``__nice__`` to "nicely" print your + objects. + + Defines ``__str__`` and ``__repr__`` in terms of ``__nice__`` function + Classes that inherit from :class:`NiceRepr` should redefine ``__nice__``. + If the inheriting class has a ``__len__``, method then the default + ``__nice__`` method will return its length. + + Example: + >>> class Foo(NiceRepr): + ... def __nice__(self): + ... return 'info' + >>> foo = Foo() + >>> assert str(foo) == '' + >>> assert repr(foo).startswith('>> class Bar(NiceRepr): + ... pass + >>> bar = Bar() + >>> import pytest + >>> with pytest.warns(None) as record: + >>> assert 'object at' in str(bar) + >>> assert 'object at' in repr(bar) + + Example: + >>> class Baz(NiceRepr): + ... def __len__(self): + ... return 5 + >>> baz = Baz() + >>> assert str(baz) == '' + """ + + def __nice__(self): + """str: a "nice" summary string describing this module""" + if hasattr(self, "__len__"): + # It is a common pattern for objects to use __len__ in __nice__ + # As a convenience we define a default __nice__ for these objects + return str(len(self)) + else: + # In all other cases force the subclass to overload __nice__ + raise NotImplementedError( + f"Define the __nice__ method for {self.__class__!r}" + ) + + def __repr__(self): + """str: the string of the module""" + try: + nice = self.__nice__() + classname = self.__class__.__name__ + return f"<{classname}({nice}) at {hex(id(self))}>" + except NotImplementedError as ex: + warnings.warn(str(ex), category=RuntimeWarning) + return object.__repr__(self) + + def __str__(self): + """str: the string of the module""" + try: + classname = self.__class__.__name__ + nice = self.__nice__() + return f"<{classname}({nice})>" + except NotImplementedError as ex: + warnings.warn(str(ex), category=RuntimeWarning) + return object.__repr__(self) diff --git a/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/yacs.py b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/yacs.py new file mode 100644 index 0000000000..c38294b663 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/algorithm/nanodet/util/yacs.py @@ -0,0 +1,510 @@ +# Copyright (c) 2018-present, Facebook, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +############################################################################## +"""YACS -- Yet Another Configuration System is designed to be a simple +configuration management system for academic and industrial research +projects. + +See README.md for usage and examples. +""" + +import copy +import io +import logging +import os +from ast import literal_eval + +import yaml +import importlib.util + +# Filename extensions for loading configs from files +_YAML_EXTS = {"", ".yaml", ".yml"} +_PY_EXTS = {".py"} + +_FILE_TYPES = (io.IOBase,) + +# CfgNodes can only contain a limited set of valid types +_VALID_TYPES = {tuple, list, str, int, float, bool, type(None)} + + +logger = logging.getLogger(__name__) + + +class CfgNode(dict): + """ + CfgNode represents an internal node in the configuration tree. It's a simple + dict-like container that allows for attribute-based access to keys. + """ + + IMMUTABLE = "__immutable__" + DEPRECATED_KEYS = "__deprecated_keys__" + RENAMED_KEYS = "__renamed_keys__" + NEW_ALLOWED = "__new_allowed__" + + def __init__(self, init_dict=None, key_list=None, new_allowed=False): + """ + Args: + init_dict (dict): the possibly-nested dictionary to initailize the + CfgNode. + key_list (list[str]): a list of names which index this CfgNode from + the root. + Currently only used for logging purposes. + new_allowed (bool): whether adding new key is allowed when merging with + other configs. + """ + # Recursively convert nested dictionaries in init_dict into CfgNodes + init_dict = {} if init_dict is None else init_dict + key_list = [] if key_list is None else key_list + init_dict = self._create_config_tree_from_dict(init_dict, key_list) + super(CfgNode, self).__init__(init_dict) + # Manage if the CfgNode is frozen or not + self.__dict__[CfgNode.IMMUTABLE] = False + # Deprecated options + # If an option is removed from the code and you don't want to break existing + # yaml configs, you can add the full config key as a string to the set below. + self.__dict__[CfgNode.DEPRECATED_KEYS] = set() + # Renamed options + # If you rename a config option, record the mapping from the old name to the + # new name in the dictionary below. Optionally, if the type also changed, you + # can make the value a tuple that specifies first the renamed key and then + # instructions for how to edit the config file. + self.__dict__[CfgNode.RENAMED_KEYS] = { + # 'EXAMPLE.OLD.KEY': 'EXAMPLE.NEW.KEY', # Dummy example to follow + # 'EXAMPLE.OLD.KEY': ( # A more complex example to follow + # 'EXAMPLE.NEW.KEY', + # "Also convert to a tuple, e.g., 'foo' -> ('foo',) or " + # + "'foo:bar' -> ('foo', 'bar')" + # ), + } + + # Allow new attributes after initialisation + self.__dict__[CfgNode.NEW_ALLOWED] = new_allowed + + @classmethod + def _create_config_tree_from_dict(cls, dic, key_list): + """ + Create a configuration tree using the given dict. + Any dict-like objects inside dict will be treated as a new CfgNode. + + Args: + dic (dict): + key_list (list[str]): a list of names which index this CfgNode from + the root. Currently only used for logging purposes. + """ + dic = copy.deepcopy(dic) + for k, v in dic.items(): + if isinstance(v, dict): + # Convert dict to CfgNode + dic[k] = cls(v, key_list=key_list + [k]) + else: + # Check for valid leaf type or nested CfgNode + _assert_with_logging( + _valid_type(v, allow_cfg_node=False), + "Key {} with value {} is not a valid type; valid types: {}".format( + ".".join(key_list + [k]), type(v), _VALID_TYPES + ), + ) + return dic + + def __getattr__(self, name): + if name in self: + return self[name] + else: + raise AttributeError(name) + + def __setattr__(self, name, value): + if self.is_frozen(): + raise AttributeError( + "Attempted to set {} to {}, but CfgNode is immutable".format( + name, value + ) + ) + + _assert_with_logging( + name not in self.__dict__, + "Invalid attempt to modify internal CfgNode state: {}".format(name), + ) + _assert_with_logging( + _valid_type(value, allow_cfg_node=True), + "Invalid type {} for key {}; valid types = {}".format( + type(value), name, _VALID_TYPES + ), + ) + + self[name] = value + + def __str__(self): + def _indent(s_, num_spaces): + s = s_.split("\n") + if len(s) == 1: + return s_ + first = s.pop(0) + s = [(num_spaces * " ") + line for line in s] + s = "\n".join(s) + s = first + "\n" + s + return s + + r = "" + s = [] + for k, v in sorted(self.items()): + seperator = "\n" if isinstance(v, CfgNode) else " " + attr_str = "{}:{}{}".format(str(k), seperator, str(v)) + attr_str = _indent(attr_str, 2) + s.append(attr_str) + r += "\n".join(s) + return r + + def __repr__(self): + return "{}({})".format(self.__class__.__name__, super(CfgNode, self).__repr__()) + + def dump(self, **kwargs): + """Dump to a string.""" + + def convert_to_dict(cfg_node, key_list): + if not isinstance(cfg_node, CfgNode): + _assert_with_logging( + _valid_type(cfg_node), + "Key {} with value {} is not a valid type; valid types: {}".format( + ".".join(key_list), type(cfg_node), _VALID_TYPES + ), + ) + return cfg_node + else: + cfg_dict = dict(cfg_node) + for k, v in cfg_dict.items(): + cfg_dict[k] = convert_to_dict(v, key_list + [k]) + return cfg_dict + + self_as_dict = convert_to_dict(self, []) + return yaml.safe_dump(self_as_dict, **kwargs) + + def merge_from_file(self, cfg_filename): + """Load a yaml config file and merge it this CfgNode.""" + with open(cfg_filename, "r", encoding="utf-8") as f: + cfg = self.load_cfg(f) + self.merge_from_other_cfg(cfg) + + def merge_from_other_cfg(self, cfg_other): + """Merge `cfg_other` into this CfgNode.""" + _merge_a_into_b(cfg_other, self, self, []) + + def merge_from_list(self, cfg_list): + """Merge config (keys, values) in a list (e.g., from command line) into + this CfgNode. For example, `cfg_list = ['FOO.BAR', 0.5]`. + """ + _assert_with_logging( + len(cfg_list) % 2 == 0, + "Override list has odd length: {}; it must be a list of pairs".format( + cfg_list + ), + ) + root = self + for full_key, v in zip(cfg_list[0::2], cfg_list[1::2]): + if root.key_is_deprecated(full_key): + continue + if root.key_is_renamed(full_key): + root.raise_key_rename_error(full_key) + key_list = full_key.split(".") + d = self + for subkey in key_list[:-1]: + _assert_with_logging( + subkey in d, "Non-existent key: {}".format(full_key) + ) + d = d[subkey] + subkey = key_list[-1] + _assert_with_logging(subkey in d, "Non-existent key: {}".format(full_key)) + value = self._decode_cfg_value(v) + value = _check_and_coerce_cfg_value_type(value, d[subkey], subkey, full_key) + d[subkey] = value + + def freeze(self): + """Make this CfgNode and all of its children immutable.""" + self._immutable(True) + + def defrost(self): + """Make this CfgNode and all of its children mutable.""" + self._immutable(False) + + def is_frozen(self): + """Return mutability.""" + return self.__dict__[CfgNode.IMMUTABLE] + + def _immutable(self, is_immutable): + """Set immutability to is_immutable and recursively apply the setting + to all nested CfgNodes. + """ + self.__dict__[CfgNode.IMMUTABLE] = is_immutable + # Recursively set immutable state + for v in self.__dict__.values(): + if isinstance(v, CfgNode): + v._immutable(is_immutable) + for v in self.values(): + if isinstance(v, CfgNode): + v._immutable(is_immutable) + + def clone(self): + """Recursively copy this CfgNode.""" + return copy.deepcopy(self) + + def register_deprecated_key(self, key): + """Register key (e.g. `FOO.BAR`) a deprecated option. When merging deprecated + keys a warning is generated and the key is ignored. + """ + _assert_with_logging( + key not in self.__dict__[CfgNode.DEPRECATED_KEYS], + "key {} is already registered as a deprecated key".format(key), + ) + self.__dict__[CfgNode.DEPRECATED_KEYS].add(key) + + def register_renamed_key(self, old_name, new_name, message=None): + """Register a key as having been renamed from `old_name` to `new_name`. + When merging a renamed key, an exception is thrown alerting to user to + the fact that the key has been renamed. + """ + _assert_with_logging( + old_name not in self.__dict__[CfgNode.RENAMED_KEYS], + "key {} is already registered as a renamed cfg key".format(old_name), + ) + value = new_name + if message: + value = (new_name, message) + self.__dict__[CfgNode.RENAMED_KEYS][old_name] = value + + def key_is_deprecated(self, full_key): + """Test if a key is deprecated.""" + if full_key in self.__dict__[CfgNode.DEPRECATED_KEYS]: + logger.warning("Deprecated config key (ignoring): {}".format(full_key)) + return True + return False + + def key_is_renamed(self, full_key): + """Test if a key is renamed.""" + return full_key in self.__dict__[CfgNode.RENAMED_KEYS] + + def raise_key_rename_error(self, full_key): + new_key = self.__dict__[CfgNode.RENAMED_KEYS][full_key] + if isinstance(new_key, tuple): + msg = " Note: " + new_key[1] + new_key = new_key[0] + else: + msg = "" + raise KeyError( + "Key {} was renamed to {}; please update your config.{}".format( + full_key, new_key, msg + ) + ) + + def is_new_allowed(self): + return self.__dict__[CfgNode.NEW_ALLOWED] + + @classmethod + def load_cfg(cls, cfg_file_obj_or_str): + """ + Load a cfg. + Args: + cfg_file_obj_or_str (str or file): + Supports loading from: + - A file object backed by a YAML file + - A file object backed by a Python source file that exports an attribute + "cfg" that is either a dict or a CfgNode + - A string that can be parsed as valid YAML + """ + _assert_with_logging( + isinstance(cfg_file_obj_or_str, _FILE_TYPES + (str,)), + "Expected first argument to be of type {} or {}, but it was {}".format( + _FILE_TYPES, str, type(cfg_file_obj_or_str) + ), + ) + if isinstance(cfg_file_obj_or_str, str): + return cls._load_cfg_from_yaml_str(cfg_file_obj_or_str) + elif isinstance(cfg_file_obj_or_str, _FILE_TYPES): + return cls._load_cfg_from_file(cfg_file_obj_or_str) + else: + raise NotImplementedError("Impossible to reach here (unless there's a bug)") + + @classmethod + def _load_cfg_from_file(cls, file_obj): + """Load a config from a YAML file or a Python source file.""" + _, file_extension = os.path.splitext(file_obj.name) + if file_extension in _YAML_EXTS: + return cls._load_cfg_from_yaml_str(file_obj.read()) + elif file_extension in _PY_EXTS: + return cls._load_cfg_py_source(file_obj.name) + else: + raise Exception( + "Attempt to load from an unsupported file type {}; " + "only {} are supported".format(file_obj, _YAML_EXTS.union(_PY_EXTS)) + ) + + @classmethod + def _load_cfg_from_yaml_str(cls, str_obj): + """Load a config from a YAML string encoding.""" + cfg_as_dict = yaml.safe_load(str_obj) + return cls(cfg_as_dict) + + @classmethod + def _load_cfg_py_source(cls, filename): + """Load a config from a Python source file.""" + module = _load_module_from_file("yacs.config.override", filename) + _assert_with_logging( + hasattr(module, "cfg"), + "Python module from file {} must have 'cfg' attr".format(filename), + ) + VALID_ATTR_TYPES = {dict, CfgNode} + _assert_with_logging( + type(module.cfg) in VALID_ATTR_TYPES, + "Imported module 'cfg' attr must be in {} but is {} instead".format( + VALID_ATTR_TYPES, type(module.cfg) + ), + ) + return cls(module.cfg) + + @classmethod + def _decode_cfg_value(cls, value): + """ + Decodes a raw config value (e.g., from a yaml config files or command + line argument) into a Python object. + + If the value is a dict, it will be interpreted as a new CfgNode. + If the value is a str, it will be evaluated as literals. + Otherwise it is returned as-is. + """ + # Configs parsed from raw yaml will contain dictionary keys that need to be + # converted to CfgNode objects + if isinstance(value, dict): + return cls(value) + # All remaining processing is only applied to strings + if not isinstance(value, str): + return value + # Try to interpret `value` as a: + # string, number, tuple, list, dict, boolean, or None + try: + value = literal_eval(value) + # The following two excepts allow v to pass through when it represents a + # string. + # + # Longer explanation: + # The type of v is always a string (before calling literal_eval), but + # sometimes it *represents* a string and other times a data structure, like + # a list. In the case that v represents a string, what we got back from the + # yaml parser is 'foo' *without quotes* (so, not '"foo"'). literal_eval is + # ok with '"foo"', but will raise a ValueError if given 'foo'. In other + # cases, like paths (v = 'foo/bar' and not v = '"foo/bar"'), literal_eval + # will raise a SyntaxError. + except ValueError: + pass + except SyntaxError: + pass + return value + + +load_cfg = ( + CfgNode.load_cfg +) # keep this function in global scope for backward compatibility + + +def _valid_type(value, allow_cfg_node=False): + return (type(value) in _VALID_TYPES) or ( + allow_cfg_node and isinstance(value, CfgNode) + ) + + +def _merge_a_into_b(a, b, root, key_list): + """Merge config dictionary a into config dictionary b, clobbering the + options in b whenever they are also specified in a. + """ + _assert_with_logging( + isinstance(a, CfgNode), + "`a` (cur type {}) must be an instance of {}".format(type(a), CfgNode), + ) + _assert_with_logging( + isinstance(b, CfgNode), + "`b` (cur type {}) must be an instance of {}".format(type(b), CfgNode), + ) + + for k, v_ in a.items(): + full_key = ".".join(key_list + [k]) + + v = copy.deepcopy(v_) + v = b._decode_cfg_value(v) + + if k in b: + v = _check_and_coerce_cfg_value_type(v, b[k], k, full_key) + # Recursively merge dicts + if isinstance(v, CfgNode): + try: + _merge_a_into_b(v, b[k], root, key_list + [k]) + except BaseException: + raise + else: + b[k] = v + elif b.is_new_allowed(): + b[k] = v + else: + if root.key_is_deprecated(full_key): + continue + elif root.key_is_renamed(full_key): + root.raise_key_rename_error(full_key) + else: + raise KeyError("Non-existent config key: {}".format(full_key)) + + +def _check_and_coerce_cfg_value_type(replacement, original, key, full_key): + """Checks that `replacement`, which is intended to replace `original` is of + the right type. The type is correct if it matches exactly or is one of a few + cases in which the type can be easily coerced. + """ + original_type = type(original) + replacement_type = type(replacement) + + # The types must match (with some exceptions) + if replacement_type == original_type: + return replacement + + # Cast replacement from from_type to to_type if the replacement and original + # types match from_type and to_type + def conditional_cast(from_type, to_type): + if replacement_type == from_type and original_type == to_type: + return True, to_type(replacement) + else: + return False, None + + # Conditionally casts + # list <-> tuple + casts = [(tuple, list), (list, tuple)] + + for (from_type, to_type) in casts: + converted, converted_value = conditional_cast(from_type, to_type) + if converted: + return converted_value + + raise ValueError( + "Type mismatch ({} vs. {}) with values ({} vs. {}) for config " + "key: {}".format( + original_type, replacement_type, original, replacement, full_key + ) + ) + + +def _assert_with_logging(cond, msg): + if not cond: + logger.debug(msg) + assert cond, msg + + +def _load_module_from_file(name, filename): + spec = importlib.util.spec_from_file_location(name, filename) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module diff --git a/src/opendr/perception/object_detection_2d/nanodet/dependencies.ini b/src/opendr/perception/object_detection_2d/nanodet/dependencies.ini new file mode 100644 index 0000000000..c8a39d062d --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/dependencies.ini @@ -0,0 +1,22 @@ +[runtime] +# 'python' key expects a value using the Python requirements file format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +python=torch>=1.7 + pytorch-lightning==1.2.3 + protobuf<=3.20.0 + omegaconf>=2.0.1 + torchvision + numpy<=1.23.5 + opencv-python + pycocotools + Cython + matplotlib + onnx + onnx-simplifier + pyaml + tabulate + tensorboard + torchmetrics + tqdm + +opendr=opendr-toolkit-engine diff --git a/src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py b/src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py new file mode 100644 index 0000000000..be505ee6e3 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/nanodet/nanodet_learner.py @@ -0,0 +1,525 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import datetime +import json +from pathlib import Path + +import pytorch_lightning as pl +import torch +from pytorch_lightning.callbacks import ProgressBar + +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util.check_point import save_model_state +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.model.arch import build_model +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.collate import naive_collate +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.data.dataset import build_dataset +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.trainer.task import TrainingTask +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.evaluator import build_evaluator +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.inferencer.utilities import Predictor +from opendr.perception.object_detection_2d.nanodet.algorithm.nanodet.util import ( + NanoDetLightningLogger, + Logger, + cfg, + load_config, + load_model_weight, + mkdir, +) + +from opendr.engine.data import Image +from opendr.engine.target import BoundingBox, BoundingBoxList +from opendr.engine.constants import OPENDR_SERVER_URL + +from opendr.engine.learners import Learner +from urllib.request import urlretrieve + +_MODEL_NAMES = {"EfficientNet_Lite0_320", "EfficientNet_Lite1_416", "EfficientNet_Lite2_512", + "RepVGG_A0_416", "t", "g", "m", "m_416", "m_0.5x", "m_1.5x", "m_1.5x_416", + "plus_m_320", "plus_m_1.5x_320", "plus_m_416", "plus_m_1.5x_416", "custom"} + + +class NanodetLearner(Learner): + def __init__(self, model_to_use="plus_m_1.5x_416", iters=None, lr=None, batch_size=None, checkpoint_after_iter=None, + checkpoint_load_iter=None, temp_path='', device='cuda', weight_decay=None, warmup_steps=None, + warmup_ratio=None, lr_schedule_T_max=None, lr_schedule_eta_min=None, grad_clip=None): + + """Initialise the Nanodet Learner""" + + self.cfg = self._load_hparam(model_to_use) + self.lr_schedule_T_max = lr_schedule_T_max + self.lr_schedule_eta_min = lr_schedule_eta_min + self.warmup_steps = warmup_steps + self.warmup_ratio = warmup_ratio + self.grad_clip = grad_clip + + self.overwrite_config(lr=lr, weight_decay=weight_decay, iters=iters, batch_size=batch_size, + checkpoint_after_iter=checkpoint_after_iter, checkpoint_load_iter=checkpoint_load_iter, + temp_path=temp_path) + + self.lr = float(self.cfg.schedule.optimizer.lr) + self.weight_decay = float(self.cfg.schedule.optimizer.weight_decay) + self.iters = int(self.cfg.schedule.total_epochs) + self.batch_size = int(self.cfg.device.batchsize_per_gpu) + self.temp_path = self.cfg.save_dir + self.checkpoint_after_iter = int(self.cfg.schedule.val_intervals) + self.checkpoint_load_iter = int(self.cfg.schedule.resume) + self.device = device + self.classes = self.cfg.class_names + + super(NanodetLearner, self).__init__(lr=self.lr, iters=self.iters, batch_size=self.batch_size, + checkpoint_after_iter=self.checkpoint_after_iter, + checkpoint_load_iter=self.checkpoint_load_iter, + temp_path=self.temp_path, device=self.device) + + self.model = build_model(self.cfg.model) + self.logger = None + self.task = None + + def _load_hparam(self, model: str): + """ Load hyperparameters for nanodet models and training configuration + + :parameter model: The name of the model of which we want to load the config file + :type model: str + :return: config with hyperparameters + :rtype: dict + """ + assert ( + model in _MODEL_NAMES + ), f"Invalid model selected. Choose one of {_MODEL_NAMES}." + full_path = list() + path = Path(__file__).parent / "algorithm" / "config" + wanted_file = "nanodet_{}.yml".format(model) + for root, dir, files in os.walk(path): + if wanted_file in files: + full_path.append(os.path.join(root, wanted_file)) + assert (len(full_path) == 1), f"You must have only one nanodet_{model}.yaml file in your config folder" + load_config(cfg, full_path[0]) + return cfg + + def overwrite_config(self, lr=0.001, weight_decay=0.05, iters=10, batch_size=64, checkpoint_after_iter=0, + checkpoint_load_iter=0, temp_path=''): + """ + Helping method for config file update to overwrite the cfg with arguments of OpenDR. + :param lr: learning rate used in training + :type lr: float, optional + :param weight_decay: weight_decay used in training + :type weight_decay: float, optional + :param iters: max epoches that the training will be run + :type iters: int, optional + :param batch_size: batch size of each gpu in use, if device is cpu batch size + will be used one single time for training + :type batch_size: int, optional + :param checkpoint_after_iter: after that number of epoches, evaluation will be + performed and one checkpoint will be saved + :type checkpoint_after_iter: int, optional + :param checkpoint_load_iter: the epoch in which checkpoint we want to load + :type checkpoint_load_iter: int, optional + :param temp_path: path to a temporal dictionary for saving models, logs and tensorboard graphs. + If temp_path='' the `cfg.save_dir` will be used instead. + :type temp_path: str, optional + """ + self.cfg.defrost() + + # Nanodet specific parameters + if self.cfg.model.arch.head.num_classes != len(self.cfg.class_names): + raise ValueError( + "cfg.model.arch.head.num_classes must equal len(cfg.class_names), " + "but got {} and {}".format( + self.cfg.model.arch.head.num_classes, len(self.cfg.class_names) + ) + ) + if self.warmup_steps is not None: + self.cfg.schedule.warmup.warmup_steps = self.warmup_steps + if self.warmup_ratio is not None: + self.cfg.schedule.warmup.warmup_ratio = self.warmup_ratio + if self.lr_schedule_T_max is not None: + self.cfg.schedule.lr_schedule.T_max = self.lr_schedule_T_max + if self.lr_schedule_eta_min is not None: + self.cfg.schedule.lr_schedule.eta_min = self.lr_schedule_eta_min + if self.grad_clip is not None: + self.cfg.grad_clip = self.grad_clip + + # OpenDR + if lr is not None: + self.cfg.schedule.optimizer.lr = lr + if weight_decay is not None: + self.cfg.schedule.optimizer.weight_decay = weight_decay + if iters is not None: + self.cfg.schedule.total_epochs = iters + if batch_size is not None: + self.cfg.device.batchsize_per_gpu = batch_size + if checkpoint_after_iter is not None: + self.cfg.schedule.val_intervals = checkpoint_after_iter + if checkpoint_load_iter is not None: + self.cfg.schedule.resume = checkpoint_load_iter + if temp_path != '': + self.cfg.save_dir = temp_path + + self.cfg.freeze() + + def save(self, path=None, verbose=True): + """ + Method for saving the current model and metadata in the path provided. + :param path: path to folder where model will be saved + :type path: str, optional + :param verbose: whether to print a success message or not, defaults to False + :type verbose: bool, optional + """ + path = path if path is not None else self.cfg.save_dir + model = self.cfg.check_point_name + os.makedirs(path, exist_ok=True) + + metadata = {"model_paths": [], "framework": "pytorch", "format": "pth", + "has_data": False, "inference_params": {}, "optimized": False, + "optimizer_info": {}, "classes": self.classes} + + param_filepath = "nanodet_{}.pth".format(model) + metadata["model_paths"].append(param_filepath) + + logger = self.logger if verbose else None + if self.task is None: + print("You do not have call a task yet, only the state of the loaded or initialized model will be saved") + save_model_state(os.path.join(path, metadata["model_paths"][0]), self.model, None, logger) + else: + self.task.save_current_model(os.path.join(path, metadata["model_paths"][0]), logger) + + with open(os.path.join(path, "nanodet_{}.json".format(model)), 'w', encoding='utf-8') as f: + json.dump(metadata, f, ensure_ascii=False, indent=4) + if verbose: + print("Model metadata saved.") + return True + + def load(self, path=None, verbose=True): + """ + Loads the model from the path provided. + :param path: path of the directory where the model was saved + :type path: str, optional + :param verbose: whether to print a success message or not, defaults to False + :type verbose: bool, optional + """ + path = path if path is not None else self.cfg.save_dir + model = self.cfg.check_point_name + if verbose: + print("Model name:", model, "-->", os.path.join(path, model + ".json")) + with open(os.path.join(path, "nanodet_{}.json".format(model))) as f: + metadata = json.load(f) + + logger = Logger(-1, path, False) if verbose else None + ckpt = torch.load(os.path.join(path, metadata["model_paths"][0]), map_location=torch.device(self.device)) + self.model = load_model_weight(self.model, ckpt, logger) + if verbose: + logger.log("Loaded model weight from {}".format(path)) + pass + + def download(self, path=None, mode="pretrained", verbose=False, + url=OPENDR_SERVER_URL + "/perception/object_detection_2d/nanodet/"): + + """ + Downloads all files necessary for inference, evaluation and training. Valid mode options are: ["pretrained", + "images", "test_data"]. + :param path: folder to which files will be downloaded, if None self.temp_path will be used + :type path: str, optional + :param mode: one of: ["pretrained", "images", "test_data"], where "pretrained" downloads a pretrained + network depending on the network choosed in config file, "images" downloads example inference data, + and "test_data" downloads additional image,annotation file and pretrained network for training and testing + :type mode: str, optional + :param model: the specific name of the model to download, all pre-configured configs files have their pretrained + model and can be selected, if None self.cfg.check_point_name will be used + :param verbose: if True, additional information is printed on stdout + :type verbose: bool, optional + :param url: URL to file location on FTP server + :type url: str, optional + """ + + valid_modes = ["pretrained", "images", "test_data"] + if mode not in valid_modes: + raise UserWarning("mode parameter not valid:", mode, ", file should be one of:", valid_modes) + + if path is None: + path = self.temp_path + if not os.path.exists(path): + os.makedirs(path) + + if mode == "pretrained": + + model = self.cfg.check_point_name + + path = os.path.join(path, "nanodet_{}".format(model)) + if not os.path.exists(path): + os.makedirs(path) + + if verbose: + print("Downloading pretrained checkpoint...") + + file_url = os.path.join(url, "pretrained", + "nanodet_{}".format(model), + "nanodet_{}.ckpt".format(model)) + + urlretrieve(file_url, os.path.join(path, "nanodet_{}.ckpt".format(model))) + + if verbose: + print("Downloading pretrain weights if provided...") + + file_url = os.path.join(url, "pretrained", "nanodet_{}".format(model), + "nanodet_{}.pth".format(model)) + try: + urlretrieve(file_url, os.path.join(path, "nanodet_{}.pth".format(model))) + + if verbose: + print("Making metadata...") + metadata = {"model_paths": [], "framework": "pytorch", "format": "pth", + "has_data": False, "inference_params": {}, "optimized": False, + "optimizer_info": {}, "classes": self.classes} + + param_filepath = "nanodet_{}.pth".format(model) + metadata["model_paths"].append(param_filepath) + with open(os.path.join(path, "nanodet_{}.json".format(model)), 'w', encoding='utf-8') as f: + json.dump(metadata, f, ensure_ascii=False, indent=4) + + except: + print("Pretrain weights for this model are not provided!!! \n" + "Only the hole ckeckpoint will be download") + + if verbose: + print("Making metadata...") + metadata = {"model_paths": [], "framework": "pytorch", "format": "pth", + "has_data": False, "inference_params": {}, "optimized": False, + "optimizer_info": {}, "classes": self.classes} + + param_filepath = "nanodet_{}.ckpt".format(model) + metadata["model_paths"].append(param_filepath) + with open(os.path.join(path, "nanodet_{}.json".format(model)), 'w', encoding='utf-8') as f: + json.dump(metadata, f, ensure_ascii=False, indent=4) + + elif mode == "images": + file_url = os.path.join(url, "images", "000000000036.jpg") + if verbose: + print("Downloading example image...") + urlretrieve(file_url, os.path.join(path, "000000000036.jpg")) + + elif mode == "test_data": + os.makedirs(os.path.join(path, "test_data"), exist_ok=True) + os.makedirs(os.path.join(path, "test_data", "train"), exist_ok=True) + os.makedirs(os.path.join(path, "test_data", "val"), exist_ok=True) + os.makedirs(os.path.join(path, "test_data", "train", "JPEGImages"), exist_ok=True) + os.makedirs(os.path.join(path, "test_data", "train", "Annotations"), exist_ok=True) + os.makedirs(os.path.join(path, "test_data", "val", "JPEGImages"), exist_ok=True) + os.makedirs(os.path.join(path, "test_data", "val", "Annotations"), exist_ok=True) + # download image + file_url = os.path.join(url, "images", "000000000036.jpg") + if verbose: + print("Downloading image...") + urlretrieve(file_url, os.path.join(path, "test_data", "train", "JPEGImages", "000000000036.jpg")) + urlretrieve(file_url, os.path.join(path, "test_data", "val", "JPEGImages", "000000000036.jpg")) + # download annotations + file_url = os.path.join(url, "annotations", "000000000036.xml") + if verbose: + print("Downloading annotations...") + urlretrieve(file_url, os.path.join(path, "test_data", "train", "Annotations", "000000000036.xml")) + urlretrieve(file_url, os.path.join(path, "test_data", "val", "Annotations", "000000000036.xml")) + + def reset(self): + """This method is not used in this implementation.""" + return NotImplementedError + + def optimize(self): + """This method is not used in this implementation.""" + return NotImplementedError + + def fit(self, dataset, val_dataset=None, logging_path='', verbose=True, seed=123): + """ + This method is used to train the detector on the COCO dataset. Validation is performed in a val_dataset if + provided, else validation is performed in training dataset. + :param dataset: training dataset; COCO and Pascal VOC are supported as ExternalDataset types, + with 'coco' or 'voc' dataset_type attributes. custom DetectionDataset types are not supported at the moment. + Any xml type dataset can be use if voc is used in datatype. + :type dataset: ExternalDataset, DetectionDataset not implemented yet + :param val_dataset: validation dataset object + :type val_dataset: ExternalDataset, DetectionDataset not implemented yet + :param logging_path: subdirectory in temp_path to save logger outputs + :type logging_path: str, optional + :param verbose: if set to True, additional information is printed to STDOUT and logger txt output, + defaults to True + :type verbose: bool + :param seed: seed for reproducibility + :type seed: int + """ + + mkdir(self.cfg.save_dir) + + if verbose: + self.logger = NanoDetLightningLogger(self.temp_path + "/" + logging_path) + self.logger.dump_cfg(self.cfg) + + if seed != '' or seed is not None: + if verbose: + self.logger.info("Set random seed to {}".format(seed)) + pl.seed_everything(seed) + + if verbose: + self.logger.info("Setting up data...") + + train_dataset = build_dataset(self.cfg.data.val, dataset, self.cfg.class_names, "train") + val_dataset = train_dataset if val_dataset is None else \ + build_dataset(self.cfg.data.val, val_dataset, self.cfg.class_names, "val") + + evaluator = build_evaluator(self.cfg.evaluator, val_dataset) + + train_dataloader = torch.utils.data.DataLoader( + train_dataset, + batch_size=self.batch_size, + shuffle=True, + num_workers=self.cfg.device.workers_per_gpu, + pin_memory=True, + collate_fn=naive_collate, + drop_last=True, + ) + val_dataloader = torch.utils.data.DataLoader( + val_dataset, + batch_size=self.batch_size, + shuffle=False, + num_workers=self.cfg.device.workers_per_gpu, + pin_memory=True, + collate_fn=naive_collate, + drop_last=False, + ) + + # Load state dictionary + model_resume_path = ( + os.path.join(self.temp_path, "checkpoints", "model_iter_{}.ckpt".format(self.checkpoint_load_iter)) + if self.checkpoint_load_iter > 0 else None + ) + + if verbose: + self.logger.info("Creating task...") + self.task = TrainingTask(self.cfg, self.model, evaluator) + + if self.device == "cpu": + gpu_ids = None + accelerator = None + elif self.device == "cuda": + gpu_ids = self.cfg.device.gpu_ids + accelerator = None if len(gpu_ids) <= 1 else "ddp" + + trainer = pl.Trainer( + default_root_dir=self.temp_path, + max_epochs=self.iters, + gpus=gpu_ids, + check_val_every_n_epoch=self.checkpoint_after_iter, + accelerator=accelerator, + log_every_n_steps=self.cfg.log.interval, + num_sanity_val_steps=0, + resume_from_checkpoint=model_resume_path, + callbacks=[ProgressBar(refresh_rate=0)], # disable tqdm bar + logger=self.logger, + benchmark=True, + gradient_clip_val=self.cfg.get("grad_clip", 0.0), + ) + + trainer.fit(self.task, train_dataloader, val_dataloader) + + def eval(self, dataset, verbose=True): + """ + This method performs evaluation on a given dataset and returns a dictionary with the evaluation results. + :param dataset: dataset object, to perform evaluation on + :type dataset: ExternalDataset, DetectionDataset not implemented yet + :param verbose: if set to True, additional information is printed to STDOUT and logger txt output, + defaults to True + :type verbose: bool + """ + + timestr = datetime.datetime.now().__format__("%Y_%m_%d_%H:%M:%S") + save_dir = os.path.join(self.cfg.save_dir, timestr) + mkdir(save_dir) + + if verbose: + self.logger = NanoDetLightningLogger(save_dir) + + self.cfg.update({"test_mode": "val"}) + + if verbose: + self.logger.info("Setting up data...") + + val_dataset = build_dataset(self.cfg.data.val, dataset, self.cfg.class_names, "val") + + val_dataloader = torch.utils.data.DataLoader( + val_dataset, + batch_size=self.batch_size, + shuffle=False, + num_workers=self.cfg.device.workers_per_gpu, + pin_memory=True, + collate_fn=naive_collate, + drop_last=False, + ) + evaluator = build_evaluator(self.cfg.evaluator, val_dataset) + + if verbose: + self.logger.info("Creating task...") + self.task = TrainingTask(self.cfg, self.model, evaluator) + + if self.device == "cpu": + gpu_ids = None + accelerator = None + elif self.device == "cuda": + gpu_ids = self.cfg.device.gpu_ids + accelerator = None if len(gpu_ids) <= 1 else "ddp" + + trainer = pl.Trainer( + default_root_dir=save_dir, + gpus=gpu_ids, + accelerator=accelerator, + log_every_n_steps=self.cfg.log.interval, + num_sanity_val_steps=0, + logger=self.logger, + ) + if verbose: + self.logger.info("Starting testing...") + return trainer.test(self.task, val_dataloader, verbose=verbose) + + def infer(self, input, threshold=0.35, verbose=True): + """ + Performs inference + :param input: input can be an Image type image to perform inference + :type input: str, optional + :param threshold: confidence threshold + :type threshold: float, optional + :param verbose: if set to True, additional information is printed to STDOUT and logger txt output, + defaults to True + :type verbose: bool + :return: list of bounding boxes of last image of input or last frame of the video + :rtype: BoundingBoxList + """ + + if verbose: + self.logger = Logger(0, use_tensorboard=False) + predictor = Predictor(self.cfg, self.model, device=self.device) + if not isinstance(input, Image): + input = Image(input) + _input = input.opencv() + meta, res = predictor.inference(_input, verbose) + + bounding_boxes = BoundingBoxList([]) + for label in res[0]: + for box in res[0][label]: + score = box[-1] + if score > threshold: + bbox = BoundingBox(left=box[0], top=box[1], + width=box[2] - box[0], + height=box[3] - box[1], + name=label, + score=score) + bounding_boxes.data.append(bbox) + bounding_boxes.data.sort(key=lambda v: v.confidence) + + return bounding_boxes diff --git a/src/opendr/perception/object_detection_2d/ssd/ssd_learner.py b/src/opendr/perception/object_detection_2d/ssd/ssd_learner.py index 70b4656cf1..e495a8daca 100644 --- a/src/opendr/perception/object_detection_2d/ssd/ssd_learner.py +++ b/src/opendr/perception/object_detection_2d/ssd/ssd_learner.py @@ -212,21 +212,26 @@ def download(self, path=None, mode="pretrained", verbose=False, "ssd_512_vgg16_atrous_wider_person.json") if verbose: print("Downloading metadata...") - urlretrieve(file_url, os.path.join(path, "ssd_default_person.json")) + file_path = os.path.join(path, "ssd_default_person.json") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) if verbose: print("Downloading params...") file_url = os.path.join(url, "pretrained", "ssd_512_vgg16_atrous_wider_person", "ssd_512_vgg16_atrous_wider_person.params") - urlretrieve(file_url, - os.path.join(path, "ssd_512_vgg16_atrous_wider_person.params")) + file_path = os.path.join(path, "ssd_512_vgg16_atrous_wider_person.params") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) elif mode == "images": file_url = os.path.join(url, "images", "people.jpg") if verbose: print("Downloading example image...") - urlretrieve(file_url, os.path.join(path, "people.jpg")) + file_path = os.path.join(path, "people.jpg") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) elif mode == "test_data": os.makedirs(os.path.join(path, "test_data"), exist_ok=True) @@ -236,17 +241,23 @@ def download(self, path=None, mode="pretrained", verbose=False, file_url = os.path.join(url, "test_data", "train.txt") if verbose: print("Downloading filelist...") - urlretrieve(file_url, os.path.join(path, "test_data", "train.txt")) + file_path = os.path.join(path, "test_data", "train.txt") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) # download image file_url = os.path.join(url, "test_data", "Images", "000040.jpg") if verbose: print("Downloading image...") - urlretrieve(file_url, os.path.join(path, "test_data", "Images", "000040.jpg")) + file_path = os.path.join(path, "test_data", "Images", "000040.jpg") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) # download annotations file_url = os.path.join(url, "test_data", "Annotations", "000040.jpg.txt") if verbose: print("Downloading annotations...") - urlretrieve(file_url, os.path.join(path, "test_data", "Annotations", "000040.jpg.txt")) + file_path = os.path.join(path, "test_data", "Annotations", "000040.jpg.txt") + if not os.path.exists(file_path): + urlretrieve(file_url, file_path) def reset(self): """This method is not used in this implementation.""" diff --git a/src/opendr/perception/object_detection_2d/utils/vis_utils.py b/src/opendr/perception/object_detection_2d/utils/vis_utils.py index a84cc1309b..a96b434a10 100644 --- a/src/opendr/perception/object_detection_2d/utils/vis_utils.py +++ b/src/opendr/perception/object_detection_2d/utils/vis_utils.py @@ -1,127 +1,127 @@ -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import matplotlib.pyplot as plt -import numpy as np -import cv2 - -from opendr.engine.data import Image -from opendr.engine.target import BoundingBoxList -from opendr.perception.object_detection_2d.datasets.transforms import BoundingBoxListToNumpyArray - -np.random.seed(0) - - -def get_unique_color(index, num_colors, cmap='jet'): - """ - Generates num_colors of unique colors from the given cmap and returns the color at index. - """ - colors = plt.get_cmap(cmap) - c = [int(x * 255) for x in colors(index / float(num_colors))[:3]][::-1] - return c - - -VOC_COLORS = [get_unique_color(i, 20) for i in range(20)] -np.random.shuffle(VOC_COLORS) - - -def get_dataset_palette(n_classes): - """ - Generates a palette for n_classes. - """ - palette = [get_unique_color(i, n_classes) for i in range(n_classes)] - return palette - - -def draw_bounding_boxes(img, bounding_boxes, class_names=None, show=False, line_thickness=None): - """ - :param img: image on which to draw bounding boxes - :type img: opendr.engine.data.Image - :param bounding_boxes: detected or groundtruth bounding boxes - :type bounding_boxes: opendr.engine.target.BoundingBoxList - :param class_names: list of class names to be drawn on top of the bounding boxes - :type class_names: list - :param show: whether to display the resulting annotated image or not - :type show: bool - :param line_thickness: line thickness in pixels - :type line_thickness: int - """ - if isinstance(img, Image): - img = img.data - assert isinstance(bounding_boxes, BoundingBoxList), "bounding_boxes must be of BoundingBoxList type" - - if not bounding_boxes.data: - return draw_detections(img, np.empty((0, 4)), np.empty((0,)), np.empty((0,)), class_names, show, line_thickness) - - bounding_boxes = BoundingBoxListToNumpyArray()(bounding_boxes) - boxes = bounding_boxes[:, :4] - scores = bounding_boxes[:, 4] - classes = bounding_boxes[:, 5].astype(np.int) - return draw_detections(img, boxes, scores, classes, class_names, show, line_thickness) - - -def draw_detections(img, boxes, scores, classes, class_names=None, show=False, line_thickness=None): - """ - :param img: image on which to draw bounding boxes - :type img: np.ndarray or opendr.engine.data.Image - :param boxes: bounding boxes in numpy array or list format [n, 4] (coordinate format: x1, y1, x2, y2) - :type boxes: np.ndarray or list - :param scores: confidence scores for each bounding box [n] - :type scores: np.ndarray or list - :param classes: class indices for each bounding box [n] - :type classes: np.ndarray or list - :param show: whether to display the resulting annotated image or not - :type show: bool - :param line_thickness: line thickness in pixels - :type line_thickness: int - """ - if isinstance(img, Image): - img = img.data - # boxes in x1, y1, x2, y2 list format [n, 4] - # scores and classes in [n] list format - classes = np.int32(classes) - palette = VOC_COLORS - n_classes = len(palette) - - for idx, pred_box in enumerate(boxes): - # pred_box_w, pred_box_h = pred_box[2] - pred_box[0], pred_box[3] - pred_box[1] - tl = line_thickness or int(0.003 * max(img.shape[:2])) - c1 = (max(0, int(pred_box[0])), max(0, int(pred_box[1]))) - c2 = (min(img.shape[1], int(pred_box[2])), min(img.shape[0], int(pred_box[3]))) - color = tuple(palette[classes[idx] % n_classes]) - - img = np.ascontiguousarray(img, dtype=np.uint8) - cv2.rectangle(img, c1, c2, color, thickness=2) - - if class_names is not None: - label = "{}".format(class_names[classes[idx]]) - - t_size = cv2.getTextSize( - label, 0, fontScale=float(tl) / 5, thickness=1)[0] - - c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3 - t = - 2 - if c2[1] < 0: - c2 = c1[0] + t_size[0], c1[1] + t_size[1] - t = t_size[1] - 4 - cv2.rectangle(img, c1, c2, color, -1) # filled - - cv2.putText(img, label, (c1[0], c1[1] + t), 0, float(tl) / 5, - [255, 255, 255], thickness=1, lineType=cv2.LINE_AA) - - if show: - cv2.imshow('detections', img) - cv2.waitKey(0) - cv2.destroyAllWindows() - return img +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import matplotlib.pyplot as plt +import numpy as np +import cv2 + +from opendr.engine.data import Image +from opendr.engine.target import BoundingBoxList +from opendr.perception.object_detection_2d.datasets.transforms import BoundingBoxListToNumpyArray + +np.random.seed(0) + + +def get_unique_color(index, num_colors, cmap='jet'): + """ + Generates num_colors of unique colors from the given cmap and returns the color at index. + """ + colors = plt.get_cmap(cmap) + c = [int(x * 255) for x in colors(index / float(num_colors))[:3]][::-1] + return c + + +VOC_COLORS = [get_unique_color(i, 20) for i in range(20)] +np.random.shuffle(VOC_COLORS) + + +def get_dataset_palette(n_classes): + """ + Generates a palette for n_classes. + """ + palette = [get_unique_color(i, n_classes) for i in range(n_classes)] + return palette + + +def draw_bounding_boxes(img, bounding_boxes, class_names=None, show=False, line_thickness=None): + """ + :param img: image on which to draw bounding boxes + :type img: opendr.engine.data.Image + :param bounding_boxes: detected or groundtruth bounding boxes + :type bounding_boxes: opendr.engine.target.BoundingBoxList + :param class_names: list of class names to be drawn on top of the bounding boxes + :type class_names: list + :param show: whether to display the resulting annotated image or not + :type show: bool + :param line_thickness: line thickness in pixels + :type line_thickness: int + """ + if isinstance(img, Image): + img = img.data + assert isinstance(bounding_boxes, BoundingBoxList), "bounding_boxes must be of BoundingBoxList type" + + if not bounding_boxes.data: + return draw_detections(img, np.empty((0, 4)), np.empty((0,)), np.empty((0,)), class_names, show, line_thickness) + + bounding_boxes = BoundingBoxListToNumpyArray()(bounding_boxes) + boxes = bounding_boxes[:, :4] + scores = bounding_boxes[:, 4] + classes = bounding_boxes[:, 5].astype(np.int) + return draw_detections(img, boxes, scores, classes, class_names, show, line_thickness) + + +def draw_detections(img, boxes, scores, classes, class_names=None, show=False, line_thickness=None): + """ + :param img: image on which to draw bounding boxes + :type img: np.ndarray or opendr.engine.data.Image + :param boxes: bounding boxes in numpy array or list format [n, 4] (coordinate format: x1, y1, x2, y2) + :type boxes: np.ndarray or list + :param scores: confidence scores for each bounding box [n] + :type scores: np.ndarray or list + :param classes: class indices for each bounding box [n] + :type classes: np.ndarray or list + :param show: whether to display the resulting annotated image or not + :type show: bool + :param line_thickness: line thickness in pixels + :type line_thickness: int + """ + if isinstance(img, Image): + img = img.data + # boxes in x1, y1, x2, y2 list format [n, 4] + # scores and classes in [n] list format + classes = np.int32(classes) + palette = VOC_COLORS + n_classes = len(palette) + + for idx, pred_box in enumerate(boxes): + # pred_box_w, pred_box_h = pred_box[2] - pred_box[0], pred_box[3] - pred_box[1] + tl = line_thickness or int(0.003 * max(img.shape[:2])) + c1 = (max(0, int(pred_box[0])), max(0, int(pred_box[1]))) + c2 = (min(img.shape[1], int(pred_box[2])), min(img.shape[0], int(pred_box[3]))) + color = tuple(palette[classes[idx] % n_classes]) + + img = np.ascontiguousarray(img, dtype=np.uint8) + cv2.rectangle(img, c1, c2, color, thickness=2) + + if class_names is not None: + label = "{}".format(class_names[classes[idx]]) + + t_size = cv2.getTextSize( + label, 0, fontScale=float(tl) / 5, thickness=1)[0] + + c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3 + t = - 2 + if c2[1] < 0: + c2 = c1[0] + t_size[0], c1[1] + t_size[1] + t = t_size[1] - 4 + cv2.rectangle(img, c1, c2, color, -1) # filled + + cv2.putText(img, label, (c1[0], c1[1] + t), 0, float(tl) / 5, + [255, 255, 255], thickness=1, lineType=cv2.LINE_AA) + + if show: + cv2.imshow('detections', img) + cv2.waitKey(0) + cv2.destroyAllWindows() + return img diff --git a/src/opendr/perception/object_detection_2d/yolov3/yolov3_learner.py b/src/opendr/perception/object_detection_2d/yolov3/yolov3_learner.py index 3cb4303fe1..beabea68ad 100644 --- a/src/opendr/perception/object_detection_2d/yolov3/yolov3_learner.py +++ b/src/opendr/perception/object_detection_2d/yolov3/yolov3_learner.py @@ -534,7 +534,6 @@ def load(self, path, verbose=True): def download(self, path=None, mode="pretrained", verbose=False, url=OPENDR_SERVER_URL + "/perception/object_detection_2d/yolov3/"): - # url='ftp://155.207.131.93/perception/object_detection_2d/yolov3/'): """ Downloads all files necessary for inference, evaluation and training. Valid mode options are: ["pretrained", "images", "test_data"]. @@ -572,21 +571,35 @@ def download(self, path=None, mode="pretrained", verbose=False, "yolo_voc.json") if verbose: print("Downloading metadata...") - urlretrieve(file_url, os.path.join(path, "yolo_default.json")) + if not os.path.exists(os.path.join(path, "yolo_default.json")): + urlretrieve(file_url, os.path.join(path, "yolo_default.json")) + if verbose: + print("Downloaded metadata json.") + elif verbose: + print("Metadata json file already exists.") if verbose: print("Downloading params...") file_url = os.path.join(url, "pretrained", "yolo_voc", "yolo_voc.params") - urlretrieve(file_url, - os.path.join(path, "yolo_voc.params")) + if not os.path.exists(os.path.join(path, "yolo_voc.params")): + urlretrieve(file_url, os.path.join(path, "yolo_voc.params")) + if verbose: + print("Downloaded params.") + elif verbose: + print("Params file already exists.") elif mode == "images": file_url = os.path.join(url, "images", "cat.jpg") if verbose: print("Downloading example image...") - urlretrieve(file_url, os.path.join(path, "cat.jpg")) + if not os.path.exists(os.path.join(path, "cat.jpg")): + urlretrieve(file_url, os.path.join(path, "cat.jpg")) + if verbose: + print("Downloaded example image.") + elif verbose: + print("Example image already exists.") elif mode == "test_data": os.makedirs(os.path.join(path, "test_data"), exist_ok=True) @@ -596,17 +609,32 @@ def download(self, path=None, mode="pretrained", verbose=False, file_url = os.path.join(url, "test_data", "train.txt") if verbose: print("Downloading filelist...") - urlretrieve(file_url, os.path.join(path, "test_data", "train.txt")) + if not os.path.exists(os.path.join(path, "test_data", "train.txt")): + urlretrieve(file_url, os.path.join(path, "test_data", "train.txt")) + if verbose: + print("Downloaded filelist.") + elif verbose: + print("Filelist already exists.") # download image file_url = os.path.join(url, "test_data", "Images", "000040.jpg") if verbose: print("Downloading image...") - urlretrieve(file_url, os.path.join(path, "test_data", "Images", "000040.jpg")) + if not os.path.exists(os.path.join(path, "test_data", "Images", "000040.jpg")): + urlretrieve(file_url, os.path.join(path, "test_data", "Images", "000040.jpg")) + if verbose: + print("Downloaded image.") + elif verbose: + print("Image already exists.") # download annotations file_url = os.path.join(url, "test_data", "Annotations", "000040.jpg.txt") if verbose: print("Downloading annotations...") - urlretrieve(file_url, os.path.join(path, "test_data", "Annotations", "000040.jpg.txt")) + if not os.path.exists(os.path.join(path, "test_data", "Annotations", "000040.jpg.txt")): + urlretrieve(file_url, os.path.join(path, "test_data", "Annotations", "000040.jpg.txt")) + if verbose: + print("Downloaded annotations.") + elif verbose: + print("Annotations already exist.") def optimize(self, target_device): """This method is not used in this implementation.""" diff --git a/src/opendr/perception/object_detection_2d/yolov5/README.md b/src/opendr/perception/object_detection_2d/yolov5/README.md new file mode 100644 index 0000000000..a19d36f411 --- /dev/null +++ b/src/opendr/perception/object_detection_2d/yolov5/README.md @@ -0,0 +1,17 @@ +OpenDR 2D Object Detection - YOLOv5 +====== + +This folder contains the OpenDR Learner class for YOLOv5 for 2D object detection. + +Sources +------ +The models are taken from the +[Ultralytics implementation](https://github.com/ultralytics/yolov5) with modifications +to make it compatible with OpenDR specifications. Only inference is supported. + +Usage +------ +- The ```model_name``` parameter is used to specify which model will be loaded. Available models: ```['yolov5s', 'yolov5n', 'yolov5m', 'yolov5l', 'yolov5x', 'yolov5n6', 'yolov5s6', 'yolov5m6', 'yolov5l6', 'custom']``` +- For custom models, the ```path``` parameter must be set to point to the location of the weights file. +- The ```temp_path``` folder is used to save the downloaded weights when using pretrained models. +- The ```force_reload``` parameter redownloads the pretrained model when set to `True`. This fixes issues with caching. \ No newline at end of file diff --git a/src/opendr/perception/object_detection_2d/yolov5/__init__.py b/src/opendr/perception/object_detection_2d/yolov5/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_detection_2d/yolov5/yolov5_learner.py b/src/opendr/perception/object_detection_2d/yolov5/yolov5_learner.py new file mode 100644 index 0000000000..6bfe9fd23b --- /dev/null +++ b/src/opendr/perception/object_detection_2d/yolov5/yolov5_learner.py @@ -0,0 +1,88 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# OpenDR engine imports +from opendr.engine.learners import Learner +from opendr.engine.data import Image +from opendr.engine.target import BoundingBox, BoundingBoxList + +# yolov5 imports +import torch +torch.hub._validate_not_a_forked_repo = lambda a, b, c: True # workaround for rate limit bug + + +class YOLOv5DetectorLearner(Learner): + available_models = ['yolov5s', 'yolov5n', 'yolov5m', 'yolov5l', 'yolov5x', + 'yolov5n6', 'yolov5s6', 'yolov5m6', 'yolov5l6', 'custom'] + + def __init__(self, model_name, path=None, device='cuda', temp_path='.', force_reload=False): + super(YOLOv5DetectorLearner, self).__init__(device=device, temp_path=temp_path) + if model_name not in self.available_models: + model_name = 'yolov5s' + print('Unrecognized model name, defaulting to "yolov5s"') + + default_dir = torch.hub.get_dir() + torch.hub.set_dir(temp_path) + + if path is None: + self.model = torch.hub.load('ultralytics/yolov5:master', 'custom', f'{temp_path}/{model_name}', + force_reload=force_reload) + else: + self.model = torch.hub.load('ultralytics/yolov5:master', 'custom', path=path, + force_reload=force_reload, skip_validation=True) + torch.hub.set_dir(default_dir) + + self.model.to(device) + self.classes = [self.model.names[i] for i in range(len(self.model.names.keys()))] + + def infer(self, img, size=640): + if isinstance(img, Image): + img = img.convert("channels_last", "rgb") + + results = self.model(img, size=size) + + bounding_boxes = BoundingBoxList([]) + for idx, box in enumerate(results.xyxy[0]): + box = box.cpu().numpy() + bbox = BoundingBox(left=box[0], top=box[1], + width=box[2] - box[0], + height=box[3] - box[1], + name=box[5], + score=box[4]) + bounding_boxes.data.append(bbox) + return bounding_boxes + + def fit(self): + """This method is not used in this implementation.""" + raise NotImplementedError + + def eval(self): + """This method is not used in this implementation.""" + raise NotImplementedError + + def optimize(self, target_device): + """This method is not used in this implementation.""" + return NotImplementedError + + def reset(self): + """This method is not used in this implementation.""" + return NotImplementedError + + def load(self): + """This method is not used in this implementation.""" + return NotImplementedError + + def save(self): + """This method is not used in this implementation.""" + return NotImplementedError diff --git a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/dependencies.ini b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/dependencies.ini index 0e7589f1d6..7aa0f689b5 100644 --- a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/dependencies.ini +++ b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/dependencies.ini @@ -1,16 +1,16 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 tensorboardX>=2.0 opencv-python==4.5.1.48 matplotlib>=2.2.2 tqdm onnx==1.8.0 onnxruntime==1.3.0 - protobuf==3.11.3 pybind11==2.6.2 llvmlite>=0.31.0 numba>=0.53.0 diff --git a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/core/cc/nms/nms_cpu.h b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/core/cc/nms/nms_cpu.h index cbf644aac3..cc7a9f235a 100644 --- a/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/core/cc/nms/nms_cpu.h +++ b/src/opendr/perception/object_detection_3d/voxel_object_detection_3d/second_detector/core/cc/nms/nms_cpu.h @@ -6,7 +6,9 @@ #include #include #include +#include #include + namespace py = pybind11; using namespace pybind11::literals; template inline py::array_t constant(ShapeContainer shape, DType value) { diff --git a/src/opendr/perception/object_tracking_2d/__init__.py b/src/opendr/perception/object_tracking_2d/__init__.py index c80477b76b..3f0c0a7394 100644 --- a/src/opendr/perception/object_tracking_2d/__init__.py +++ b/src/opendr/perception/object_tracking_2d/__init__.py @@ -1,5 +1,8 @@ -from opendr.perception.object_tracking_2d.fair_mot.object_tracking_2d_fair_mot_learner import ObjectTracking2DFairMotLearner -from opendr.perception.object_tracking_2d.deep_sort.object_tracking_2d_deep_sort_learner import ObjectTracking2DDeepSortLearner +from opendr.perception.object_tracking_2d.fair_mot.object_tracking_2d_fair_mot_learner import \ + ObjectTracking2DFairMotLearner +from opendr.perception.object_tracking_2d.deep_sort.object_tracking_2d_deep_sort_learner import \ + ObjectTracking2DDeepSortLearner +from opendr.perception.object_tracking_2d.siamrpn.siamrpn_learner import SiamRPNLearner from opendr.perception.object_tracking_2d.datasets.mot_dataset import ( MotDataset, @@ -14,4 +17,5 @@ ) __all__ = ['ObjectTracking2DFairMotLearner', 'ObjectTracking2DDeepSortLearner', 'MotDataset', 'MotDatasetIterator', - 'RawMotDatasetIterator', 'RawMotWithDetectionsDatasetIterator', 'Market1501Dataset', 'Market1501DatasetIterator'] + 'RawMotDatasetIterator', 'RawMotWithDetectionsDatasetIterator', 'Market1501Dataset', 'Market1501DatasetIterator', + 'SiamRPNLearner'] diff --git a/src/opendr/perception/object_tracking_2d/datasets/__init__.py b/src/opendr/perception/object_tracking_2d/datasets/__init__.py index e69de29bb2..c30305ce41 100644 --- a/src/opendr/perception/object_tracking_2d/datasets/__init__.py +++ b/src/opendr/perception/object_tracking_2d/datasets/__init__.py @@ -0,0 +1,3 @@ +from opendr.perception.object_tracking_2d.datasets.otb_dataset import OTBTrainDataset + +__all__ = ['OTBTrainDataset'] diff --git a/src/opendr/perception/object_tracking_2d/datasets/otb_dataset.py b/src/opendr/perception/object_tracking_2d/datasets/otb_dataset.py new file mode 100644 index 0000000000..6daceca528 --- /dev/null +++ b/src/opendr/perception/object_tracking_2d/datasets/otb_dataset.py @@ -0,0 +1,165 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import cv2 +import numpy as np +import json +from gluoncv.data.tracking_data import AnchorTarget +from gluoncv.data.transforms.track import SiamRPNaugmentation +from gluoncv.model_zoo.siamrpn.siamrpn_tracker import center2corner +from gluoncv.model_zoo.siamrpn.siamrpn_tracker import Center +from opendr.engine.datasets import DatasetIterator + + +class OTBTrainDataset(DatasetIterator): + def __init__(self, root, json_path, train_exemplar_size=127, train_search_size=255, + train_output_size=17, anchor_stride=8, anchor_ratios=(0.33, 0.5, 1, 2, 3), + train_thr_high=0.6, train_thr_low=0.3, train_pos_num=16, train_neg_num=16, + train_total_num=64, template_shift=4, template_scale=0.05, template_blur=0, + template_flip=0, template_color=1.0, search_shift=64, search_scale=0.18, + search_blur=0, search_flip=0, search_color=1.0): + super(OTBTrainDataset, self).__init__() + # dataset specific reading functions + self.root = root + self.json_path = json_path + with open(self.json_path, 'r') as f: + self.annotations = json.load(f) + self.video_names = list(self.annotations.keys()) + self.n_videos = len(self.video_names) + print(f'Found {self.n_videos} videos, reading data...') + + # SiamRPN functionality + self.train_exemplar_size = train_exemplar_size + self.train_search_size = train_search_size + self.train_output_size = train_output_size + + # create anchor target + self.anchor_target = AnchorTarget(anchor_stride=anchor_stride, + anchor_ratios=anchor_ratios, + train_search_size=self.train_search_size, + train_output_size=self.train_output_size, + train_thr_high=train_thr_high, + train_thr_low=train_thr_low, + train_pos_num=train_pos_num, + train_neg_num=train_neg_num, + train_total_num=train_total_num) + + # data augmentation + self.template_aug = SiamRPNaugmentation(template_shift, + template_scale, + template_blur, + template_flip, + template_color) + self.search_aug = SiamRPNaugmentation(search_shift, + search_scale, + search_blur, + search_flip, + search_color) + + def __len__(self): + return self.n_videos + + def _get_bbox(self, image, shape): + imh, imw = image.shape[:2] + if len(shape) == 4: + w, h = shape[2]-shape[0], shape[3]-shape[1] + else: + w, h = shape + context_amount = 0.5 + exemplar_size = self.train_exemplar_size + wc_z = w + context_amount * (w+h) + hc_z = h + context_amount * (w+h) + s_z = np.sqrt(wc_z * hc_z) + scale_z = exemplar_size / s_z + w = w*scale_z + h = h*scale_z + cx, cy = imw//2, imh//2 + bbox = center2corner(Center(cx, cy, w, h)) + return bbox + + def _get_crop(self, image, box): + image_mean = np.mean(image, axis=(0, 1)) + image = crop_like_siamfc(image, box, exemplar_size=self.train_exemplar_size, + search_size=self.train_search_size, padding=image_mean) + return image + + def __getitem__(self, item): + video_name = self.video_names[item] + + # choose two random frames from video + available_frames = self.annotations[video_name]["img_names"] + indices = np.random.choice(len(available_frames), 2) + frame1, frame2 = available_frames[indices[0]], available_frames[indices[1]] + + # read frames and transform to target, search + template_image = cv2.imread(os.path.join(self.root, frame1)) + search_image = cv2.imread(os.path.join(self.root, frame2)) + # crop around boxes + template_box = self.annotations[video_name]["gt_rect"][indices[0]] + search_box = self.annotations[video_name]["gt_rect"][indices[1]] + template_image = self._get_crop(template_image, template_box) + search_image = self._get_crop(search_image, search_box) + + # get corresponding bounding boxes + # we only need the dimensions of the boxes (w, h) as we have already cropped the images around the targets + template_box = self._get_bbox(template_image, template_box[2:]) + search_box = self._get_bbox(search_image, search_box[2:]) + + # augmentation + template, _ = self.template_aug(template_image, + template_box, + self.train_exemplar_size) + + search, bbox = self.search_aug(search_image, + search_box, + self.train_search_size) + + # get labels + cls, delta, delta_weight, _ = self.anchor_target(bbox, self.train_output_size, False) + template = template.transpose((2, 0, 1)).astype(np.float32) + search = search.transpose((2, 0, 1)).astype(np.float32) + + return template, search, cls, delta, delta_weight, np.array(bbox) + + +def crop_hwc(image, bbox, out_sz, padding=(0, 0, 0)): + a = (out_sz-1) / (bbox[2]-bbox[0]) + b = (out_sz-1) / (bbox[3]-bbox[1]) + c = -a * bbox[0] + d = -b * bbox[1] + mapping = np.array([[a, 0, c], + [0, b, d]]).astype(np.float) + crop = cv2.warpAffine(image, mapping, (out_sz, out_sz), + borderMode=cv2.BORDER_CONSTANT, borderValue=padding) + return crop + + +def pos_s_2_bbox(pos, s): + return [pos[0]-s/2, pos[1]-s/2, pos[0]+s/2, pos[1]+s/2] + + +def crop_like_siamfc(image, bbox, exemplar_size=127, context_amount=0.5, search_size=255, padding=(0, 0, 0)): + target_pos = [(bbox[0] + bbox[2] / 2.), (bbox[1] + bbox[3] / 2.)] + target_size = [bbox[2], bbox[3]] + wc_z = target_size[1] + context_amount * sum(target_size) + hc_z = target_size[0] + context_amount * sum(target_size) + s_z = np.sqrt(wc_z * hc_z) + scale_z = exemplar_size / s_z + d_search = (search_size - exemplar_size) / 2 + pad = d_search / scale_z + s_x = s_z + 2 * pad + + x = crop_hwc(image, pos_s_2_bbox(target_pos, s_x), search_size, padding) + return x diff --git a/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/deep_sort_tracker.py b/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/deep_sort_tracker.py index edd44325f2..559139c597 100644 --- a/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/deep_sort_tracker.py +++ b/src/opendr/perception/object_tracking_2d/deep_sort/algorithm/deep_sort_tracker.py @@ -51,12 +51,12 @@ def __init__( self.deepsort = self.build_tracker() self.frame = 0 - def infer(self, imageWithDetections: ImageWithDetections, frame_id=None): + def infer(self, imageWithDetections: ImageWithDetections, frame_id=None, swap_left_top=False): if frame_id is not None: self.frame = frame_id - image = imageWithDetections.numpy().transpose(2, 1, 0) + image = imageWithDetections.numpy().transpose(1, 2, 0) detections = imageWithDetections.boundingBoxList bbox_xywh = [] @@ -65,15 +65,15 @@ def infer(self, imageWithDetections: ImageWithDetections, frame_id=None): for detection in detections: bbox_xywh.append(np.array([ - detection.left, - detection.top, + detection.top if swap_left_top else detection.left, + detection.left if swap_left_top else detection.top, detection.width, detection.height, ])) cls_conf.append(detection.confidence) cls_ids.append(detection.name) - bbox_xywh = np.array(bbox_xywh) + bbox_xywh = np.array(bbox_xywh).reshape(-1, 4) cls_conf = np.array(cls_conf) cls_ids = np.array(cls_ids) @@ -96,8 +96,8 @@ def infer(self, imageWithDetections: ImageWithDetections, frame_id=None): bb_tlwh = self.deepsort._xyxy_to_tlwh(bb_xyxy) results.append(TrackingAnnotation( cls_id, - bb_tlwh[0], - bb_tlwh[1], + (bb_tlwh[1] + bb_tlwh[3] / 2 if swap_left_top else bb_tlwh[0] + bb_tlwh[2] / 2), + (bb_tlwh[0] + bb_tlwh[2] / 2 if swap_left_top else bb_tlwh[1] + bb_tlwh[3] / 2), bb_tlwh[2], bb_tlwh[3], id, diff --git a/src/opendr/perception/object_tracking_2d/deep_sort/object_tracking_2d_deep_sort_learner.py b/src/opendr/perception/object_tracking_2d/deep_sort/object_tracking_2d_deep_sort_learner.py index d0cd3f36a4..a5f0d020fa 100644 --- a/src/opendr/perception/object_tracking_2d/deep_sort/object_tracking_2d_deep_sort_learner.py +++ b/src/opendr/perception/object_tracking_2d/deep_sort/object_tracking_2d_deep_sort_learner.py @@ -262,7 +262,7 @@ def eval( return result - def infer(self, batch, frame_ids=None): + def infer(self, batch, frame_ids=None, swap_left_top=False): if self.tracker is None: raise ValueError("No model loaded or created") @@ -286,7 +286,7 @@ def infer(self, batch, frame_ids=None): t0 = time.time() - result = self.tracker.infer(image, frame_id) + result = self.tracker.infer(image, frame_id, swap_left_top=swap_left_top) results.append(result) t0 = time.time() - t0 diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/dependencies.ini b/src/opendr/perception/object_tracking_2d/dependencies.ini similarity index 82% rename from src/opendr/perception/object_tracking_2d/fair_mot/dependencies.ini rename to src/opendr/perception/object_tracking_2d/dependencies.ini index 757d9a27e1..32ff6a3997 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/dependencies.ini +++ b/src/opendr/perception/object_tracking_2d/dependencies.ini @@ -3,6 +3,7 @@ # https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 tensorboardX>=2.0 opencv-python==4.5.1.48 matplotlib>=2.2.2 @@ -13,9 +14,12 @@ python=torch==1.9.0 yacs==0.1.8 progress>=1.5 lap>=0.4.0 - cython>=0.29.22 + Cython>=0.29.22 cython-bbox==0.1.3 motmetrics==1.2.0 ninja + gluoncv==0.11.0b20210908 + mxnet==1.8.0 + numba==0.53.0 opendr=opendr-toolkit-engine diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/model.py b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/model.py index ea3d5f9221..812cd3e456 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/model.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/model.py @@ -24,11 +24,11 @@ } -def create_model(arch, heads, head_conv): +def create_model(pretrained, arch, heads, head_conv): num_layers = int(arch[arch.find("_") + 1:]) if "_" in arch else 0 arch = arch[: arch.find("_")] if "_" in arch else arch get_model = _model_factory[arch] - model = get_model(num_layers=num_layers, heads=heads, head_conv=head_conv) + model = get_model(pretrained=pretrained, num_layers=num_layers, heads=heads, head_conv=head_conv) return model diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/dlav0.py b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/dlav0.py index 9d3520d2fd..e8378c9dcc 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/dlav0.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/dlav0.py @@ -392,14 +392,17 @@ def forward(self, x): return x - def load_pretrained_model(self, data="imagenet", name="dla34", hash="ba72cf86"): + def load_model(self, pretrained=True, data="imagenet", name="dla34", hash="ba72cf86", num_classes=None): fc = self.fc - if name.endswith(".pth"): - model_weights = torch.load(data + name) - else: - model_url = get_model_url(data, name, hash) - model_weights = model_zoo.load_url(model_url) - num_classes = len(model_weights[list(model_weights.keys())[-1]]) + + if pretrained: + if name.endswith(".pth"): + model_weights = torch.load(data + name) + else: + model_url = get_model_url(data, name, hash) + model_weights = model_zoo.load_url(model_url) + num_classes = len(model_weights[list(model_weights.keys())[-1]]) + self.fc = nn.Conv2d( self.channels[-1], num_classes, @@ -408,70 +411,66 @@ def load_pretrained_model(self, data="imagenet", name="dla34", hash="ba72cf86"): padding=0, bias=True, ) - self.load_state_dict(model_weights) + + if pretrained: + self.load_state_dict(model_weights) self.fc = fc -def dla34(pretrained, **kwargs): # DLA-34 +def dla34(pretrained, num_classes=None, **kwargs): # DLA-34 model = DLA( [1, 1, 1, 2, 2, 1], [16, 32, 64, 128, 256, 512], block=BasicBlock, **kwargs ) - if pretrained: - model.load_pretrained_model(data="imagenet", name="dla34", hash="ba72cf86") + model.load_model(data="imagenet", name="dla34", hash="ba72cf86", num_classes=num_classes, pretrained=pretrained) return model -def dla46_c(pretrained=None, **kwargs): # DLA-46-C +def dla46_c(pretrained=False, num_classes=None, **kwargs): # DLA-46-C Bottleneck.expansion = 2 model = DLA( [1, 1, 1, 2, 2, 1], [16, 32, 64, 64, 128, 256], block=Bottleneck, **kwargs ) - if pretrained is not None: - model.load_pretrained_model(pretrained, "dla46_c") + model.load_model(pretrained, "dla46_c", num_classes=num_classes) return model -def dla46x_c(pretrained=None, **kwargs): # DLA-X-46-C +def dla46x_c(pretrained=False, num_classes=None, **kwargs): # DLA-X-46-C BottleneckX.expansion = 2 model = DLA( [1, 1, 1, 2, 2, 1], [16, 32, 64, 64, 128, 256], block=BottleneckX, **kwargs ) - if pretrained is not None: - model.load_pretrained_model(pretrained, "dla46x_c") + model.load_model(pretrained, "dla46x_c", num_classes=num_classes) return model -def dla60x_c(pretrained, **kwargs): # DLA-X-60-C +def dla60x_c(pretrained, num_classes=None, **kwargs): # DLA-X-60-C BottleneckX.expansion = 2 model = DLA( [1, 1, 1, 2, 3, 1], [16, 32, 64, 64, 128, 256], block=BottleneckX, **kwargs ) - if pretrained: - model.load_pretrained_model(data="imagenet", name="dla60x_c", hash="b870c45c") + model.load_model(data="imagenet", name="dla60x_c", hash="b870c45c", num_classes=num_classes, pretrained=pretrained) return model -def dla60(pretrained=None, **kwargs): # DLA-60 +def dla60(pretrained=False, num_classes=None, **kwargs): # DLA-60 Bottleneck.expansion = 2 model = DLA( [1, 1, 1, 2, 3, 1], [16, 32, 128, 256, 512, 1024], block=Bottleneck, **kwargs ) - if pretrained is not None: - model.load_pretrained_model(pretrained, "dla60") + model.load_model(pretrained, "dla60", num_classes=num_classes) return model -def dla60x(pretrained=None, **kwargs): # DLA-X-60 +def dla60x(pretrained=False, num_classes=None, **kwargs): # DLA-X-60 BottleneckX.expansion = 2 model = DLA( [1, 1, 1, 2, 3, 1], [16, 32, 128, 256, 512, 1024], block=BottleneckX, **kwargs ) - if pretrained is not None: - model.load_pretrained_model(pretrained, "dla60x") + model.load_model(pretrained, "dla60x", num_classes=num_classes) return model -def dla102(pretrained=None, **kwargs): # DLA-102 +def dla102(pretrained=False, num_classes=None, **kwargs): # DLA-102 Bottleneck.expansion = 2 model = DLA( [1, 1, 1, 3, 4, 1], @@ -480,12 +479,11 @@ def dla102(pretrained=None, **kwargs): # DLA-102 residual_root=True, **kwargs ) - if pretrained is not None: - model.load_pretrained_model(pretrained, "dla102") + model.load_model(pretrained, "dla102", num_classes=num_classes) return model -def dla102x(pretrained=None, **kwargs): # DLA-X-102 +def dla102x(pretrained=False, num_classes=None, **kwargs): # DLA-X-102 BottleneckX.expansion = 2 model = DLA( [1, 1, 1, 3, 4, 1], @@ -494,12 +492,11 @@ def dla102x(pretrained=None, **kwargs): # DLA-X-102 residual_root=True, **kwargs ) - if pretrained is not None: - model.load_pretrained_model(pretrained, "dla102x") + model.load_model(pretrained, "dla102x", num_classes=num_classes) return model -def dla102x2(pretrained=None, **kwargs): # DLA-X-102 64 +def dla102x2(pretrained=False, num_classes=None, **kwargs): # DLA-X-102 64 BottleneckX.cardinality = 64 model = DLA( [1, 1, 1, 3, 4, 1], @@ -508,12 +505,11 @@ def dla102x2(pretrained=None, **kwargs): # DLA-X-102 64 residual_root=True, **kwargs ) - if pretrained is not None: - model.load_pretrained_model(pretrained, "dla102x2") + model.load_model(pretrained, "dla102x2", num_classes=num_classes) return model -def dla169(pretrained=None, **kwargs): # DLA-169 +def dla169(pretrained=False, num_classes=None, **kwargs): # DLA-169 Bottleneck.expansion = 2 model = DLA( [1, 1, 2, 3, 5, 1], @@ -522,8 +518,7 @@ def dla169(pretrained=None, **kwargs): # DLA-169 residual_root=True, **kwargs ) - if pretrained is not None: - model.load_pretrained_model(pretrained, "dla169") + model.load_model(pretrained, "dla169", num_classes=num_classes) return model @@ -662,12 +657,12 @@ def fill_fc_weights(layers): class DLASeg(nn.Module): - def __init__(self, base_name, heads, pretrained=True, down_ratio=4, head_conv=256): + def __init__(self, base_name, heads, pretrained=True, down_ratio=4, head_conv=256, num_classes=None): super(DLASeg, self).__init__() assert down_ratio in [2, 4, 8, 16] self.heads = heads self.first_level = int(np.log2(down_ratio)) - self.base = globals()[base_name](pretrained=pretrained, return_levels=True) + self.base = globals()[base_name](pretrained=pretrained, return_levels=True, num_classes=num_classes) channels = self.base.channels scales = [2 ** i for i in range(len(channels[self.first_level :]))] self.dla_up = DLAUp(channels[self.first_level :], scales=scales) @@ -785,12 +780,13 @@ def dla169up(classes, pretrained_base=None, **kwargs): """ -def get_pose_net(num_layers, heads, head_conv=256, down_ratio=4): +def get_pose_net(pretrained, num_layers, heads, head_conv=256, down_ratio=4, num_classes=1000): model = DLASeg( "dla{}".format(num_layers), heads, - pretrained=True, + pretrained=pretrained, down_ratio=down_ratio, head_conv=head_conv, + num_classes=num_classes, ) return model diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_dla_conv.py b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_dla_conv.py index b163e270b7..15f5aaec37 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_dla_conv.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_dla_conv.py @@ -602,11 +602,11 @@ def forward(self, x): return [z] -def get_pose_net(num_layers, heads, head_conv=256, down_ratio=4): +def get_pose_net(pretrained, num_layers, heads, head_conv=256, down_ratio=4): model = DLASeg( "dla{}".format(num_layers), heads, - pretrained=True, + pretrained=pretrained, down_ratio=down_ratio, final_kernel=1, last_level=5, diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_dla_dcn.py b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_dla_dcn.py index 7e183ef33d..458e474279 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_dla_dcn.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_dla_dcn.py @@ -372,13 +372,16 @@ def forward(self, x): y.append(x) return y - def load_pretrained_model(self, data="imagenet", name="dla34", hash="ba72cf86"): - if name.endswith(".pth"): - model_weights = torch.load(data + name) - else: - model_url = get_model_url(data, name, hash) - model_weights = model_zoo.load_url(model_url) - num_classes = len(model_weights[list(model_weights.keys())[-1]]) + def load_model(self, pretrained=True, data="imagenet", name="dla34", hash="ba72cf86", num_classes=1000): + + if pretrained: + if name.endswith(".pth"): + model_weights = torch.load(data + name) + else: + model_url = get_model_url(data, name, hash) + model_weights = model_zoo.load_url(model_url) + num_classes = len(model_weights[list(model_weights.keys())[-1]]) + self.fc = nn.Conv2d( self.channels[-1], num_classes, @@ -387,15 +390,16 @@ def load_pretrained_model(self, data="imagenet", name="dla34", hash="ba72cf86"): padding=0, bias=True, ) - self.load_state_dict(model_weights) + + if pretrained: + self.load_state_dict(model_weights) def dla34(pretrained=True, **kwargs): # DLA-34 model = DLA( [1, 1, 1, 2, 2, 1], [16, 32, 64, 128, 256, 512], block=BasicBlock, **kwargs ) - if pretrained: - model.load_pretrained_model(data="imagenet", name="dla34", hash="ba72cf86") + model.load_model(data="imagenet", name="dla34", hash="ba72cf86", pretrained=pretrained) return model @@ -533,12 +537,13 @@ def __init__( last_level, head_conv, out_channel=0, + num_classes=None ): super(DLASeg, self).__init__() assert down_ratio in [2, 4, 8, 16] self.first_level = int(np.log2(down_ratio)) self.last_level = last_level - self.base = globals()[base_name](pretrained=pretrained) + self.base = globals()[base_name](pretrained=pretrained, num_classes=num_classes) channels = self.base.channels scales = [2 ** i for i in range(len(channels[self.first_level :]))] self.dla_up = DLAUp(self.first_level, channels[self.first_level :], scales) @@ -608,14 +613,15 @@ def forward(self, x): return [z] -def get_pose_net(num_layers, heads, head_conv=256, down_ratio=4): +def get_pose_net(pretrained, num_layers, heads, head_conv=256, down_ratio=4, num_classes=None): model = DLASeg( "dla{}".format(num_layers), heads, - pretrained=True, + pretrained=pretrained, down_ratio=down_ratio, final_kernel=1, last_level=5, head_conv=head_conv, + num_classes=num_classes, ) return model diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_hrnet.py b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_hrnet.py index 21e833d9b7..1361637052 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_hrnet.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/pose_hrnet.py @@ -555,7 +555,7 @@ def fill_fc_weights(layers): nn.init.constant_(m.bias, 0) -def get_pose_net(num_layers, heads, head_conv): +def get_pose_net(pretrained, num_layers, heads, head_conv): if num_layers == 32: cfg_dir = "../src/lib/models/networks/config/hrnet_w32.yaml" elif num_layers == 18: @@ -564,6 +564,6 @@ def get_pose_net(num_layers, heads, head_conv): cfg_dir = "../src/lib/models/networks/config/hrnet_w18.yaml" update_config(cfg, cfg_dir) model = PoseHighResolutionNet(cfg, heads) - model.init_weights(cfg.MODEL.PRETRAINED) + model.init_weights(cfg.MODEL.PRETRAINED if pretrained else "") return model diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/resnet_dcn.py b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/resnet_dcn.py index 588a09284e..c04bb55530 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/resnet_dcn.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/resnet_dcn.py @@ -274,17 +274,17 @@ def forward(self, x): ret[head] = self.__getattr__(head)(x) return [ret] - def init_weights(self, num_layers): - if 1: + def init_weights(self, pretrained, num_layers): + if pretrained: url = model_urls["resnet{}".format(num_layers)] pretrained_state_dict = model_zoo.load_url(url) print("=> loading pretrained model {}".format(url)) self.load_state_dict(pretrained_state_dict, strict=False) - print("=> init deconv weights from normal distribution") - for name, m in self.deconv_layers.named_modules(): - if isinstance(m, nn.BatchNorm2d): - nn.init.constant_(m.weight, 1) - nn.init.constant_(m.bias, 0) + print("=> init deconv weights from normal distribution") + for name, m in self.deconv_layers.named_modules(): + if isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) resnet_spec = { @@ -296,9 +296,9 @@ def init_weights(self, num_layers): } -def get_pose_net(num_layers, heads, head_conv=256): +def get_pose_net(pretrained, num_layers, heads, head_conv=256): block_class, layers = resnet_spec[num_layers] model = PoseResNet(block_class, layers, heads, head_conv=head_conv) - model.init_weights(num_layers) + model.init_weights(pretrained, num_layers) return model diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/resnet_fpn_dcn.py b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/resnet_fpn_dcn.py index e2add58e20..591b9a9bd8 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/resnet_fpn_dcn.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/algorithm/lib/models/networks/resnet_fpn_dcn.py @@ -282,8 +282,8 @@ def forward(self, x): ret[head] = self.__getattr__(head)(p1) return [ret] - def init_weights(self, num_layers): - if 1: + def init_weights(self, pretrained, num_layers): + if pretrained: url = model_urls["resnet{}".format(num_layers)] pretrained_state_dict = model_zoo.load_url(url) print("=> loading pretrained model {}".format(url)) @@ -326,9 +326,9 @@ def forward(self, x): } -def get_pose_net(num_layers, heads, head_conv=256): +def get_pose_net(pretrained, num_layers, heads, head_conv=256): block_class, layers = resnet_spec[num_layers] model = PoseResNet(block_class, layers, heads, head_conv=head_conv) - model.init_weights(num_layers) + model.init_weights(pretrained, num_layers) return model diff --git a/src/opendr/perception/object_tracking_2d/fair_mot/object_tracking_2d_fair_mot_learner.py b/src/opendr/perception/object_tracking_2d/fair_mot/object_tracking_2d_fair_mot_learner.py index a5eae1a456..02cdcdcbaf 100644 --- a/src/opendr/perception/object_tracking_2d/fair_mot/object_tracking_2d_fair_mot_learner.py +++ b/src/opendr/perception/object_tracking_2d/fair_mot/object_tracking_2d_fair_mot_learner.py @@ -77,6 +77,7 @@ def __init__( image_std=[0.289, 0.274, 0.278], frame_rate=30, min_box_area=100, + use_pretrained_backbone=True, ): # Pass the shared parameters on super's constructor so they can get initialized as class attributes super(ObjectTracking2DFairMotLearner, self).__init__( @@ -122,6 +123,7 @@ def __init__( self.image_std = image_std self.frame_rate = frame_rate self.min_box_area = min_box_area + self.use_pretrained_backbone = use_pretrained_backbone main_batch_size = self.batch_size // len(self.gpus) rest_batch_size = (self.batch_size - main_batch_size) @@ -658,7 +660,7 @@ def __create_model(self): self.heads = heads - self.model = create_model(self.backbone, heads, self.head_conv) + self.model = create_model(self.use_pretrained_backbone, self.backbone, heads, self.head_conv) self.model.to(self.device) self.model.ort_session = None self.model.heads_names = heads.keys() diff --git a/src/opendr/perception/object_tracking_2d/siamrpn/README.md b/src/opendr/perception/object_tracking_2d/siamrpn/README.md new file mode 100644 index 0000000000..5dd47f231c --- /dev/null +++ b/src/opendr/perception/object_tracking_2d/siamrpn/README.md @@ -0,0 +1,60 @@ +# SiamRPNLearner Module + +This class implements the SiamRPN generic object tracker based on its +[GluonCV](https://github.com/dmlc/gluon-cv) implementation. + +## Tracking datasets + +### Training datasets + +The following datasets are supported for training the SiamRPN tracker: + +1. COCO Detection dataset ([preprocessing scripts](https://github.com/foolwood/SiamMask/tree/master/data/coco)) +2. YouTube BB dataset ([preprocessing scripts](https://github.com/foolwood/SiamMask/tree/master/data/ytb_vos)) +3. ILSVRC-VID ([preprocessing scripts](https://github.com/foolwood/SiamMask/tree/master/data/vid)) +4. ILSVRC-DET ([preprocessing scripts](https://github.com/foolwood/SiamMask/tree/master/data/det)) + +The datasets need to be downloaded and preprocessed as indicated in the +[SiamMask](https://github.com/foolwood/SiamMask/tree/master/data) GitHub repository. + +The following data structure is expected: + +``` +data_root +├── ... +├── coco +│ ├── crop511 +│ ├── ... +├── Youtube_bb +│ ├── crop511 +│ ├── ... +├── vid +│ ├── crop511 +│ ├── ... +├── det +│ ├── crop511 +│ ├── ... +└── ... +``` + +#### Custom training datasets + +Support for custom datasets is implemented by inheriting the `opendr.engine.datasets.DatasetIterator` class as shown in +[otb_dataset.py](/src/opendr/perception/object_tracking_2d/datasets/otb_dataset.py). + +### Evaluation datasets + +The [OTB](http://cvlab.hanyang.ac.kr/tracker_benchmark/datasets.html) dataset is supported +for tracker evaluation. The dataset can be downloaded using the [otb.py](data_utils/otb.py) script or the +`SiamRPNLearner.download('otb2015')` method as shown in +[eval_demo.py](/projects/python/perception/object_tracking_2d/demos/siamrpn/eval_demo.py). + +The OpenDR SiamRPN model achieves a 66.8\% Success AUC on the OTB2015 dataset, running at ~132FPS +on an NVIDIA RTX 2070. +```shell +------------------------------------------------------- +| Tracker name | Success | FPS | +------------------------------------------------------- +| siamrpn_opendr | 0.668 | 132.2 | +------------------------------------------------------- +``` \ No newline at end of file diff --git a/src/opendr/perception/object_tracking_2d/siamrpn/__init__.py b/src/opendr/perception/object_tracking_2d/siamrpn/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_tracking_2d/siamrpn/data_utils/__init__.py b/src/opendr/perception/object_tracking_2d/siamrpn/data_utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/object_tracking_2d/siamrpn/data_utils/otb.py b/src/opendr/perception/object_tracking_2d/siamrpn/data_utils/otb.py new file mode 100644 index 0000000000..e7f1323a0d --- /dev/null +++ b/src/opendr/perception/object_tracking_2d/siamrpn/data_utils/otb.py @@ -0,0 +1,72 @@ +""" adapted from https://cv.gluon.ai/build/examples_datasets/otb2015.html +this script is used to prepare Otb2015 dataset for tracking, +which is Single Object Tracking benchmark""" + +import argparse +import zipfile +import os +import shutil +import time +from gluoncv.utils import download, makedirs + +otb50 = ['Basketball', 'Biker', 'Bird1', 'BlurBody', 'BlurCar2', 'BlurFace', 'BlurOwl', + 'Bolt', 'Box', 'Car1', 'Car4', 'CarDark', 'CarScale', 'ClifBar', 'Couple', 'Crowds', 'David', 'Deer', + 'Diving', 'DragonBaby', 'Dudek', 'Football', 'Freeman4', 'Girl', 'Human3', 'Human4', 'Human6', 'Human9', + 'Ironman', 'Jump', 'Jumping', 'Liquor', 'Matrix', 'MotorRolling', 'Panda', 'RedTeam', 'Shaking', 'Singer2', + 'Skating1', 'Skating2', 'Skiing', 'Soccer', 'Surfer', 'Sylvester', 'Tiger2', 'Trellis', 'Walking', + 'Walking2', 'Woman'] +otb100 = ['Bird2', 'BlurCar1', 'BlurCar3', 'BlurCar4', 'Board', 'Bolt2', 'Boy', + 'Car2', 'Car24', 'Coke', 'Coupon', 'Crossing', 'Dancer', 'Dancer2', 'David2', + 'David3', 'Dog', 'Dog1', 'Doll', 'FaceOcc1', 'FaceOcc2', 'Fish', 'FleetFace', 'Football1', + 'Freeman1', 'Freeman3', 'Girl2', 'Gym', 'Human2', 'Human5', 'Human7', 'Human8', 'Jogging', + 'KiteSurf', 'Lemming', 'Man', 'Mhyang', 'MountainBike', 'Rubik', 'Singer1', 'Skater', + 'Skater2', 'Subway', 'Suv', 'Tiger1', 'Toy', 'Trans', 'Twinnings', 'Vase'] + + +def parse_args(): + """Otb2015 dataset parameter.""" + parser = argparse.ArgumentParser( + description='Download Otb2015 and prepare for tracking') + parser.add_argument('--download-dir', type=str, default='~/data/otb/', + help='dataset directory on disk') + args = parser.parse_args() + args.download_dir = os.path.expanduser(args.download_dir) + return args + + +def download_otb(download_dir, overwrite=False): + """download otb2015 dataset and Unzip to download_dir""" + _DOWNLOAD_URLS = 'http://cvlab.hanyang.ac.kr/tracker_benchmark/seq/' + if not os.path.isdir(download_dir): + makedirs(download_dir) + for per_otb50 in otb50: + url = os.path.join(_DOWNLOAD_URLS, per_otb50 + '.zip') + filename = download(url, path=download_dir, overwrite=overwrite) + with zipfile.ZipFile(filename) as zf: + zf.extractall(path=download_dir) + for per_otb100 in otb100: + url = os.path.join(_DOWNLOAD_URLS, per_otb100 + '.zip') + filename = download(url, path=download_dir, overwrite=overwrite) + with zipfile.ZipFile(filename) as zf: + zf.extractall(path=download_dir) + + shutil.copytree(os.path.join(download_dir, 'Jogging'), os.path.join(download_dir, 'Jogging-1')) + os.rename(os.path.join(download_dir, 'Jogging'), os.path.join(download_dir, 'Jogging-2')) + shutil.copytree(os.path.join(download_dir, 'Skating2'), os.path.join(download_dir, 'Skating2-1')) + os.rename(os.path.join(download_dir, 'Skating2'), os.path.join(download_dir, 'Skating2-2')) + os.rename(os.path.join(download_dir, ' Human4'), os.path.join(download_dir, 'Human4-2')) + + +def main(args): + # download otb2015 dataset + download_otb(args.download_dir) + print('otb2015 dataset has already download completed') + + +if __name__ == '__main__': + since = time.time() + args = parse_args() + main(args) + time_elapsed = time.time() - since + print('Total complete in {:.0f}m {:.0f}s'.format( + time_elapsed // 60, time_elapsed % 60)) diff --git a/src/opendr/perception/object_tracking_2d/siamrpn/siamrpn_learner.py b/src/opendr/perception/object_tracking_2d/siamrpn/siamrpn_learner.py new file mode 100644 index 0000000000..f1a450ea54 --- /dev/null +++ b/src/opendr/perception/object_tracking_2d/siamrpn/siamrpn_learner.py @@ -0,0 +1,571 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# general imports +import os +import time +import json +import numpy as np +from multiprocessing import Pool +import cv2 +from tqdm import tqdm +from urllib.request import urlretrieve + +# gluoncv imports +import mxnet as mx +from mxnet import gluon, nd, autograd +from gluoncv import utils as gutils +from gluoncv import model_zoo +from gluoncv.model_zoo.siamrpn.siamrpn_tracker import SiamRPNTracker as build_tracker +from gluoncv.loss import SiamRPNLoss +from gluoncv.utils import LRScheduler, LRSequential, split_and_load +from gluoncv.model_zoo.siamrpn.siamrpn_tracker import get_axis_aligned_bbox +from gluoncv.data.tracking_data.track import TrkDataset +from gluoncv.utils.metrics.tracking import OPEBenchmark +from gluoncv.data.otb.tracking import OTBTracking + +# OpenDR engine imports +from opendr.engine.learners import Learner +from opendr.engine.data import Image +from opendr.engine.target import TrackingAnnotation +from opendr.engine.datasets import ExternalDataset +from opendr.engine.constants import OPENDR_SERVER_URL +from opendr.engine.datasets import DatasetIterator + +gutils.random.seed(0) +os.environ['MXNET_CUDNN_AUTOTUNE_DEFAULT'] = '0' + + +class SiamRPNLearner(Learner): + def __init__(self, device='cuda', n_epochs=50, num_workers=1, warmup_epochs=2, + lr=1e-3, weight_decay=0, momentum=0.9, cls_weight=1., loc_weight=1.2, + batch_size=32, temp_path=''): + """ + SiamRPN Tracker Learner + :param device: Either 'cpu' or 'cuda'. If a specific GPU is to be used, can be of the form 'cuda:#' + :type device: str, optional + :param n_epochs: Total number of epochs to train for + :type n_epochs: int, optional + :param num_workers: Number of threads used to load the train dataset or perform evaluation + :type num_workers: int, optional + :param warmup_epochs: Number of epochs during which the learning rate is annealer to `lr` + :type warmup_epochs: int, optional + :param lr: Initial learning rate, after warmup_epochs + :type lr: float, optional + :param weight_decay: Weight decay factor + :type weight_decay: float, optional + :param momentum: Optimizer momentum + :type momentum: float, optional + :param cls_weight: Weighs the classification loss + :type cls_weight: float, optional + :param loc_weight: Weights the localization loss + :type loc_weight: float, optional + :param batch_size: Batch size for training + :type batch_size: int, optional + :param temp_path: path to where relevant data and weights are downloaded + :type temp_path: str, optional + """ + self.loc_weight = loc_weight + self.cls_weight = cls_weight + self.warmup_epochs = warmup_epochs + self.num_workers = num_workers + self.n_epochs = n_epochs + backbone = 'siamrpn_alexnet_v2_otb15' + super(SiamRPNLearner, self).__init__(device=device, backbone=backbone, lr=lr, + batch_size=batch_size, temp_path=temp_path) + self.weight_decay = weight_decay + self.momentum = momentum + + if 'cuda' in self.device: + if mx.context.num_gpus() > 0: + if self.device == 'cuda': + self.ctx = mx.gpu(0) + else: + self.ctx = mx.gpu(int(self.device.split(':')[1])) + else: + self.ctx = mx.cpu() + else: + self.ctx = mx.cpu() + + self.__create_model() + self.tracker = build_tracker(self._model) + + def __create_model(self, pretrained=True): + """base model creation""" + self._model = model_zoo.get_model(self.backbone, ctx=self.ctx, pretrained=pretrained) + + def fit(self, dataset, log_interval=20, n_gpus=1, + val_dataset=None, logging_path='', silent=True, verbose=True): + """ + Train the tracker on a new dataset. + :param dataset: training dataset + :type dataset: ExternalDataset or supported DatasetIterator + :param log_interval: Train loss is printed after log_interval iterations + :type log_interval: int + :param n_gpus: Number of GPUs to train with if device is set to GPU + :type n_gpus: int + :param verbose: if set to True, additional information is printed to STDOUT, defaults to True + :type verbose: bool + :param val_dataset: ignored + :type val_dataset: ExternalDataset, optional + :param logging_path: ignored + :type logging_path: str, optional + :param silent: ignored + :type silent: str, optional + :param verbose: if set to True, additional information is printed to STDOUT, defaults to True + :type verbose: bool + :return: returns stats regarding the training process + :rtype: dict + """ + dataset = self.__prepare_training_dataset(dataset) + train_loader = gluon.data.DataLoader(dataset, + batch_size=self.batch_size, + last_batch='discard', + num_workers=self.num_workers) + + if self.device.startswith('cuda'): + if ':' in self.device: + _, gpu_no = self.device.split(':') + ctx = [mx.gpu(int(gpu_no))] + else: + ctx = [mx.gpu(i) for i in range(n_gpus)] + else: + ctx = [mx.cpu(0)] + + self._model = model_zoo.get_model(self.backbone, bz=self.batch_size, is_train=True, ctx=ctx, + pretrained=True) + + criterion = SiamRPNLoss(self.batch_size) + step_epoch = [10 * i for i in range(0, self.n_epochs, 10)] + num_batches = len(train_loader) + lr_scheduler = LRSequential([LRScheduler(mode='step', + base_lr=0.005, + target_lr=0.01, + nepochs=self.warmup_epochs, + iters_per_epoch=num_batches, + step_epoch=step_epoch, + ), + LRScheduler(mode='poly', + base_lr=0.01, + target_lr=0.005, + nepochs=self.n_epochs - self.warmup_epochs, + iters_per_epoch=num_batches, + step_epoch=[e - self.warmup_epochs for e in step_epoch], + power=0.02)]) + + optimizer_params = {'lr_scheduler': lr_scheduler, + 'wd': self.weight_decay, + 'momentum': self.momentum, + 'learning_rate': self.lr} + optimizer = gluon.Trainer(self._model.collect_params(), 'sgd', optimizer_params) + train_dict = { + 'loss_total': [], + 'loss_loc': [], + 'loss_cls': [] + } + + for epoch in range(self.n_epochs): + loss_total_val = 0 + loss_loc_val = 0 + loss_cls_val = 0 + batch_time = time.time() + for i, data in enumerate(train_loader): + template, search, label_cls, label_loc, label_loc_weight = self.__train_batch_fn(data, ctx) + cls_losses = [] + loc_losses = [] + total_losses = [] + + with autograd.record(): + for j in range(len(ctx)): + cls, loc = self._model(template[j], search[j]) + label_cls_temp = label_cls[j].reshape(-1).asnumpy() + pos_index = np.argwhere(label_cls_temp == 1).reshape(-1) + neg_index = np.argwhere(label_cls_temp == 0).reshape(-1) + if len(pos_index): + pos_index = nd.array(pos_index, ctx=ctx[j]) + else: + pos_index = nd.array(np.array([]), ctx=ctx[j]) + if len(neg_index): + neg_index = nd.array(neg_index, ctx=ctx[j]) + else: + neg_index = nd.array(np.array([]), ctx=ctx[j]) + cls_loss, loc_loss = criterion(cls, loc, label_cls[j], pos_index, neg_index, + label_loc[j], label_loc_weight[j]) + total_loss = self.cls_weight * cls_loss + self.loc_weight * loc_loss + cls_losses.append(cls_loss) + loc_losses.append(loc_loss) + total_losses.append(total_loss) + + mx.nd.waitall() + autograd.backward(total_losses) + optimizer.step(self.batch_size) + loss_total_val += sum([l.mean().asscalar() for l in total_losses]) / len(total_losses) + loss_loc_val += sum([l.mean().asscalar() for l in loc_losses]) / len(loc_losses) + loss_cls_val += sum([l.mean().asscalar() for l in cls_losses]) / len(cls_losses) + if i % log_interval == 0 and verbose: + print('Epoch %d iteration %04d/%04d: loc loss %.3f, cls loss %.3f, \ + training loss %.3f, batch time %.3f' % + (epoch, i, len(train_loader), loss_loc_val / (i + 1), loss_cls_val / (i + 1), + loss_total_val / (i + 1), time.time() - batch_time)) + batch_time = time.time() + mx.nd.waitall() + train_dict['loss_total'].append(loss_total_val) + train_dict['loss_loc'].append(loss_loc_val) + train_dict['loss_cls'].append(loss_cls_val) + return train_dict + + def eval(self, dataset): + """ + Evaluate the current model on the OTB dataset. Measures success and FPS. + :param dataset: Dataset for evaluation. + :type dataset: `ExternalDataset` + :return: returns stats regarding evaluation + :rtype: dict + """ + tracker_name = self.backbone + + dataset = self.__prepare_validation_dataset(dataset) + + self._model.collect_params().reset_ctx(self.ctx) + self.tracker = build_tracker(self._model) + + # iterate through dataset + fps = np.zeros(len(dataset)) + for v_idx, video in enumerate(dataset): + toc = 0 + pred_bboxes = [] + scores = [] + track_times = [] + for idx, (img, gt_bbox) in enumerate(video): + tic = cv2.getTickCount() + if idx == 0: + x_max, y_max, gt_w, gt_t = get_axis_aligned_bbox(np.array(gt_bbox)) + gt_bbox_ = [x_max - (gt_w - 1) / 2, y_max - (gt_t - 1) / 2, gt_w, gt_t] + self.tracker.init(img, gt_bbox_, self.ctx) + pred_bbox = gt_bbox_ + scores.append(None) + pred_bboxes.append(pred_bbox) + else: + outputs = self.tracker.track(img, self.ctx) + pred_bbox = outputs['bbox'] + pred_bboxes.append(pred_bbox) + scores.append(outputs['best_score']) + toc += cv2.getTickCount() - tic + track_times.append((cv2.getTickCount() - tic) / cv2.getTickFrequency()) + toc /= cv2.getTickFrequency() + + print('({:3d}) Video: {:12s} Time: {:5.1f}s Speed: {:3.1f}fps'.format + (v_idx + 1, video.name, toc, len(video) / toc)) + video.pred_trajs[tracker_name] = pred_bboxes + fps[v_idx] = len(video) / toc + + mean_fps = np.mean(fps) + benchmark = OPEBenchmark(dataset) + trackers = [tracker_name] + success_ret = {} + with Pool(processes=self.num_workers) as pool: + for ret in tqdm(pool.imap_unordered(benchmark.eval_success, trackers), + desc='eval success', total=len(trackers), ncols=100): + success_ret.update(ret) + precision_ret = {} + with Pool(processes=self.num_workers) as pool: + for ret in tqdm(pool.imap_unordered(benchmark.eval_precision, trackers), + desc='eval precision', total=len(trackers), ncols=100): + precision_ret.update(ret) + + tracker_auc = {} + for tracker_name in success_ret.keys(): + auc = np.mean(list(success_ret[tracker_name].values())) + tracker_auc[tracker_name] = auc + + tracker_name_len = max((max([len(x) for x in success_ret.keys()]) + 2), 12) + header = ("|{:^" + str(tracker_name_len) + "}|{:^9}|{:^9}|").format("Tracker name", "Success", "FPS") + formatter = "|{:^" + str(tracker_name_len) + "}|{:^9.3f}|{:^9.1f}|" + print('-' * len(header)) + print(header) + print('-' * len(header)) + success = tracker_auc[tracker_name] + print(formatter.format(tracker_name, success, mean_fps)) + print('-' * len(header)) + eval_dict = { + 'success': success, + 'fps': mean_fps + } + return eval_dict + + def infer(self, img, init_box=None): + """ + Performs inference on an input image and returns the resulting bounding box. + :param img: image to perform inference on + :type img: opendr.engine.data.Image + :param init_box: If provided, it is used to initialized the tracker on the contained object + :type init_box: TrackingAnnotation + :return: list of bounding boxes + :rtype: BoundingBoxList + """ + if isinstance(img, Image): + img = img.opencv() + + if isinstance(init_box, TrackingAnnotation) and init_box is not None: + # initialize tracker + gt_bbox_ = [init_box.left, init_box.top, init_box.width, init_box.height] + self.tracker.init(img, gt_bbox_, ctx=self.ctx) + pred_bbox = gt_bbox_ + else: + outputs = self.tracker.track(img, ctx=self.ctx) + pred_bbox = outputs['bbox'] + + pred_bbox = list(map(int, pred_bbox)) + return TrackingAnnotation(left=pred_bbox[0], top=pred_bbox[1], + width=pred_bbox[2], height=pred_bbox[3], name=0, id=0) + + def save(self, path, verbose=False): + """ + Method for saving the current model in the path provided. + :param path: path to folder where model will be saved + :type path: str + :param verbose: whether to print a success message or not, defaults to False + :type verbose: bool, optional + """ + os.makedirs(path, exist_ok=True) + + model_name = os.path.basename(path) + if verbose: + print(model_name) + metadata = {"model_paths": [], "framework": "mxnet", "format": "params", + "has_data": False, "inference_params": {}, "optimized": False, + "optimizer_info": {}, "backbone": self.backbone} + param_filepath = model_name + ".params" + metadata["model_paths"].append(param_filepath) + + self._model.save_parameters(os.path.join(path, metadata["model_paths"][0])) + if verbose: + print("Model parameters saved.") + + with open(os.path.join(path, model_name + '.json'), 'w', encoding='utf-8') as f: + json.dump(metadata, f, ensure_ascii=False, indent=4) + if verbose: + print("Model metadata saved.") + return True + + def load(self, path, verbose=False): + """ + Loads the model from the path provided, based on the metadata .json file included. + :param path: path of the directory where the model was saved + :type path: str + :param verbose: whether to print a success message or not, defaults to False + :type verbose: bool, optional + """ + model_name = os.path.basename(os.path.normpath(path)) + if verbose: + print("Model name:", model_name, "-->", os.path.join(path, model_name + ".json")) + with open(os.path.join(path, model_name + ".json")) as f: + metadata = json.load(f) + + self.backbone = metadata["backbone"] + self.__create_model(pretrained=False) + + self._model.load_parameters(os.path.join(path, metadata["model_paths"][0])) + self._model.collect_params().reset_ctx(self.ctx) + self._model.hybridize(static_alloc=True, static_shape=True) + if verbose: + print("Loaded parameters and metadata.") + return True + + def download(self, path=None, mode="pretrained", verbose=False, + url=OPENDR_SERVER_URL + "/perception/object_tracking_2d/siamrpn/", + overwrite=False): + """ + Downloads all files necessary for inference, evaluation and training. Valid mode options are: ["pretrained", + "video", "test_data", "otb2015"]. + :param path: folder to which files will be downloaded, if None self.temp_path will be used + :type path: str, optional + :param mode: one of: ["pretrained", "video", "test_data", "otb2015"], where "pretrained" downloads a pretrained + network, "video" downloads example inference data, "test_data" downloads a very small train/eval subset, and + "otb2015" downloads the OTB dataset + :type mode: str, optional + :param verbose: if True, additional information is printed on stdout + :type verbose: bool, optional + :param url: URL to file location on FTP server + :type url: str, optional + :param overwrite: if True, the downloaded files will be overwritten + :type overwrite:bool, optional + """ + valid_modes = ["pretrained", "video", "otb2015", "test_data"] + if mode not in valid_modes: + raise UserWarning("mode parameter not valid:", mode, ", file should be one of:", valid_modes) + + if path is None: + path = self.temp_path + + if not os.path.exists(path): + os.makedirs(path) + + if mode == "pretrained": + path = os.path.join(path, "siamrpn_opendr") + if not os.path.exists(path): + os.makedirs(path) + + if verbose: + print("Downloading pretrained model...") + + file_url = os.path.join(url, "pretrained", + "siamrpn_opendr", + "siamrpn_opendr.json") + if verbose: + print("Downloading metadata...") + file_path = os.path.join(path, "siamrpn_opendr.json") + if not os.path.exists(file_path) or overwrite: + urlretrieve(file_url, file_path) + + if verbose: + print("Downloading params...") + file_url = os.path.join(url, "pretrained", "siamrpn_opendr", + "siamrpn_opendr.params") + file_path = os.path.join(path, "siamrpn_opendr.params") + if not os.path.exists(file_path) or overwrite: + urlretrieve(file_url, file_path) + + elif mode == "video": + file_url = os.path.join(url, "video", "tc_Skiing_ce.mp4") + if verbose: + print("Downloading example video...") + file_path = os.path.join(path, "tc_Skiing_ce.mp4") + if not os.path.exists(file_path) or overwrite: + urlretrieve(file_url, file_path) + + elif mode == "test_data": + os.makedirs(os.path.join(path, "Basketball"), exist_ok=True) + os.makedirs(os.path.join(path, "Basketball", "img"), exist_ok=True) + # download annotation + file_url = os.path.join(url, "test_data", "OTBtest.json") + if verbose: + print("Downloading annotation...") + file_path = os.path.join(path, "OTBtest.json") + if not os.path.exists(file_path) or overwrite: + urlretrieve(file_url, file_path) + # download image + if verbose: + print("Downloading 100 images...") + for i in range(100): + file_url = os.path.join(url, "test_data", "Basketball", "img", f"{i+1:04d}.jpg") + file_path = os.path.join(path, "Basketball", "img", f"{i+1:04d}.jpg") + if not os.path.exists(file_path) or overwrite: + urlretrieve(file_url, file_path) + + else: + # mode == 'otb2015' + from .data_utils.otb import download_otb + if verbose: + print('Attempting to download OTB2015 (100 videos)...') + download_otb(os.path.join(path, "otb2015"), overwrite=overwrite) + file_url = os.path.join(url, "otb2015", "OTB2015.json") + if verbose: + print("Downloading annotation...") + file_path = os.path.join(path, "otb2015", "OTB2015.json") + if not os.path.exists(file_path) or overwrite: + urlretrieve(file_url, file_path) + + @staticmethod + def __train_batch_fn(data, ctx): + """split and load data in GPU""" + template = split_and_load(data[0], ctx_list=ctx, batch_axis=0) + search = split_and_load(data[1], ctx_list=ctx, batch_axis=0) + label_cls = split_and_load(data[2], ctx_list=ctx, batch_axis=0) + label_loc = split_and_load(data[3], ctx_list=ctx, batch_axis=0) + label_loc_weight = split_and_load(data[4], ctx_list=ctx, batch_axis=0) + return template, search, label_cls, label_loc, label_loc_weight + + def __prepare_training_dataset(self, dataset): + """ + Converts `ExternalDataset` or list of `ExternalDatasets` to appropriate format. + :param dataset: Training dataset(s) + :type dataset: `ExternalDataset` or list of `ExternalDatasets` + """ + if isinstance(dataset, list) or isinstance(dataset, tuple): + frame_range_map = { + 'vid': 100, + 'Youtube_bb': 3, + 'coco': 1, + 'det': 1, + } + num_use_map = { + 'vid': 100000, + 'Youtube_bb': -1, + 'coco': -1, + 'det': -1, + } + + dataset_paths = [] + dataset_names = [] + dataset_roots = [] + dataset_annos = [] + frame_ranges = [] + num_uses = [] + + for _dataset in dataset: + # check if all are ExternalDataset + if not isinstance(dataset, ExternalDataset): + raise TypeError("Only `ExternalDataset` types are supported.") + # get params + dataset_paths.append(_dataset.path) + dataset_names.append(_dataset.dataset_type) + dataset_roots.append(os.path.join(_dataset.dataset_type, 'crop511')) + dataset_annos.append(os.path.join(_dataset.dataset_type, + f'train{"2017" if _dataset.dataset_type == "coco" else ""}.json')) + frame_ranges.append(frame_range_map[_dataset.dataset_type]) + num_uses.append(num_use_map[_dataset.dataset_type]) + dataset = TrkDataset(dataset[0].path, + dataset_names=dataset_names, detaset_root=dataset_roots, + detaset_anno=dataset_annos, train_epoch=self.n_epochs, + dataset_frame_range=frame_ranges, dataset_num_use=num_uses) + return dataset + + if isinstance(dataset, ExternalDataset): + dataset_types = ['vid', 'Youtube_bb', 'coco', 'det'] + assert dataset.dataset_type in dataset_types, f"Unrecognized dataset_type," \ + f" acceptable values: {dataset_types}" + dataset = TrkDataset(data_path=dataset.path, + dataset_names=[dataset.dataset_type], detaset_root=[f'{dataset.dataset_type}/crop511'], + detaset_anno=[f'{dataset.dataset_type}/' + f'train{"2017" if dataset.dataset_type == "coco" else ""}.json'], + train_epoch=self.n_epochs) + return dataset + + if issubclass(type(dataset), DatasetIterator): + return dataset + + if not isinstance(dataset, ExternalDataset): + raise TypeError("Only `ExternalDataset` and modified `DatasetIterator` types are supported.") + + @staticmethod + def __prepare_validation_dataset(dataset): + """ + :param dataset: `ExternalDataset` object containing OTB2015 dataset root and type ('OTB2015') + :type dataset: ExternalDataset + """ + if not isinstance(dataset, ExternalDataset): + raise TypeError("Only `ExternalDataset` types are supported.") + dataset_types = ["OTB2015", "OTBtest"] + assert dataset.dataset_type in dataset_types, "Unrecognized dataset type, only OTB2015 is supported currently" + dataset = OTBTracking(dataset.dataset_type, dataset_root=dataset.path, load_img=False) + return dataset + + def reset(self): + """This method is not used in this implementation.""" + return NotImplementedError + + def optimize(self, target_device): + """This method is not used in this implementation.""" + return NotImplementedError diff --git a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/ab3dmot.py b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/ab3dmot.py index 19b0e1d761..a90e970415 100644 --- a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/ab3dmot.py +++ b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/ab3dmot.py @@ -15,20 +15,8 @@ import numpy as np from opendr.engine.target import BoundingBox3DList, TrackingAnnotation3DList from scipy.optimize import linear_sum_assignment +from opendr.perception.object_tracking_3d.ab3dmot.algorithm.core import convert_3dbox_to_8corner, iou3D from opendr.perception.object_tracking_3d.ab3dmot.algorithm.kalman_tracker_3d import KalmanTracker3D -from opendr.perception.object_detection_3d.voxel_object_detection_3d.second_detector.core.box_np_ops import ( - center_to_corner_box3d, -) -from numba.cuda.cudadrv.error import CudaSupportError - -try: - from opendr.perception.object_detection_3d.voxel_object_detection_3d.\ - second_detector.core.non_max_suppression.nms_gpu import ( - rotate_iou_gpu_eval as iou3D, - ) -except (CudaSupportError, ValueError): - def iou3D(boxes, qboxes, criterion=-1): - return np.ones((boxes.shape[0], qboxes.shape[0])) class AB3DMOT(): @@ -46,9 +34,10 @@ def __init__( self.max_staleness = max_staleness self.min_updates = min_updates - self.frame = frame + self.frame = frame - 1 + self.starting_frame = frame - 1 self.tracklets = [] - self.last_tracklet_id = 1 + self.last_tracklet_id = 0 self.iou_threshold = iou_threshold self.state_dimensions = state_dimensions @@ -60,6 +49,8 @@ def __init__( def update(self, detections: BoundingBox3DList): + self.frame += 1 + if len(detections) > 0: predictions = np.zeros([len(self.tracklets), self.measurement_dimensions]) @@ -68,18 +59,16 @@ def update(self, detections: BoundingBox3DList): box = tracklet.predict().reshape(-1)[:self.measurement_dimensions] predictions[i] = [*box] - detection_corners = center_to_corner_box3d( - np.array([box.location for box in detections.boxes]), - np.array([box.dimensions for box in detections.boxes]), - np.array([box.rotation_y for box in detections.boxes]), - ) + detection_corners = [ + convert_3dbox_to_8corner(np.array([*box.location, box.rotation_y, *box.dimensions])) + for box in detections.boxes + ] if len(predictions) > 0: - prediction_corners = center_to_corner_box3d( - predictions[:, :3], - predictions[:, 4:], - predictions[:, 3], - ) + prediction_corners = [ + convert_3dbox_to_8corner(p) + for p in predictions + ] else: prediction_corners = np.zeros((0, 8, 3)) @@ -115,22 +104,22 @@ def update(self, detections: BoundingBox3DList): tracked_boxes.append(tracklet.tracking_bounding_box_3d(self.frame)) result = TrackingAnnotation3DList(tracked_boxes) - - self.frame += 1 - return result def reset(self): - self.frame = 0 + self.frame = self.starting_frame self.tracklets = [] - self.last_tracklet_id = 1 + self.last_tracklet_id = 0 def associate(detection_corners, prediction_corners, iou_threshold): - ious = iou3D(detection_corners, prediction_corners) + iou_matrix = np.zeros((len(detection_corners), len(prediction_corners)), dtype=np.float32) + for d, det in enumerate(detection_corners): + for t, trk in enumerate(prediction_corners): + iou_matrix[d, t] = iou3D(det, trk)[0] - detection_match_ids, prediction_match_ids = linear_sum_assignment(-ious) + detection_match_ids, prediction_match_ids = linear_sum_assignment(-iou_matrix) unmatched_detections = [] unmatched_predictions = [] @@ -148,7 +137,7 @@ def associate(detection_corners, prediction_corners, iou_threshold): detection_id = detection_match_ids[i] prediction_id = prediction_match_ids[i] - if ious[detection_id, prediction_id] < iou_threshold: + if iou_matrix[detection_id, prediction_id] < iou_threshold: unmatched_detections.append(detection_id) unmatched_predictions.append(prediction_id) else: diff --git a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/core.py b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/core.py new file mode 100644 index 0000000000..3b382d5bb6 --- /dev/null +++ b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/core.py @@ -0,0 +1,127 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List +import numba +import copy +import numpy as np +from scipy.spatial import ConvexHull + + +@numba.jit +def polygon_area(x, y): + return 0.5 * np.abs(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1))) + + +@numba.jit +def corner_box3d_volume(corners: np.array): # [8, 3] -> [] + + result = ( + np.sqrt(np.sum((corners[0, :] - corners[1, :]) ** 2)) * + np.sqrt(np.sum((corners[1, :] - corners[2, :]) ** 2)) * + np.sqrt(np.sum((corners[0, :] - corners[4, :]) ** 2)) + ) + return result + + +def polygon_clip(subject_polygon, clip_polygon): # [(x, y)] -> [(x, y)] -> [(x, y)) + def is_inside(p, clip_polygon1, clip_polygon2): + return (clip_polygon2[0] - clip_polygon1[0]) * (p[1] - clip_polygon1[1]) > ( + clip_polygon2[1] - clip_polygon1[1] + ) * (p[0] - clip_polygon1[0]) + + def intersection(clip_polygon1, clip_polygon2): + dc = [clip_polygon1[0] - clip_polygon2[0], clip_polygon1[1] - clip_polygon2[1]] + dp = [s[0] - e[0], s[1] - e[1]] + n1 = clip_polygon1[0] * clip_polygon2[1] - clip_polygon1[1] * clip_polygon2[0] + n2 = s[0] * e[1] - s[1] * e[0] + n3 = 1.0 / (dc[0] * dp[1] - dc[1] * dp[0]) + return [(n1 * dp[0] - n2 * dc[0]) * n3, (n1 * dp[1] - n2 * dc[1]) * n3] + + outputList = subject_polygon + cp1 = clip_polygon[-1] + + for clip_vertex in clip_polygon: + cp2 = clip_vertex + inputList = outputList + outputList = [] + s = inputList[-1] + + for subjectVertex in inputList: + e = subjectVertex + if is_inside(e, cp1, cp2): + if not is_inside(s, cp1, cp2): + outputList.append(intersection(cp1, cp2)) + outputList.append(e) + elif is_inside(s, cp1, cp2): + outputList.append(intersection(cp1, cp2)) + s = e + cp1 = cp2 + if len(outputList) == 0: + return None + return outputList + + +@numba.jit +def convex_hull_intersection( + polygon1: List[tuple], polygon2: List[tuple] +): # [(x, y)] -> [(x, y)] -> ([(x, y), []]) + inter_p = polygon_clip(polygon1, polygon2) + if inter_p is not None: + hull_inter = ConvexHull(inter_p) + return inter_p, hull_inter.volume + else: + return None, 0.0 + + +def iou3D(corners1, corners2): # [8, 3] -> [8, 3] -> ([], []) + # corner points are in counter clockwise order + rect1 = [(corners1[i, 0], corners1[i, 2]) for i in range(3, -1, -1)] + rect2 = [(corners2[i, 0], corners2[i, 2]) for i in range(3, -1, -1)] + area1 = polygon_area(np.array(rect1)[:, 0], np.array(rect1)[:, 1]) + area2 = polygon_area(np.array(rect2)[:, 0], np.array(rect2)[:, 1]) + _, inter_area = convex_hull_intersection(rect1, rect2) + iou_2d = inter_area / (area1 + area2 - inter_area) + y_max = min(corners1[0, 1], corners2[0, 1]) + y_min = max(corners1[4, 1], corners2[4, 1]) + inter_vol = inter_area * max(0.0, y_max - y_min) + vol1 = corner_box3d_volume(corners1) + vol2 = corner_box3d_volume(corners2) + iou = inter_vol / (vol1 + vol2 - inter_vol) + return iou, iou_2d + + +@numba.jit +def rotation_matrix_y(t): # [] -> [3, 3] + c = np.cos(t) + s = np.sin(t) + return np.array([[c, 0, s], [0, 1, 0], [-s, 0, c]]) + + +def convert_3dbox_to_8corner(bbox3d_input): # [7] -> [8, 3] + bbox3d = copy.copy(bbox3d_input) + rot_matrix = rotation_matrix_y(bbox3d[3]) + + l, w, h = bbox3d[4:7] + + x_corners = [l / 2, l / 2, -l / 2, -l / 2, l / 2, l / 2, -l / 2, -l / 2] + y_corners = [0, 0, 0, 0, -h, -h, -h, -h] + z_corners = [w / 2, -w / 2, -w / 2, w / 2, w / 2, -w / 2, -w / 2, w / 2] + + corners_3d = np.dot(rot_matrix, np.vstack([x_corners, y_corners, z_corners])) + corners_3d[0, :] = corners_3d[0, :] + bbox3d[0] + corners_3d[1, :] = corners_3d[1, :] + bbox3d[1] + corners_3d[2, :] = corners_3d[2, :] + bbox3d[2] + + return np.transpose(corners_3d) diff --git a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/evaluate.py b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/evaluate.py index d28d8d9951..c649445302 100644 --- a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/evaluate.py +++ b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/evaluate.py @@ -284,46 +284,47 @@ def _load_data( for boxList in input_seq_data: input_seq_boxes += boxList.boxes - f_data = [[] for x in range(input_seq_boxes[-1].frame + 1)] + # f_data = [[] for x in range(input_seq_boxes[-1].frame + 1)] + f_data = [[] for x in range(len(input_seq_data))] - for TrackingAnnotation3D in input_seq_boxes: + for trackingAnnotation3D in input_seq_boxes: # KITTI tracking benchmark data format: # (frame,tracklet_id,objectType,truncation,occlusion,alpha,x1,y1,x2,y2,h,w,l,X,Y,Z,ry) - if not any([s for s in classes if s == TrackingAnnotation3D.name.lower()]): + if not any([s for s in classes if s == trackingAnnotation3D.name.lower()]): continue # get fields from table - t_data.frame = int(TrackingAnnotation3D.frame) - t_data.track_id = int(TrackingAnnotation3D.id) - t_data.obj_type = TrackingAnnotation3D.name.lower() # object type [car, pedestrian, cyclist, ...] + t_data.frame = int(trackingAnnotation3D.frame) + t_data.track_id = int(trackingAnnotation3D.id) + t_data.obj_type = trackingAnnotation3D.name.lower() # object type [car, pedestrian, cyclist, ...] t_data.truncation = int( - TrackingAnnotation3D.truncated + trackingAnnotation3D.truncated ) # truncation [-1,0,1,2] t_data.occlusion = int( - TrackingAnnotation3D.occluded + trackingAnnotation3D.occluded ) # occlusion [-1,0,1,2] - t_data.obs_angle = float(TrackingAnnotation3D.alpha) # observation angle [rad] - t_data.x1 = float(TrackingAnnotation3D.bbox2d[0]) # left [px] - t_data.y1 = float(TrackingAnnotation3D.bbox2d[1]) # top [px] - t_data.x2 = float(TrackingAnnotation3D.bbox2d[2]) # right [px] - t_data.y2 = float(TrackingAnnotation3D.bbox2d[3]) # bottom [px] - t_data.h = float(TrackingAnnotation3D.dimensions[0]) # height [m] - t_data.w = float(TrackingAnnotation3D.dimensions[1]) # width [m] - t_data.length = float(TrackingAnnotation3D.dimensions[2]) # length [m] - t_data.X = float(TrackingAnnotation3D.location[0]) # X [m] - t_data.Y = float(TrackingAnnotation3D.location[1]) # Y [m] - t_data.Z = float(TrackingAnnotation3D.location[2]) # Z [m] - t_data.yaw = float(TrackingAnnotation3D.rotation_y) # yaw angle [rad] - t_data.score = float(TrackingAnnotation3D.confidence) + t_data.obs_angle = float(trackingAnnotation3D.alpha) # observation angle [rad] + t_data.x1 = float(trackingAnnotation3D.bbox2d[0]) # left [px] + t_data.y1 = float(trackingAnnotation3D.bbox2d[1]) # top [px] + t_data.x2 = float(trackingAnnotation3D.bbox2d[2]) # right [px] + t_data.y2 = float(trackingAnnotation3D.bbox2d[3]) # bottom [px] + t_data.h = float(trackingAnnotation3D.dimensions[0]) # height [m] + t_data.w = float(trackingAnnotation3D.dimensions[1]) # width [m] + t_data.length = float(trackingAnnotation3D.dimensions[2]) # length [m] + t_data.X = float(trackingAnnotation3D.location[0]) # X [m] + t_data.Y = float(trackingAnnotation3D.location[1]) # Y [m] + t_data.Z = float(trackingAnnotation3D.location[2]) # Z [m] + t_data.yaw = float(trackingAnnotation3D.rotation_y) # yaw angle [rad] + t_data.score = float(trackingAnnotation3D.confidence) # do not consider objects marked as invalid - if t_data.track_id is -1 and t_data.obj_type != "dontcare": + if t_data.track_id == -1 and t_data.obj_type != "dontcare": continue idx = t_data.frame # check if length for frame data is sufficient if idx >= len(f_data): - raise ValueError("Frame " + str(idx) + "is out of range") + raise ValueError("Frame " + str(idx) + " is out of range") id_frame = (t_data.frame, t_data.track_id) if id_frame in id_frame_cache and not loading_groundtruth: diff --git a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/kalman_tracker_3d.py b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/kalman_tracker_3d.py index 5f73a56a78..502b4bf94c 100644 --- a/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/kalman_tracker_3d.py +++ b/src/opendr/perception/object_tracking_3d/ab3dmot/algorithm/kalman_tracker_3d.py @@ -135,7 +135,7 @@ def tracking_bounding_box_3d(self, frame): return TrackingAnnotation3D( self.name, self.truncated, self.occluded, self.alpha, self.bbox2d, - self.kalman_filter.x[4:].reshape(-1), + self.kalman_filter.x[4:7].reshape(-1), self.kalman_filter.x[:3].reshape(-1), float(self.kalman_filter.x[3]), self.id, diff --git a/src/opendr/perception/object_tracking_3d/ab3dmot/object_tracking_3d_ab3dmot_learner.py b/src/opendr/perception/object_tracking_3d/ab3dmot/object_tracking_3d_ab3dmot_learner.py index 067410f992..1d72586cde 100644 --- a/src/opendr/perception/object_tracking_3d/ab3dmot/object_tracking_3d_ab3dmot_learner.py +++ b/src/opendr/perception/object_tracking_3d/ab3dmot/object_tracking_3d_ab3dmot_learner.py @@ -49,6 +49,7 @@ def __init__( self.covariance_matrix = covariance_matrix self.process_uncertainty_matrix = process_uncertainty_matrix self.iou_threshold = iou_threshold + self.model = None self.infers_count = 0 self.infers_time = 0 diff --git a/src/opendr/perception/object_tracking_3d/datasets/kitti_tracking.py b/src/opendr/perception/object_tracking_3d/datasets/kitti_tracking.py index 6332b8889c..88e231077b 100644 --- a/src/opendr/perception/object_tracking_3d/datasets/kitti_tracking.py +++ b/src/opendr/perception/object_tracking_3d/datasets/kitti_tracking.py @@ -22,8 +22,18 @@ from zipfile import ZipFile from urllib.request import urlretrieve from opendr.engine.constants import OPENDR_SERVER_URL -from opendr.engine.target import BoundingBox3D, BoundingBox3DList, TrackingAnnotation3D, TrackingAnnotation3DList +from opendr.engine.target import ( + BoundingBox3D, + BoundingBox3DList, + TrackingAnnotation3D, + TrackingAnnotation3DList, +) +from opendr.engine.data import PointCloudWithCalibration from opendr.engine.datasets import DatasetIterator +import numpy as np +from skimage import io + +from opendr.perception.object_detection_3d.datasets.kitti import parse_calib class KittiTrackingDatasetIterator(DatasetIterator): @@ -122,167 +132,17 @@ def __load_data(self): data = [] - def load_file(file_path, format, return_format, remove_dontcare=False): - - results = {} - max_frame = -1 - - with open(file_path) as f: - lines = [x.strip() for x in f.readlines()] - - for line in lines: - fields = line.split(" ") - frame = int(float(fields[0])) - - if format == "tracking": - if return_format == "tracking": - box = TrackingAnnotation3D( - name=fields[2], - truncated=int(float(fields[3])), - occluded=int(float(fields[4])), - alpha=float(fields[5]), - bbox2d=[ - float(fields[6]), - float(fields[7]), - float(fields[8]), - float(fields[9]), - ], - dimensions=[ - float(fields[10]), - float(fields[11]), - float(fields[12]), - ], - location=[ - float(fields[13]), - float(fields[14]), - float(fields[15]), - ], - rotation_y=float(fields[16]), - score=0 if len(fields) <= 17 else fields[17], - frame=int(float(fields[0])), - id=int(float(fields[1])), - ) - elif return_format == "detection": - box = BoundingBox3D( - name=fields[2], - truncated=int(float(fields[3])), - occluded=int(float(fields[4])), - alpha=float(fields[5]), - bbox2d=[ - float(fields[6]), - float(fields[7]), - float(fields[8]), - float(fields[9]), - ], - dimensions=[ - float(fields[10]), - float(fields[11]), - float(fields[12]), - ], - location=[ - float(fields[13]), - float(fields[14]), - float(fields[15]), - ], - rotation_y=float(fields[16]), - score=0 if len(fields) <= 17 else fields[17], - ) - else: - raise ValueError("return_format should be tracking or detection") - elif format == "detection": - if return_format == "tracking": - box = TrackingAnnotation3D( - name=fields[1], - truncated=int(float(fields[2])), - occluded=int(float(fields[3])), - alpha=float(fields[4]), - bbox2d=[ - float(fields[5]), - float(fields[6]), - float(fields[7]), - float(fields[8]), - ], - dimensions=[ - float(fields[9]), - float(fields[10]), - float(fields[11]), - ], - location=[ - float(fields[12]), - float(fields[13]), - float(fields[14]), - ], - rotation_y=float(fields[15]), - score=0 if len(fields) <= 15 else fields[16], - frame=int(float(fields[0])), - id=-1, - ) - elif return_format == "detection": - box = BoundingBox3D( - name=fields[1], - truncated=int(float(fields[2])), - occluded=int(float(fields[3])), - alpha=float(fields[4]), - bbox2d=[ - float(fields[5]), - float(fields[6]), - float(fields[7]), - float(fields[8]), - ], - dimensions=[ - float(fields[9]), - float(fields[10]), - float(fields[11]), - ], - location=[ - float(fields[12]), - float(fields[13]), - float(fields[14]), - ], - rotation_y=float(fields[15]), - score=0 if len(fields) <= 15 else fields[16], - ) - else: - raise ValueError("format should be tracking or detection") - - if frame not in results: - results[frame] = [] - - if not (remove_dontcare and box.name == "DontCare"): - results[frame].append(box) - max_frame = max(max_frame, frame) - - if return_format == "tracking": - - result = [] - - for frame in range(max_frame): - if frame in results: - result.append(TrackingAnnotation3DList(results[frame])) - else: - result.append(TrackingAnnotation3DList([])) - - return result - elif return_format == "detection": - result = [] - - for frame in range(max_frame): - if frame in results: - result.append(BoundingBox3DList(results[frame])) - else: - result.append(BoundingBox3DList([])) - - return result - else: - raise ValueError("return_format should be tracking or detection") - for input_file, ground_truth_file in zip( self.inputs_files, self.ground_truths_files ): - input = load_file(os.path.join(self.inputs_path, input_file), self.inputs_format, "detection", remove_dontcare=True) - ground_truth = load_file(os.path.join(self.ground_truths_path, ground_truth_file), "tracking", "tracking") + input, _ = load_tracking_file( + os.path.join(self.inputs_path, input_file), self.inputs_format, "detection", remove_dontcare=True + ) + ground_truth, _ = load_tracking_file( + os.path.join(self.ground_truths_path, ground_truth_file), "tracking", "tracking" + ) data.append((input, ground_truth)) @@ -290,3 +150,230 @@ def load_file(file_path, format, return_format, remove_dontcare=False): def __len__(self): return len(self.data) + + +class LabeledTrackingPointCloudsDatasetIterator(DatasetIterator): + def __init__( + self, + lidar_path, + label_path, + calib_path, + image_path=None, + labels_format="tracking", # detection, tracking + num_point_features=4, + ): + super().__init__() + + self.lidar_path = lidar_path + self.label_path = label_path + self.calib_path = calib_path + self.image_path = image_path + self.num_point_features = num_point_features + + self.lidar_files = sorted(os.listdir(self.lidar_path)) + # self.label_files = sorted(os.listdir(self.label_path)) + # self.calib_files = sorted(os.listdir(self.calib_path)) + self.image_files = ( + sorted(os.listdir(self.image_path)) + if self.image_path is not None + else None + ) + + self.labels, self.max_id = load_tracking_file( + self.label_path, "tracking", labels_format, + ) + self.calib = parse_calib(self.calib_path) + + def __getitem__(self, idx): + points = np.fromfile( + os.path.join(self.lidar_path, self.lidar_files[idx]), + dtype=np.float32, + count=-1, + ).reshape([-1, self.num_point_features]) + target = self.labels[idx] if len(self.labels) > idx else TrackingAnnotation3DList([]) + + image_shape = ( + None + if self.image_files is None + else ( + np.array( + io.imread( + os.path.join(self.image_path, self.image_files[idx]) + ).shape[:2], + dtype=np.int32, + ) + ) + ) + + result = ( + PointCloudWithCalibration(points, self.calib, image_shape), + target, + ) + + return result + + def __len__(self): + return len(self.lidar_files) + + +def load_tracking_file( + file_path, format, return_format, remove_dontcare=False +): + + results = {} + max_frame = -1 + max_id = 0 + + with open(file_path) as f: + lines = [x.strip() for x in f.readlines()] + + for line in lines: + fields = line.split(" ") + frame = int(float(fields[0])) + + if format == "tracking": + if return_format == "tracking": + box = TrackingAnnotation3D( + name=fields[2], + truncated=int(float(fields[3])), + occluded=int(float(fields[4])), + alpha=float(fields[5]), + bbox2d=[ + float(fields[6]), + float(fields[7]), + float(fields[8]), + float(fields[9]), + ], + dimensions=[ + float(fields[12]), + float(fields[10]), + float(fields[11]), + ], + location=[ + float(fields[13]), + float(fields[14]), + float(fields[15]), + ], + rotation_y=float(fields[16]), + score=0 if len(fields) <= 17 else fields[17], + frame=int(float(fields[0])), + id=int(float(fields[1])), + ) + elif return_format == "detection": + box = BoundingBox3D( + name=fields[2], + truncated=int(float(fields[3])), + occluded=int(float(fields[4])), + alpha=float(fields[5]), + bbox2d=[ + float(fields[6]), + float(fields[7]), + float(fields[8]), + float(fields[9]), + ], + dimensions=[ + float(fields[12]), + float(fields[10]), + float(fields[11]), + ], + location=[ + float(fields[13]), + float(fields[14]), + float(fields[15]), + ], + rotation_y=float(fields[16]), + score=0 if len(fields) <= 17 else fields[17], + ) + else: + raise ValueError( + "return_format should be tracking or detection" + ) + elif format == "detection": + if return_format == "tracking": + box = TrackingAnnotation3D( + name=fields[1], + truncated=int(float(fields[2])), + occluded=int(float(fields[3])), + alpha=float(fields[4]), + bbox2d=[ + float(fields[5]), + float(fields[6]), + float(fields[7]), + float(fields[8]), + ], + dimensions=[ + float(fields[11]), + float(fields[9]), + float(fields[10]), + ], + location=[ + float(fields[12]), + float(fields[13]), + float(fields[14]), + ], + rotation_y=float(fields[15]), + score=0 if len(fields) <= 15 else fields[16], + frame=int(float(fields[0])), + id=-1, + ) + elif return_format == "detection": + box = BoundingBox3D( + name=fields[1], + truncated=int(float(fields[2])), + occluded=int(float(fields[3])), + alpha=float(fields[4]), + bbox2d=[ + float(fields[5]), + float(fields[6]), + float(fields[7]), + float(fields[8]), + ], + dimensions=[ + float(fields[11]), + float(fields[9]), + float(fields[10]), + ], + location=[ + float(fields[12]), + float(fields[13]), + float(fields[14]), + ], + rotation_y=float(fields[15]), + score=0 if len(fields) <= 15 else fields[16], + ) + else: + raise ValueError("format should be tracking or detection") + + if frame not in results: + results[frame] = [] + max_frame = max(max_frame, frame) + + if not (remove_dontcare and box.name == "DontCare"): + results[frame].append(box) + + if isinstance(box, TrackingAnnotation3D): + max_id = max(max_id, box.id) + + if return_format == "tracking": + + result = [] + + for frame in range(max_frame): + if frame in results: + result.append(TrackingAnnotation3DList(results[frame])) + else: + result.append(TrackingAnnotation3DList([])) + + return result, max_id + elif return_format == "detection": + result = [] + + for frame in range(max_frame): + if frame in results: + result.append(BoundingBox3DList(results[frame])) + else: + result.append(BoundingBox3DList([])) + + return result, max_id + else: + raise ValueError("return_format should be tracking or detection") diff --git a/src/opendr/perception/panoptic_segmentation/README.md b/src/opendr/perception/panoptic_segmentation/README.md index 1fc4b77ea2..7f5b602dd3 100644 --- a/src/opendr/perception/panoptic_segmentation/README.md +++ b/src/opendr/perception/panoptic_segmentation/README.md @@ -36,7 +36,7 @@ Please note that the original repository is heavily based on ## Example Usage -More code snippets can be found in [example_usage.py](../../../../projects/perception/panoptic_segmentation/efficient_ps/example_usage.py) with the corresponding [readme](../../../../projects/perception/panoptic_segmentation/efficient_ps/README.md). +More code snippets can be found in [example_usage.py](../../../../projects/python/perception/panoptic_segmentation/efficient_ps/example_usage.py) with the corresponding [readme](../../../../projects/python/perception/panoptic_segmentation/efficient_ps/README.md). **Prepare the downloaded Cityscapes dataset** (see the [datasets' readme](./datasets/README.md) as well) ```python diff --git a/src/opendr/perception/panoptic_segmentation/efficient_ps/algorithm/EfficientPS b/src/opendr/perception/panoptic_segmentation/efficient_ps/algorithm/EfficientPS index d03deab54e..e1c92c301b 160000 --- a/src/opendr/perception/panoptic_segmentation/efficient_ps/algorithm/EfficientPS +++ b/src/opendr/perception/panoptic_segmentation/efficient_ps/algorithm/EfficientPS @@ -1 +1 @@ -Subproject commit d03deab54edc5da15ed63318b3d1b14fb9712441 +Subproject commit e1c92c301b8d2a9c582797ab3cad203909f2fa9d diff --git a/src/opendr/perception/panoptic_segmentation/efficient_ps/dependencies.ini b/src/opendr/perception/panoptic_segmentation/efficient_ps/dependencies.ini index 1c7a1de4c2..02c0616514 100644 --- a/src/opendr/perception/panoptic_segmentation/efficient_ps/dependencies.ini +++ b/src/opendr/perception/panoptic_segmentation/efficient_ps/dependencies.ini @@ -2,6 +2,7 @@ python= torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 tqdm mmcv==0.5.9 future diff --git a/src/opendr/perception/panoptic_segmentation/efficient_ps/efficient_ps_learner.py b/src/opendr/perception/panoptic_segmentation/efficient_ps/efficient_ps_learner.py index 469bd016c1..4036a82c12 100644 --- a/src/opendr/perception/panoptic_segmentation/efficient_ps/efficient_ps_learner.py +++ b/src/opendr/perception/panoptic_segmentation/efficient_ps/efficient_ps_learner.py @@ -306,17 +306,18 @@ def infer(self, warnings.warn('The current model has not been trained.') self.model.eval() - # Build the data pipeline - test_pipeline = Compose(self._cfg.test_pipeline[1:]) - device = next(self.model.parameters()).device - - # Convert to the format expected by the mmdetection API single_image_mode = False if isinstance(batch, Image): batch = [batch] single_image_mode = True + + # Convert to the format expected by the mmdetection API mmdet_batch = [] + device = next(self.model.parameters()).device for img in batch: + # Change the processing size according to the input image + self._cfg.test_pipeline[1:][0]['img_scale'] = batch[0].data.shape[1:] + test_pipeline = Compose(self._cfg.test_pipeline[1:]) # Convert from OpenDR convention (CHW/RGB) to the expected format (HWC/BGR) img_ = img.convert('channels_last', 'bgr') mmdet_img = {'filename': None, 'img': img_, 'img_shape': img_.shape, 'ori_shape': img_.shape} @@ -455,15 +456,15 @@ def download(path: str, mode: str='model', trained_on: str='cityscapes') -> str: """ if mode == 'model': models = { - 'cityscapes': f'{OPENDR_SERVER_URL}perception/panoptic_segmentation/models/model_cityscapes.pth', - 'kitti': f'{OPENDR_SERVER_URL}perception/panoptic_segmentation/models/model_kitti.pth' + 'cityscapes': f'{OPENDR_SERVER_URL}perception/panoptic_segmentation/efficient_ps/models/model_cityscapes.pth', + 'kitti': f'{OPENDR_SERVER_URL}perception/panoptic_segmentation/efficient_ps/models/model_kitti.pth' } if trained_on not in models.keys(): raise ValueError(f'Could not find model weights pre-trained on {trained_on}. ' f'Valid options are {list(models.keys())}') url = models[trained_on] elif mode == 'test_data': - url = f'{OPENDR_SERVER_URL}perception/panoptic_segmentation/test_data/test_data.zip' + url = f'{OPENDR_SERVER_URL}perception/panoptic_segmentation/efficient_ps/test_data.zip' else: raise ValueError('Invalid mode. Valid options are ["model", "test_data"]') @@ -481,8 +482,12 @@ def update_to(b=1, bsize=1, total=None): return update_to - with tqdm(unit='B', unit_scale=True, unit_divisor=1024, miniters=1, desc=f'Downloading {filename}') as pbar: - urllib.request.urlretrieve(url, filename, pbar_hook(pbar)) + if os.path.exists(filename) and os.path.isfile(filename): + print(f'File already downloaded: {filename}') + else: + with tqdm(unit='B', unit_scale=True, unit_divisor=1024, miniters=1, desc=f'Downloading {filename}') \ + as pbar: + urllib.request.urlretrieve(url, filename, pbar_hook(pbar)) return filename @staticmethod diff --git a/src/opendr/perception/pose_estimation/__init__.py b/src/opendr/perception/pose_estimation/__init__.py index d0d7c3cc14..66e9725a6a 100644 --- a/src/opendr/perception/pose_estimation/__init__.py +++ b/src/opendr/perception/pose_estimation/__init__.py @@ -1,6 +1,8 @@ from opendr.perception.pose_estimation.lightweight_open_pose.lightweight_open_pose_learner import \ LightweightOpenPoseLearner +from opendr.perception.pose_estimation.hr_pose_estimation.high_resolution_learner import \ + HighResolutionPoseEstimationLearner from opendr.perception.pose_estimation.lightweight_open_pose.utilities import draw, get_bbox -__all__ = ['LightweightOpenPoseLearner', 'draw', 'get_bbox'] +__all__ = ['LightweightOpenPoseLearner', 'draw', 'get_bbox', 'HighResolutionPoseEstimationLearner'] diff --git a/src/opendr/perception/pose_estimation/dependencies.ini b/src/opendr/perception/pose_estimation/dependencies.ini index f4628452dd..6371274c20 100644 --- a/src/opendr/perception/pose_estimation/dependencies.ini +++ b/src/opendr/perception/pose_estimation/dependencies.ini @@ -1,9 +1,10 @@ [runtime] # 'python' and 'python-dependencies' keys expect a value in the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format -python-dependencies=cython +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +python-dependencies=Cython python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 tensorboardX>=2.0 opencv-python==4.5.1.48 matplotlib>=2.2.2 diff --git a/src/opendr/perception/pose_estimation/hr_pose_estimation/README.md b/src/opendr/perception/pose_estimation/hr_pose_estimation/README.md new file mode 100644 index 0000000000..e9a89a99ca --- /dev/null +++ b/src/opendr/perception/pose_estimation/hr_pose_estimation/README.md @@ -0,0 +1,11 @@ +# OpenDR Pose Estimation - High Resolution Open Pose + +This folder contains the OpenDR Learner and Target classes implemented for the high resolution pose estimation task. + + +# Sources + +Large parts of the Learner and utilities.py code are taken from [Daniil-Osokin/lightweight-human-pose-estimation.pytorch]( +https://github.com/Daniil-Osokin/lightweight-human-pose-estimation.pytorch) with modifications to make them compatible +with OpenDR specifications. + diff --git a/src/opendr/perception/pose_estimation/hr_pose_estimation/__init__.py b/src/opendr/perception/pose_estimation/hr_pose_estimation/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/perception/pose_estimation/hr_pose_estimation/high_resolution_learner.py b/src/opendr/perception/pose_estimation/hr_pose_estimation/high_resolution_learner.py new file mode 100644 index 0000000000..083af21255 --- /dev/null +++ b/src/opendr/perception/pose_estimation/hr_pose_estimation/high_resolution_learner.py @@ -0,0 +1,603 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License.""" + +# General imports +import torchvision.transforms +import os +import cv2 +import torch +import json +import numpy as np +from tqdm import tqdm + +from urllib.request import urlretrieve + +# OpenDR engine imports +from opendr.engine.data import Image +from opendr.engine.target import Pose +from opendr.engine.constants import OPENDR_SERVER_URL + +# OpenDR lightweight_open_pose imports +from opendr.perception.pose_estimation.lightweight_open_pose.lightweight_open_pose_learner import \ + LightweightOpenPoseLearner +from opendr.perception.pose_estimation.lightweight_open_pose.algorithm.modules.load_state import \ + load_state +from opendr.perception.pose_estimation.lightweight_open_pose.algorithm.modules.keypoints import \ + extract_keypoints, group_keypoints +from opendr.perception.pose_estimation.lightweight_open_pose.algorithm.val import \ + convert_to_coco_format, run_coco_eval, normalize, pad_width + + +class HighResolutionPoseEstimationLearner(LightweightOpenPoseLearner): + + def __init__(self, device='cuda', backbone='mobilenet', + temp_path='temp', mobilenet_use_stride=True, mobilenetv2_width=1.0, shufflenet_groups=3, + num_refinement_stages=2, batches_per_iter=1, base_height=256, + first_pass_height=360, second_pass_height=540, percentage_arround_crop=0.3, heatmap_threshold=0.1, + experiment_name='default', num_workers=8, weights_only=True, output_name='detections.json', + multiscale=False, scales=None, visualize=False, + img_mean=np.array([128, 128, 128], np.float32), img_scale=np.float32(1 / 256), pad_value=(0, 0, 0), + half_precision=False): + + super(HighResolutionPoseEstimationLearner, self).__init__(device=device, backbone=backbone, temp_path=temp_path, + mobilenet_use_stride=mobilenet_use_stride, + mobilenetv2_width=mobilenetv2_width, + shufflenet_groups=shufflenet_groups, + num_refinement_stages=num_refinement_stages, + batches_per_iter=batches_per_iter, + base_height=base_height, + experiment_name=experiment_name, + num_workers=num_workers, weights_only=weights_only, + output_name=output_name, multiscale=multiscale, + scales=scales, visualize=visualize, img_mean=img_mean, + img_scale=img_scale, pad_value=pad_value, + half_precision=half_precision) + + self.first_pass_height = first_pass_height + self.second_pass_height = second_pass_height + self.perc = percentage_arround_crop + self.threshold = heatmap_threshold + + def __first_pass(self, img): + """ + This method is generating a rough heatmap of the input image in order to specify the approximate location + of humans in the picture. + + :param img: input image for heatmap generation + :type img: numpy.ndarray + + :return: returns the Part Affinity Fields (PAFs) of the humans inside the image + :rtype: numpy.ndarray + """ + + if 'cuda' in self.device: + tensor_img = torch.from_numpy(img).permute(2, 0, 1).unsqueeze(0).float().cuda() + tensor_img = tensor_img.cuda() + if self.half: + tensor_img = tensor_img.half() + else: + tensor_img = torch.from_numpy(img).permute(2, 0, 1).unsqueeze(0).float().cpu() + + stages_output = self.model(tensor_img) + + stage2_pafs = stages_output[-1] + pafs = np.transpose(stage2_pafs.squeeze().cpu().data.numpy(), (1, 2, 0)) + return pafs + + def __second_pass(self, img, net_input_height_size, max_width, stride, upsample_ratio, + pad_value=(0, 0, 0), + img_mean=np.array([128, 128, 128], np.float32), img_scale=np.float32(1 / 256)): + """ + This method detects the keypoints and estimates the pose of humans using the cropped image from the + previous step (__first_pass_). + + :param img: input image for heatmap generation + :type img: numpy.ndarray + :param net_input_height_size: the height that the input image will be resized for inference + :type net_input_height_size: int + :param max_width: this parameter is the maximum width that the resized image should have. It is introduced to + avoid cropping images with abnormal ratios e.g (30, 800) + :type max_width: int + :param upsample_ratio: Defines the amount of upsampling to be performed on the heatmaps and PAFs when resizing, + defaults to 4 + :type upsample_ratio: int, optional + + :returns: the heatmap of human figures, the part affinity filed (pafs), the scale of the resized image compred + to the initial and the pad arround the image + :rtype: heatmap, pafs -> numpy.ndarray + scale -> float + pad = -> list + """ + + height, width, _ = img.shape + scale = net_input_height_size / height + img_ratio = width / height + if img_ratio > 6: + scale = max_width / width + + scaled_img = cv2.resize(img, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_LINEAR) + scaled_img = normalize(scaled_img, img_mean, img_scale) + min_dims = [net_input_height_size, max(scaled_img.shape[1], net_input_height_size)] + padded_img, pad = pad_width(scaled_img, stride, pad_value, min_dims) + + if 'cuda' in self.device: + tensor_img = torch.from_numpy(padded_img).permute(2, 0, 1).unsqueeze(0).float().cuda() + tensor_img = tensor_img.cuda() + if self.half: + tensor_img = tensor_img.half() + else: + tensor_img = torch.from_numpy(padded_img).permute(2, 0, 1).unsqueeze(0).float().cpu() + + stages_output = self.model(tensor_img) + + stage2_heatmaps = stages_output[-2] + heatmaps = np.transpose(stage2_heatmaps.squeeze().cpu().data.numpy(), (1, 2, 0)) + heatmaps = heatmaps.astype(np.float32) + heatmaps = cv2.resize(heatmaps, (0, 0), fx=upsample_ratio, fy=upsample_ratio, interpolation=cv2.INTER_CUBIC) + + stage2_pafs = stages_output[-1] + pafs = np.transpose(stage2_pafs.squeeze().cpu().data.numpy(), (1, 2, 0)) + pafs = pafs.astype(np.float32) + pafs = cv2.resize(pafs, (0, 0), fx=upsample_ratio, fy=upsample_ratio, interpolation=cv2.INTER_CUBIC) + + return heatmaps, pafs, scale, pad + + @staticmethod + def __pooling(img, kernel): # Pooling on input image for dimension reduction + """This method applies a pooling filter on an input image in order to resize it in a fixed shape + + :param img: input image for resizing + :rtype img: engine.data.Image class object + :param kernel: the kernel size of the pooling filter + :type kernel: int + """ + pool_img = torchvision.transforms.ToTensor()(img) + pool_img = pool_img.unsqueeze(0) + pool_img = torch.nn.functional.avg_pool2d(pool_img, kernel) + pool_img = pool_img.squeeze(0).permute(1, 2, 0).cpu().float().numpy() + return pool_img + + def fit(self, dataset, val_dataset=None, logging_path='', logging_flush_secs=30, + silent=False, verbose=True, epochs=None, use_val_subset=True, val_subset_size=250, + images_folder_name="train2017", annotations_filename="person_keypoints_train2017.json", + val_images_folder_name="val2017", val_annotations_filename="person_keypoints_val2017.json"): + """This method is not used in this implementation.""" + + raise NotImplementedError + + def optimize(self, do_constant_folding=False): + """This method is not used in this implementation.""" + + raise NotImplementedError + + def reset(self): + """This method is not used in this implementation.""" + return NotImplementedError + + def save(self, path, verbose=False): + """This method is not used in this implementation.""" + return NotImplementedError + + def eval(self, dataset, silent=False, verbose=True, use_subset=True, subset_size=250, upsample_ratio=4, + images_folder_name="val2017", annotations_filename="person_keypoints_val2017.json"): + """ + This method is used to evaluate a trained model on an evaluation dataset. + + :param dataset: object that holds the evaluation dataset. + :type dataset: ExternalDataset class object or DatasetIterator class object + :param silent: if set to True, disables all printing of evaluation progress reports and other + information to STDOUT, defaults to 'False' + :type silent: bool, optional + :param verbose: if set to True, enables the maximum verbosity, defaults to 'True' + :type verbose: bool, optional + :param use_subset: If set to True, a subset of the validation dataset is created and used in + evaluation, defaults to 'True' + :type use_subset: bool, optional + :param subset_size: Controls the size of the validation subset, defaults to '250' + :type subset_size: int, optional + param upsample_ratio: Defines the amount of upsampling to be performed on the heatmaps and PAFs + when resizing,defaults to 4 + :type upsample_ratio: int, optional + :param images_folder_name: Folder name that contains the dataset images. This folder should be contained + in the dataset path provided. Note that this is a folder name, not a path, defaults to 'val2017' + :type images_folder_name: str, optional + :param annotations_filename: Filename of the annotations json file. This file should be contained in the + dataset path provided, defaults to 'person_keypoints_val2017.json' + :type annotations_filename: str, optional + + :returns: returns stats regarding evaluation + :rtype: dict + """ + + data = super(HighResolutionPoseEstimationLearner, # NOQA + self)._LightweightOpenPoseLearner__prepare_val_dataset(dataset, use_subset=use_subset, + subset_name="val_subset.json", + subset_size=subset_size, + images_folder_default_name=images_folder_name, + annotations_filename=annotations_filename, + verbose=verbose and not silent) + # Model initialization if needed + if self.model is None and self.checkpoint_load_iter != 0: + # No model loaded, initializing new + self.init_model() + # User set checkpoint_load_iter, so they want to load a checkpoint + # Try to find the checkpoint_load_iter checkpoint + checkpoint_name = "checkpoint_iter_" + str(self.checkpoint_load_iter) + ".pth" + checkpoints_folder = os.path.join(self.parent_dir, '{}_checkpoints'.format(self.experiment_name)) + full_path = os.path.join(checkpoints_folder, checkpoint_name) + try: + checkpoint = torch.load(full_path, map_location=torch.device(self.device)) + except FileNotFoundError as e: + e.strerror = "File " + checkpoint_name + " not found inside checkpoints_folder, " \ + "provided checkpoint_load_iter (" + \ + str(self.checkpoint_load_iter) + \ + ") doesn't correspond to a saved checkpoint.\nNo such file or directory." + raise e + if not silent and verbose: + print("Loading checkpoint:", full_path) + + load_state(self.model, checkpoint) + elif self.model is None: + raise AttributeError("self.model is None. Please load a model or set checkpoint_load_iter.") + + self.model = self.model.eval() # Change model state to evaluation + self.model.to(self.device) + if "cuda" in self.device: + self.model = self.model.to(self.device) + if self.half: + self.model.half() + + if self.multiscale: + self.scales = [0.5, 1.0, 1.5, 2.0] + + coco_result = [] + num_keypoints = Pose.num_kpts + + pbar_eval = None + if not silent: + pbar_desc = "Evaluation progress" + pbar_eval = tqdm(desc=pbar_desc, total=len(data), bar_format="{l_bar}%s{bar}{r_bar}" % '\x1b[38;5;231m') + + img_height = data[0]['img'].shape[0] + + if img_height in (1080, 1440): + offset = 200 + elif img_height == 720: + offset = 50 + else: + offset = 0 + + for sample in data: + file_name = sample['file_name'] + img = sample['img'] + h, w, _ = img.shape + max_width = w + kernel = int(h / self.first_pass_height) + if kernel > 0: + pool_img = self.__pooling(img, kernel) + + else: + pool_img = img + + # ------- Heatmap Generation ------- + avg_pafs = self.__first_pass(pool_img) + avg_pafs = avg_pafs.astype(np.float32) + + pafs_map = cv2.blur(avg_pafs, (5, 5)) + pafs_map[pafs_map < self.threshold] = 0 + + heatmap = pafs_map.sum(axis=2) + heatmap = heatmap * 100 + heatmap = heatmap.astype(np.uint8) + heatmap = cv2.blur(heatmap, (5, 5)) + + contours, hierarchy = cv2.findContours(heatmap, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + count = [] + coco_keypoints = [] + + if len(contours) > 0: + for x in contours: + count.append(x) + xdim = [] + ydim = [] + + for j in range(len(count)): # Loop for every human (every contour) + for i in range(len(count[j])): + xdim.append(count[j][i][0][0]) + ydim.append(count[j][i][0][1]) + + h, w, _ = pool_img.shape + xmin = int(np.floor(min(xdim))) * int((w / heatmap.shape[1])) * kernel + xmax = int(np.floor(max(xdim))) * int((w / heatmap.shape[1])) * kernel + ymin = int(np.floor(min(ydim))) * int((h / heatmap.shape[0])) * kernel + ymax = int(np.floor(max(ydim))) * int((h / heatmap.shape[0])) * kernel + + extra_pad_x = int(self.perc * (xmax - xmin)) # Adding an extra pad around cropped image + extra_pad_y = int(self.perc * (ymax - ymin)) + + if xmin - extra_pad_x > 0: + xmin = xmin - extra_pad_x + if xmax + extra_pad_x < img.shape[1]: + xmax = xmax + extra_pad_x + if ymin - extra_pad_y > 0: + ymin = ymin - extra_pad_y + if ymax + extra_pad_y < img.shape[0]: + ymax = ymax + extra_pad_y + + if (xmax - xmin) > 40 and (ymax - ymin) > 40: + crop_img = img[ymin:ymax, xmin:xmax] + else: + crop_img = img[offset:img.shape[0], offset:img.shape[1]] + + h, w, _ = crop_img.shape + + # ------- Second pass of the image, inference for pose estimation ------- + avg_heatmaps, avg_pafs, scale, pad = self.__second_pass(crop_img, self.second_pass_height, max_width, + self.stride, upsample_ratio) + total_keypoints_num = 0 + all_keypoints_by_type = [] + for kpt_idx in range(18): + total_keypoints_num += extract_keypoints(avg_heatmaps[:, :, kpt_idx], all_keypoints_by_type, + total_keypoints_num) + + pose_entries, all_keypoints = group_keypoints(all_keypoints_by_type, avg_pafs) + + for kpt_id in range(all_keypoints.shape[0]): + all_keypoints[kpt_id, 0] = (all_keypoints[kpt_id, 0] * self.stride / upsample_ratio - pad[1]) / scale + all_keypoints[kpt_id, 1] = (all_keypoints[kpt_id, 1] * self.stride / upsample_ratio - pad[0]) / scale + + for i in range(all_keypoints.shape[0]): + for j in range(all_keypoints.shape[1]): + if j == 0: # Adjust offset if needed for evaluation on our HR datasets + all_keypoints[i][j] = round((all_keypoints[i][j] + xmin) - offset) + if j == 1: # Adjust offset if needed for evaluation on our HR datasets + all_keypoints[i][j] = round((all_keypoints[i][j] + ymin) - offset) + + current_poses = [] + for n in range(len(pose_entries)): + if len(pose_entries[n]) == 0: + continue + pose_keypoints = np.ones((num_keypoints, 2), dtype=np.int32) * -1 + for kpt_id in range(num_keypoints): + if pose_entries[n][kpt_id] != -1.0: # keypoint was found + pose_keypoints[kpt_id, 0] = int(all_keypoints[int(pose_entries[n][kpt_id]), 0]) + pose_keypoints[kpt_id, 1] = int(all_keypoints[int(pose_entries[n][kpt_id]), 1]) + pose = Pose(pose_keypoints, pose_entries[n][18]) + current_poses.append(pose) + + coco_keypoints, scores = convert_to_coco_format(pose_entries, all_keypoints) + + image_id = int(file_name[0:file_name.rfind('.')]) + + for idx in range(len(coco_keypoints)): + coco_result.append({ + 'image_id': image_id, + 'category_id': 1, # person + 'keypoints': coco_keypoints[idx], + 'score': scores[idx] + }) + + if self.visualize: + for keypoints in coco_keypoints: + for idx in range(len(keypoints) // 3): + cv2.circle(img, (int(keypoints[idx * 3] + offset), int(keypoints[idx * 3 + 1]) + offset), + 3, (255, 0, 255), -1) + cv2.imshow('keypoints', img) + key = cv2.waitKey() + if key == 27: # esc + return + if not silent: + pbar_eval.update(1) + + with open(self.output_name, 'w') as f: + json.dump(coco_result, f, indent=4) + if len(coco_result) != 0: + if use_subset: + result = run_coco_eval(os.path.join(dataset.path, "val_subset.json"), + self.output_name, verbose=not silent) + else: + result = run_coco_eval(os.path.join(dataset.path, annotations_filename), + self.output_name, verbose=not silent) + return {"average_precision": result.stats[0:5], "average_recall": result.stats[5:]} + else: + if not silent and verbose: + print("Evaluation ended with no detections.") + return {"average_precision": [0.0 for _ in range(5)], "average_recall": [0.0 for _ in range(5)]} + + def infer(self, img, upsample_ratio=4, stride=8, track=True, smooth=True, + multiscale=False): + """ + This method is used to perform pose estimation on an image. + + :param img: image to run inference on + :rtype img: engine.data.Image class object + :param upsample_ratio: Defines the amount of upsampling to be performed on the heatmaps and PAFs + when resizing,defaults to 4 + :type upsample_ratio: int, optional + :param stride: Defines the stride value for creating a padded image + :type stride: int,optional + :param track: If True, infer propagates poses ids from previous frame results to track poses, + defaults to 'True' + :type track: bool, optional + :param smooth: If True, smoothing is performed on pose keypoints between frames, defaults to 'True' + :type smooth: bool, optional + :param multiscale: Specifies whether evaluation will run in the predefined multiple scales setup or not. + :type multiscale: bool,optional + + :return: Returns a list of engine.target.Pose objects, where each holds a pose, or returns an empty list + if no detections were made. + :rtype: list of engine.target.Pose objects + """ + current_poses = [] + + offset = 0 + + num_keypoints = Pose.num_kpts + + if not isinstance(img, Image): + img = Image(img) + + # Bring image into the appropriate format for the implementation + img = img.convert(format='channels_last', channel_order='bgr') + + h, w, _ = img.shape + max_width = w + + kernel = int(h / self.first_pass_height) + if kernel > 0: + pool_img = self.__pooling(img, kernel) + else: + pool_img = img + + # ------- Heatmap Generation ------- + avg_pafs = self.__first_pass(pool_img) + avg_pafs = avg_pafs.astype(np.float32) + pafs_map = cv2.blur(avg_pafs, (5, 5)) + + pafs_map[pafs_map < self.threshold] = 0 + + heatmap = pafs_map.sum(axis=2) + heatmap = heatmap * 100 + heatmap = heatmap.astype(np.uint8) + heatmap = cv2.blur(heatmap, (5, 5)) + + contours, hierarchy = cv2.findContours(heatmap, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + count = [] + + if len(contours) > 0: + for x in contours: + count.append(x) + xdim = [] + ydim = [] + + for j in range(len(count)): # Loop for every human (every contour) + for i in range(len(count[j])): + xdim.append(count[j][i][0][0]) + ydim.append(count[j][i][0][1]) + + h, w, _ = pool_img.shape + xmin = int(np.floor(min(xdim))) * int((w / heatmap.shape[1])) * kernel + xmax = int(np.floor(max(xdim))) * int((w / heatmap.shape[1])) * kernel + ymin = int(np.floor(min(ydim))) * int((h / heatmap.shape[0])) * kernel + ymax = int(np.floor(max(ydim))) * int((h / heatmap.shape[0])) * kernel + + extra_pad_x = int(self.perc * (xmax - xmin)) # Adding an extra pad around cropped image + extra_pad_y = int(self.perc * (ymax - ymin)) + + if xmin - extra_pad_x > 0: + xmin = xmin - extra_pad_x + if xmax + extra_pad_x < img.shape[1]: + xmax = xmax + extra_pad_x + if ymin - extra_pad_y > 0: + ymin = ymin - extra_pad_y + if ymax + extra_pad_y < img.shape[0]: + ymax = ymax + extra_pad_y + + if (xmax - xmin) > 40 and (ymax - ymin) > 40: + crop_img = img[ymin:ymax, xmin:xmax] + else: + crop_img = img[offset:img.shape[0], offset:img.shape[1]] + + h, w, _ = crop_img.shape + + # ------- Second pass of the image, inference for pose estimation ------- + avg_heatmaps, avg_pafs, scale, pad = self.__second_pass(crop_img, self.second_pass_height, + max_width, stride, upsample_ratio) + + total_keypoints_num = 0 + all_keypoints_by_type = [] + for kpt_idx in range(18): + total_keypoints_num += extract_keypoints(avg_heatmaps[:, :, kpt_idx], all_keypoints_by_type, + total_keypoints_num) + + pose_entries, all_keypoints = group_keypoints(all_keypoints_by_type, avg_pafs) + + for kpt_id in range(all_keypoints.shape[0]): + all_keypoints[kpt_id, 0] = (all_keypoints[kpt_id, 0] * stride / upsample_ratio - pad[1]) / scale + all_keypoints[kpt_id, 1] = (all_keypoints[kpt_id, 1] * stride / upsample_ratio - pad[0]) / scale + + for i in range(all_keypoints.shape[0]): + for j in range(all_keypoints.shape[1]): + if j == 0: # Adjust offset if needed for evaluation on our HR datasets + all_keypoints[i][j] = round((all_keypoints[i][j] + xmin) - offset) + if j == 1: # Adjust offset if needed for evaluation on our HR datasets + all_keypoints[i][j] = round((all_keypoints[i][j] + ymin) - offset) + + current_poses = [] + for n in range(len(pose_entries)): + if len(pose_entries[n]) == 0: + continue + pose_keypoints = np.ones((num_keypoints, 2), dtype=np.int32) * -1 + for kpt_id in range(num_keypoints): + if pose_entries[n][kpt_id] != -1.0: # keypoint was found + pose_keypoints[kpt_id, 0] = int(all_keypoints[int(pose_entries[n][kpt_id]), 0]) + pose_keypoints[kpt_id, 1] = int(all_keypoints[int(pose_entries[n][kpt_id]), 1]) + pose = Pose(pose_keypoints, pose_entries[n][18]) + current_poses.append(pose) + + return current_poses + + def download(self, path=None, mode="pretrained", verbose=False, + url=OPENDR_SERVER_URL + "perception/pose_estimation/lightweight_open_pose/", + image_resolution=1080): + """ + Download utility for various Lightweight Open Pose components. Downloads files depending on mode and + saves them in the path provided. It supports downloading: + 1) the default mobilenet pretrained model + 2) mobilenet, mobilenetv2 and shufflenet weights needed for training + 3) a test dataset with a single COCO image and its annotation + :param path: Local path to save the files, defaults to self.temp_path if None + :type path: str, path, optional + :param mode: What file to download, can be one of "pretrained", "weights", "test_data", defaults to "pretrained" + :type mode: str, optional + :param verbose: Whether to print messages in the console, defaults to False + :type verbose: bool, optional + :param url: URL of the FTP server, defaults to OpenDR FTP URL + :type url: str, optional + :param image_resolution: Resolution of the test images to download + :type image_resolution: int, optional + """ + valid_modes = ["weights", "pretrained", "test_data"] + if mode not in valid_modes: + raise UserWarning("mode parameter not valid:", mode, ", file should be one of:", valid_modes) + + if path is None: + path = self.temp_path + + if not os.path.exists(path): + os.makedirs(path) + + if mode in ("pretrained", "weights"): + super(HighResolutionPoseEstimationLearner, self).download(path=path, mode=mode, verbose=verbose, url=url) + elif mode == "test_data": + if verbose: + print("Downloading test data...") + if not os.path.exists(os.path.join(self.temp_path, "dataset")): + os.makedirs(os.path.join(self.temp_path, "dataset")) + if not os.path.exists(os.path.join(self.temp_path, "dataset", "image")): + os.makedirs(os.path.join(self.temp_path, "dataset", "image")) + # Path for high resolution data + url = OPENDR_SERVER_URL + "perception/pose_estimation/high_resolution_pose_estimation/" + # Download annotation file + file_url = os.path.join(url, "dataset", "annotation.json") + urlretrieve(file_url, os.path.join(self.temp_path, "dataset", "annotation.json")) + # Download test image + if image_resolution in (1080, 1440): + file_url = os.path.join(url, "dataset", "image", "000000000785_" + str(image_resolution) + ".jpg") + urlretrieve(file_url, os.path.join(self.temp_path, "dataset", "image", "000000000785_1080.jpg")) + else: + raise UserWarning("There are no data for this image resolution (only 1080 and 1440 are supported).") + + if verbose: + print("Test data download complete.") diff --git a/src/opendr/perception/pose_estimation/lightweight_open_pose/lightweight_open_pose_learner.py b/src/opendr/perception/pose_estimation/lightweight_open_pose/lightweight_open_pose_learner.py index d46fff3564..2305154c4b 100644 --- a/src/opendr/perception/pose_estimation/lightweight_open_pose/lightweight_open_pose_learner.py +++ b/src/opendr/perception/pose_estimation/lightweight_open_pose/lightweight_open_pose_learner.py @@ -90,9 +90,9 @@ def __init__(self, lr=4e-5, epochs=280, batch_size=80, device='cuda', backbone=' self.backbone = backbone.lower() self.half = half_precision - supportedBackbones = ["mobilenet", "mobilenetv2", "shufflenet"] - if self.backbone not in supportedBackbones: - raise ValueError(self.backbone + " not a valid backbone. Supported backbones:" + str(supportedBackbones)) + supported_backbones = ["mobilenet", "mobilenetv2", "shufflenet"] + if self.backbone not in supported_backbones: + raise ValueError(self.backbone + " not a valid backbone. Supported backbones:" + str(supported_backbones)) if self.backbone == "mobilenet": self.use_stride = mobilenet_use_stride else: @@ -127,6 +127,9 @@ def __init__(self, lr=4e-5, epochs=280, batch_size=80, device='cuda', backbone=' self.ort_session = None # ONNX runtime inference session self.model_train_state = True + if self.device == "cpu": + torch.set_flush_denormal(True) + def fit(self, dataset, val_dataset=None, logging_path='', logging_flush_secs=30, silent=False, verbose=True, epochs=None, use_val_subset=True, val_subset_size=250, images_folder_name="train2017", annotations_filename="person_keypoints_train2017.json", @@ -307,11 +310,11 @@ def fit(self, dataset, val_dataset=None, logging_path='', logging_flush_secs=30, batch_per_iter_idx = 0 pbar = None - pbarDesc = "" + pbar_desc = "" batch_index = 0 if not silent: - pbarDesc = "Epoch #" + str(epochId) + " progress" - pbar = tqdm(desc=pbarDesc, total=batches, bar_format="{l_bar}%s{bar}{r_bar}" % '\x1b[38;5;231m') + pbar_desc = "Epoch #" + str(epochId) + " progress" + pbar = tqdm(desc=pbar_desc, total=batches, bar_format="{l_bar}%s{bar}{r_bar}" % '\x1b[38;5;231m') for batch_data in train_loader: if batch_per_iter_idx == 0: optimizer.zero_grad() @@ -396,7 +399,7 @@ def fit(self, dataset, val_dataset=None, logging_path='', logging_flush_secs=30, eval_results_list.append(eval_results) if not silent: # Re-initialize outer tqdm - pbar = tqdm(desc=pbarDesc, initial=batch_index, total=batches, + pbar = tqdm(desc=pbar_desc, initial=batch_index, total=batches, bar_format="{l_bar}%s{bar}{r_bar}" % '\x1b[38;5;231m') if logging: file_writer.add_scalar(tag="Average Precision @IoU=0.5:0.95, area = all", @@ -530,8 +533,8 @@ def eval(self, dataset, silent=False, verbose=True, use_subset=True, subset_size pbar_eval = None if not silent: - pbarDesc = "Evaluation progress" - pbar_eval = tqdm(desc=pbarDesc, total=len(data), bar_format="{l_bar}%s{bar}{r_bar}" % '\x1b[38;5;231m') + pbar_desc = "Evaluation progress" + pbar_eval = tqdm(desc=pbar_desc, total=len(data), bar_format="{l_bar}%s{bar}{r_bar}" % '\x1b[38;5;231m') for sample in data: file_name = sample['file_name'] img = sample['img'] diff --git a/src/opendr/perception/skeleton_based_action_recognition/__init__.py b/src/opendr/perception/skeleton_based_action_recognition/__init__.py index 0dc3b48187..d981079b96 100644 --- a/src/opendr/perception/skeleton_based_action_recognition/__init__.py +++ b/src/opendr/perception/skeleton_based_action_recognition/__init__.py @@ -1,7 +1,21 @@ -from opendr.perception.skeleton_based_action_recognition.spatio_temporal_gcn_learner import SpatioTemporalGCNLearner -from opendr.perception.skeleton_based_action_recognition.progressive_spatio_temporal_gcn_learner \ - import ProgressiveSpatioTemporalGCNLearner -from opendr.perception.skeleton_based_action_recognition.algorithm.datasets.ntu_gendata import NTU60_CLASSES -from opendr.perception.skeleton_based_action_recognition.algorithm.datasets.kinetics_gendata import KINETICS400_CLASSES +from opendr.perception.skeleton_based_action_recognition.continual_stgcn_learner import CoSTGCNLearner +from opendr.perception.skeleton_based_action_recognition.spatio_temporal_gcn_learner import ( + SpatioTemporalGCNLearner, +) +from opendr.perception.skeleton_based_action_recognition.progressive_spatio_temporal_gcn_learner import ( + ProgressiveSpatioTemporalGCNLearner, +) +from opendr.perception.skeleton_based_action_recognition.algorithm.datasets.ntu_gendata import ( + NTU60_CLASSES, +) +from opendr.perception.skeleton_based_action_recognition.algorithm.datasets.kinetics_gendata import ( + KINETICS400_CLASSES, +) -__all__ = ['SpatioTemporalGCNLearner', 'ProgressiveSpatioTemporalGCNLearner', 'NTU60_CLASSES', 'KINETICS400_CLASSES'] +__all__ = [ + "CoSTGCNLearner", + "SpatioTemporalGCNLearner", + "ProgressiveSpatioTemporalGCNLearner", + "NTU60_CLASSES", + "KINETICS400_CLASSES", +] diff --git a/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_agcn.py b/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_agcn.py new file mode 100644 index 0000000000..2f67a95df4 --- /dev/null +++ b/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_agcn.py @@ -0,0 +1,212 @@ +""" +Modified based on: https://github.com/open-mmlab/mmskeleton +""" +from collections import OrderedDict +import continual as co +import torch +from torch import nn +from opendr.perception.skeleton_based_action_recognition.algorithm.models.co_base import ( + CoModelBase, + CoSpatioTemporalBlock, + init_weights, +) +import numpy as np + + +class AdaptiveGraphConvolutionMod(nn.Module): + def __init__(self, in_channels, out_channels, A, bn_momentum=0.1, coff_embedding=4): + super(AdaptiveGraphConvolutionMod, self).__init__() + self.inter_c = out_channels // coff_embedding + self.graph_attn = nn.Parameter(torch.from_numpy(A.astype(np.float32))) + nn.init.constant_(self.graph_attn, 1) + self.A = nn.Parameter(torch.from_numpy(A.astype(np.float32)), requires_grad=False) + self.num_subset = 3 + self.g_conv = nn.ModuleList() + self.a_conv = nn.ModuleList() + self.b_conv = nn.ModuleList() + for i in range(self.num_subset): + self.g_conv.append(nn.Conv2d(in_channels, out_channels, 1)) + self.a_conv.append(nn.Conv2d(in_channels, self.inter_c, 1)) + self.b_conv.append(nn.Conv2d(in_channels, self.inter_c, 1)) + init_weights(self.g_conv[i], bs=self.num_subset) + init_weights(self.a_conv[i]) + init_weights(self.b_conv[i]) + + if in_channels != out_channels: + self.gcn_residual = nn.Sequential( + nn.Conv2d(in_channels, out_channels, 1), + nn.BatchNorm2d(out_channels, momentum=bn_momentum), + ) + init_weights(self.gcn_residual[0], bs=1) + init_weights(self.gcn_residual[1], bs=1) + else: + self.gcn_residual = lambda x: x + + self.bn = nn.BatchNorm2d(out_channels, momentum=bn_momentum) + init_weights(self.bn, bs=1e-6) + self.relu = nn.ReLU() + self.soft = nn.Softmax(-2) + + def forward(self, x): + N, C, T, V = x.size() + A = self.A + self.graph_attn + hidden = None + for i in range(self.num_subset): + A1 = self.a_conv[i](x).permute(0, 3, 1, 2).contiguous() + A2 = self.b_conv[i](x) + # Modified attention within timestep + A1 = self.soft(torch.einsum("nvct,nctw->nvwt", A1, A2) / self.inter_c) # N V V T + A1 = A1 + A[i].unsqueeze(0).unsqueeze(-1) + z = self.g_conv[i](torch.einsum("nctv,nvwt->nctv", x, A1)) + hidden = z + hidden if hidden is not None else z + hidden = self.bn(hidden) + hidden += self.gcn_residual(x) + return self.relu(hidden) + + +def CoAdaptiveGraphConvolution(in_channels, out_channels, A, bn_momentum=0.1): + return co.forward_stepping(AdaptiveGraphConvolutionMod(in_channels, out_channels, A, bn_momentum)) + + +class CoAGcnMod(CoModelBase): + def __init__( + self, + num_point=25, + num_person=2, + in_channels=3, + graph_type="ntu", + sequence_len: int = 300, + num_classes: int = 60, + loss_name="cross_entropy", + ): + CoModelBase.__init__( + self, num_point, num_person, in_channels, graph_type, sequence_len, num_classes, loss_name + ) + + # Shapes: num_channels, num_frames, num_vertices, num_skeletons + (C_in, T, _, _) = self.input_shape + A = self.graph.A + + # Pass in precise window-sizes to compensate propperly in BatchNorm modules + self.layers = co.Sequential( + OrderedDict( + [ + ( + "layer1", + CoSpatioTemporalBlock( + C_in, + 64, + A, + CoGraphConv=CoAdaptiveGraphConvolution, + padding=0, + window_size=T, + residual=False, + ), + ), + ( + "layer2", + CoSpatioTemporalBlock( + 64, + 64, + A, + CoGraphConv=CoAdaptiveGraphConvolution, + padding=0, + window_size=T - 1 * 8, + ), + ), + ( + "layer3", + CoSpatioTemporalBlock( + 64, + 64, + A, + CoGraphConv=CoAdaptiveGraphConvolution, + padding=0, + window_size=T - 2 * 8, + ), + ), + ( + "layer4", + CoSpatioTemporalBlock( + 64, + 64, + A, + CoGraphConv=CoAdaptiveGraphConvolution, + padding=0, + window_size=T - 3 * 8, + ), + ), + ( + "layer5", + CoSpatioTemporalBlock( + 64, + 128, + A, + CoGraphConv=CoAdaptiveGraphConvolution, + padding=0, + window_size=T - 4 * 8, + stride=1, + ), + ), + ( + "layer6", + CoSpatioTemporalBlock( + 128, + 128, + A, + CoGraphConv=CoAdaptiveGraphConvolution, + padding=0, + window_size=(T - 4 * 8) - 1 * 8, + ), + ), + ( + "layer7", + CoSpatioTemporalBlock( + 128, + 128, + A, + CoGraphConv=CoAdaptiveGraphConvolution, + padding=0, + window_size=(T - 4 * 8) - 2 * 8, + ), + ), + ( + "layer8", + CoSpatioTemporalBlock( + 128, + 256, + A, + CoGraphConv=CoAdaptiveGraphConvolution, + padding=0, + window_size=(T - 4 * 8) - 3 * 8, + stride=1, + ), + ), + ( + "layer9", + CoSpatioTemporalBlock( + 256, + 256, + A, + CoGraphConv=CoAdaptiveGraphConvolution, + padding=0, + window_size=((T - 4 * 8) - 3 * 8) - 1 * 8, + ), + ), + ( + "layer10", + CoSpatioTemporalBlock( + 256, + 256, + A, + CoGraphConv=CoAdaptiveGraphConvolution, + padding=0, + window_size=((T - 4 * 8) - 3 * 8) - 2 * 8, + ), + ), + ] + ) + ) + + # Other layers defined in CoModelBase.on_init_end + CoModelBase.on_init_end(self) diff --git a/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_base.py b/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_base.py new file mode 100644 index 0000000000..2227607ee5 --- /dev/null +++ b/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_base.py @@ -0,0 +1,485 @@ +""" +Modified based on: https://github.com/LukasHedegaard/continual-skeletons +""" + +import math +from collections import OrderedDict +from typing import Sequence + +import continual as co +import numpy as np +import torch +import torch.nn as nn +from torch import Tensor +import torch.nn.functional as F +from numpy import prod +from torch.nn.modules.batchnorm import _BatchNorm +from torch.nn.modules.conv import _ConvNd +from logging import getLogger +import pytorch_lightning as pl + +from opendr.perception.skeleton_based_action_recognition.algorithm.graphs.nturgbd import NTUGraph +from opendr.perception.skeleton_based_action_recognition.algorithm.graphs.kinetics import ( + KineticsGraph, +) + +logger = getLogger(__name__) + + +class CoModelBase(pl.LightningModule, co.Sequential): + def __init__( + self, + num_point=25, + num_person=2, + in_channels=3, + graph_type="ntu", + sequence_len: int = 300, + num_classes=60, + loss_name="cross_entropy", + forward_mode: str = "clip", # choices=["clip", "frame"] + predict_after_frames: int = 0, + continual_temporal_fill: str = "zeros", # choices=["zeros", "replicate"] + pool_size: int = -1, + pool_padding: int = -1, + ): + pl.LightningModule.__init__(self) + self.forward_mode = forward_mode + self.predict_after_frames = predict_after_frames + self.continual_temporal_fill = continual_temporal_fill + self.pool_size = pool_size + self.pool_padding = pool_padding + self.num_point = num_point + self.num_person = num_person + self.in_channels = in_channels + self.graph_type = graph_type + self.sequence_len = sequence_len + self.num_classes = num_classes + self.loss_name = loss_name + self.input_shape = (in_channels, sequence_len, num_point, num_person) + if graph_type == "ntu" or num_point == 25: + self.graph = NTUGraph() + elif graph_type == "openpose" or num_point == 18: + self.graph = KineticsGraph() + + co.Sequential.__init__(self) + + def on_init_end(self): + # Shapes from Dataset: + # num_channels, num_frames, num_vertices, num_skeletons + (C_in, T, V, S) = self.input_shape + + def reshape1_fn(x): + return x.permute(0, 3, 2, 1).contiguous().view(-1, S * V * C_in) + + reshape1 = co.Lambda(reshape1_fn) + data_bn = nn.BatchNorm1d(S * C_in * V) + + def reshape2_fn(x): + return x.view(-1, S, V, C_in).permute(0, 1, 3, 2).contiguous().view(-1, C_in, V) + + reshape2 = co.Lambda(reshape2_fn) + + spatial_pool = co.Lambda(lambda x: x.view(-1, S, 256, V).mean(3).mean(1)) + + pool_size = self.pool_size + if pool_size == -1: + pool_size = math.ceil( + (T - self.receptive_field + 2 * self.padding[0] + 1) / self.stride[0] + ) + + pool_padding = self.pool_padding + if pool_padding == -1: + pool_padding = pool_size - math.ceil( + (T - self.receptive_field + self.padding[0] + 1) / self.stride[0] + ) + pool = co.AvgPool1d(pool_size, stride=1, padding=max(0, pool_padding)) + + fc = co.Linear(256, self.num_classes, channel_dim=1) + + squeeze = co.Lambda(lambda x: x.squeeze(-1), takes_time=True, forward_step_only_fn=lambda x: x) + + # Initialize weights + init_weights(data_bn, bs=1) + init_weights(fc, bs=self.num_classes) + + # Add blocks sequentially + co.Sequential.__init__( + self, + OrderedDict( + [ + ("reshape1", reshape1), + ("data_bn", data_bn), + ("reshape2", reshape2), + ("layers", self.layers), + ("spatial_pool", spatial_pool), + ("pool", pool), + ("fc", fc), + ("squeeze", squeeze), + ] + ), + ) + + if self.forward_mode == "frame": + self.call_mode = "forward_steps" # Set continual forward mode + + logger.info(f"Input shape (C, T, V, S) = {self.input_shape}") + logger.info(f"Receptive field {self.receptive_field}") + logger.info(f"Init frames {self.receptive_field - 2 * self.padding[0] - 1}") + logger.info(f"Pool size {pool_size}") + logger.info(f"Stride {self.stride[0]}") + logger.info(f"Padding {self.padding[0]}") + logger.info(f"Using Continual {self.call_mode}") + + if self.forward_mode == "frame": + (num_channels, num_frames, num_vertices, num_skeletons) = self.input_shape + + # A new output is created every `self.stride` frames. + self.input_shape = (num_channels, self.stride, num_vertices, num_skeletons) + + def warm_up(self, dummy, sample: Sequence[int], *args, **kwargs): + # Called prior to profiling + + if self.forward_mode == "clip": + return + + self.clean_state() + + N, C, T, S, V = sample.shape + + self._current_input_shape = (N, C, S, V) + + init_frames = self.receptive_field - self.padding - 1 + init_data = torch.randn((N, C, init_frames, S, V)).to(device=self.device) + for i in range(init_frames): + self.forward_step(init_data[:, :, i]) + + def clean_state_on_shape_change(self, shape): + if getattr(self, "_current_input_shape", None) != shape: + self._current_input_shape = shape + self.clean_state() + + def forward(self, input): + if self.forward_mode == "clip": + ret = super().forward(input) + else: + assert self.forward_mode == "frame" + N, C, T, S, V = input.shape + self.clean_state_on_shape_change((N, C, S, V)) + + if not self.profile_model: + self.clean_state() + + ret = super().forward_steps(input, update_state=True, pad_end=False) + + if len(getattr(ret, "shape", (0,))) == 3: + ret = ret[:, :, 0] # the rest may be end-padding + return ret + + def forward_step(self, input, update_state=True): + self.clean_state_on_shape_change(input.shape) + return super().forward_step(input, update_state) + + def forward_steps(self, input: Tensor, pad_end=False, update_state=True): + N, C, T, S, V = input.shape + self.clean_state_on_shape_change((N, C, S, V)) + return super().forward_steps(input, pad_end, update_state) + + def map_state_dict( + self, + state_dict: "OrderedDict[str, Tensor]", + strict: bool = True, + ) -> "OrderedDict[str, Tensor]": + def map_key(k: str): + # Handle "layers.layer2.0.1.gcn.g_conv.0.weight" -> "layers.layer2.gcn.g_conv.0.weight" + k = k.replace("0.1.", "") + + # Handle "layers.layer8.0.0.residual.t_conv.weight" ->layers.layer8.residual.t_conv.weight' + k = k.replace("0.0.residual", "residual") + return k + + long_keys = nn.Module.state_dict(self, keep_vars=True).keys() + + if len(long_keys - state_dict.keys()): + short2long = {map_key(k): k for k in long_keys} + state_dict = OrderedDict( + [(short2long[k], v) for k, v in state_dict.items() if strict or k in short2long] + ) + return state_dict + + def map_loaded_weights(self, file, loaded_state_dict): + return self.map_state_dict(loaded_state_dict) + + def training_step(self, batch, batch_idx): + x, y = _unpack_batch(batch) + x = self.forward(x) + loss = getattr(F, self.loss_name, F.cross_entropy)(x, y) + self.log("train/loss", loss) + self.log("train/acc", _accuracy(x, y)) + return loss + + def validation_step(self, batch, batch_idx): + x, y = _unpack_batch(batch) + x = self.forward(x) + loss = getattr(F, self.loss_name, F.cross_entropy)(x, y) + self.log("val/loss", loss) + self.log("val/acc", _accuracy(x, y)) + return loss + + def test_step(self, batch, batch_idx): + x, y = _unpack_batch(batch) + x = self.forward(x) + loss = getattr(F, self.loss_name, F.cross_entropy)(x, y) + self.log("test/loss", loss) + self.log("test/acc", _accuracy(x, y)) + return loss + + +def _unpack_batch(batch): + if len(batch) == 3: + x, y, _ = batch + return (x, y) + + +def _accuracy(x: Tensor, y: Tensor): + return torch.sum(x.argmax(dim=1) == y) / len(y) + + +class GraphConvolution(nn.Module): + def __init__(self, in_channels, out_channels, A, bn_momentum=0.1, *args, **kwargs): + super(GraphConvolution, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.graph_attn = nn.Parameter(torch.from_numpy(A.astype(np.float32))) + nn.init.constant_(self.graph_attn, 1) + self.A = nn.Parameter(torch.from_numpy(A.astype(np.float32)), requires_grad=False) + self.num_subset = 3 + self.g_conv = nn.ModuleList() + for i in range(self.num_subset): + self.g_conv.append(nn.Conv2d(in_channels, out_channels, 1)) + init_weights(self.g_conv[i], bs=self.num_subset) + + if in_channels != out_channels: + self.gcn_residual = nn.Sequential( + nn.Conv2d(in_channels, out_channels, 1), + nn.BatchNorm2d(out_channels, momentum=bn_momentum), + ) + init_weights(self.gcn_residual[0], bs=1) + init_weights(self.gcn_residual[1], bs=1) + else: + self.gcn_residual = lambda x: x + + self.bn = nn.BatchNorm2d(out_channels, momentum=bn_momentum) + init_weights(self.bn, bs=1e-6) + self.relu = nn.ReLU() + + def forward(self, x): + N, C, T, V = x.size() + A = self.A * self.graph_attn + sum_ = None + for i in range(self.num_subset): + x_a = x.view(N, C * T, V) + z = self.g_conv[i](torch.matmul(x_a, A[i]).view(N, C, T, V)) + sum_ = z + sum_ if sum_ is not None else z + sum_ = self.bn(sum_) + sum_ += self.gcn_residual(x) + return self.relu(sum_) + + +def CoGraphConvolution(in_channels, out_channels, A, bn_momentum=0.1): + return co.forward_stepping(GraphConvolution(in_channels, out_channels, A, bn_momentum)) + + +class TemporalConvolution(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size=9, + stride=1, + padding=4, + ): + super(TemporalConvolution, self).__init__() + + self.padding = padding + self.t_conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=(kernel_size, 1), + padding=(self.padding, 0), + stride=(stride, 1), + ) + self.bn = nn.BatchNorm2d(out_channels) + init_weights(self.t_conv, bs=1) + init_weights(self.bn, bs=1) + + def forward(self, x): + x = self.bn(self.t_conv(x)) + return x + + +def CoTemporalConvolution( + in_channels, + out_channels, + kernel_size=9, + padding=0, + stride=1, +) -> co.Sequential: + + if padding == "equal": + padding = int((kernel_size - 1) / 2) + + t_conv = co.Conv2d( + in_channels, + out_channels, + kernel_size=(kernel_size, 1), + padding=(padding, 0), + stride=(stride, 1), + ) + + bn = nn.BatchNorm2d(out_channels) + + init_weights(t_conv, bs=1) + init_weights(bn, bs=1) + + seq = [] + seq.append(("t_conv", t_conv)) + seq.append(("bn", bn)) + return co.Sequential(OrderedDict(seq)) + + +class SpatioTemporalBlock(nn.Module): + def __init__( + self, + in_channels, + out_channels, + A, + stride=1, + residual=True, + temporal_kernel_size=9, + temporal_padding=-1, + GraphConv=GraphConvolution, + TempConv=TemporalConvolution, + ): + super(SpatioTemporalBlock, self).__init__() + equal_padding = int((temporal_kernel_size - 1) / 2) + if temporal_padding < 0: + temporal_padding = equal_padding + self.residual_shrink = None + else: + assert temporal_padding <= equal_padding + self.residual_shrink = equal_padding - temporal_padding + self.gcn = GraphConv(in_channels, out_channels, A) + self.tcn = TempConv( + out_channels, + out_channels, + stride=stride, + kernel_size=temporal_kernel_size, + padding=temporal_padding, + ) + self.relu = nn.ReLU() + if not residual: + self.residual = zero + elif (in_channels == out_channels) and (stride == 1): + self.residual = unity + else: + self.residual = TempConv( + in_channels, out_channels, kernel_size=1, stride=stride, padding=0 + ) + + def forward(self, x): + z = self.tcn(self.gcn(x)) + + if self.residual_shrink: + # Centered residuals: + # If temporal zero-padding is removed, the feature-map shrinks at every temporal conv + # The residual should shrink correspondingly, i.e. (kernel_size - 1) / 2) on each side + r = self.residual(x[:, :, self.residual_shrink:-self.residual_shrink]) + else: + r = self.residual(x) + + return self.relu(z + r) + + +def CoSpatioTemporalBlock( + in_channels, + out_channels, + A, + stride=1, + residual=True, + window_size=1, + padding=0, + CoGraphConv=CoGraphConvolution, + CoTempConv=CoTemporalConvolution, +): + window_size = int(window_size) # Currently unused. Could be used BN momentum + + gcn = CoGraphConv(in_channels, out_channels, A, bn_momentum=0.1) + tcn = CoTempConv( + out_channels, + out_channels, + stride=stride, + padding=padding, + ) + relu = torch.nn.ReLU() + + if not residual: + return co.Sequential(OrderedDict([("gcn", gcn), ("tcn", tcn), ("relu", relu)])) + + if (in_channels == out_channels) and (stride == 1): + return co.Sequential( + co.Residual( + co.Sequential(OrderedDict([("gcn", gcn), ("tcn", tcn)])), + residual_shrink=True, + ), + relu, + ) + + residual = CoTempConv(in_channels, out_channels, kernel_size=1, stride=stride) + + residual_shrink = tcn.receptive_field - 2 * tcn.padding[0] != 1 + + delay = tcn.delay // stride + if residual_shrink: + delay = delay // 2 + + return co.Sequential( + co.BroadcastReduce( + co.Sequential( + OrderedDict( + [ + ("residual", residual), + ("align", co.Delay(delay, auto_shrink=residual_shrink)), + ] + ) + ), + co.Sequential(OrderedDict([("gcn", gcn), ("tcn", tcn)])), + auto_delay=False, + ), + relu, + ) + + +def init_weights(module_, bs=1): + if isinstance(module_, _ConvNd): + nn.init.constant_(module_.bias, 0) + if bs == 1: + nn.init.kaiming_normal_(module_.weight, mode="fan_out") + else: + nn.init.normal_( + module_.weight, + 0, + math.sqrt(2.0 / (prod(module_.weight.size()) * bs)), + ) + elif isinstance(module_, _BatchNorm): + nn.init.constant_(module_.weight, bs) + nn.init.constant_(module_.bias, 0) + elif isinstance(module_, nn.Linear): + nn.init.normal_(module_.weight, 0, math.sqrt(2.0 / bs)) + + +def zero(x): + return 0 + + +def unity(x): + return x diff --git a/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_stgcn.py b/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_stgcn.py new file mode 100644 index 0000000000..de31f4f9e9 --- /dev/null +++ b/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_stgcn.py @@ -0,0 +1,49 @@ +""" +Modified based on: https://github.com/open-mmlab/mmskeleton +""" +from collections import OrderedDict +import continual as co + +from opendr.perception.skeleton_based_action_recognition.algorithm.models.co_base import ( + CoModelBase, + CoSpatioTemporalBlock, +) + + +class CoStGcnMod(CoModelBase): + def __init__( + self, + num_point=25, + num_person=2, + in_channels=3, + graph_type="ntu", + sequence_len: int = 300, + num_classes: int = 60, + loss_name="cross_entropy", + ): + CoModelBase.__init__( + self, num_point, num_person, in_channels, graph_type, sequence_len, num_classes, loss_name + ) + + # Shapes: num_channels, num_frames, num_vertices, num_skeletons + (C_in, T, _, _) = self.input_shape + A = self.graph.A + + # Pass in precise window-sizes to compensate propperly in BatchNorm modules + # fmt: off + self.layers = co.Sequential(OrderedDict([ + ("layer1", CoSpatioTemporalBlock(C_in, 64, A, padding=0, window_size=T, residual=False)), + ("layer2", CoSpatioTemporalBlock(64, 64, A, padding=0, window_size=T - 1 * 8)), + ("layer3", CoSpatioTemporalBlock(64, 64, A, padding=0, window_size=T - 2 * 8)), + ("layer4", CoSpatioTemporalBlock(64, 64, A, padding=0, window_size=T - 3 * 8)), + ("layer5", CoSpatioTemporalBlock(64, 128, A, padding=0, window_size=T - 4 * 8, stride=1)), + ("layer6", CoSpatioTemporalBlock(128, 128, A, padding=0, window_size=(T - 4 * 8) / 2 - 1 * 8)), + ("layer7", CoSpatioTemporalBlock(128, 128, A, padding=0, window_size=(T - 4 * 8) / 2 - 2 * 8)), + ("layer8", CoSpatioTemporalBlock(128, 256, A, padding=0, window_size=(T - 4 * 8) / 2 - 3 * 8, stride=1)), + ("layer9", CoSpatioTemporalBlock(256, 256, A, padding=0, window_size=((T - 4 * 8) / 2 - 3 * 8) / 2 - 1 * 8)), + ("layer10", CoSpatioTemporalBlock(256, 256, A, padding=0, window_size=((T - 4 * 8) / 2 - 3 * 8) / 2 - 2 * 8)), + ])) + # fmt: on + + # Other layers defined in CoModelBase.on_init_end + CoModelBase.on_init_end(self) diff --git a/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_str.py b/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_str.py new file mode 100644 index 0000000000..6c4803bcbc --- /dev/null +++ b/src/opendr/perception/skeleton_based_action_recognition/algorithm/models/co_str.py @@ -0,0 +1,589 @@ +""" +Modified based on: https://github.com/open-mmlab/mmskeleton +""" +from collections import OrderedDict +import continual as co +import math +import torch +from torch import nn +import torch.nn.functional as F +import numpy as np +from opendr.perception.skeleton_based_action_recognition.algorithm.models.co_base import ( + CoModelBase, + CoSpatioTemporalBlock, +) + + +class CoSTrMod(CoModelBase): + def __init__( + self, + num_point=25, + num_person=2, + in_channels=3, + graph_type="ntu", + sequence_len: int = 300, + num_classes: int = 60, + loss_name="cross_entropy", + ): + CoModelBase.__init__( + self, num_point, num_person, in_channels, graph_type, sequence_len, num_classes, loss_name + ) + + # Shapes: num_channels, num_frames, num_vertices, num_skeletons + (C_in, T, V, S) = self.input_shape + A = self.graph.A + + def CoGcnUnitAttention(in_channels, out_channels, A, bn_momentum=0.1): + return co.forward_stepping( + GcnUnitAttention(in_channels, out_channels, A, bn_momentum, num_point=V) + ) + + # Pass in precise window-sizes to compensate propperly in BatchNorm modules + self.layers = co.Sequential( + OrderedDict( + [ + ( + "layer1", + CoSpatioTemporalBlock(C_in, 64, A, padding=0, window_size=T, residual=False), + ), + ("layer2", CoSpatioTemporalBlock(64, 64, A, padding=0, window_size=T - 1 * 8)), + ("layer3", CoSpatioTemporalBlock(64, 64, A, padding=0, window_size=T - 2 * 8)), + ( + "layer4", + CoSpatioTemporalBlock( + 64, + 64, + A, + CoGraphConv=CoGcnUnitAttention, + padding=0, + window_size=T - 3 * 8, + ), + ), + ( + "layer5", + CoSpatioTemporalBlock( + 64, + 128, + A, + CoGraphConv=CoGcnUnitAttention, + padding=0, + window_size=T - 4 * 8, + stride=1, + ), + ), + ( + "layer6", + CoSpatioTemporalBlock( + 128, + 128, + A, + CoGraphConv=CoGcnUnitAttention, + padding=0, + window_size=(T - 4 * 8) / 2 - 1 * 8, + ), + ), + ( + "layer7", + CoSpatioTemporalBlock( + 128, + 128, + A, + CoGraphConv=CoGcnUnitAttention, + padding=0, + window_size=(T - 4 * 8) / 2 - 2 * 8, + ), + ), + ( + "layer8", + CoSpatioTemporalBlock( + 128, + 256, + A, + CoGraphConv=CoGcnUnitAttention, + padding=0, + window_size=(T - 4 * 8) / 2 - 3 * 8, + stride=1, + ), + ), + ( + "layer9", + CoSpatioTemporalBlock( + 256, + 256, + A, + CoGraphConv=CoGcnUnitAttention, + padding=0, + window_size=((T - 4 * 8) / 2 - 3 * 8) / 2 - 1 * 8, + ), + ), + ( + "layer10", + CoSpatioTemporalBlock( + 256, + 256, + A, + CoGraphConv=CoGcnUnitAttention, + padding=0, + window_size=((T - 4 * 8) / 2 - 3 * 8) / 2 - 2 * 8, + ), + ), + ] + ) + ) + + # Other layers defined in CoModelBase.on_init_end + CoModelBase.on_init_end(self) + + +class SpatialAttention(nn.Module): + """ + This class implements Spatial Attention. + Function adapted from: https://github.com/leaderj1001/Attention-Augmented-Conv2d + """ + + def __init__( + self, + in_channels, + kernel_size, + dk, + dv, + Nh, + complete, + relative, + layer, + A, + more_channels, + drop_connect, + adjacency, + num, + num_point, + shape=25, + stride=1, + last_graph=False, + data_normalization=True, + skip_conn=True, + visualization=True, + ): + super(SpatialAttention, self).__init__() + self.in_channels = in_channels + self.complete = complete + self.kernel_size = 1 + self.dk = dk + self.dv = dv + self.num = num + self.layer = layer + self.more_channels = more_channels + self.drop_connect = drop_connect + self.visualization = visualization + self.data_normalization = data_normalization + self.skip_conn = skip_conn + self.adjacency = adjacency + self.Nh = Nh + self.num_point = num_point + self.A = A[0] + A[1] + A[2] + if self.adjacency: + self.mask = nn.Parameter(torch.ones(self.A.size())) + self.shape = shape + self.relative = relative + self.last_graph = last_graph + self.stride = stride + self.padding = (self.kernel_size - 1) // 2 + + assert self.Nh != 0, "integer division or modulo by zero, Nh >= 1" + assert ( + self.dk % self.Nh == 0 + ), "dk should be divided by Nh. (example: out_channels: 20, dk: 40, Nh: 4)" + assert ( + self.dv % self.Nh == 0 + ), "dv should be divided by Nh. (example: out_channels: 20, dv: 4, Nh: 4)" + assert stride in [1, 2], str(stride) + " Up to 2 strides are allowed." + + if self.more_channels: + + self.qkv_conv = nn.Conv2d( + self.in_channels, + (2 * self.dk + self.dv) * self.Nh // self.num, + kernel_size=self.kernel_size, + stride=stride, + padding=self.padding, + ) + else: + self.qkv_conv = nn.Conv2d( + self.in_channels, + 2 * self.dk + self.dv, + kernel_size=self.kernel_size, + stride=stride, + padding=self.padding, + ) + if self.more_channels: + + self.attn_out = nn.Conv2d(self.dv * self.Nh // self.num, self.dv, kernel_size=1, stride=1) + else: + self.attn_out = nn.Conv2d(self.dv, self.dv, kernel_size=1, stride=1) + + if self.relative: + # Two parameters are initialized in order to implement relative positional encoding + # One weight repeated over the diagonal + # V^2-V+1 paramters in positions outside the diagonal + if self.more_channels: + self.key_rel = nn.Parameter( + torch.randn( + ((self.num_point ** 2) - self.num_point, self.dk // self.num), + requires_grad=True, + ) + ) + else: + self.key_rel = nn.Parameter( + torch.randn( + ((self.num_point ** 2) - self.num_point, self.dk // Nh), + requires_grad=True, + ) + ) + if self.more_channels: + self.key_rel_diagonal = nn.Parameter( + torch.randn((1, self.dk // self.num), requires_grad=True) + ) + else: + self.key_rel_diagonal = nn.Parameter( + torch.randn((1, self.dk // self.Nh), requires_grad=True) + ) + + def forward(self, x): + # Input x + # (batch_size, channels, 1, joints) + B, _, T, V = x.size() + + # flat_q, flat_k, flat_v + # (batch_size, Nh, dvh or dkh, joints) + # dvh = dv / Nh, dkh = dk / Nh + # q, k, v obtained by doing 2D convolution on the input (q=XWq, k=XWk, v=XWv) + flat_q, flat_k, flat_v, q, k, v = self.compute_flat_qkv(x, self.dk, self.dv, self.Nh) + + logits = torch.matmul(flat_q.transpose(2, 3), flat_k) + + # In this version, the adjacency matrix is weighted and added to the attention logits of transformer to add + # information of the original skeleton structure + if self.adjacency: + logits = logits.reshape(-1, V, V) + M, V, V = logits.shape + A = self.A + A *= self.mask + A = A.unsqueeze(0).expand(M, V, V) + logits = logits + A + logits = logits.reshape(B, self.Nh, V, V) + + # Relative positional encoding is used or not + if self.relative: + rel_logits = self.relative_logits(q) + logits_sum = torch.add(logits, rel_logits) + + # Calculate weights + if self.relative: + weights = F.softmax(logits_sum, dim=-1) + else: + weights = F.softmax(logits, dim=-1) + + # Drop connect implementation to avoid overfitting + if self.drop_connect and self.training: + mask = torch.bernoulli((0.5) * torch.ones(B * self.Nh * V, device=x.device)) + mask = mask.reshape(B, self.Nh, V).unsqueeze(2).expand(B, self.Nh, V, V) + weights = weights * mask + weights = weights / (weights.sum(3, keepdim=True) + 1e-8) + + # attn_out + # (batch, Nh, joints, dvh) + # weights*V + # (batch, Nh, joints, joints)*(batch, Nh, joints, dvh)=(batch, Nh, joints, dvh) + attn_out = torch.matmul(weights, flat_v.transpose(2, 3)) + + if not self.more_channels: + attn_out = torch.reshape(attn_out, (B, self.Nh, T, V, self.dv // self.Nh)) + else: + attn_out = torch.reshape(attn_out, (B, self.Nh, T, V, self.dv // self.num)) + + attn_out = attn_out.permute(0, 1, 4, 2, 3) + + # combine_heads_2d, combine heads only after having calculated each Z separately + # (batch, Nh*dv, 1, joints) + attn_out = self.combine_heads_2d(attn_out) + + # Multiply for W0 (batch, out_channels, 1, joints) with out_channels=dv + attn_out = self.attn_out(attn_out) + return attn_out + + def compute_flat_qkv(self, x, dk, dv, Nh): + qkv = self.qkv_conv(x) + # T=1 in this case, because we are considering each frame separately + N, _, T, V = qkv.size() + + # if self.more_channels=True, to each head is assigned dk*self.Nh//self.num channels + if self.more_channels: + q, k, v = torch.split( + qkv, + [ + dk * self.Nh // self.num, + dk * self.Nh // self.num, + dv * self.Nh // self.num, + ], + dim=1, + ) + else: + q, k, v = torch.split(qkv, [dk, dk, dv], dim=1) + q = self.split_heads_2d(q, Nh) + k = self.split_heads_2d(k, Nh) + v = self.split_heads_2d(v, Nh) + + dkh = dk // Nh + q = q * (dkh ** -0.5) + if self.more_channels: + flat_q = torch.reshape(q, (N, Nh, dk // self.num, T * V)) + flat_k = torch.reshape(k, (N, Nh, dk // self.num, T * V)) + flat_v = torch.reshape(v, (N, Nh, dv // self.num, T * V)) + else: + flat_q = torch.reshape(q, (N, Nh, dkh, T * V)) + flat_k = torch.reshape(k, (N, Nh, dkh, T * V)) + flat_v = torch.reshape(v, (N, Nh, dv // self.Nh, T * V)) + return flat_q, flat_k, flat_v, q, k, v + + def split_heads_2d(self, x, Nh): + B, channels, T, V = x.size() + ret_shape = (B, Nh, channels // Nh, T, V) + split = torch.reshape(x, ret_shape) + return split + + def combine_heads_2d(self, x): + batch, Nh, dv, T, V = x.size() + ret_shape = (batch, Nh * dv, T, V) + return torch.reshape(x, ret_shape) + + def relative_logits(self, q): + B, Nh, dk, T, V = q.size() + q = torch.transpose(q, 2, 4).transpose(2, 3) + q_first = q.unsqueeze(4).expand((B, Nh, T, V, V - 1, dk)) + q_first = torch.reshape(q_first, (B * Nh * T, -1, dk)) + + # q used to multiply for the embedding of the parameter on the diagonal + q = torch.reshape(q, (B * Nh * T, V, dk)) + # key_rel_diagonal: (1, dk) -> (V, dk) + param_diagonal = self.key_rel_diagonal.expand((V, dk)) + rel_logits = self.relative_logits_1d(q_first, q, self.key_rel, param_diagonal, T, V, Nh) + return rel_logits + + def relative_logits_1d(self, q_first, q, rel_k, param_diagonal, T, V, Nh): + # compute relative logits along one dimension + # (B*Nh*1,V^2-V, self.dk // Nh)*(V^2 - V, self.dk // Nh) + + # (B*Nh*1, V^2-V) + rel_logits = torch.einsum("bmd,md->bm", q_first, rel_k) + # (B*Nh*1, V) + rel_logits_diagonal = torch.einsum("bmd,md->bm", q, param_diagonal) + + # reshapes to obtain Srel + rel_logits = self.rel_to_abs(rel_logits, rel_logits_diagonal) + + rel_logits = torch.reshape(rel_logits, (-1, Nh, V, V)) + return rel_logits + + def rel_to_abs(self, rel_logits, rel_logits_diagonal): + B, L = rel_logits.size() + B, V = rel_logits_diagonal.size() + + # (B, V-1, V) -> (B, V, V) + rel_logits = torch.reshape(rel_logits, (B, V - 1, V)) + row_pad = torch.zeros(B, 1, V).to(rel_logits) + rel_logits = torch.cat((rel_logits, row_pad), dim=1) + + # concat the other embedding on the left + # (B, V, V) -> (B, V, V+1) -> (B, V+1, V) + rel_logits_diagonal = torch.reshape(rel_logits_diagonal, (B, V, 1)) + rel_logits = torch.cat((rel_logits_diagonal, rel_logits), dim=2) + rel_logits = torch.reshape(rel_logits, (B, V + 1, V)) + + # slice + flat_sliced = rel_logits[:, :V, :] + final_x = torch.reshape(flat_sliced, (B, V, V)) + return final_x + + +def conv_init(module): + # he_normal + n = module.out_channels + for k in module.kernel_size: + n = n * k + module.weight.data.normal_(0, math.sqrt(2.0 / n)) + + +class GcnUnitAttention(nn.Module): + def __init__( + self, + in_channels, + out_channels, + A, + num=4, + dv_factor=0.25, + dk_factor=0.25, + Nh=8, + complete=True, + relative=False, + only_attention=True, + layer=0, + more_channels=False, + drop_connect=True, + data_normalization=True, + skip_conn=True, + adjacency=False, + num_point=25, + padding=0, + kernel_size=1, + stride=1, + bn_flag=True, + t_dilation=1, + last_graph=False, + visualization=True, + *args, + **kwargs, + ): + super().__init__() + self.relu = nn.ReLU() + self.visualization = visualization + self.in_channels = in_channels + self.more_channels = more_channels + self.drop_connect = drop_connect + self.data_normalization = data_normalization + self.skip_conn = skip_conn + self.num_point = num_point + self.adjacency = adjacency + # print("Nh ", Nh) + # print("Dv ", dv_factor) + # print("Dk ", dk_factor) + + self.last_graph = last_graph + if not only_attention: + self.out_channels = out_channels - int((out_channels) * dv_factor) + else: + self.out_channels = out_channels + self.data_bn = nn.BatchNorm1d(self.in_channels * self.num_point) + self.bn = nn.BatchNorm2d(out_channels) + self.only_attention = only_attention + self.bn_flag = bn_flag + self.layer = layer + + self.A = nn.Parameter(torch.from_numpy(A.astype(np.float32))) + + # Each Conv2d unit implements 2d convolution to weight every single partition (filter size 1x1) + # There is a convolutional unit for each partition + # This is done only in the case in which Spatial Transformer and Graph Convolution are concatenated + + if not self.only_attention: + self.g_convolutions = nn.ModuleList( + [ + nn.Conv2d( + in_channels, + self.out_channels, + kernel_size=(kernel_size, 1), + padding=(padding, 0), + stride=(stride, 1), + dilation=(t_dilation, 1), + ) + for i in range(self.A.size()[0]) + ] + ) + for conv in self.g_convolutions: + conv_init(conv) + + self.attention_conv = SpatialAttention( + in_channels=self.in_channels, + kernel_size=1, + dk=int(out_channels * dk_factor), + dv=int(out_channels * dv_factor), + Nh=Nh, + complete=complete, + relative=relative, + stride=stride, + layer=self.layer, + A=self.A, + num=num, + more_channels=self.more_channels, + drop_connect=self.drop_connect, + data_normalization=self.data_normalization, + skip_conn=self.skip_conn, + adjacency=self.adjacency, + visualization=self.visualization, + num_point=self.num_point, + ) + else: + self.attention_conv = SpatialAttention( + in_channels=self.in_channels, + kernel_size=1, + dk=int(out_channels * dk_factor), + dv=int(out_channels), + Nh=Nh, + complete=complete, + relative=relative, + stride=stride, + last_graph=self.last_graph, + layer=self.layer, + A=self.A, + num=num, + more_channels=self.more_channels, + drop_connect=self.drop_connect, + data_normalization=self.data_normalization, + skip_conn=self.skip_conn, + adjacency=self.adjacency, + visualization=self.visualization, + num_point=self.num_point, + ) + + def forward(self, x): + # N: number of samples, equal to the batch size + # C: number of channels, in our case 3 (coordinates x, y, z) + # T: number of frames + # V: number of nodes + N, C, T, V = x.size() + x_sum = x + if self.data_normalization: + x = x.permute(0, 1, 3, 2).reshape(N, C * V, T) + x = self.data_bn(x) + x = x.reshape(N, C, V, T).permute(0, 1, 3, 2) + + # Learnable parameter + A = self.A + + # N, T, C, V > NT, C, 1, V + xa = x.permute(0, 2, 1, 3).reshape(-1, C, 1, V) + + # Spatial Transformer + attn_out = self.attention_conv(xa) + # N, T, C, V > N, C, T, V + attn_out = attn_out.reshape(N, T, -1, V).permute(0, 2, 1, 3) + + if not self.only_attention: + + # For each partition multiplies for the input and applies convolution 1x1 to the result to weight each partition + for i, partition in enumerate(A): + # print(partition) + # NCTxV + xp = x.reshape(-1, V) + # (NCTxV)*(VxV) + xp = xp.mm(partition.float()) + # NxCxTxV + xp = xp.reshape(N, C, T, V) + + if i == 0: + y = self.g_convolutions[i](xp) + else: + y = y + self.g_convolutions[i](xp) + + # Concatenate on the channel dimension the two convolutions + y = torch.cat((y, attn_out), dim=1) + else: + if self.skip_conn and self.in_channels == self.out_channels: + y = attn_out + x_sum + else: + y = attn_out + if self.bn_flag: + y = self.bn(y) + + y = self.relu(y) + + return y diff --git a/src/opendr/perception/skeleton_based_action_recognition/continual_stgcn_learner.py b/src/opendr/perception/skeleton_based_action_recognition/continual_stgcn_learner.py new file mode 100644 index 0000000000..389b668540 --- /dev/null +++ b/src/opendr/perception/skeleton_based_action_recognition/continual_stgcn_learner.py @@ -0,0 +1,699 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import continual as co +import json +import os +import pickle +import torch +import onnxruntime as ort +import pytorch_lightning as pl +import torch.nn.functional as F + +from functools import partial +from pathlib import Path +from opendr.engine.target import Category +from opendr.engine.learners import Learner +from opendr.engine.helper.io import bump_version +from opendr.engine.datasets import Dataset +from opendr.engine.constants import OPENDR_SERVER_URL +from opendr.engine.datasets import ExternalDataset, DatasetIterator +from urllib.request import urlretrieve + +from logging import getLogger +from typing import Any, Union, Dict, List + +from opendr.perception.skeleton_based_action_recognition.spatio_temporal_gcn_learner import ( + SpatioTemporalGCNLearner, +) +from opendr.perception.skeleton_based_action_recognition.algorithm.datasets.feeder import ( + Feeder, +) +from opendr.perception.skeleton_based_action_recognition.algorithm.models.co_stgcn import ( + CoStGcnMod, +) +from opendr.perception.skeleton_based_action_recognition.algorithm.models.co_agcn import ( + CoAGcnMod, +) +from opendr.perception.skeleton_based_action_recognition.algorithm.models.co_str import ( + CoSTrMod, +) + + +_MODEL_NAMES = {"costgcn", "costr", "coagcn"} + +logger = getLogger(__name__) + + +class CoSTGCNLearner(Learner): + def __init__( + self, + lr=1e-3, + iters=10, # Epochs + batch_size=64, + optimizer="adam", + lr_schedule="", + backbone="costgcn", + network_head="classification", + checkpoint_after_iter=0, + checkpoint_load_iter=0, + temp_path="", + device="cuda", + loss="cross_entropy", + weight_decay=1e-5, + momentum=0.9, + drop_last=True, + pin_memory=False, + num_workers=0, + seed=123, + num_classes=60, + num_point=25, + num_person=2, + in_channels=3, + graph_type="ntu", + sequence_len: int = 300, + *args, + **kwargs, + ): + """Initialise the CoSTGCNLearnerLearner + This learner wraps the Continual version of X3D, which makes predictions frame-by-frame rather than by clip. + + Args: + lr (float, optional): Learning rate during optimization. Defaults to 1e-3. + iters (int, optional): Number of epochs to train for. Defaults to 10. + optimizer (str, optional): Name of optimizer to use ("sgd" or "adam"). Defaults to "adam". + lr_schedule (str, optional): Unused parameter. Defaults to "". + network_head (str, optional): Head of network (only "classification" is currently available). + Defaults to "classification". + checkpoint_after_iter (int, optional): Unused parameter. Defaults to 0. + checkpoint_load_iter (int, optional): Unused parameter. Defaults to 0. + temp_path (str, optional): Path in which to store temporary files. Defaults to "". + device (str, optional): Name of computational device ("cpu" or "cuda"). Defaults to "cuda". + loss (str): Name of loss in torch.nn.functional to use. Defaults to "cross_entropy". + weight_decay (float, optional): Weight decay used for optimization. Defaults to 1e-5. + momentum (float, optional): Momentum used for optimization. Defaults to 0.9. + drop_last (bool, optional): Drop last data point if a batch cannot be filled. Defaults to True. + pin_memory (bool, optional): Pin memory in dataloader. Defaults to False. + num_workers (int, optional): Number of workers in dataloader. Defaults to 0. + seed (int, optional): Random seed. Defaults to 123. + num_classes (int, optional): Number of classes to predict among. Defaults to 400. + sequence_len (int, optional): Size of the final global average pooling. Defaults to 300. + """ + super(CoSTGCNLearner, self).__init__( + lr=lr, + iters=iters, + batch_size=batch_size, + optimizer=optimizer, + lr_schedule=lr_schedule, + backbone=backbone, + network_head=network_head, + temp_path=temp_path, + checkpoint_after_iter=checkpoint_after_iter, + checkpoint_load_iter=checkpoint_load_iter, + device=device, + threshold=0.0, + ) + + self.weight_decay = weight_decay + self.momentum = momentum + self.drop_last = drop_last + self.pin_memory = pin_memory + self.num_workers = num_workers + self.seed = seed + self.num_classes = num_classes + self.loss = loss + self._ort_session = None + self._ort_state = None + self.num_point = num_point + self.num_person = num_person + self.in_channels = in_channels + self.graph_type = graph_type + self.sequence_len = sequence_len + + if self.graph_type is None: + raise ValueError( + self.graph_type + "is not a valid graph type. Supported graphs: ntu, openpose" + ) + if self.backbone is None or self.backbone not in _MODEL_NAMES: + raise ValueError( + self.backbone + f"is not a valid dataset name. Supported methods: {_MODEL_NAMES}" + ) + + pl.seed_everything(self.seed) + self.init_model() + + def init_model(self) -> Union[CoStGcnMod, CoAGcnMod, CoSTrMod]: + """Initialise model with random parameters + + Returns: + Union[CoStGcnMod, CoAGcnMod, CoSTrMod]: model + """ + Model = { + "costgcn": CoStGcnMod, + "coagcn": CoAGcnMod, + "costr": CoSTrMod, + }[self.backbone] + + self.model = Model( + self.num_point, + self.num_person, + self.in_channels, + self.graph_type, + self.sequence_len, + self.num_classes, + self.loss, + ).to(device=self.device) + return self.model + + def download( + self, + dataset_name="nturgbd_cv", + experiment_name="stgcn_nturgbd", + path=None, + method_name="costgcn", + mode="pretrained", + verbose=True, + url=OPENDR_SERVER_URL + "perception/skeleton_based_action_recognition/", + file_name="costgcn_ntu60_xview_joint.ckpt", + ): + if path is None: + path = self.temp_path + + if mode == "pretrained": + file_url = os.path.join(url, 'pretrained_models', method_name, file_name) + target_path = Path(path) / method_name / dataset_name / file_name + if target_path.exists(): + return str(target_path) + target_path.parent.mkdir(exist_ok=True, parents=True) + if verbose: + print(f"Downloading {file_name} to {str(target_path)}") + urlretrieve(file_url, str(target_path)) + return target_path + + # For dataset downloads, reuse the code from SpatioTemporalGCNLearner + class _DownloadConfig: + def __init__(self, parent_dir: str, dataset_name: str, experiment_name: str): + self.parent_dir = parent_dir + self.dataset_name = dataset_name + self.experiment_name = experiment_name + + # Use download method from SpatioTemporalGCNLearner + return SpatioTemporalGCNLearner.download( + _DownloadConfig(self.temp_path, dataset_name, experiment_name), + path, + method_name, + mode, + verbose, + url, + file_name, + ) + + @staticmethod + def _prepare_dataset( + dataset, + data_filename="train_joints.npy", + labels_filename="train_labels.pkl", + skeleton_data_type="joint", + phase="train", + verbose=True, + ): + if isinstance(dataset, ExternalDataset): + if ( + dataset.dataset_type.lower() != "nturgbd" and + dataset.dataset_type.lower() != "kinetics" + ): + raise UserWarning('dataset_type must be "NTURGBD or Kinetics"') + # Get data and labels path + data_path = os.path.join(dataset.path, data_filename) + labels_path = os.path.join(dataset.path, labels_filename) + if phase == "train": + if dataset.dataset_type.lower() == "nturgbd": + random_choose = False + random_move = False + window_size = -1 + elif dataset.dataset_type.lower() == "kinetics": + random_choose = True + random_move = True + window_size = 150 + else: + random_choose = False + random_move = False + window_size = -1 + + if verbose: + print("Dataset path is set. Loading feeder...") + return Feeder( + data_path=data_path, + label_path=labels_path, + random_choose=random_choose, + random_move=random_move, + window_size=window_size, + skeleton_data_type=skeleton_data_type, + data_name=dataset.dataset_type.lower(), + ) + elif isinstance(dataset, DatasetIterator): + return dataset + + def infer(self, batch: torch.Tensor) -> List[Category]: + """Run inference on a batch of data + + Args: + batch (torch.Tensor): batch of skeletons for a single time-step. + The batch should have shape (C, V, S), (C, T, V, S), or (B, C, T, V, S). + Here, B is the batch size, C is the number of input channels, V is the + number of vertices, and S is the number of skeletons + + Returns: + List[target.Category]: List of output categories + """ + # Cast to torch tensor + batch = batch.to(device=self.device, dtype=torch.float) + if len(batch.shape) == 3: + batch = batch.unsqueeze(0) # (C, V, S) -> (B, C, V, S) + if len(batch.shape) == 4: + batch = batch.unsqueeze(2) # (B, C, V, S) -> (B, C, T, V, S) + + if self._ort_session is not None and self._ort_state is not None: + batch = batch.squeeze(2) # (B, C, T, V, S) -> (B, C, V, S) + inputs = { + "input": batch.cpu().detach().numpy(), + **self._ort_state, + } + results, *next_state = self._ort_session.run(None, inputs) + results = torch.tensor(results) + self._ort_state = {k: v for k, v in zip(self._ort_state.keys(), next_state)} + else: + self.model.eval() + results = self.model.forward_steps(batch) + if results is None: + print("Warming model up prior to inference") + _ = self.model.forward_steps( + batch.repeat(1, 1, self.model.receptive_field, 1, 1) + ) + results = self.model.forward_steps(batch) + + results = [ + Category(prediction=int(r.argmax(dim=0)), confidence=F.softmax(r, dim=-1)) + for r in results + ] + return results + + def _load_model_weights(self, weights_path: Union[str, Path]): + """Load pretrained model weights + + Args: + weights_path (Union[str, Path]): Path to model weights file. + Type of file must be one of {".pyth", ".pth", ".onnx"} + """ + weights_path = Path(weights_path) + + assert weights_path.is_file() and weights_path.suffix in { + ".pyth", + ".pth", + ".onnx", + ".ckpt", + }, ( + f"weights_path ({str(weights_path)}) should be a .pth or .onnx file." + "Pretrained weights can be downloaded using `self.download(...)`" + ) + if weights_path.suffix == ".onnx": + return self._load_onnx(weights_path) + + logger.debug(f"Loading model weights from {str(weights_path)}") + + # Check for configuration mismatches, loading only matching weights + new_model_state = self.model.state_dict() + loaded_state_dict = torch.load(weights_path, map_location=torch.device(self.device)) + # As found in some pretrained models + if "model_state" in loaded_state_dict: + loaded_state_dict = loaded_state_dict["model_state"] + # As found in PyTorch Lightning checkpoints + if "state_dict" in loaded_state_dict: + loaded_state_dict = loaded_state_dict["state_dict"] + + loaded_state_dict = self.model.map_state_dict(loaded_state_dict) + + def size_ok(k): + return new_model_state[k].size() == loaded_state_dict[k].size() + + to_load = {k: v for k, v in loaded_state_dict.items() if size_ok(k)} + self.model.load_state_dict(to_load, strict=False) + + names_not_loaded = set(new_model_state.keys()) - set(to_load.keys()) + if len(names_not_loaded) > 0: + logger.warning(f"Some model weight could not be loaded: {names_not_loaded}") + self.model.to(self.device) + + return self + + def save(self, path: Union[str, Path]): + """Save model weights and metadata to path. + + Args: + path (Union[str, Path]): Directory in which to save model weights and meta data. + + Returns: + self + """ + assert hasattr( + self, "model" + ), "Cannot save model because no model was found. Did you forget to call `__init__`?" + + root_path = Path(path) + root_path.mkdir(parents=True, exist_ok=True) + name = f"{self.backbone}" + ext = ".onnx" if self._ort_session else ".pth" + weights_path = bump_version(root_path / f"model_{name}{ext}") + meta_path = bump_version(root_path / f"{name}.json") + + logger.info(f"Saving model weights to {str(weights_path)}") + if self._ort_session: + self._save_onnx(weights_path) + else: + torch.save(self.model.state_dict(), weights_path) + + logger.info(f"Saving meta-data to {str(meta_path)}") + meta_data = { + "model_paths": weights_path.name, + "framework": "pytorch", + "format": "pth", + "has_data": False, + "inference_params": { + "backbone": self.backbone, + "network_head": self.network_head, + "num_classes": self.num_classes, + "num_point": self.num_point, + "num_person": self.num_person, + "in_channels": self.in_channels, + "graph_type": self.graph_type, + "sequence_len": self.sequence_len, + }, + "optimized": bool(self._ort_session), + "optimizer_info": { + "lr": self.lr, + "iters": self.iters, + "batch_size": self.batch_size, + "optimizer": self.optimizer, + "checkpoint_after_iter": self.checkpoint_after_iter, + "checkpoint_load_iter": self.checkpoint_load_iter, + "loss": self.loss, + "weight_decay": self.weight_decay, + "momentum": self.momentum, + "drop_last": self.drop_last, + "pin_memory": self.pin_memory, + "num_workers": self.num_workers, + "seed": self.seed, + }, + } + with open(str(meta_path), "w", encoding="utf-8") as f: + json.dump(meta_data, f, sort_keys=True, indent=4) + + return self + + def load(self, path: Union[str, Path]): + """Load model. + + Args: + path (Union[str, Path]): Path to metadata file in json format or path to model weights + + Returns: + self + """ + path = Path(path) + + # Allow direct loading of weights, omitting the metadata file + if path.suffix in {".pyth", ".pth", ".onnx", ".ckpt"}: + self._load_model_weights(path) + return self + if path.is_dir(): + path = path / f"{self.backbone}.json" + assert ( + path.is_file() and path.suffix == ".json" + ), "The provided metadata path should be a .json file" + + logger.debug(f"Loading CoSTGCNLearner metadata from {str(path)}") + with open(path, "r") as f: + meta_data = json.load(f) + + inference_params = meta_data["inference_params"] + optimizer_info = meta_data["optimizer_info"] + + self.__init__( + lr=optimizer_info["lr"], + iters=optimizer_info["iters"], + batch_size=optimizer_info["batch_size"], + optimizer=optimizer_info["optimizer"], + device=getattr(self, "device", "cpu"), + backbone=inference_params["backbone"], + network_head=inference_params["network_head"], + loss=optimizer_info["loss"], + checkpoint_after_iter=optimizer_info["checkpoint_after_iter"], + checkpoint_load_iter=optimizer_info["checkpoint_load_iter"], + weight_decay=optimizer_info["weight_decay"], + momentum=optimizer_info["momentum"], + drop_last=optimizer_info["drop_last"], + pin_memory=optimizer_info["pin_memory"], + num_workers=optimizer_info["num_workers"], + seed=optimizer_info["seed"], + num_classes=inference_params["num_classes"], + num_point=inference_params["num_point"], + num_person=inference_params["num_person"], + in_channels=inference_params["in_channels"], + graph_type=inference_params["graph_type"], + sequence_len=inference_params["sequence_len"], + ) + + weights_path = path.parent / meta_data["model_paths"] + self._load_model_weights(weights_path) + + return self + + def reset(self): + pass + + def fit( + self, + dataset: Dataset, + val_dataset: Dataset = None, + epochs: int = None, + steps: int = None, + *args, + **kwargs, + ): + """Fit the model to a dataset + + Args: + dataset (Dataset): Training dataset + val_dataset (Dataset, optional): Validation dataset. + If none is given, validation steps are skipped. Defaults to None. + epochs (int, optional): Number of epochs. If none is supplied, self.iters will be used. Defaults to None. + steps (int, optional): Number of training steps to conduct. If none, this is determined by epochs. Defaults to None. + """ + train_dataloader = torch.utils.data.DataLoader( + dataset, + batch_size=self.batch_size, + num_workers=self.num_workers, + shuffle=True, + pin_memory=self.pin_memory, + drop_last=self.drop_last, + ) + val_dataloader = ( + torch.utils.data.DataLoader( + val_dataset, + batch_size=self.batch_size, + num_workers=self.num_workers, + shuffle=False, + pin_memory=self.pin_memory, + drop_last=self.drop_last, + ) + if val_dataset + else None + ) + + optimisation_metric = "val/loss" if val_dataset else "train/loss" + + # Patch model optimizer + assert self.optimizer in { + "adam", + "sgd", + }, f"Invalid optimizer '{self.optimizer}'. Must be 'adam' or 'sgd'." + if self.optimizer == "adam": + Optimizer = partial( + torch.optim.Adam, + lr=self.lr, + betas=(self.momentum, 0.999), + weight_decay=self.weight_decay, + ) + else: # self.optimizer == "sgd": + Optimizer = partial( + torch.optim.Adam, + lr=self.lr, + momentum=self.momentum, + weight_decay=self.weight_decay, + ) + + def configure_optimizers(): + # nonlocal Optimizer, optimisation_metric + optimizer = Optimizer(self.model.parameters()) + scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=10) + return { + "optimizer": optimizer, + "lr_scheduler": scheduler, + "monitor": optimisation_metric, + } + + self.model.configure_optimizers = configure_optimizers + + self.trainer = pl.Trainer( + max_epochs=epochs or self.iters, + gpus=1 if "cuda" in self.device else 0, + callbacks=[ + pl.callbacks.ModelCheckpoint( + save_top_k=1, + verbose=True, + monitor=optimisation_metric, + mode="min", + prefix="", + ) + ], + logger=_experiment_logger(), + ) + self.trainer.limit_train_batches = steps or self.trainer.limit_train_batches + self.trainer.limit_val_batches = steps or self.trainer.limit_val_batches + + self.trainer.fit(self.model, train_dataloader, val_dataloader) + self.model.to(self.device) + + def eval(self, dataset: Dataset, steps: int = None) -> Dict[str, Any]: + """Evaluate the model on the dataset + + Args: + dataset (Dataset): Dataset on which to evaluate model + steps (int, optional): Number of validation batches to evaluate. + If None, all batches are evaluated. Defaults to None. + + Returns: + Dict[str, Any]: Evaluation statistics + """ + test_dataloader = torch.utils.data.DataLoader( + dataset, + batch_size=self.batch_size, + num_workers=self.num_workers, + shuffle=False, + pin_memory=self.pin_memory, + drop_last=False, + ) + + if not hasattr(self, "trainer"): + self.trainer = pl.Trainer( + gpus=1 if "cuda" in self.device else 0, + logger=_experiment_logger(), + ) + self.trainer.limit_test_batches = steps or self.trainer.limit_test_batches + results = self.trainer.test(self.model, test_dataloader) + results = { + "accuracy": results[-1]["test/acc"], + "loss": results[-1]["test/loss"], + } + return results + + def optimize(self, do_constant_folding=False): + """Optimize model execution. + This is accomplished by saving to the ONNX format and loading the optimized model. + + Args: + do_constant_folding (bool, optional): Whether to optimize constants. Defaults to False. + """ + + if getattr(self.model, "ort_session", None): + logger.info("Model is already optimized. Skipping redundant optimization") + return + + path = Path(self.temp_path or os.getcwd()) / "weights" / f"{self.backbone}_weights.onnx" + if not path.exists(): + self._save_onnx(path, do_constant_folding) + self._load_onnx(path) + + @property + def _example_input(self): + return torch.randn( + self.batch_size, self.in_channels, self.sequence_len, self.num_point, self.num_person + ).to(device=self.device) + + @_example_input.setter + def _example_input(self): + raise ValueError( + "_example_input is set through 'num_point', 'num_person', and 'in_channels' in constructor" + ) + + def _save_onnx(self, path: Union[str, Path], do_constant_folding=False, verbose=False): + """Save model in the ONNX format + + Args: + path (Union[str, Path]): Directory in which to save ONNX model + do_constant_folding (bool, optional): Whether to optimize constants. Defaults to False. + """ + path.parent.mkdir(exist_ok=True, parents=True) + + model = self.model.to(device="cpu") + model.eval() + + # Prepare state + state0 = None + with torch.no_grad(): + for i in range(model.receptive_field): + _, state0 = model._forward_step(self._example_input[:, :, i], state0) + _, state0 = model._forward_step(self._example_input[:, :, -1], state0) + state0 = co.utils.flatten(state0) + + # Export to ONNX + logger.info(f"Saving model to ONNX format at {str(path)}") + co.onnx.export( + model, + (self._example_input[:, :, -1], *state0), + path, + input_names=["input"], + output_names=["output"], + do_constant_folding=do_constant_folding, + verbose=verbose, + opset_version=12, + ) + + # Save default state and name mappings for later use + state_path = path.parent / f"{self.backbone}_state.pickle" + logger.info(f"Saving ONNX model states at {str(state_path)}") + omodel = co.onnx.OnnxWrapper(self.model) + state = {k: v.detach().numpy() for k, v in zip(omodel.state_input_names, state0)} + with open(state_path, "wb") as f: + pickle.dump(state, f) + + def _load_onnx(self, path: Union[str, Path]): + """Loads ONNX model into an onnxruntime inference session. + + Args: + path (Union[str, Path]): Path to ONNX model + """ + onnx_path = path + state_path = path.parent / f"{self.backbone}_state.pickle" + + logger.info(f"Loading ONNX runtime inference session from {str(onnx_path)}") + self._ort_session = ort.InferenceSession(str(onnx_path)) + + logger.info(f"Loading ONNX state from {str(state_path)}") + with open(state_path, "rb") as f: + self._ort_state = pickle.load(f) + + +def _experiment_logger(): + return pl.loggers.TensorBoardLogger(save_dir=Path(os.getcwd()) / "logs", name="costgcn") diff --git a/src/opendr/perception/skeleton_based_action_recognition/dependencies.ini b/src/opendr/perception/skeleton_based_action_recognition/dependencies.ini index 6b8af3805a..087612c7b2 100644 --- a/src/opendr/perception/skeleton_based_action_recognition/dependencies.ini +++ b/src/opendr/perception/skeleton_based_action_recognition/dependencies.ini @@ -1,12 +1,16 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 + protobuf<=3.20.0 tensorboardX>=2.0 matplotlib>=2.2.2 tqdm + pandas onnx==1.8.0 onnxruntime==1.3.0 + continual-inference>=1.0.2 + pytorch_lightning==1.2.3 opendr=opendr-toolkit-engine diff --git a/src/opendr/perception/speech_recognition/edgespeechnets/dependencies.ini b/src/opendr/perception/speech_recognition/edgespeechnets/dependencies.ini index 981ac6776e..e25e9929c0 100644 --- a/src/opendr/perception/speech_recognition/edgespeechnets/dependencies.ini +++ b/src/opendr/perception/speech_recognition/edgespeechnets/dependencies.ini @@ -1,9 +1,9 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 librosa==0.8.0 - numpy>=1.19 + numpy>=1.19,<=1.23.5 numba==0.53.0 linux=libsndfile1 diff --git a/src/opendr/perception/speech_recognition/edgespeechnets/edgespeechnets_learner.py b/src/opendr/perception/speech_recognition/edgespeechnets/edgespeechnets_learner.py index a3802ca2a0..ad07e26067 100644 --- a/src/opendr/perception/speech_recognition/edgespeechnets/edgespeechnets_learner.py +++ b/src/opendr/perception/speech_recognition/edgespeechnets/edgespeechnets_learner.py @@ -26,7 +26,7 @@ from opendr.engine.learners import Learner from opendr.engine.target import Category from opendr.perception.speech_recognition.edgespeechnets.algorithm.audioutils import get_mfcc -import opendr.perception.speech_recognition.edgespeechnets.algorithm.models as models +from opendr.perception.speech_recognition.edgespeechnets.algorithm import models as models class EdgeSpeechNetsLearner(Learner): diff --git a/src/opendr/perception/speech_recognition/matchboxnet/dependencies.ini b/src/opendr/perception/speech_recognition/matchboxnet/dependencies.ini index 0fad5d3ead..ca591d5aed 100644 --- a/src/opendr/perception/speech_recognition/matchboxnet/dependencies.ini +++ b/src/opendr/perception/speech_recognition/matchboxnet/dependencies.ini @@ -3,7 +3,7 @@ # https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 librosa==0.8.0 - numpy>=1.19 + numpy>=1.19,<=1.23.5 numba==0.53.0 linux=libsndfile1 diff --git a/src/opendr/perception/speech_recognition/quadraticselfonn/dependencies.ini b/src/opendr/perception/speech_recognition/quadraticselfonn/dependencies.ini index 981ac6776e..e25e9929c0 100644 --- a/src/opendr/perception/speech_recognition/quadraticselfonn/dependencies.ini +++ b/src/opendr/perception/speech_recognition/quadraticselfonn/dependencies.ini @@ -1,9 +1,9 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 librosa==0.8.0 - numpy>=1.19 + numpy>=1.19,<=1.23.5 numba==0.53.0 linux=libsndfile1 diff --git a/src/opendr/planning/end_to_end_planning/README.md b/src/opendr/planning/end_to_end_planning/README.md index a8e285992d..2c764c4f96 100644 --- a/src/opendr/planning/end_to_end_planning/README.md +++ b/src/opendr/planning/end_to_end_planning/README.md @@ -3,21 +3,28 @@ This folder contains the OpenDR Learner class for end-to-end planning tasks. This method uses reinforcement learning to train an agent that is able to generate local motion plans for a quadrotor UAV equipped with a depth camera. -### Simulation environment setup +### Using non-dynamic simulation environment -The environment includes an Ardupilot controlled quadrotor in Webots simulation. +The end-to-end planning agent is interacting with gym environment which communicates with Webots. +The environment is provided with the [webots world](../../../../src/opendr/planning/end_to_end_planning/envs/webots/worlds/train-no-dynamic-random-obstacles.wbt) +that needs to be opened with Webots version 2022b in order to demonstrate the end-to-end planner. + +### Using Ardupilot simulation environment + +The environment includes an optional Ardupilot controlled quadrotor in Webots for dynamic simulation. +The environment required to be initiated with argument `no_dynamics=False` For the installation of Ardupilot instructions are available [here](https://github.com/ArduPilot/ardupilot). -The required files to complete Ardupilot setup can be downloaded by running [`download_ardupilot_files.py`](src/opendr/planning/end_to_end_planning/download_ardupilot_files.py) script. +The required files to complete Ardupilot setup can be downloaded by running [download_ardupilot_files.py](../../../../src/opendr/planning/end_to_end_planning/download_ardupilot_files.py) script. The downloaded files (zipped as `ardupilot.zip`) should be replaced under the installation of Ardupilot. In order to run Ardupilot in Webots 2021a, controller codes should be replaced. (For older versions of Webots, these files can be skipped.) The world file for the environment is provided under `/ardupilot/libraries/SITL/examples/webots/worlds/` for training and testing. Install `mavros` package for ROS communication with Ardupilot. Instructions are available [here](https://github.com/mavlink/mavros/blob/master/mavros/README.md#installation). -Source installation is recomended. +Source installation is recommended. -### Running the environment +### Running Ardupilot environment The following steps should be executed to have a ROS communication between Gym environment and simulation. - Start the Webots and open the provided world file. @@ -30,4 +37,4 @@ The simulation time should stop at first time step and wait for Ardupilot softwa - `take_off` which takes off the quadrotor. - `range_image` which converts the depth image into array format to be input for the learner. -After these steps the [AgiEnv](src/opendr/planning/end_to_end_planning/envs/agi_env.py) gym environment can send action comments to the simulated drone and receive depth image and pose information from simulation. +After these steps the [UAVDepthPlanningEnv](../../../../src/opendr/planning/end_to_end_planning/envs/UAV_depth_planning_env.py) gym environment can send action comments to the simulated drone and receive depth image and pose information from simulation. diff --git a/src/opendr/planning/end_to_end_planning/__init__.py b/src/opendr/planning/end_to_end_planning/__init__.py index 3f5a5c45e9..9d31e9071e 100644 --- a/src/opendr/planning/end_to_end_planning/__init__.py +++ b/src/opendr/planning/end_to_end_planning/__init__.py @@ -1,4 +1,6 @@ from opendr.planning.end_to_end_planning.e2e_planning_learner import EndToEndPlanningRLLearner -from opendr.planning.end_to_end_planning.envs.agi_env import AgiEnv +import os +if os.environ.get("ROS_DISTRO") == "melodic" or os.environ.get("ROS_DISTRO") == "noetic": + from opendr.planning.end_to_end_planning.envs.UAV_depth_planning_env import UAVDepthPlanningEnv -__all__ = ['EndToEndPlanningRLLearner', 'AgiEnv'] +__all__ = ['EndToEndPlanningRLLearner', 'UAVDepthPlanningEnv'] diff --git a/src/opendr/planning/end_to_end_planning/dependencies.ini b/src/opendr/planning/end_to_end_planning/dependencies.ini index 81de02b377..83adfdde48 100644 --- a/src/opendr/planning/end_to_end_planning/dependencies.ini +++ b/src/opendr/planning/end_to_end_planning/dependencies.ini @@ -1,5 +1,4 @@ [compilation] -linux=libeigen3-dev python=vcstool rosdep rospkg @@ -9,8 +8,6 @@ python=vcstool empy gym==0.20.0 stable-baselines3==1.1.0 -[runtime] -# 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format -python=stable-baselines3 -linux=ros-noetic-webots-ros + scipy + +opendr=opendr-toolkit-engine diff --git a/src/opendr/planning/end_to_end_planning/e2e_planning_learner.py b/src/opendr/planning/end_to_end_planning/e2e_planning_learner.py index a0086a5ff2..5707523bd9 100644 --- a/src/opendr/planning/end_to_end_planning/e2e_planning_learner.py +++ b/src/opendr/planning/end_to_end_planning/e2e_planning_learner.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. import numpy as np -import rospy import gym import os from pathlib import Path @@ -21,31 +20,35 @@ from stable_baselines3 import PPO from stable_baselines3.common.monitor import Monitor from stable_baselines3.common.vec_env import DummyVecEnv -from stable_baselines3.common.results_plotter import load_results, ts2xy +from stable_baselines3.common.callbacks import CheckpointCallback from opendr.engine.learners import LearnerRL from opendr.engine.constants import OPENDR_SERVER_URL -__all__ = ["rospy", ] - class EndToEndPlanningRLLearner(LearnerRL): - def __init__(self, env, lr=3e-4, n_steps=1024, iters=int(5e4), batch_size=64, checkpoint_after_iter=500, + def __init__(self, env=None, lr=3e-4, n_steps=1024, iters=int(1e5), batch_size=64, checkpoint_after_iter=500, temp_path='', device='cuda'): """ Specifies a proximal policy optimization (PPO) agent that can be trained for end to end planning for obstacle avoidance. - Internally uses Stable-Baselines (https://github.com/hill-a/stable-baselines). + Internally uses Stable-Baselines 3 (https://github.com/DLR-RM/stable-baselines3.git). """ super(EndToEndPlanningRLLearner, self).__init__(lr=lr, iters=iters, batch_size=batch_size, optimizer='adam', network_head='', temp_path=temp_path, checkpoint_after_iter=checkpoint_after_iter, device=device, threshold=0.0, scale=1.0) self.env = env - if isinstance(self.env, DummyVecEnv): - self.env = self.env.envs[0] - self.env = DummyVecEnv([lambda: self.env]) - self.agent = PPO("MultiInputPolicy", self.env, learning_rate=self.lr, n_steps=n_steps, - batch_size=self.batch_size, verbose=1) + self.n_steps = n_steps + if self.env is None: + self.agent = PPO.load(os.environ.get( + "OPENDR_HOME") + '/src/opendr/planning/end_to_end_planning/pretrained_model/saved_model.zip') + print("Learner is initiated with pretrained model without a gym model.") + else: + if isinstance(self.env, DummyVecEnv): + self.env = self.env.envs[0] + self.env = DummyVecEnv([lambda: self.env]) + self.agent = PPO("MultiInputPolicy", self.env, learning_rate=self.lr, n_steps=self.n_steps, + batch_size=self.batch_size, verbose=1) def download(self, path=None, url=OPENDR_SERVER_URL + "planning/end_to_end_planning"): @@ -59,24 +62,26 @@ def download(self, path=None, urlretrieve(url=url, filename=file_destination) return file_destination - def fit(self, env=None, logging_path='', silent=False, verbose=True): + def fit(self, env=None, logging_path='', verbose=True): """ Train the agent on the environment. :param env: gym.Env, optional, if specified use this env to train :param logging_path: str, path for logging and checkpointing - :param silent: bool, disable verbosity :param verbose: bool, enable verbosity - :return: """ if env is not None: if isinstance(env, gym.Env): - self.env = env + if isinstance(self.env, gym.Env): + self.env = env + else: + self.env = env + self.agent = PPO("MultiInputPolicy", self.env, learning_rate=self.lr, n_steps=self.n_steps, + batch_size=self.batch_size, verbose=verbose) else: print('env should be gym.Env') return self.last_checkpoint_time_step = 0 - self.mean_reward = -10 self.logdir = logging_path if isinstance(self.env, DummyVecEnv): self.env = self.env.envs[0] @@ -85,8 +90,8 @@ def fit(self, env=None, logging_path='', silent=False, verbose=True): self.env = Monitor(self.env, filename=self.logdir) self.env = DummyVecEnv([lambda: self.env]) self.agent.set_env(self.env) - self.agent.learn(total_timesteps=self.iters, callback=self.callback) - return {"last_20_episodes_mean_reward": self.mean_reward} + checkpoint_callback = CheckpointCallback(save_freq=1000, save_path=self.logdir, name_prefix='rl_model') + self.agent.learn(total_timesteps=self.iters, callback=checkpoint_callback) def eval(self, env): """ @@ -99,7 +104,6 @@ def eval(self, env): env = env.envs[0] if isinstance(env, Monitor): env = env.env - # env = Monitor(env, filename=self.logdir) env = DummyVecEnv([lambda: env]) self.agent.set_env(env) obs = env.reset() @@ -133,7 +137,8 @@ def load(self, path): :rtype: bool """ self.agent = PPO.load(path) - self.agent.set_env(self.env) + if isinstance(self.env, gym.Env): + self.agent.set_env(self.env) def infer(self, batch, deterministic: bool = True): """ @@ -158,19 +163,3 @@ def reset(self): def optimize(self, target_device): raise NotImplementedError() - - def callback(self, _locals, _globals): - x, y = ts2xy(load_results(self.logdir), 'timesteps') - - if len(y) > 20: - self.mean_reward = np.mean(y[-20:]) - else: - return True - - if x[-1] - self.last_checkpoint_time_step > self.checkpoint_after_iter: - self.last_checkpoint_time_step = x[-1] - check_point_path = Path(self.logdir, - 'checkpoint_save' + str(x[-1]) + 'with_mean_rew' + str(self.mean_reward)) - self.save(str(check_point_path)) - - return True diff --git a/src/opendr/planning/end_to_end_planning/envs/UAV_depth_planning_env.py b/src/opendr/planning/end_to_end_planning/envs/UAV_depth_planning_env.py new file mode 100644 index 0000000000..a86ef68882 --- /dev/null +++ b/src/opendr/planning/end_to_end_planning/envs/UAV_depth_planning_env.py @@ -0,0 +1,426 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import gym +from cv_bridge import CvBridge +import webots_ros.srv +from gym import spaces +import numpy as np +import rospy +from geometry_msgs.msg import PoseStamped, PointStamped +from std_msgs.msg import String +from nav_msgs.msg import Path +from webots_ros.msg import BoolStamped +from sensor_msgs.msg import Imu, Image +from opendr.planning.end_to_end_planning.utils.obstacle_randomizer import ObstacleRandomizer +from opendr.planning.end_to_end_planning.utils.euler_quaternion_transformations import euler_from_quaternion +from opendr.planning.end_to_end_planning.utils.euler_quaternion_transformations import euler_to_quaternion + + +class UAVDepthPlanningEnv(gym.Env): + metadata = {'render.modes': ['human']} + + def __init__(self, no_dynamics=True, discrete_actions=False): + super(UAVDepthPlanningEnv, self).__init__() + + # Gym elements + self.observation_space = spaces.Dict( + {'depth_cam': spaces.Box(low=0, high=255, shape=(64, 64, 1), dtype=np.uint8), + 'moving_target': spaces.Box(low=-np.inf, high=np.inf, shape=(3,), dtype=np.float64)}) + self.is_discrete_actions = discrete_actions + if self.is_discrete_actions: + self.action_space = gym.spaces.Discrete(7) + else: + self.action_space = gym.spaces.Box(low=-1, high=+1, shape=(2,), dtype=np.float64) + self.action_dictionary = {0: (1, 1), # used for discrete actions + 1: (1, 0), + 2: (0, 1), + 3: (0, 0), + 4: (0, -1), + 5: (-1, 0), + 6: (-1, -1)} + self.step_length = 1 # meter + + self.current_position = PoseStamped().pose.position + self.current_yaw = 0 + self.current_orientation = PoseStamped().pose.orientation + self.range_image = np.ones((64, 64, 1), dtype=np.float32) + self.collision_flag = False + self.safety1_flag = False + self.safety2_flag = False + self.enable_safety_reward = True + self.model_name = "" + self.target_y = 0 + self.target_z = 2.5 + self.start_x = -10 + self.forward_direction = True + self.parkour_length = 30 + self.episode_counter = 0 + self.closer_object_length = 5 + self.no_dynamics = no_dynamics + + # ROS connection + self.bridge = CvBridge() + rospy.init_node('gym_depth_planning_environment') + self.r = rospy.Rate(25) + self.ros_pub_pose = rospy.Publisher('mavros/setpoint_position/local', PoseStamped, queue_size=10) + self.ros_pub_target = rospy.Publisher('target_position', PoseStamped, queue_size=10) + self.ros_pub_trajectory = rospy.Publisher('uav_trajectory', Path, queue_size=10) + self.ros_pub_global_trajectory = rospy.Publisher('uav_global_trajectory', Path, queue_size=10) + self.global_traj = Path() + self.uav_trajectory = Path() + rospy.Subscriber("/mavros/local_position/pose", PoseStamped, self.pose_callback) + rospy.Subscriber("/model_name", String, self.model_name_callback) + counter = 0 + rospy.loginfo("Waiting for webots model to start!") + while self.model_name == "": + self.r.sleep() + counter += 1 + if counter > 25: + break + if self.model_name == "": + rospy.loginfo("Webots model is not started!") + return + self.randomizer = ObstacleRandomizer(self.model_name) + rospy.Subscriber("/touch_sensor_collision/value", BoolStamped, self.collision_callback) + rospy.Subscriber("/touch_sensor_safety1/value", BoolStamped, self.safety1_callback) + rospy.Subscriber("/touch_sensor_safety2/value", BoolStamped, self.safety2_callback) + rospy.Subscriber("/range_finder/range_image", Image, self.range_callback, queue_size=1) + self.ros_srv_touch_sensor_collision_enable = rospy.ServiceProxy( + "/touch_sensor_collision/enable", webots_ros.srv.set_int) + self.ros_srv_touch_sensor_safety1_enable = rospy.ServiceProxy( + "/touch_sensor_safety1/enable", webots_ros.srv.set_int) + self.ros_srv_touch_sensor_safety2_enable = rospy.ServiceProxy( + "/touch_sensor_safety2/enable", webots_ros.srv.set_int) + self.ros_srv_range_sensor_enable = rospy.ServiceProxy( + "/range_finder/enable", webots_ros.srv.set_int) + try: + self.ros_srv_touch_sensor_collision_enable(1) + self.ros_srv_range_sensor_enable(1) + except rospy.ServiceException as exc: + print("Service did not process request: " + str(exc)) + try: + self.ros_srv_touch_sensor_safety1_enable(1) + self.ros_srv_touch_sensor_safety2_enable(1) + except rospy.ServiceException as exc: + print("Service did not process request: " + str(exc)) + + self.set_target() + self.r.sleep() + vo = self.difference_between_points(self.target_position, self.current_position) + self.vector_observation = np.array([vo[0] * np.cos(self.current_yaw * 22.5 / 180 * np.pi) - vo[1] * np.sin( + self.current_yaw * 22.5 / 180 * np.pi), + vo[0] * np.sin(self.current_yaw * 22.5 / 180 * np.pi) + vo[1] * np.cos( + self.current_yaw * 22.5 / 180 * np.pi), + vo[2]]) + self.observation = {'depth_cam': np.copy(self.range_image), 'moving_target': np.copy(self.vector_observation)} + self.r.sleep() + + if no_dynamics: + self.ros_srv_gps_sensor_enable = rospy.ServiceProxy( + "/gps/enable", webots_ros.srv.set_int) + self.ros_srv_inertial_unit_enable = rospy.ServiceProxy( + "/inertial_unit/enable", webots_ros.srv.set_int) + self.ros_srv_get_self = rospy.ServiceProxy( + "/supervisor/get_self", webots_ros.srv.get_uint64) + self.ros_srv_get_field = rospy.ServiceProxy( + "/supervisor/node/get_field", webots_ros.srv.node_get_field) + self.ros_srv_field_set_v3 = rospy.ServiceProxy( + "/supervisor/field/set_vec3f", webots_ros.srv.field_set_vec3f) + self.ros_srv_field_set_rotation = rospy.ServiceProxy( + "/supervisor/field/set_rotation", webots_ros.srv.field_set_rotation) + rospy.Subscriber("/inertial_unit/quaternion", Imu, self.imu_callback) + rospy.Subscriber("/gps/values", PointStamped, self.gps_callback) + try: + self.ros_srv_gps_sensor_enable(1) + self.ros_srv_inertial_unit_enable(1) + except rospy.ServiceException as exc: + print("Service did not process request: " + str(exc)) + try: + resp1 = self.ros_srv_get_self(True) + self.robot_node_id = resp1.value + except rospy.ServiceException as exc: + print("Service did not process request: " + str(exc)) + try: + resp1 = self.ros_srv_get_field(self.robot_node_id, 'translation', False) + resp2 = self.ros_srv_get_field(self.robot_node_id, 'rotation', False) + self.robot_translation_field = resp1.field + self.robot_rotation_field = resp2.field + except rospy.ServiceException as exc: + print("Service did not process request: " + str(exc)) + + def step(self, action): + if self.model_name == "": + rospy.loginfo("Gym environment cannot connect to Webots") + return self.observation_space.sample(), np.random.random(1), False, {} + if self.is_discrete_actions: + action = self.action_dictionary[action] + forward_step = np.cos(action[0] * 22.5 / 180 * np.pi) + side_step = np.sin(action[0] * 22.5 / 180 * np.pi) + yaw_step = action[1] * 22.5 / 180 * np.pi + + # take the step + prev_x = self.current_position.x + if self.forward_direction: + yaw = self.current_yaw + yaw_step + if yaw > np.pi / 2: + yaw = np.pi / 2 + if yaw < -np.pi / 2: + yaw = -np.pi / 2 + self.go_position( + self.current_position.x + forward_step * np.cos(self.current_yaw) - side_step * np.sin( + self.current_yaw), + self.current_position.y + forward_step * np.sin(self.current_yaw) + side_step * np.cos( + self.current_yaw), + self.target_z, yaw=yaw, + check_collision=True) + else: + yaw = self.current_yaw + yaw_step + if np.abs(yaw) < np.pi / 2: + yaw = np.sign(yaw) * np.pi / 2 + self.go_position( + self.current_position.x + forward_step * np.cos(self.current_yaw) - side_step * np.sin( + self.current_yaw), + self.current_position.y + forward_step * np.sin(self.current_yaw) + side_step * np.cos( + self.current_yaw), + self.target_z, yaw=yaw, + check_collision=True) + self.update_trajectory() + + self.r.sleep() + + dx = np.abs(self.current_position.x - prev_x) + dy = np.abs(self.current_position.y - self.target_position.y) + dyaw = np.abs(self.current_yaw) + + # calculate reward + reward = 2 * dx - 0.4 * dy - 0.3 * dyaw + if self.enable_safety_reward: + if self.safety2_flag: + reward -= 2 + if self.safety1_flag: + reward -= 10 + + # set new observation + if self.forward_direction: + self.set_target() + vo = self.difference_between_points(self.target_position, self.current_position) + self.vector_observation = np.array([vo[0] * np.cos(self.current_yaw) + vo[1] * np.sin(self.current_yaw), + -vo[0] * np.sin(self.current_yaw) + vo[1] * np.cos(self.current_yaw), + vo[2]]) + self.observation = {'depth_cam': np.copy(self.range_image), + 'moving_target': np.copy(self.vector_observation)} + finish_passed = (self.current_position.x > self.parkour_length + self.start_x) + else: + self.set_target() + vo = self.difference_between_points(self.current_position, self.target_position) + self.vector_observation = np.array( + [vo[0] * np.cos(self.current_yaw + np.pi) + vo[1] * np.sin(self.current_yaw + np.pi), + -vo[0] * np.sin(self.current_yaw + np.pi) + vo[1] * np.cos(self.current_yaw + np.pi), + vo[2]]) + self.observation = {'depth_cam': np.copy(self.range_image), + 'moving_target': np.copy(self.vector_observation)} + finish_passed = (self.current_position.x < self.start_x - self.parkour_length) + + # check done and update reward + if finish_passed: + reward = 20 + done = True + elif abs(self.current_position.y - self.target_y) > 5: + reward = -10 + done = True + elif self.collision_flag: + reward = -20 + done = True + else: + done = False + + info = {"current_position": self.current_position, "finish_passed": finish_passed, + "safety_flags": [self.safety1_flag, self.safety2_flag], "closer_object": self.closer_object_length} + self.safety1_flag = False + self.safety2_flag = False + return self.observation, reward, done, info + + def reset(self): + if self.model_name == "": + rospy.loginfo("Gym environment cannot connect to Webots") + return self.observation_space.sample() + if self.no_dynamics: + self.go_position(self.start_x, self.target_y + np.random.uniform(-0.5, 0.5), self.target_z, + yaw=(1 - self.forward_direction) * np.pi) + else: + self.go_position(self.current_position.x, self.current_position.y, 8) + self.go_position(self.start_x, self.current_position.y, 8) + self.go_position(self.start_x, self.target_y, 8) + self.go_position(self.start_x, self.target_y + np.random.uniform(-0.5, 0.5), self.target_z) + self.r.sleep() + self.uav_trajectory.header.frame_id = "map" + self.update_trajectory() + self.publish_global_trajectory() + + self.collision_flag = False + self.safety1_flag = False + self.safety2_flag = False + self.set_target() + if self.forward_direction: + self.vector_observation = self.difference_between_points(self.target_position, self.current_position) + else: + self.vector_observation = self.difference_between_points(self.current_position, self.target_position) + self.observation = {'depth_cam': np.copy(self.range_image), 'moving_target': np.copy(self.vector_observation)} + self.randomizer.randomize_environment() + return self.observation + + def set_target(self): + self.target_position = PoseStamped().pose.position + if self.forward_direction: + self.target_position.x = self.current_position.x + 5 + else: + self.target_position.x = self.current_position.x - 5 + self.target_position.y = self.target_y + self.target_position.z = self.target_z + self.publish_target() + + def render(self, mode='human', close=False): + pass + + def pose_callback(self, data): + self.current_position = data.pose.position + self.current_orientation = data.pose.orientation + self.current_yaw = euler_from_quaternion(data.pose.orientation)["yaw"] + + def range_callback(self, data): + image_arr = self.bridge.imgmsg_to_cv2(data) + self.range_image = ((np.clip(image_arr.reshape((64, 64, 1)), 0, 15) / 15.) * 255).astype(np.uint8) + + def model_name_callback(self, data): + if data.data[:5] == "robot": + self.model_name = data.data + if data.data[:4] == "quad": + self.model_name = data.data + + def collision_callback(self, data): + if data.data: + self.collision_flag = True + + def safety1_callback(self, data): + if data.data: + self.safety1_flag = True + + def safety2_callback(self, data): + if data.data: + self.safety2_flag = True + + def gps_callback(self, data): # for no dynamics + self.current_position.x = -data.point.x + self.current_position.y = -data.point.y + self.current_position.z = data.point.z + + def imu_callback(self, data): # for no dynamics + self.current_orientation = data.orientation + self.current_yaw = euler_from_quaternion(data.orientation)["yaw"] + + def go_position(self, x, y, z, yaw=0, check_collision=False): + if self.no_dynamics: + goal = PoseStamped() + + goal.header.seq = 1 + goal.header.stamp = rospy.Time.now() + + goal.pose.position.x = -x + goal.pose.position.y = -y + goal.pose.position.z = z + + goal.pose.orientation = euler_to_quaternion(np.pi/2, 0, -np.pi/2+yaw) + try: + self.ros_srv_field_set_v3(self.robot_translation_field, 0, goal.pose.position) + self.ros_srv_field_set_rotation(self.robot_rotation_field, 0, goal.pose.orientation) + self.r.sleep() + except rospy.ServiceException as exc: + print("Service did not process request: " + str(exc)) + else: + goal = PoseStamped() + + goal.header.seq = 1 + goal.header.stamp = rospy.Time.now() + + goal.pose.position.x = x + goal.pose.position.y = y + goal.pose.position.z = z + + goal.pose.orientation = euler_to_quaternion(0, 0, yaw) + self.current_yaw = yaw + self.ros_pub_pose.publish(goal) + self.r.sleep() + while self.distance_between_points(goal.pose.position, self.current_position) > 0.1: + if check_collision and self.collision_flag: + return + self.ros_pub_pose.publish(goal) + self.r.sleep() + + def publish_target(self): + goal = PoseStamped() + + goal.header.seq = 1 + goal.header.stamp = rospy.Time.now() + goal.header.frame_id = "map" + + goal.pose.position = self.target_position + + goal.pose.orientation.x = 0.0 + goal.pose.orientation.y = 0.0 + goal.pose.orientation.z = 0.0 + goal.pose.orientation.w = 1.0 + self.ros_pub_target.publish(goal) + + def update_trajectory(self): + new_point = PoseStamped() + new_point.header.seq = 1 + new_point.header.stamp = rospy.Time.now() + new_point.header.frame_id = "map" + new_point.pose.position.x = self.current_position.x + new_point.pose.position.y = self.current_position.y + new_point.pose.position.z = self.current_position.z + self.uav_trajectory.poses.append(new_point) + self.ros_pub_trajectory.publish(self.uav_trajectory) + + def publish_global_trajectory(self): + self.global_traj.header.frame_id = "map" + new_point = PoseStamped() + new_point.header.seq = 1 + new_point.header.stamp = rospy.Time.now() + new_point.header.frame_id = "map" + new_point.pose.position.x = self.start_x + new_point.pose.position.y = self.target_y + new_point.pose.position.z = self.target_z + self.global_traj.poses.append(new_point) + new_point = PoseStamped() + new_point.header.seq = 1 + new_point.header.stamp = rospy.Time.now() + new_point.header.frame_id = "map" + if self.forward_direction: + new_point.pose.position.x = self.start_x + self.parkour_length + else: + new_point.pose.position.x = self.start_x - self.parkour_length + new_point.pose.position.y = self.target_y + new_point.pose.position.z = self.target_z + self.global_traj.poses.append(new_point) + self.ros_pub_global_trajectory.publish(self.global_traj) + + def distance_between_points(self, p1, p2): + x = p1.x - p2.x + y = p1.y - p2.y + z = p1.z - p2.z + return np.sqrt(x * x + y * y + z * z) + + def difference_between_points(self, p1, p2): + return np.array([p1.x - p2.x, p1.y - p2.y, p1.z - p2.z]) diff --git a/src/opendr/planning/end_to_end_planning/envs/agi_env.py b/src/opendr/planning/end_to_end_planning/envs/agi_env.py deleted file mode 100644 index 69e476e904..0000000000 --- a/src/opendr/planning/end_to_end_planning/envs/agi_env.py +++ /dev/null @@ -1,313 +0,0 @@ -# Copyright 2020-2022 OpenDR European Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import gym -from gym import spaces -import numpy as np -import rospy -from geometry_msgs.msg import PoseStamped -from std_msgs.msg import Float32MultiArray -from std_msgs.msg import String -from nav_msgs.msg import Path -from webots_ros.msg import BoolStamped - - -def euler_from_quaternion(x, y, z, w): - """ - Convert a quaternion into euler angles (roll, pitch, yaw) - roll is rotation around x in radians (counterclockwise) - pitch is rotation around y in radians (counterclockwise) - yaw is rotation around z in radians (counterclockwise) - """ - t3 = +2.0 * (w * z + x * y) - t4 = +1.0 - 2.0 * (y * y + z * z) - yaw_z = np.atan2(t3, t4) - - return yaw_z / np.pi * 180 # in radians - - -class AgiEnv(gym.Env): - metadata = {'render.modes': ['human']} - - def __init__(self): - super(AgiEnv, self).__init__() - - # Gym elements - self.action_space = gym.spaces.Discrete(7) - self.observation_space = spaces.Dict( - {'depth_cam': spaces.Box(low=0, high=255, shape=(64, 64, 1), dtype=np.uint8), - 'moving_target': spaces.Box(low=-np.inf, high=np.inf, shape=(3,), dtype=np.float64)}) - - self.action_dictionary = {0: (np.cos(22.5 / 180 * np.pi), np.sin(22.5 / 180 * np.pi), 1), - 1: (np.cos(22.5 / 180 * np.pi), np.sin(22.5 / 180 * np.pi), 0), - 2: (1, 0, 1), - 3: (1, 0, 0), - 4: (1, 0, -1), - 5: (np.cos(22.5 / 180 * np.pi), -np.sin(22.5 / 180 * np.pi), 0), - 6: (np.cos(22.5 / 180 * np.pi), -np.sin(22.5 / 180 * np.pi), -1), - 7: (0, 0, 2), - 8: (0, 0, -2)} - self.step_length = 1 # meter - - self.current_position = PoseStamped().pose.position - self.current_yaw = 0 - self.range_image = np.ones((64, 64), dtype=np.float32) - self.collision_flag = False - self.model_name = "" - - # ROS connection - rospy.init_node('agi_gym_environment') - self.r = rospy.Rate(10) - self.ros_pub_pose = rospy.Publisher('mavros/setpoint_position/local', PoseStamped, queue_size=10) - self.ros_pub_target = rospy.Publisher('target_position', PoseStamped, queue_size=10) - self.ros_pub_trajectory = rospy.Publisher('uav_trajectory', Path, queue_size=10) - self.ros_pub_global_trajectory = rospy.Publisher('uav_global_trajectory', Path, queue_size=10) - self.global_traj = Path() - self.uav_trajectory = Path() - rospy.Subscriber("/mavros/local_position/pose", PoseStamped, self.pose_callback) - rospy.Subscriber("/range_image_raw", Float32MultiArray, self.range_image_callback) - rospy.Subscriber("/model_name", String, self.model_name_callback) - self.r.sleep() - rospy.Subscriber("/" + self.model_name + "/touch_sensor/value", BoolStamped, self.collision_callback) - - self.target_y = -22 - self.target_y_list = [-22, -16, -10, -4, 2, 7, 12] # evaluation map:[-22, -16, -10, -4, 2, 8, 14, 20, 26, 32] - self.target_z = 2.5 - self.start_x = -10 - self.forward_direction = True - self.parkour_length = 30 - self.episode_counter = 0 - - self.set_target() - self.r.sleep() - vo = self.difference_between_points(self.target_position, self.current_position) - self.vector_observation = np.array([vo[0] * np.cos(self.current_yaw * 22.5 / 180 * np.pi) - vo[1] * np.sin( - self.current_yaw * 22.5 / 180 * np.pi), - vo[0] * np.sin(self.current_yaw * 22.5 / 180 * np.pi) + vo[1] * np.cos( - self.current_yaw * 22.5 / 180 * np.pi), - vo[2]]) - self.observation = {'depth_cam': np.copy(self.range_image), 'moving_target': np.copy(self.vector_observation)} - self.r.sleep() - - self.image_count = 0 - - def step(self, discrete_action): - if self.current_position == PoseStamped().pose.position: - rospy.loginfo("Gym environment is not reading mavros position") - return self.observation_space.sample(), np.random.random(1), False, {} - action = self.action_dictionary[discrete_action] - action = (action[0] * self.step_length, action[1] * self.step_length, action[2]) - prev_x = self.current_position.x - if self.forward_direction: - self.go_position( - self.current_position.x + action[0] * np.cos(self.current_yaw * 22.5 / 180 * np.pi) - action[ - 1] * np.sin(self.current_yaw * 22.5 / 180 * np.pi), - self.current_position.y + action[0] * np.sin(self.current_yaw * 22.5 / 180 * np.pi) + action[ - 1] * np.cos(self.current_yaw * 22.5 / 180 * np.pi), self.target_z, yaw=self.current_yaw + action[2], - check_collision=True) - else: - self.go_position( - self.current_position.x - action[0] * np.cos(self.current_yaw * 22.5 / 180 * np.pi) + action[ - 1] * np.sin(self.current_yaw * 22.5 / 180 * np.pi), - self.current_position.y - action[0] * np.sin(self.current_yaw * 22.5 / 180 * np.pi) - action[ - 1] * np.cos(self.current_yaw * 22.5 / 180 * np.pi), self.target_z, yaw=self.current_yaw + action[2], - check_collision=True) - self.update_trajectory() - - dx = np.abs(self.current_position.x - prev_x) - dy = np.abs(self.current_position.y - self.target_position.y) - dyaw = np.abs(self.current_yaw) - reward = 2 * dx - 0.4 * dy - 0.3 * dyaw - - # set new observation - if self.forward_direction: - self.set_target() - vo = self.difference_between_points(self.target_position, self.current_position) - self.vector_observation = np.array([vo[0] * np.cos(self.current_yaw * 22.5 / 180 * np.pi) + vo[1] * np.sin( - self.current_yaw * 22.5 / 180 * np.pi), - -vo[0] * np.sin(self.current_yaw * 22.5 / 180 * np.pi) + vo[1] * np.cos( - self.current_yaw * 22.5 / 180 * np.pi), - vo[2]]) - self.observation = {'depth_cam': np.copy(self.range_image), - 'moving_target': np.copy(self.vector_observation)} - finish_passed = (self.current_position.x > self.parkour_length + self.start_x) - else: - self.set_target() - vo = self.difference_between_points(self.current_position, self.target_position) - self.vector_observation = np.array([vo[0] * np.cos(self.current_yaw * 22.5 / 180 * np.pi) + vo[1] * np.sin( - self.current_yaw * 22.5 / 180 * np.pi), - -vo[0] * np.sin(self.current_yaw * 22.5 / 180 * np.pi) + vo[1] * np.cos( - self.current_yaw * 22.5 / 180 * np.pi), - vo[2]]) - self.observation = {'depth_cam': np.copy(self.range_image), - 'moving_target': np.copy(self.vector_observation)} - finish_passed = (self.current_position.x < self.start_x - self.parkour_length) - - # check done - if finish_passed: - reward = 20 - done = True - elif abs(self.current_position.y - self.target_y) > 5: - reward = -10 - done = True - elif self.collision_flag: - reward = -20 - done = True - else: - done = False - - info = {"current_position": self.current_position, "finish_passed": finish_passed} - return self.observation, reward, done, info - - def reset(self): - if self.current_position == PoseStamped().pose.position: - rospy.loginfo("Gym environment is not reading mavros position") - return self.observation_space.sample() - self.target_y = np.random.choice(self.target_y_list) - self.go_position(self.current_position.x, self.current_position.y, 8) - self.go_position(self.start_x, self.current_position.y, 8) - self.go_position(self.start_x, self.target_y, self.target_z) - self.uav_trajectory.header.frame_id = "map" - self.update_trajectory() - self.publish_global_trajectory() - - self.collision_flag = False - self.set_target() - if self.forward_direction: - self.vector_observation = self.difference_between_points(self.target_position, self.current_position) - else: - self.vector_observation = self.difference_between_points(self.current_position, self.target_position) - self.observation = {'depth_cam': np.copy(self.range_image), 'moving_target': np.copy(self.vector_observation)} - return self.observation - - def set_target(self): - self.target_position = PoseStamped().pose.position - if self.forward_direction: - self.target_position.x = self.current_position.x + 5 - else: - self.target_position.x = self.current_position.x - 5 - self.target_position.y = self.target_y - self.target_position.z = self.target_z - self.publish_target() - - def render(self, mode='human', close=False): - pass - - def pose_callback(self, data): - self.current_position = data.pose.position - - def range_image_callback(self, data): - self.range_image = ((np.clip(np.array(data.data).reshape((64, 64, 1)), 0, 15) / 15.)*255).astype(np.uint8) - - def model_name_callback(self, data): - if data.data[:5] == "robot": - self.model_name = data.data - - def collision_callback(self, data): - if data.data: - self.collision_flag = True - # print("colliiiddeeee") - - def go_position(self, x, y, z, yaw=0, check_collision=False): - if yaw > 4: - yaw = 4 - if yaw < -4: - yaw = -4 - goal = PoseStamped() - - goal.header.seq = 1 - goal.header.stamp = rospy.Time.now() - # goal.header.frame_id = "map" - - goal.pose.position.x = x - goal.pose.position.y = y - goal.pose.position.z = z - - goal.pose.orientation.x = 0.0 - goal.pose.orientation.y = 0.0 - quat_z_yaw_dict = {-4: -0.7071068, -3: -0.5555702, -2: -0.3826834, -1: -0.1950903, 0: 0.0, 1: 0.1950903, - 2: 0.3826834, 3: 0.5555702, 4: 0.7071068} - quat_w_yaw_dict = {-4: 0.7071068, -3: 0.8314696, -2: 0.9238795, -1: 0.9807853, 0: 1.0, 1: 0.9807853, - 2: 0.9238795, 3: 0.8314696, 4: 0.7071068} - if self.forward_direction: - goal.pose.orientation.z = quat_z_yaw_dict[yaw] - goal.pose.orientation.w = quat_w_yaw_dict[yaw] - else: - goal.pose.orientation.z = -quat_w_yaw_dict[yaw] - goal.pose.orientation.w = quat_z_yaw_dict[yaw] - self.current_yaw = yaw - self.ros_pub_pose.publish(goal) - self.r.sleep() - while self.distance_between_points(goal.pose.position, self.current_position) > 0.1: - if check_collision and self.collision_flag: - return - self.ros_pub_pose.publish(goal) - self.r.sleep() - - def publish_target(self): - goal = PoseStamped() - - goal.header.seq = 1 - goal.header.stamp = rospy.Time.now() - goal.header.frame_id = "map" - - goal.pose.position = self.target_position - - goal.pose.orientation.x = 0.0 - goal.pose.orientation.y = 0.0 - goal.pose.orientation.z = 0.0 - goal.pose.orientation.w = 1.0 - self.ros_pub_target.publish(goal) - - def update_trajectory(self): - new_point = PoseStamped() - new_point.header.seq = 1 - new_point.header.stamp = rospy.Time.now() - new_point.header.frame_id = "map" - new_point.pose.position.x = self.current_position.x - new_point.pose.position.y = self.current_position.y - new_point.pose.position.z = self.current_position.z - self.uav_trajectory.poses.append(new_point) - self.ros_pub_trajectory.publish(self.uav_trajectory) - - def publish_global_trajectory(self): - self.global_traj.header.frame_id = "map" - new_point = PoseStamped() - new_point.header.seq = 1 - new_point.header.stamp = rospy.Time.now() - new_point.header.frame_id = "map" - new_point.pose.position.x = self.start_x - new_point.pose.position.y = self.target_y - new_point.pose.position.z = self.target_z - self.global_traj.poses.append(new_point) - new_point = PoseStamped() - new_point.header.seq = 1 - new_point.header.stamp = rospy.Time.now() - new_point.header.frame_id = "map" - if self.forward_direction: - new_point.pose.position.x = self.start_x + self.parkour_length - else: - new_point.pose.position.x = self.start_x - self.parkour_length - new_point.pose.position.y = self.target_y - new_point.pose.position.z = self.target_z - self.global_traj.poses.append(new_point) - self.ros_pub_global_trajectory.publish(self.global_traj) - - def distance_between_points(self, p1, p2): - x = p1.x - p2.x - y = p1.y - p2.y - z = p1.z - p2.z - return np.sqrt(x * x + y * y + z * z) - - def difference_between_points(self, p1, p2): - return np.array([p1.x - p2.x, p1.y - p2.y, p1.z - p2.z]) diff --git a/src/opendr/planning/end_to_end_planning/envs/webots/protos/box.proto b/src/opendr/planning/end_to_end_planning/envs/webots/protos/box.proto new file mode 100644 index 0000000000..9c34af8955 --- /dev/null +++ b/src/opendr/planning/end_to_end_planning/envs/webots/protos/box.proto @@ -0,0 +1,88 @@ +#VRML_SIM R2022b utf8 +# license: Copyright Cyberbotics Ltd. Licensed for use only with Webots. +# license url: https://cyberbotics.com/webots_assets_license +# This bounding object with a pipe shape is formed by a group of boxes. +PROTO box [ + field SFFloat height 0.2 # Defines the height of the pipe. + field SFFloat radius 0.5 # Defines the radius of the pipe. + field SFFloat thickness 0.05 # Defines the thickness of the pipe. + field SFInt32 subdivision 8 # Defines the number of polygons used to represent the pipe and so its resolution. + field SFFloat accuracy 0.0001 # Defines how much boxes position can differ on y axis: a 0 value represents an error-free model but it will slow down the simulation. +] +{ + %{ + local wbrandom = require('wbrandom') + + -- parameter checking + local subdivision = fields.subdivision.value + if subdivision > 200 then + io.stderr:write("High value for 'subdivision'. This can slow down the simulation\n") + elseif subdivision < 8 then + io.stderr:write("'subdivision' must be greater than or equal to 8\n") + subdivision = 8 + end + + local height = fields.height.value + if height <= 0 then + io.stderr:write("'height' must be greater than 0\n") + height = fields.height.defaultValue + end + + local radius = fields.radius.value + if radius <= 0 then + io.stderr:write("'radius' must be greater than 0\n") + radius = fields.radius.defaultValue + end + + local thickness = fields.thickness.value + if thickness <= 0 then + io.stderr:write("'thickness' must be greater than 0\n") + thickness = radius / 2 + elseif thickness >= fields.radius.value then + io.stderr:write("'thickness' must be smaller than 'radius'\n") + thickness = radius / 2 + end + + -- global stuff before entering in the main loop + local beta = 2.0 * math.pi / subdivision + local alpha = beta / 2.0 + local innerRadius = radius - thickness + local su = radius * math.cos(alpha) - innerRadius + if su < 0 then + -- fixed edge case: + -- There are 2 inner radius, depending if we measure it along the center or along the edge of the boxes. + -- If the thickness is below the difference of these two radius, then the algorithm can not achieve. + io.stderr:write("Either 'thickness' or 'subdivision' are too small for the box subdivision algorithm.\n") + su = math.abs(su) + end + local sv = height + local sw = radius * math.sin(alpha) * 2.0 + local boxRadius = innerRadius + su / 2.0 + }% + Group { # set of boxes + children [ + %{ for i = 0, (subdivision - 1) do }% + %{ + -- position of an internal box + local gamma = beta * i + beta / 2 + local ax = boxRadius * math.sin(gamma) + local ay = 0 + local az = boxRadius * math.cos(gamma) + local angle = gamma + 0.5 * math.pi + -- add small offset to boxes y translation to reduce constraints + -- on the top and bottom face due to co-planarity + local offset = wbrandom.real(-1.0, 1.0) * fields.accuracy.value; + }% + Transform { + translation %{= ax}% %{= ay + offset }% %{= az}% + rotation 0 1 0 %{= angle }% + children [ + Box { + size %{= su}% %{= sv}% %{= sw}% + } + ] + } + %{ end }% + ] + } +} diff --git a/src/opendr/planning/end_to_end_planning/envs/webots/worlds/train-no-dynamic-random-obstacles.wbt b/src/opendr/planning/end_to_end_planning/envs/webots/worlds/train-no-dynamic-random-obstacles.wbt new file mode 100644 index 0000000000..61d53ceb3f --- /dev/null +++ b/src/opendr/planning/end_to_end_planning/envs/webots/worlds/train-no-dynamic-random-obstacles.wbt @@ -0,0 +1,503 @@ +#VRML_SIM R2022b utf8 + +EXTERNPROTO "https://raw.githubusercontent.com/cyberbotics/webots/R2022b/projects/appearances/protos/Grass.proto" +EXTERNPROTO "https://raw.githubusercontent.com/cyberbotics/webots/R2022b/projects/appearances/protos/Parquetry.proto" +EXTERNPROTO "https://raw.githubusercontent.com/cyberbotics/webots/R2022b/projects/objects/floors/protos/Floor.proto" +EXTERNPROTO "https://raw.githubusercontent.com/cyberbotics/webots/R2022b/projects/objects/apartment_structure/protos/Wall.proto" +EXTERNPROTO "../protos/box.proto" + +WorldInfo { + gravity 9.80665 + basicTimeStep 1 + FPS 15 + optimalThreadCount 4 + randomSeed 52 +} +Viewpoint { + orientation 0.23912921076912644 -0.010500223692226803 -0.9709309789368902 3.1000770696059305 + position 16.599575477443874 0.11479760710878642 11.449782726362042 + followType "Mounted Shot" +} +DEF DEF_VEHICLE Robot { + translation 2.01714 -0.173907 2.57 + rotation 0.5773502691896257 -0.5773502691896257 -0.5773502691896257 2.0943951023931957 + children [ + Lidar { + translation 0 0.07 0 + rotation 3.4621799999783786e-06 -0.999999999993755 -7.095049999955691e-07 3.14159 + horizontalResolution 32 + fieldOfView 1.57 + verticalFieldOfView 0.1 + numberOfLayers 1 + minRange 0.3 + maxRange 5 + } + RangeFinder { + translation 0 0.1 0 + rotation -0.5773502691896258 -0.5773502691896258 -0.5773502691896258 2.0943951023931957 + maxRange 15 + } + TouchSensor { + translation 0 0.03 0 + rotation 0 1 0 1.5708 + name "touch sensor-collision" + boundingObject box { + } + } + TouchSensor { + translation 0 0.03 0.5 + rotation 0 1 0 1.5708 + name "touch sensor-safety1" + boundingObject box { + radius 1 + subdivision 12 + } + } + TouchSensor { + translation 0 0.03 1 + rotation 0 1 0 1.5708 + name "touch sensor-safety2" + boundingObject box { + radius 1.5 + subdivision 16 + } + } + Receiver { + name "receiver_main" + type "serial" + channel 1 + bufferSize 32 + } + Emitter { + name "emitter_plugin" + description "commuicates with physics plugin" + } + Shape { + appearance Appearance { + material Material { + } + } + geometry Box { + size 0.1 0.1 0.1 + } + } + Camera { + translation 0 0.12 0 + rotation 0.1294279597735375 0.9831056944488314 0.1294279597735375 -1.58783 + name "camera1" + width 128 + height 128 + } + Compass { + name "compass1" + } + GPS { + name "gps" + } + Accelerometer { + name "accelerometer1" + } + Gyro { + name "gyro1" + } + InertialUnit { + rotation 0 1 0 1.5707947122222805 + name "inertial_unit" + } + Transform { + translation 0 0 0.1 + children [ + Shape { + appearance Appearance { + material Material { + } + } + geometry DEF DEF_ARM Cylinder { + height 0.1 + radius 0.01 + } + } + ] + } + Transform { + translation -0.09999999999999999 0 0 + rotation -0.7071067811865476 0 0.7071067811865476 -3.1415923071795864 + children [ + Shape { + appearance Appearance { + material Material { + } + } + geometry USE DEF_ARM + } + ] + } + Transform { + translation 0.09999999999999999 0 0 + rotation 0 -1 0 -1.5707963071795863 + children [ + Shape { + appearance Appearance { + material Material { + diffuseColor 1 0.09999999999999999 0 + } + } + geometry USE DEF_ARM + } + ] + } + Transform { + translation 0 0 -0.1 + children [ + Shape { + appearance Appearance { + material Material { + diffuseColor 0.7999999999999999 0.7999999999999999 0.7999999999999999 + } + } + geometry USE DEF_ARM + } + ] + } + ] + name "quad_plus_sitl" + boundingObject Box { + size 0.1 0.1 0.1 + } + rotationStep 0.261799 + controller "ros" + customData "1" + supervisor TRUE +} +Background { + skyColor [ + 0.15 0.5 1 + ] +} +DirectionalLight { +} +Floor { + translation 0 0 -1 + rotation 0 0 1 1.5707963267948966 + size 500 750 + appearance Grass { + } +} +Floor { + translation -4 0 -0.96 + rotation 0 0 1 1.5707963267948966 + name "floor(13)" + size 0.5 30 + appearance Parquetry { + type "dark strip" + } +} +Floor { + translation -8 -14 -0.98 + rotation 0 0 1 1.5707963267948966 + name "floor(5)" + size 100 50 + appearance PBRAppearance { + baseColor 0.6 0.8 0.6 + roughness 1 + } +} +DEF cyl1 Solid { + translation -33.571763188537474 19.604630321122315 2.324472693591628 + rotation 0.8184168164849583 -0.008454983970179998 0.5745628144430251 2.855354311428324 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF cyl_geo1 Cylinder { + height 2.1392156472456616 + radius 1.6973091143376589 + } + castShadows FALSE + } + ] + name "solid(6)" + boundingObject USE cyl_geo1 +} +DEF cyl2 Solid { + translation -20.440751891815367 13.747862151581423 2.2037661733607323 + rotation -0.9576802011973716 0.046082095001687674 0.2841215809381919 2.291645054624233 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF cyl_geo2 Cylinder { + height 1.9024561085079923 + radius 1.1790909016521525 + } + castShadows FALSE + } + ] + name "solid(16)" + boundingObject USE cyl_geo2 +} +DEF cyl3 Solid { + translation -26.698438622531555 14.389081419732586 2.865288247046378 + rotation 0.06344984160283776 -0.037805321362627146 -0.9972687076596976 -2.2281994020336473 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF cyl_geo3 Cylinder { + height 2.4713739170413263 + radius 1.7748883243712648 + } + castShadows FALSE + } + ] + name "solid(17)" + boundingObject USE cyl_geo3 +} +DEF cyl4 Solid { + translation -30.029891046849826 18.478910598526205 2.975906443581888 + rotation 0.8826129905240483 -0.436261871860521 0.17512820480707927 -3.0124718491193443 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF cyl_geo4 Cylinder { + height 2.040387292247227 + radius 1.7321406926258653 + } + castShadows FALSE + } + ] + name "solid(18)" + boundingObject USE cyl_geo4 +} +DEF cyl5 Solid { + translation -24.09970968828449 16.57710468047925 2.4982480911172904 + rotation -0.3917242543263733 0.07876246896092191 -0.9167052863683216 0.9303512269603899 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF cyl_geo5 Cylinder { + height 2.4768414116000366 + radius 0.5824817005442169 + } + castShadows FALSE + } + ] + name "solid(19)" + boundingObject USE cyl_geo5 +} +DEF box1 Solid { + translation -21.633230654472253 0.26435729418541554 2.44667080338155 + rotation -0.9504828289471485 -0.2846212101353512 0.1247924621830743 -2.5557071516379524 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF box_geo1 Box { + size 0.6703828008453012 1.144210412169449 0.606098167324667 + } + castShadows FALSE + } + ] + name "solid(20)" + boundingObject USE box_geo1 +} +DEF box2 Solid { + translation -49.944469797724835 1.6898234337915463 2.2675614976470575 + rotation -0.7163183367896099 0.6204835974021974 0.31919922577254956 2.929261604379051 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF box_geo2 Box { + size 1.6555731912544518 0.8528384366701209 1.5923867066800264 + } + castShadows FALSE + } + ] + name "solid(21)" + boundingObject USE box_geo2 +} +DEF box3 Solid { + translation -20.292422865902708 8.238761971490536 2.6721509445938736 + rotation 0.492702975086357 0.008495842259129496 0.8701560773823055 -3.124774550627343 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF box_geo3 Box { + size 1.114861834585034 1.9899789593315744 1.665194050916234 + } + castShadows FALSE + } + ] + name "solid(22)" + boundingObject USE box_geo3 +} +DEF box4 Solid { + translation -44.90744150542008 8.527463770969087 2.641006035191632 + rotation -0.47381905460959706 -0.5794103506313973 0.6631584645241805 -2.2430503148315895 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF box_geo4 Box { + size 1.6228519285122363 1.1501776483206156 2.2316284316140305 + } + castShadows FALSE + } + ] + name "solid(23)" + boundingObject USE box_geo4 +} +DEF box5 Solid { + translation -38.86442228484968 6.8392747579709265 2.2856439867583433 + rotation 0.1849655628048051 0.930668272300889 0.3156648658130647 3.098971634530017 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF box_geo5 Box { + size 2.198602344698272 0.9299983006419481 1.8591651370902504 + } + castShadows FALSE + } + ] + name "solid(24)" + boundingObject USE box_geo5 +} +DEF sph1 Solid { + translation -34.151569808427524 -8.39941640511953 2.832797125554921 + rotation 0.46953082387497425 0.2604920627631049 0.8436140650017107 -2.2344190120762484 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF sph_geo1 Sphere { + radius 2.441984523476554 + } + castShadows FALSE + } + ] + name "solid(25)" + boundingObject USE sph_geo1 +} +DEF sph2 Solid { + translation -35.244313566036006 -5.669701484639425 2.98237944209821 + rotation 0.46953082387497425 0.2604920627631049 0.8436140650017107 -2.2344190120762484 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF sph_geo2 Sphere { + radius 2.9612360062179715 + } + castShadows FALSE + } + ] + name "solid(26)" + boundingObject USE sph_geo2 +} +DEF sph3 Solid { + translation -45.118047101108615 -7.009574816911507 2.679998597601765 + rotation 0.46953082387497425 0.2604920627631049 0.8436140650017107 -2.2344190120762484 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF sph_geo3 Sphere { + radius 1.5576301083903183 + } + castShadows FALSE + } + ] + name "solid(27)" + boundingObject USE sph_geo3 +} +DEF sph4 Solid { + translation -35.57358399548293 -3.505564259041 2.013593906239073 + rotation 0.46953082387497425 0.2604920627631049 0.8436140650017107 -2.2344190120762484 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF sph_geo4 Sphere { + radius 1.8204413448018755 + } + castShadows FALSE + } + ] + name "solid(28)" + boundingObject USE sph_geo4 +} +DEF sph5 Solid { + translation -31.708422025337523 -7.917116623970895 2.104425536420231 + rotation 0.46953082387497425 0.2604920627631049 0.8436140650017107 -2.2344190120762484 + children [ + Shape { + appearance PBRAppearance { + baseColor 0.6 0.3 0.0235294 + roughness 1 + metalness 0 + } + geometry DEF sph_geo5 Sphere { + radius 2.2713871330568587 + } + castShadows FALSE + } + ] + name "solid(29)" + boundingObject USE sph_geo5 +} +DEF wall1 Wall { + translation -4 4.504321090318505 -1 + size 30 0.1 7 +} +DEF wall2 Wall { + translation -4 -4.504321090318505 -1 + name "wall(2)" + size 30 0.1 7 +} diff --git a/src/opendr/planning/end_to_end_planning/pretrained_model/saved_model.zip b/src/opendr/planning/end_to_end_planning/pretrained_model/saved_model.zip index 53c2522feb..3b22eb2895 100644 Binary files a/src/opendr/planning/end_to_end_planning/pretrained_model/saved_model.zip and b/src/opendr/planning/end_to_end_planning/pretrained_model/saved_model.zip differ diff --git a/src/opendr/planning/end_to_end_planning/utils/__init__.py b/src/opendr/planning/end_to_end_planning/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/opendr/planning/end_to_end_planning/utils/euler_quaternion_transformations.py b/src/opendr/planning/end_to_end_planning/utils/euler_quaternion_transformations.py new file mode 100644 index 0000000000..6a865eaff9 --- /dev/null +++ b/src/opendr/planning/end_to_end_planning/utils/euler_quaternion_transformations.py @@ -0,0 +1,57 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +from geometry_msgs.msg import Quaternion + + +def euler_from_quaternion(q): + """ + Convert a quaternion into euler angles (roll, pitch, yaw) + roll is rotation around x in radians (counterclockwise) + pitch is rotation around y in radians (counterclockwise) + yaw is rotation around z in radians (counterclockwise) + """ + t0 = +2.0 * (q.w * q.x + q.y * q.z) + t1 = +1.0 - 2.0 * (q.x * q.x + q.y * q.y) + roll_x = np.arctan2(t0, t1) + + t2 = +2.0 * (q.w * q.y - q.z * q.x) + t2 = +1.0 if t2 > +1.0 else t2 + t2 = -1.0 if t2 < -1.0 else t2 + pitch_y = np.arcsin(t2) + + t3 = +2.0 * (q.w * q.z + q.x * q.y) + t4 = +1.0 - 2.0 * (q.y * q.y + q.z * q.z) + yaw_z = np.arctan2(t3, t4) + + return {"roll": roll_x, "pitch": pitch_y, "yaw": yaw_z} # in radians / np.pi * 180 + + +def euler_to_quaternion(roll, pitch, yaw): + """ + Convert euler angles (roll, pitch, yaw) into a quaternion + """ + # to fix the issue that when q is (0, 0, 0, 1) exactly, webots considers it as NaN value. + if roll == 0 and pitch == 0 and yaw == 0: + pitch = 0.000001 + q = Quaternion() + q.x = np.sin(roll / 2) * np.cos(pitch / 2) * np.cos(yaw / 2) - np.cos(roll / 2) * np.sin(pitch / 2) * np.sin( + yaw / 2) + q.y = np.cos(roll / 2) * np.sin(pitch / 2) * np.cos(yaw / 2) + np.sin(roll / 2) * np.cos(pitch / 2) * np.sin( + yaw / 2) + q.z = np.cos(roll / 2) * np.cos(pitch / 2) * np.sin(yaw / 2) - np.sin(roll / 2) * np.sin(pitch / 2) * np.cos( + yaw / 2) + q.w = np.cos(roll / 2) * np.cos(pitch / 2) * np.cos(yaw / 2) + np.sin(roll / 2) * np.sin(pitch / 2) * np.sin( + yaw / 2) + return q diff --git a/src/opendr/planning/end_to_end_planning/utils/obstacle_randomizer.py b/src/opendr/planning/end_to_end_planning/utils/obstacle_randomizer.py new file mode 100644 index 0000000000..b03e2d9d03 --- /dev/null +++ b/src/opendr/planning/end_to_end_planning/utils/obstacle_randomizer.py @@ -0,0 +1,237 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import numpy as np +import rospy +from scipy.spatial.transform import Rotation as R +import webots_ros.srv +from geometry_msgs.msg import Point, Quaternion + + +class ObstacleRandomizer(): + def __init__(self, model_name): + self.number_of_obstacles = 10 + self.model_name = model_name + self.ros_srv_get_from_def = rospy.ServiceProxy("/supervisor/get_from_def", + webots_ros.srv.supervisor_get_from_def) + self.ros_srv_get_field = rospy.ServiceProxy("/supervisor/node/get_field", + webots_ros.srv.node_get_field) + self.ros_srv_field_set_v3 = rospy.ServiceProxy("/supervisor/field/set_vec3f", + webots_ros.srv.field_set_vec3f) + self.ros_srv_field_set_rotation = rospy.ServiceProxy("/supervisor/field/set_rotation", + webots_ros.srv.field_set_rotation) + self.ros_srv_field_set_float = rospy.ServiceProxy("/supervisor/field/set_float", + webots_ros.srv.field_set_float) + try: + self.cyl_solid_nodes = [self.ros_srv_get_from_def(name="cyl" + str(i)).node for i in range(1, 6)] + self.cyl_geometry_nodes = [self.ros_srv_get_from_def(name="cyl_geo" + str(i)).node for i in range(1, 6)] + self.box_solid_nodes = [self.ros_srv_get_from_def(name="box" + str(i)).node for i in range(1, 6)] + self.box_geometry_nodes = [self.ros_srv_get_from_def(name="box_geo" + str(i)).node for i in range(1, 6)] + self.sph_solid_nodes = [self.ros_srv_get_from_def(name="sph" + str(i)).node for i in range(1, 6)] + self.sph_geometry_nodes = [self.ros_srv_get_from_def(name="sph_geo" + str(i)).node for i in range(1, 6)] + self.wall_nodes = [self.ros_srv_get_from_def(name='wall' + str(i)).node for i in range(1, 3)] + except rospy.ServiceException as exc: + print("Service did not process request: " + str(exc)) + try: + self.cyl_solid_translation_fields = [self.ros_srv_get_field(node, 'translation', False).field for node in + self.cyl_solid_nodes] + self.cyl_solid_rotation_fields = [self.ros_srv_get_field(node, 'rotation', False).field for node in + self.cyl_solid_nodes] + self.cyl_geometry_radius_fields = [self.ros_srv_get_field(node, 'radius', False).field for node in + self.cyl_geometry_nodes] + self.cyl_geometry_height_fields = [self.ros_srv_get_field(node, 'height', False).field for node in + self.cyl_geometry_nodes] + self.box_solid_translation_fields = [self.ros_srv_get_field(node, 'translation', False).field for node in + self.box_solid_nodes] + self.box_solid_rotation_fields = [self.ros_srv_get_field(node, 'rotation', False).field for node in + self.box_solid_nodes] + self.box_geometry_size_fields = [self.ros_srv_get_field(node, 'size', False).field for node in + self.box_geometry_nodes] + self.sph_solid_translation_fields = [self.ros_srv_get_field(node, 'translation', False).field for node in + self.sph_solid_nodes] + self.sph_geometry_radius_fields = [self.ros_srv_get_field(node, 'radius', False).field for node in + self.sph_geometry_nodes] + self.wall_translation_fields = [self.ros_srv_get_field(node, 'translation', False).field for node in + self.wall_nodes] + except rospy.ServiceException as exc: + print("Service did not process request: " + str(exc)) + self.keep_configuration = {} + + def randomize_environment(self, with_walls=None, number_of_obstacles=None, save_config_dir=None): + if with_walls is None: + with_walls = np.random.choice([True, False]) + self.randomize_walls(with_walls) + if number_of_obstacles is None: + number_of_obstacles = np.random.randint(2, 7) + self.keep_configuration['with_walls'] = with_walls + self.keep_configuration['number_of_obstacles'] = number_of_obstacles + c = np.random.choice(3, number_of_obstacles) + number_of_cylinders = np.sum(c == 0) + number_of_boxs = np.sum(c == 1) + number_of_spheres = np.sum(c == 2) + while number_of_spheres > 5 or number_of_boxs > 5 or number_of_cylinders > 5: + c = np.random.choice(3, number_of_obstacles) + number_of_cylinders = np.sum(c == 0) + number_of_boxs = np.sum(c == 1) + number_of_spheres = np.sum(c == 2) + self.keep_configuration['number_of_cylinders'] = number_of_cylinders + self.keep_configuration['number_of_boxs'] = number_of_boxs + self.keep_configuration['number_of_spheres'] = number_of_spheres + self.randomize_cylinders(number_of_cylinders) + self.randomize_boxs(number_of_boxs) + self.randomize_spheres(number_of_spheres) + if save_config_dir is not None: + np.save(save_config_dir, self.keep_configuration) + + def randomize_cylinders(self, num=5, lower_size=1, higher_size=3): + for i in range(num): + t_field = self.cyl_solid_translation_fields[i] + p = Point() + p.y = np.random.normal(0, 2.5) + p.x = -np.random.uniform(-7, 20) + p.z = np.random.uniform(2, 3) + self.ros_srv_field_set_v3(t_field, 0, p) + rot_field = self.cyl_solid_rotation_fields[i] + q = Quaternion() + [q.x, q.y, q.z, q.w] = R.random().as_quat() + self.ros_srv_field_set_rotation(rot_field, 0, q) + rad_field = self.cyl_geometry_radius_fields[i] + rad = np.random.uniform(lower_size / 2, higher_size / 2) + self.ros_srv_field_set_float(rad_field, 0, rad) + h_field = self.cyl_geometry_height_fields[i] + h = np.random.uniform(lower_size, higher_size) + self.ros_srv_field_set_float(h_field, 0, h) + self.keep_configuration["cyl" + str(i) + "p"] = p + self.keep_configuration["cyl" + str(i) + "rot"] = q + self.keep_configuration["cyl" + str(i) + "rad"] = rad + self.keep_configuration["cyl" + str(i) + "h"] = h + for i in range(num, 5): + t_field = self.cyl_solid_translation_fields[i] + p = Point() + p.z = -10 + self.ros_srv_field_set_v3(t_field, 0, p) + + def randomize_boxs(self, num=5, lower_size=0.5, higher_size=2.5): + for i in range(num): + t_field = self.box_solid_translation_fields[i] + p = Point() + p.y = np.random.normal(0, 2.5) + p.x = -np.random.uniform(-7, 20) + p.z = np.random.uniform(2, 3) + self.ros_srv_field_set_v3(t_field, 0, p) + rot_field = self.box_solid_rotation_fields[i] + q = Quaternion() + [q.x, q.y, q.z, q.w] = R.random().as_quat() + self.ros_srv_field_set_rotation(rot_field, 0, q) + size_field = self.box_geometry_size_fields[i] + size = Point() + size.x = np.random.uniform(lower_size, higher_size) + size.z = np.random.uniform(lower_size, higher_size) + size.y = np.random.uniform(lower_size, higher_size) + self.ros_srv_field_set_v3(size_field, 0, size) + self.keep_configuration["box" + str(i) + "p"] = p + self.keep_configuration["box" + str(i) + "rot"] = q + self.keep_configuration["box" + str(i) + "size"] = size + for i in range(num, 5): + t_field = self.box_solid_translation_fields[i] + p = Point() + p.z = -10 + self.ros_srv_field_set_v3(t_field, 0, p) + + def randomize_spheres(self, num=5, lower_radius=0.5, higher_radius=1.5): + for i in range(num): + t_field = self.sph_solid_translation_fields[i] + p = Point() + p.y = np.random.normal(0, 2.5) + p.x = -np.random.uniform(-7, 20) + p.z = np.random.uniform(2, 3) + self.ros_srv_field_set_v3(t_field, 0, p) + rad_field = self.sph_geometry_radius_fields[i] + rad = np.random.uniform(lower_radius, higher_radius) + self.ros_srv_field_set_float(rad_field, 0, rad) + self.keep_configuration["sphere" + str(i) + "p"] = p + self.keep_configuration["sphere" + str(i) + "rad"] = rad + for i in range(num, 5): + t_field = self.sph_solid_translation_fields[i] + p = Point() + p.z = -10 + self.ros_srv_field_set_v3(t_field, 0, p) + + def randomize_walls(self, with_walls=True, lower_width=4, higher_width=10): + field = self.wall_translation_fields[0] + p = Point() + width = np.random.uniform(lower_width, higher_width) + self.keep_configuration["wall_width"] = width + if with_walls: + p.z = -1 + else: + p.z = -9 + p.y = -width / 2 + p.x = -4 + self.ros_srv_field_set_v3(field, 0, p) + p.y = width / 2 + field = self.wall_translation_fields[1] + self.ros_srv_field_set_v3(field, 0, p) + + def reload_environment(self, load_dir): + conf = np.load(load_dir, allow_pickle=True).item() + self.randomize_walls(with_walls=conf["with_walls"], lower_width=conf["wall_width"], + higher_width=conf["wall_width"]) + # set cylinders + for i in range(conf["number_of_cylinders"]): + t_field = self.cyl_solid_translation_fields[i] + p = conf["cyl" + str(i) + "p"] + self.ros_srv_field_set_v3(t_field, 0, p) + rot_field = self.cyl_solid_rotation_fields[i] + q = conf["cyl" + str(i) + "rot"] + self.ros_srv_field_set_rotation(rot_field, 0, q) + rad_field = self.cyl_geometry_radius_fields[i] + rad = conf["cyl" + str(i) + "rad"] + self.ros_srv_field_set_float(rad_field, 0, rad) + h_field = self.cyl_geometry_height_fields[i] + h = conf["cyl" + str(i) + "h"] + self.ros_srv_field_set_float(h_field, 0, h) + for i in range(conf["number_of_cylinders"], 5): + t_field = self.cyl_solid_translation_fields[i] + p = Point() + p.y = -10 + self.ros_srv_field_set_v3(t_field, 0, p) + # set boxes + for i in range(conf["number_of_boxs"]): + t_field = self.box_solid_translation_fields[i] + p = conf["box" + str(i) + "p"] + self.ros_srv_field_set_v3(t_field, 0, p) + rot_field = self.box_solid_rotation_fields[i] + q = conf["box" + str(i) + "rot"] + self.ros_srv_field_set_rotation(rot_field, 0, q) + size_field = self.box_geometry_size_fields[i] + size = conf["box" + str(i) + "size"] + self.ros_srv_field_set_v3(size_field, 0, size) + for i in range(conf["number_of_boxs"], 5): + t_field = self.box_solid_translation_fields[i] + p = Point() + p.y = -10 + self.ros_srv_field_set_v3(t_field, 0, p) + # set spheres + for i in range(conf["number_of_spheres"]): + t_field = self.sph_solid_translation_fields[i] + p = conf["sphere" + str(i) + "p"] + self.ros_srv_field_set_v3(t_field, 0, p) + rad_field = self.sph_geometry_radius_fields[i] + rad = conf["sphere" + str(i) + "rad"] + self.ros_srv_field_set_float(rad_field, 0, rad) + for i in range(conf["number_of_spheres"], 5): + t_field = self.sph_solid_translation_fields[i] + p = Point() + p.y = -10 + self.ros_srv_field_set_v3(t_field, 0, p) diff --git a/src/opendr/planning/end_to_end_planning/utils/sys_utils.py b/src/opendr/planning/end_to_end_planning/utils/sys_utils.py new file mode 100644 index 0000000000..e0f5e818cc --- /dev/null +++ b/src/opendr/planning/end_to_end_planning/utils/sys_utils.py @@ -0,0 +1,21 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from pathlib import Path + + +def get_or_create_dir(dir_str: str, folder) -> str: + dir_path = Path(dir_str, folder) + if not dir_path.exists(): + dir_path.mkdir(parents=True) + return str(dir_path) diff --git a/src/opendr/simulation/human_model_generation/dependencies.ini b/src/opendr/simulation/human_model_generation/dependencies.ini index 580c774824..527636a8f2 100644 --- a/src/opendr/simulation/human_model_generation/dependencies.ini +++ b/src/opendr/simulation/human_model_generation/dependencies.ini @@ -1,6 +1,6 @@ [runtime] # 'python' key expects a value using the Python requirements file format -# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format python=torch==1.9.0 torchvision==0.10.0 pyglet>=1.5.16 @@ -9,5 +9,6 @@ python=torch==1.9.0 trimesh==3.5.23 scikit-image>0.16.2 matplotlib>=2.2.2 + tqdm opendr=opendr-toolkit-engine diff --git a/src/opendr/simulation/human_model_generation/utilities/PIFu/lib/mesh_util.py b/src/opendr/simulation/human_model_generation/utilities/PIFu/lib/mesh_util.py index 0ad38a66f1..0bcb968fbc 100644 --- a/src/opendr/simulation/human_model_generation/utilities/PIFu/lib/mesh_util.py +++ b/src/opendr/simulation/human_model_generation/utilities/PIFu/lib/mesh_util.py @@ -42,7 +42,7 @@ def eval_func(points): # Finally we do marching cubes try: - verts, faces, normals, values = measure.marching_cubes(sdf, 0.5) + verts, faces, normals, values = measure.marching_cubes(sdf, 0.5, method='lewiner') # transform verts into world coordinate system verts = np.matmul(mat[:3, :3], verts.T) + mat[:3, 3:4] verts = verts.T diff --git a/src/opendr/utils/README.md b/src/opendr/utils/README.md index 27001f08c7..c01fb368aa 100644 --- a/src/opendr/utils/README.md +++ b/src/opendr/utils/README.md @@ -1,4 +1,4 @@ ## Utils Module This module contains utility tools of the OpenDR toolkit, such as the -[hyperparameter tuning tool](hyperparameter_tuner/hyperparameter_tuner.py). +[hyperparameter tuning tool](hyperparameter_tuner/hyperparameter_tuner.py) and the [AmbiguityMeasure tool](ambiguity_measure/ambiguity_measure.py). diff --git a/src/opendr/utils/ambiguity_measure/README.md b/src/opendr/utils/ambiguity_measure/README.md new file mode 100644 index 0000000000..fff6dbd101 --- /dev/null +++ b/src/opendr/utils/ambiguity_measure/README.md @@ -0,0 +1,15 @@ +# OpenDR Ambiguity Measure + +This folder contains a tool for obtaining ambiguity measures for pixel-wise values estimates. +This tool can be used in combination with vision-based manipulation models such as Transporter Nets [[1]](#transporter-paper). +The contents of the file `persistence.py` were adapted from [persitence.py](https://git.sthu.org/?p=persistence.git;a=blob;f=imagepers.py) and +[union_find.py](https://git.sthu.org/?p=persistence.git;a=blob;f=union_find.py) created by Stefan Huber. + + +#### References +[1] +Zeng, A., Florence, P., Tompson, J., Welker, S., Chien, J., Attarian, M., ... & Lee, J. (2021, October). +Transporter networks: Rearranging the visual world for robotic manipulation. +In Conference on Robot Learning (pp. 726-747). +PMLR. + diff --git a/src/opendr/utils/ambiguity_measure/__init__.py b/src/opendr/utils/ambiguity_measure/__init__.py new file mode 100644 index 0000000000..fa9eeb77d3 --- /dev/null +++ b/src/opendr/utils/ambiguity_measure/__init__.py @@ -0,0 +1,3 @@ +from opendr.utils.ambiguity_measure.ambiguity_measure import AmbiguityMeasure + +__all__ = ["AmbiguityMeasure"] diff --git a/src/opendr/utils/ambiguity_measure/ambiguity_measure.py b/src/opendr/utils/ambiguity_measure/ambiguity_measure.py new file mode 100644 index 0000000000..eb00c0d667 --- /dev/null +++ b/src/opendr/utils/ambiguity_measure/ambiguity_measure.py @@ -0,0 +1,189 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +from opendr.utils.ambiguity_measure.persistence import get_persistence +from opendr.engine.data import Image +from matplotlib import pyplot as plt, transforms, cm +from copy import deepcopy +from typing import Optional, Union, List + + +class AmbiguityMeasure(object): + """ + AmbiguityMeasure tool. + + This tool can be used to obtain an ambiguity measure of the output of vision-based manipulation models, such as + Transporter Nets and CLIPort. + """ + + def __init__(self, threshold: float = 0.5, temperature: float = 1.0): + """ + Constructor of AmbiguityMeasure + + :param threshold: Ambiguity threshold, should be in [0, 1). + :type threshold: float + :param temperature: Temperature of the sigmoid function. + :type temperature: float + """ + assert threshold >= 0 < 1, "Threshold should be in [0, 1)." + assert temperature > 0, "Temperature should be greater than 0." + self._threshold = threshold + self._temperature = temperature + + def get_ambiguity_measure(self, heatmap: np.ndarray): + """ + Get Ambiguity Measure. + + :param heatmap: Pixel-wise value estimates. + :type heatmap: np.ndarray + :return: Tuple[ambiguous, locs, maxima, probs] + - ambiguous: Whether or not output was ambiguous. + - locs: Pixel locations of significant local maxima. + - maxima: Values corresponding to local maxima. + - probs: Probability mass function based on local maxima. + :rtype: Tuple[ambiguous, locs, maxima, probs] + - ambiguous: bool + - locs: list + - maxima: list + - probs: list + """ + # Calculate persistence to find local maxima + persistence = get_persistence(heatmap) + + maxima = [] + locs = [] + for i, homclass in enumerate(persistence): + p_birth, _, _, _ = homclass + locs.append(p_birth) + maxima.append(heatmap[p_birth[0], p_birth[1]]) + probs = self.__softmax(np.asarray(maxima)) + ambiguous = 1.0 - max(probs) < self._threshold + return ambiguous, locs, maxima, probs + + def plot_ambiguity_measure( + self, + heatmap: np.ndarray, + locs: List[List[int]], + probs: Union[List[float], np.ndarray], + img: Image = None, + img_offset: float = -250.0, + view_init: List[int] = [30, 30], + plot_threshold: float = 0.05, + title: str = "Ambiguity Measure", + save_path: Optional[str] = None, + ): + """ + Plot the obtained ambiguity measure. + + :param heatmap: Pixel-wise value estimates. + :type heatmap: np.ndarray + :param locs: Pixel locations of significant local maxima. + :type locs: List[List[int]] + :param probs: Probability mass function based on local maxima. + :type probs: List[float] + :param img: Top view input image. + :type img: Union[np.ndarray, Image] + :param img_offset: Specifies the distance between value estimates and image. + :type img_offset: float + :param view_init: Set the elevation and azimuth of the axes in degrees (not radians). + :type view_init: List[float] + :param plot_threshold: Threshold for plotting probabilities. + Probabilities lower than this value will not be plotted. + :param title: Title of the plot. + :type title: str + :param save_path: Path for saving figure, if None, + :type plot_threshold: float + """ + fig = plt.figure() + ax = plt.axes(projection="3d") + ax.computed_zorder = False + trans_offset = transforms.offset_copy(ax.transData, fig=fig, y=2, units="dots") + X, Y = np.mgrid[0:heatmap.shape[0], 0:heatmap.shape[1]] + Z = heatmap + ax.set_title(title) + ax.plot_surface(X, Y, Z, cmap=cm.viridis, linewidth=0, antialiased=False, shade=False, zorder=-1) + + if img is not None: + if type(img) is Image: + img = np.moveaxis(img.numpy(), 0, -1) + + img = deepcopy(img) + if np.max(img) > 1: + img = img / 255 + x_image, y_image = np.mgrid[0:img.shape[0], 0:img.shape[1]] + ax.plot_surface( + x_image, + y_image, + np.ones(img.shape[:2]) * -img_offset, + rstride=1, + cstride=1, + facecolors=img, + shade=False, + ) + + ax.set_zlim(-img_offset - 1, 50) + ax.view_init(view_init[0], view_init[1]) + for loc, value in zip(locs, probs): + if value > plot_threshold: + ax.plot3D([loc[0]], [loc[1]], [value], "r.", zorder=9) + ax.plot3D([loc[0]], [loc[1]], [-img_offset], "r.", zorder=-2) + ax.text( + loc[0], + loc[1], + value, + f"{value:.2f}", + zorder=10, + transform=trans_offset, + horizontalalignment="center", + verticalalignment="bottom", + c="r", + fontsize="large", + ) + ax.grid(False) + ax.set_axis_off() + ax.set_xticklabels([]) + ax.set_yticklabels([]) + ax.set_zticklabels([]) + if save_path: + plt.savefig(save_path) + plt.show() + + @property + def threshold(self): + """ + Getter of threshold. + + :return: Threshold value. + :rtype: float + """ + return self._threshold + + @threshold.setter + def threshold(self, value: float): + """ + Setter of threshold. + + :param threshold: Threshold value. + :type threshold: float + """ + if type(value) != float: + raise TypeError("threshold should be a float") + else: + self._threshold = value + + def __softmax(self, x): + x /= self._temperature + e_x = np.exp(x - np.max(x)) + return e_x / e_x.sum() diff --git a/src/opendr/utils/ambiguity_measure/dependencies.ini b/src/opendr/utils/ambiguity_measure/dependencies.ini new file mode 100644 index 0000000000..fd40514704 --- /dev/null +++ b/src/opendr/utils/ambiguity_measure/dependencies.ini @@ -0,0 +1,8 @@ +[runtime] +# 'python' key expects a value using the Python requirements file format +# https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format +python=numpy<=1.23.5 + matplotlib + wheel + +opendr=opendr-toolkit-engine diff --git a/src/opendr/utils/ambiguity_measure/persistence.py b/src/opendr/utils/ambiguity_measure/persistence.py new file mode 100644 index 0000000000..fc3406aa9f --- /dev/null +++ b/src/opendr/utils/ambiguity_measure/persistence.py @@ -0,0 +1,151 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Adapted from https://git.sthu.org/?p=persistence.git;a=blob;f=imagepers.py and +https://git.sthu.org/?p=persistence.git;a=blob;f=union_find.py; + +original author: "Stefan Huber " + +Union-find data structure. Based on Josiah Carlson's code, +http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/215912 +with significant additional changes by D. Eppstein. +""" + + +class UnionFind: + + """Union-find data structure. + + Each unionFind instance X maintains a family of disjoint sets of + hashable objects, supporting the following two methods: + + - X[item] returns a name for the set containing the given item. + Each set is named by an arbitrarily-chosen one of its members; as + long as the set remains unchanged it will keep the same name. If + the item is not yet part of a set in X, a new singleton set is + created for it. + + - X.union(item1, item2, ...) merges the sets containing each item + into a single larger set. If any item is not yet part of a set + in X, it is added to X as one of the members of the merged set. + """ + + def __init__(self): + """Create a new empty union-find structure.""" + self.weights = {} + self.parents = {} + + def add(self, obj, weight): + if obj not in self.parents: + self.parents[obj] = obj + self.weights[obj] = weight + + def __contains__(self, obj): + return obj in self.parents + + def __getitem__(self, obj): + """Find and return the name of the set containing the object.""" + + # check for previously unknown obj + assert obj in self.parents, f"Object {obj} not in parents" + + # find path of objects leading to the root + path = [obj] + root = self.parents[obj] + while root != path[-1]: + path.append(root) + root = self.parents[root] + + # compress the path and return + for ancestor in path: + self.parents[ancestor] = root + return root + + def __iter__(self): + """Iterate through all items ever found or unioned by this structure.""" + return iter(self.parents) + + def union(self, *objects): + """Find the sets containing the objects and merge them all.""" + roots = [self[x] for x in objects] + heaviest = max([(self.weights[r], r) for r in roots])[1] + for r in roots: + if r != heaviest: + self.parents[r] = heaviest + + +def get(im, p): + return im[p[0]][p[1]] + + +def iter_neighbors(p, w, h): + y, x = p + + # 8-neighborship + neigh = [(y + j, x + i) for i in [-1, 0, 1] for j in [-1, 0, 1]] + # 4-neighborship + # neigh = [(y-1, x), (y+1, x), (y, x-1), (y, x+1)] + + for j, i in neigh: + if j < 0 or j >= h: + continue + if i < 0 or i >= w: + continue + if j == y and i == x: + continue + yield j, i + + +def get_persistence(im): + h, w = im.shape + + # Get indices orderd by value from high to low + indices = [(i, j) for i in range(h) for j in range(w)] + indices.sort(key=lambda p: get(im, p), reverse=True) + + # Maintains the growing sets + uf = UnionFind() + + groups0 = {} + + def get_comp_birth(p): + return get(im, uf[p]) + + # Process pixels from high to low + for i, p in enumerate(indices): + v = get(im, p) + ni = [uf[q] for q in iter_neighbors(p, w, h) if q in uf] + nc = sorted([(get_comp_birth(q), q) for q in set(ni)], reverse=True) + + if i == 0: + groups0[p] = (v, v, None) + + uf.add(p, -i) + + if len(nc) > 0: + oldp = nc[0][1] + uf.union(oldp, p) + + # Merge all others with oldp + for bl, q in nc[1:]: + if uf[q] not in groups0: + # print(i, ": Merge", uf[q], "with", oldp, "via", p) + groups0[uf[q]] = (bl, bl - v, p) + uf.union(oldp, q) + + groups0 = [(k, groups0[k][0], groups0[k][1], groups0[k][2]) for k in groups0] + groups0.sort(key=lambda g: g[2], reverse=True) + + return groups0 diff --git a/src/opendr/utils/hyperparameter_tuner/dependencies.ini b/src/opendr/utils/hyperparameter_tuner/dependencies.ini index 90177a32c9..7bffaadd8c 100644 --- a/src/opendr/utils/hyperparameter_tuner/dependencies.ini +++ b/src/opendr/utils/hyperparameter_tuner/dependencies.ini @@ -5,7 +5,7 @@ python=optuna tabulate torch plotly - sklearn + scikit-learn wheel opendr=opendr-toolkit-engine diff --git a/tests/Makefile b/tests/Makefile index 3c2797ee0e..b5e23d8a6c 100644 --- a/tests/Makefile +++ b/tests/Makefile @@ -57,7 +57,7 @@ $(BUILD_DIR)/test_face_recognition: @+echo "Building face recognition test..." $(CC) $(CFLAGS) -o $(BUILD_DIR)/test_face_recognition sources/c_api/test_face_recognition.c $(INC) $(OPENDR_INC) $(OPENDR_LD) $(LD) -FMP_INC = -I$(OPENDR_HOME)/projects/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include +FMP_INC = -I$(OPENDR_HOME)/projects/python/perception/slam/full_map_posterior_gmapping/src/openslam_gmapping/include $(BUILD_DIR)/test_fmp_gmapping: @+echo "Building Full-Map-Posterior GMapping test..." $(CPP) $(CFLAGS) -o $(BUILD_DIR)/test_fmp_gmapping sources/c_api/test_fmp_gmapping.cpp -lboost_unit_test_framework $(INC) $(OPENDR_INC) $(OPENDR_LD) $(LD) $(FMP_INC) diff --git a/tests/sources/tools/control/mobile_manipulation/run_ros.sh b/tests/sources/tools/control/mobile_manipulation/run_ros.sh old mode 100644 new mode 100755 index ee27242c66..61f7bece5e --- a/tests/sources/tools/control/mobile_manipulation/run_ros.sh +++ b/tests/sources/tools/control/mobile_manipulation/run_ros.sh @@ -1,4 +1,4 @@ -source ${OPENDR_HOME}/projects/control/mobile_manipulation/mobile_manipulation_ws/devel/setup.bash +source ${OPENDR_HOME}/projects/python/control/mobile_manipulation/mobile_manipulation_ws/devel/setup.bash roscore & sleep 5 -roslaunch mobile_manipulation_rl pr2_analytical.launch & \ No newline at end of file +roslaunch mobile_manipulation_rl pr2_analytical.launch & diff --git a/tests/sources/tools/perception/activity_recognition/continual_transformer_encoder/__init__.py b/tests/sources/tools/perception/activity_recognition/continual_transformer_encoder/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/activity_recognition/continual_transformer_encoder/test_continual_transformer_encoder_learner.py b/tests/sources/tools/perception/activity_recognition/continual_transformer_encoder/test_continual_transformer_encoder_learner.py new file mode 100644 index 0000000000..007bbddd93 --- /dev/null +++ b/tests/sources/tools/perception/activity_recognition/continual_transformer_encoder/test_continual_transformer_encoder_learner.py @@ -0,0 +1,143 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import continual +import os +import shutil +import torch +import unittest + +from opendr.perception.activity_recognition import CoTransEncLearner +from opendr.engine.data import Vector, Timeseries +from opendr.engine.target import Category +from opendr.perception.activity_recognition.datasets import DummyTimeseriesDataset +from pathlib import Path +from logging import getLogger +import onnxruntime as ort + +device = os.getenv("TEST_DEVICE") if os.getenv("TEST_DEVICE") else "cpu" + +logger = getLogger(__name__) + +_BATCH_SIZE = 1 + + +class TestCoTransEncLearner(unittest.TestCase): + @classmethod + def setUpClass(cls): + print( + "\n\n**********************************\nTEST Continual Transformer Encoder Learner\n" + "**********************************" + ) + cls.temp_dir = Path("./tests/sources/tools/perception/activity_recognition/temp") + + cls.learner = CoTransEncLearner( + batch_size=_BATCH_SIZE, + device=device, + input_dims=8, + hidden_dims=32, + sequence_len=64, + num_heads=8, + num_classes=4, + temp_path=str(cls.temp_dir), + ) + + cls.train_ds = DummyTimeseriesDataset(sequence_len=64, num_sines=8, num_datapoints=128) + cls.val_ds = DummyTimeseriesDataset( + sequence_len=64, num_sines=8, num_datapoints=128, base_offset=64 + ) + + @classmethod + def tearDownClass(cls): + try: + shutil.rmtree(str(cls.temp_dir)) + except OSError as e: + logger.error(f"Caught error while cleaning up {e.filename}: {e.strerror}") + + def test_save_and_load(self): + assert self.learner.model is not None + self.learner.save(self.temp_dir) + # Make changes to check subsequent load + self.learner.model = None + self.learner.batch_size = 42 + self.learner.load(self.temp_dir) + self.assertIsNotNone(self.learner.model, "model is None after loading pth model.") + assert self.learner.batch_size == _BATCH_SIZE + + def test_fit(self): + # Initialize with random parameters + self.learner.model = None + self.learner.init_model() + + # Store prior parameters + m = list(self.learner.model.parameters())[0].clone() + + # Fit model + self.learner.fit(dataset=self.train_ds, val_dataset=self.val_ds, steps=2) + + # Check that parameters changed + assert not torch.equal(m, list(self.learner.model.parameters())[0]) + + def test_eval(self): + results = self.learner.eval(self.val_ds, steps=2) + + assert isinstance(results["accuracy"], float) + assert isinstance(results["loss"], float) + + def test_infer(self): + dl = torch.utils.data.DataLoader(self.val_ds, batch_size=_BATCH_SIZE, num_workers=0) + tensor = next(iter(dl))[0][0] + + # Input is Tensor + results1 = self.learner.infer(tensor.to(device)) + # print(results1) + # Results has confidence summing to 1.0 + assert torch.isclose(torch.sum(results1.confidence), torch.tensor(1.0)) + + # Input is Timeseries + results2 = self.learner.infer(Timeseries(tensor.permute(1, 0))) + # print(results2) + assert torch.allclose(results1.confidence, results2.confidence, atol=1e-2) + + # Input is Vector + for i in range(64): # = sequence_len + results3 = self.learner.infer(Vector(tensor[:, i])) + assert torch.allclose(results1.confidence, results3.confidence, atol=1e-4) + + def test_optimize(self): + torch_ok = int(torch.__version__.split(".")[1]) >= 10 + co_ok = int(getattr(continual, "__version__", "0.0.0").split(".")[0]) >= 1 + ort_ok = int(getattr(ort, "__version__", "0.0.0").split(".")[1]) >= 11 + if not (torch_ok and co_ok and ort_ok): + return # Skip test + + self.learner._ort_session = None + self.learner.optimize() + step_input = self.learner._example_input[:, :, 0] + step_output = self.learner.infer(step_input) + assert isinstance(step_output, Category) + + assert self.learner._ort_session is not None + self.learner._ort_session = None # Clean up + + +def rmfile(path): + try: + os.remove(path) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/sources/tools/perception/activity_recognition/cox3d/test_cox3d_learner.py b/tests/sources/tools/perception/activity_recognition/cox3d/test_cox3d_learner.py index 6d56da46e7..75a5d9f697 100644 --- a/tests/sources/tools/perception/activity_recognition/cox3d/test_cox3d_learner.py +++ b/tests/sources/tools/perception/activity_recognition/cox3d/test_cox3d_learner.py @@ -16,15 +16,16 @@ import torch import unittest import numpy as np +import os from opendr.perception.activity_recognition import CoX3DLearner from opendr.perception.activity_recognition import KineticsDataset from opendr.engine.data import Image +from opendr.engine.target import Category from pathlib import Path from logging import getLogger -import os -device = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu' +device = os.getenv("TEST_DEVICE") if os.getenv("TEST_DEVICE") else "cpu" logger = getLogger(__name__) @@ -47,6 +48,9 @@ def setUpClass(cls): # Download mini dataset cls.dataset_path = cls.temp_dir / "datasets" / "kinetics3" KineticsDataset.download_micro(cls.temp_dir / "datasets") + cls.train_ds = KineticsDataset(path=cls.dataset_path, frames_per_clip=4, split="train", spatial_pixels=160) + cls.val_ds = KineticsDataset(path=cls.dataset_path, frames_per_clip=4, split="val", spatial_pixels=160) + cls.test_ds = KineticsDataset(path=cls.dataset_path, frames_per_clip=4, split="test", spatial_pixels=160) @classmethod def tearDownClass(cls): @@ -71,9 +75,6 @@ def test_save_and_load(self): assert self.learner.batch_size == 2 def test_fit(self): - train_ds = KineticsDataset(path=self.dataset_path, frames_per_clip=4, split="train") - val_ds = KineticsDataset(path=self.dataset_path, frames_per_clip=4, split="val") - # Initialize with random parameters self.learner.model = None self.learner.init_model() @@ -82,48 +83,58 @@ def test_fit(self): m = list(self.learner.model.parameters())[0].clone() # Fit model - self.learner.fit(dataset=train_ds, val_dataset=val_ds, steps=1) + self.learner.fit(dataset=self.train_ds, val_dataset=self.val_ds, steps=1) # Check that parameters changed assert not torch.equal(m, list(self.learner.model.parameters())[0]) def test_eval(self): - test_ds = KineticsDataset(path=self.dataset_path, frames_per_clip=40, split="test") - + self.learner.model.clean_state() self.learner.load(self.temp_dir / "weights" / f"x3d_{_BACKBONE}.pyth") - results = self.learner.eval(test_ds, steps=2) + results = self.learner.eval(self.test_ds, steps=2) - assert results["accuracy"] > 0.2 + assert results["accuracy"] > 0.5 assert results["loss"] < 20 def test_infer(self): - ds = KineticsDataset(path=self.dataset_path, frames_per_clip=4, split="test") - dl = torch.utils.data.DataLoader(ds, batch_size=2, num_workers=0) + dl = torch.utils.data.DataLoader(self.test_ds, batch_size=2, num_workers=0) batch = next(iter(dl))[0] batch = batch[:, :, 0] # Select a single frame self.learner.load(self.temp_dir / "weights" / f"x3d_{_BACKBONE}.pyth") - self.learner.model.clean_model_state() + + # Warm up + self.learner.model.forward_steps( + batch.unsqueeze(2).repeat(1, 1, self.learner.model.receptive_field - 1, 1, 1) + ) # Input is Tensor - results1 = self.learner.infer(batch.to(device)) + results1 = self.learner.infer(batch) # Results is a batch with each item summing to 1.0 assert all([torch.isclose(torch.sum(r.confidence), torch.tensor(1.0)) for r in results1]) # Input is Image - results2 = self.learner.infer([Image(batch[0], dtype=np.float32), Image(batch[1], dtype=np.float32)]) - assert torch.allclose(results1[0].confidence, results2[0].confidence, atol=1e-4) + results2 = self.learner.infer([Image(batch[0], dtype=np.float64), Image(batch[1], dtype=np.float32)]) + assert results1[0].data == results2[0].data + assert results1[1].data == results2[1].data # Input is List[Image] - results3 = self.learner.infer([Image(v, dtype=np.float) for v in batch]) - assert all([torch.allclose(r1.confidence, r3.confidence, atol=1e-4) for (r1, r3) in zip(results1, results3)]) + results3 = self.learner.infer([Image(v, dtype=np.float64) for v in batch]) + assert results1[0].data == results3[0].data + assert results1[1].data == results3[1].data def test_optimize(self): self.learner.ort_session = None self.learner.load(self.temp_dir / "weights" / f"x3d_{_BACKBONE}.pyth") self.learner.optimize() - assert self.learner.ort_session is not None + assert self.learner._ort_session is not None + + step_input = self.learner._example_input.repeat( + self.learner.batch_size, 1, 1, 1 + ) + step_output = self.learner.infer(step_input) + assert isinstance(step_output[0], Category) # Clean up self.learner.ort_session = None diff --git a/tests/sources/tools/perception/activity_recognition/x3d/test_x3d_learner.py b/tests/sources/tools/perception/activity_recognition/x3d/test_x3d_learner.py index 728eae3c13..9cb9b7b2ec 100644 --- a/tests/sources/tools/perception/activity_recognition/x3d/test_x3d_learner.py +++ b/tests/sources/tools/perception/activity_recognition/x3d/test_x3d_learner.py @@ -120,32 +120,15 @@ def test_infer(self): for (r1, r3) in zip(results1, results3) ]) - # Redundant test: Same code is executed internally in `test_optimize` - # def test_save_load_onnx(self): - # self.learner.load(self.temp_dir / "weights" / f"x3d_{_BACKBONE}.pyth") - # path = self.temp_dir / f"x3d_{_BACKBONE}.pyth" - # # Save - # if path.exists(): - # path.unlink() - # assert not path.exists() - # self.learner._save_onnx(path) - # assert path.exists() - # # Load - # assert getattr(self.learner, "ort_session", None) == None - # self.learner._load_onnx(path) - # assert getattr(self.learner, "ort_session", None) != None - # # Clean up - # self.learner.ort_session = None - def test_optimize(self): - self.learner.ort_session = None + self.learner._ort_session = None self.learner.load(self.temp_dir / "weights" / f"x3d_{_BACKBONE}.pyth") self.learner.optimize() - assert self.learner.ort_session is not None + assert self.learner._ort_session is not None # Clean up - self.learner.ort_session = None + self.learner._ort_session = None if __name__ == "__main__": diff --git a/tests/sources/tools/perception/facial_expression_recognition/image_based_facial_emotion_estimation/__init__.py b/tests/sources/tools/perception/facial_expression_recognition/image_based_facial_emotion_estimation/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/facial_expression_recognition/image_based_facial_emotion_estimation/test_esr.py b/tests/sources/tools/perception/facial_expression_recognition/image_based_facial_emotion_estimation/test_esr.py new file mode 100644 index 0000000000..394168d846 --- /dev/null +++ b/tests/sources/tools/perception/facial_expression_recognition/image_based_facial_emotion_estimation/test_esr.py @@ -0,0 +1,158 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import shutil +import os +import torch +from opendr.perception.facial_expression_recognition import FacialEmotionLearner +from opendr.perception.facial_expression_recognition import datasets +from os import path, makedirs +from torch.utils.data import DataLoader + + +def rmfile(path): + try: + os.remove(path) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +def rmdir(_dir): + try: + shutil.rmtree(_dir) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +PATH_ = './temp' + + +class TestFacialEmotionLearner(unittest.TestCase): + @classmethod + def setUpClass(cls): + print( + "\n\n**********************************\nTEST Facial Emotion Learner for Facial Expression and " + "Emotion Analysis\n*" + "*********************************") + if not path.isdir(PATH_): + makedirs(PATH_) + cls.temp_dir = PATH_ + + cls.learner = FacialEmotionLearner(device="cpu", temp_path=cls.temp_dir, + batch_size=2, max_training_epoch=1, ensemble_size=1, + name_experiment='esr_9', base_path_experiment=PATH_, + lr=1e-1, categorical_train=True, dimensional_finetune=True, + max_tuning_epoch=1) + + cls.dataset_path = cls.learner.download(mode='data') + cls.pretrained_path = cls.learner.download(mode='pretrained') + cls.learner.base_path_to_dataset = cls.dataset_path + + @classmethod + def tearDownClass(cls): + # Clean up downloaded files + rmdir(os.path.join(cls.temp_dir)) + + def test_fit(self): + print("\n\n**********************************\nTest ESR fit function \n*" + "*********************************") + + self.learner.model = None + self.learner.init_model(num_branches=self.learner.ensemble_size) + + m = list(self.learner.model.parameters())[0].clone() + self.learner.fit() + self.assertFalse(torch.equal(m, list(self.learner.model.parameters())[0]), + msg="Model parameters did not change after running fit.") + + def test_eval(self): + print("\n\n**********************************\nTest ESR eval function \n*" + "*********************************") + self.learner.init_model(num_branches=9) + self.learner.load(ensemble_size=9, path_to_saved_network=self.pretrained_path) + if self.learner.categorical_train: + eval_categorical_results = self.learner.eval(eval_type='categorical') + if self.learner.dimensional_finetune: + eval_dimensional_results = self.learner.eval(eval_type='dimensional') + + self.assertNotEqual(sum([len(eval_dimensional_results["valence_arousal_losses"][i]) for i in range(2)]), 0, + msg="Eval results contains empty lists for valence and arousal estimation loss") + self.assertNotEqual(sum(eval_categorical_results['running_emotion_loss']), 0.0, + msg="Eval results have zero loss for categorical expression recognition") + + def test_infer(self): + print("\n\n**********************************\nTest ESR infer function \n*" + "*********************************") + self.learner.init_model(num_branches=9) + self.learner.load(ensemble_size=9, path_to_saved_network=self.pretrained_path) + val_data = datasets.AffectNetCategorical(idx_set=2, + max_loaded_images_per_label=2, + transforms=None, + is_norm_by_mean_std=False, + base_path_to_affectnet=self.dataset_path) + val_loader = DataLoader(val_data, batch_size=32, shuffle=False, num_workers=8) + batch = next(iter(val_loader))[0] + # input is Tensor + ensemble_emotion_results, ensemble_dimension_results = self.learner.infer(batch) + self.assertIsNotNone(ensemble_emotion_results[0].confidence, msg="The predicted confidence score is None") + self.assertNotEqual((sum(sum(ensemble_dimension_results))).numpy(), 0.0, + msg="overall ensembled dimension results are zero") + + def test_save_load(self): + print("\n\n**********************************\nTest ESR save_load function \n*" + "*********************************") + path_to_saved_network = path.join(self.temp_dir, self.learner.name_experiment) + if not path.isdir(path_to_saved_network): + makedirs(path_to_saved_network) + self.learner.model = None + self.learner.ort_session = None + self.learner.init_model(num_branches=1) + self.learner.save(state_dicts=self.learner.model.to_state_dict(), + base_path_to_save_model=path_to_saved_network) + self.learner.load(ensemble_size=1, path_to_saved_network=path_to_saved_network, fix_backbone=True) + self.assertIsNotNone(self.learner.model, "model is None after loading pt model.") + # Cleanup + + def test_save_load_onnx(self): + print("\n\n**********************************\nTest ESR save_load ONNX function \n*" + "*********************************") + path_to_saved_network = path.join(self.temp_dir, self.learner.name_experiment) + if not path.isdir(path_to_saved_network): + makedirs(path_to_saved_network) + self.learner.model = None + self.learner.ort_session = None + self.learner.init_model(num_branches=1) + self.learner.optimize() + self.learner.save(state_dicts=self.learner.model.to_state_dict(), base_path_to_save_model=path_to_saved_network) + self.learner.model = None + self.learner.load(ensemble_size=1, path_to_saved_network=path_to_saved_network, fix_backbone=True) + self.assertIsNotNone(self.learner.ort_session, "ort_session is None after loading onnx model.") + # Cleanup + self.learner.ort_session = None + + def test_optimize(self): + print("\n\n**********************************\nTest ESR optimize function \n*" + "*********************************") + self.learner.model = None + self.learner.ort_session = None + self.learner.init_model(num_branches=1) + self.learner.optimize() + self.assertIsNotNone(self.learner.ort_session, "ort_session is None after optimizing the pretrained model.") + # Cleanup + self.learner.ort_session = None + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/test_pstbln.py b/tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/test_pstbln.py index 558232a8f6..1201a64819 100644 --- a/tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/test_pstbln.py +++ b/tests/sources/tools/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/test_pstbln.py @@ -75,6 +75,9 @@ def tearDownClass(cls): rmdir(os.path.join(cls.temp_dir)) def test_network_builder(self): + print( + "\n\n**********************************\nTEST network_builder function \n*****" + "*****************************") training_dataset = ExternalDataset(path=self.Train_DATASET_PATH, dataset_type="CASIA") validation_dataset = ExternalDataset(path=self.Val_DATASET_PATH, dataset_type="CASIA") self.pstbln_facial_expression_classifier.topology = [] @@ -92,6 +95,9 @@ def test_network_builder(self): msg="Model topology did not change after running network_builder.") def test_fit(self): + print( + "\n\n**********************************\nTEST fit function \n*****" + "*****************************") training_dataset = ExternalDataset(path=self.Train_DATASET_PATH, dataset_type="CASIA") validation_dataset = ExternalDataset(path=self.Val_DATASET_PATH, dataset_type="CASIA") self.pstbln_facial_expression_classifier.topology = [1] @@ -108,6 +114,9 @@ def test_fit(self): msg="Model parameters did not change after running fit.") def test_eval(self): + print( + "\n\n**********************************\nTEST eval function \n*****" + "*****************************") self.pstbln_facial_expression_classifier.topology = [1] self.pstbln_facial_expression_classifier.init_model() validation_dataset = ExternalDataset(path=self.Val_DATASET_PATH, dataset_type="CASIA") @@ -120,6 +129,9 @@ def test_eval(self): self.assertNotEqual(len(eval_results["score"]), 0, msg="Eval results contains empty list.") def test_infer(self): + print( + "\n\n**********************************\nTEST infer function \n*****" + "*****************************") test_data = np.load(self.Test_DATASET_PATH)[0:1] self.pstbln_facial_expression_classifier.topology = [1] self.pstbln_facial_expression_classifier.init_model() @@ -127,20 +139,26 @@ def test_infer(self): self.assertIsNotNone(category.confidence, msg="The predicted confidence score is None") def test_save_load(self): + print( + "\n\n**********************************\nTEST save_load function \n*****" + "*****************************") self.pstbln_facial_expression_classifier.topology = [1] self.pstbln_facial_expression_classifier.ort_session = None self.pstbln_facial_expression_classifier.init_model() self.pstbln_facial_expression_classifier.save(path=os.path.join(self.temp_dir, self.experiment_name), - model_name='test_pstgcn') + model_name='test_pstbln') self.pstbln_facial_expression_classifier.model = None self.pstbln_facial_expression_classifier.topology = [1] self.pstbln_facial_expression_classifier.load(path=os.path.join(self.temp_dir, self.experiment_name), - model_name='test_pstgcn') + model_name='test_pstbln') self.assertIsNotNone(self.pstbln_facial_expression_classifier.model, "model is None after loading pt model.") # Cleanup rmdir(os.path.join(self.temp_dir, self.experiment_name)) def test_optimize(self): + print( + "\n\n**********************************\nTEST optimize function \n*****" + "*****************************") self.pstbln_facial_expression_classifier.topology = [1] self.pstbln_facial_expression_classifier.ort_session = None self.pstbln_facial_expression_classifier.init_model() @@ -149,9 +167,11 @@ def test_optimize(self): "ort_session is None after optimizing the pretrained model.") # Cleanup self.pstbln_facial_expression_classifier.ort_session = None - rmfile(os.path.join(self.temp_dir, self.experiment_name)) def test_save_load_onnx(self): + print( + "\n\n**********************************\nTEST save_load_onnx function \n*****" + "*****************************") self.pstbln_facial_expression_classifier.topology = [1] self.pstbln_facial_expression_classifier.ort_session = None self.pstbln_facial_expression_classifier.init_model() diff --git a/tests/sources/tools/perception/object_detection_2d/nanodet/__init__.py b/tests/sources/tools/perception/object_detection_2d/nanodet/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/object_detection_2d/nanodet/test_nanodet.py b/tests/sources/tools/perception/object_detection_2d/nanodet/test_nanodet.py new file mode 100644 index 0000000000..583404d933 --- /dev/null +++ b/tests/sources/tools/perception/object_detection_2d/nanodet/test_nanodet.py @@ -0,0 +1,131 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import cv2 +import unittest +import gc +import shutil +import os +import numpy as np +from opendr.perception.object_detection_2d import NanodetLearner +from opendr.engine.datasets import ExternalDataset + +device = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu' + +_DEFAULT_MODEL = "plus_m_416" + + +def rmfile(path): + try: + os.remove(path) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +def rmdir(_dir): + try: + shutil.rmtree(_dir) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +class TestNanodetLearner(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print("\n\n**********************************\nTEST Nanodet Learner\n" + "**********************************") + + cls.temp_dir = os.path.join(".", "tests", "sources", "tools", "perception", "object_detection_2d", + "nanodet", "nanodet_temp") + cls.detector = NanodetLearner(model_to_use=_DEFAULT_MODEL, device=device, temp_path=cls.temp_dir, batch_size=1, + iters=1, checkpoint_after_iter=2, lr=1e-4) + # Download all required files for testing + cls.detector.download(path=cls.temp_dir, mode="pretrained") + cls.detector.download(path=cls.temp_dir, mode="images") + cls.detector.download(path=cls.temp_dir, mode="test_data") + + @classmethod + def tearDownClass(cls): + print('Removing temporary directories for Nanodet...') + # Clean up downloaded files + rmfile(os.path.join(cls.temp_dir, "000000000036.jpg")) + rmdir(os.path.join(cls.temp_dir, "test_data")) + rmdir(os.path.join(cls.temp_dir, "nanodet_{}".format(_DEFAULT_MODEL))) + rmdir(os.path.join(cls.temp_dir)) + + del cls.detector + gc.collect() + print('Finished cleaning for Nanodet...') + + def test_fit(self): + print('Starting training test for Nanodet...') + training_dataset = ExternalDataset(path=os.path.join(self.temp_dir, "test_data"), dataset_type="voc") + m = list(self.detector._model.parameters())[0].clone().detach().clone().to(device) + self.detector.fit(dataset=training_dataset, verbose=False) + n = list(self.detector._model.parameters())[0].clone().detach().clone().to(device) + self.assertFalse(np.array_equal(m, n), + msg="Model parameters did not change after running fit.") + del training_dataset, m, n + gc.collect() + + rmfile(os.path.join(self.temp_dir, "checkpoints", "model_iter_0.ckpt")) + rmfile(os.path.join(self.temp_dir, "checkpoints", "epoch=0-step=0.ckpt")) + rmdir(os.path.join(self.temp_dir, "checkpoints")) + + print('Finished training test for Nanodet...') + + def test_eval(self): + print('Starting evaluation test for Nanodet...') + eval_dataset = ExternalDataset(path=os.path.join(self.temp_dir, "test_data"), dataset_type="voc") + self.detector.load(path=os.path.join(self.temp_dir, "nanodet_{}".format(_DEFAULT_MODEL)), verbose=False) + results_dict = self.detector.eval(dataset=eval_dataset, verbose=False) + self.assertNotEqual(len(results_dict), 0, + msg="Eval results dictionary list is empty.") + del eval_dataset, results_dict + gc.collect() + + rmfile(os.path.join(self.temp_dir, "results.json")) + rmfile(os.path.join(self.temp_dir, "eval_results.txt")) + print('Finished evaluation test for Nanodet...') + + def test_infer(self): + print('Starting inference test for Nanodet...') + self.detector.load(os.path.join(self.temp_dir, "nanodet_{}".format(_DEFAULT_MODEL)), verbose=False) + img = cv2.imread(os.path.join(self.temp_dir, "000000000036.jpg")) + self.assertIsNotNone(self.detector.infer(input=img, verbose=False), + msg="Returned empty BoundingBoxList.") + gc.collect() + print('Finished inference test for Nanodet...') + + def test_save_load(self): + print('Starting save/load test for Nanodet...') + self.detector.save(path=os.path.join(self.temp_dir, "test_model"), verbose=False) + starting_param_1 = list(self.detector._model.parameters())[0].detach().clone().to(device) + self.detector.model = None + detector2 = NanodetLearner(model_to_use=_DEFAULT_MODEL, device=device, temp_path=self.temp_dir, batch_size=1, + iters=1, checkpoint_after_iter=1, lr=1e-4) + detector2.load(path=os.path.join(self.temp_dir, "test_model"), verbose=False) + new_param = list(detector2._model.parameters())[0].detach().clone().to(device) + self.assertTrue(starting_param_1.allclose(new_param)) + + # Cleanup + rmfile(os.path.join(self.temp_dir, "test_model", "nanodet_{}.json".format(_DEFAULT_MODEL))) + rmfile(os.path.join(self.temp_dir, "test_model", "nanodet_{}.pth".format(_DEFAULT_MODEL))) + rmdir(os.path.join(self.temp_dir, "test_model")) + print('Finished save/load test for Nanodet...') + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/sources/tools/perception/object_detection_2d/yolov5/__init__.py b/tests/sources/tools/perception/object_detection_2d/yolov5/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/object_detection_2d/yolov5/test_yolov5.py b/tests/sources/tools/perception/object_detection_2d/yolov5/test_yolov5.py new file mode 100644 index 0000000000..cb5fbc05e6 --- /dev/null +++ b/tests/sources/tools/perception/object_detection_2d/yolov5/test_yolov5.py @@ -0,0 +1,78 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import gc +import cv2 +import shutil +import os +import torch + +from opendr.perception.object_detection_2d import YOLOv5DetectorLearner + +torch.hub._validate_not_a_forked_repo = lambda a, b, c: True # workaround for rate limit bug +device = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu' + + +def rmfile(path): + try: + os.remove(path) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +def rmdir(_dir): + try: + shutil.rmtree(_dir) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +class TestYOLOv5DetectorLearner(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print("\n\n**********************************\nTEST YOLOv5Detector Learner\n" + "**********************************") + + cls.temp_dir = os.path.join(".", "tests", "sources", "tools", "perception", "object_detection_2d", + "yolov5", "yolov5_temp") + cls.detector = YOLOv5DetectorLearner(model_name='yolov5s', device=device, temp_path=cls.temp_dir, + force_reload=True) + + @classmethod + def tearDownClass(cls): + print('Removing temporary directories for YOLOv5...') + # Clean up downloaded files + rmfile(os.path.join(cls.temp_dir, "zidane.jpg")) + rmfile(os.path.join(cls.temp_dir, "yolov5s.pt")) + rmdir(os.path.join(cls.temp_dir)) + + del cls.detector + gc.collect() + print('Finished cleaning for YOLOv5...') + + def test_infer(self): + print('Starting inference test for YOLOv5...') + torch.hub.download_url_to_file('https://ultralytics.com/images/zidane.jpg', os.path.join(self.temp_dir, 'zidane.jpg')) + img = cv2.imread(os.path.join(self.temp_dir, "zidane.jpg")) + self.assertIsNotNone(self.detector.infer(img), + msg="Returned empty BoundingBoxList.") + del img + gc.collect() + print('Finished inference test for YOLOv5...') + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/sources/tools/perception/object_detection_3d/voxel_object_detection_3d/test_object_detection_3d.py b/tests/sources/tools/perception/object_detection_3d/voxel_object_detection_3d/test_object_detection_3d.py index 5e544c2de2..40fc9ac112 100644 --- a/tests/sources/tools/perception/object_detection_3d/voxel_object_detection_3d/test_object_detection_3d.py +++ b/tests/sources/tools/perception/object_detection_3d/voxel_object_detection_3d/test_object_detection_3d.py @@ -24,14 +24,6 @@ DEVICE = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu' print("Using device:", DEVICE) -print("Using device:", DEVICE, file=sys.stderr) - - -def rmfile(path): - try: - os.remove(path) - except OSError as e: - print("Error: %s - %s." % (e.filename, e.strerror)) def rmdir(_dir): @@ -80,12 +72,9 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): # Clean up downloaded files - rmdir(os.path.join(cls.temp_dir)) - pass def test_fit(self): - def test_model(name, config): print("Fit", name, "start", file=sys.stderr) model_path = os.path.join(self.temp_dir, "test_fit_" + name) diff --git a/tests/sources/tools/perception/object_tracking_2d/fair_mot/test_object_tracking_2d_fair_mot.py b/tests/sources/tools/perception/object_tracking_2d/fair_mot/test_object_tracking_2d_fair_mot.py index 7fbea8a277..72b25a0315 100644 --- a/tests/sources/tools/perception/object_tracking_2d/fair_mot/test_object_tracking_2d_fair_mot.py +++ b/tests/sources/tools/perception/object_tracking_2d/fair_mot/test_object_tracking_2d_fair_mot.py @@ -95,9 +95,10 @@ def test_model(name): checkpoint_after_iter=3, temp_path=self.temp_dir, device=DEVICE, + use_pretrained_backbone=False, ) - starting_param = list(learner.model.parameters())[0].clone() + starting_param = list(learner.model.parameters())[-1].clone() learner.fit( dataset, @@ -106,7 +107,7 @@ def test_model(name): val_split_paths=self.train_split_paths, verbose=True, ) - new_param = list(learner.model.parameters())[0].clone() + new_param = list(learner.model.parameters())[-1].clone() self.assertFalse(torch.equal(starting_param, new_param)) print("Fit", name, "ok", file=sys.stderr) @@ -125,6 +126,7 @@ def test_model(name): checkpoint_after_iter=3, temp_path=self.temp_dir, device=DEVICE, + use_pretrained_backbone=False, ) starting_param = list(learner.model.parameters())[0].clone() @@ -156,6 +158,7 @@ def test_model(name): checkpoint_after_iter=3, temp_path=self.temp_dir, device=DEVICE, + use_pretrained_backbone=False, ) learner.load(model_path, verbose=True) result = learner.eval(eval_dataset) @@ -176,6 +179,7 @@ def test_model(name): checkpoint_after_iter=3, temp_path=self.temp_dir, device=DEVICE, + use_pretrained_backbone=False, ) learner.load(model_path, verbose=True) result = learner.infer(eval_dataset[0][0], 10) @@ -204,6 +208,7 @@ def test_model(name): checkpoint_after_iter=3, temp_path=self.temp_dir, device=DEVICE, + use_pretrained_backbone=False, ) learner.save(save_path, True) @@ -215,6 +220,7 @@ def test_model(name): checkpoint_after_iter=3, temp_path=self.temp_dir, device=DEVICE, + use_pretrained_backbone=False, ) learner2.load(save_path) @@ -233,6 +239,7 @@ def test_model(name): checkpoint_after_iter=3, temp_path=self.temp_dir, device=DEVICE, + use_pretrained_backbone=False, ) with self.assertRaises(Exception): diff --git a/tests/sources/tools/perception/object_tracking_2d/siamrpn/__init__.py b/tests/sources/tools/perception/object_tracking_2d/siamrpn/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/object_tracking_2d/siamrpn/test_siamrpn.py b/tests/sources/tools/perception/object_tracking_2d/siamrpn/test_siamrpn.py new file mode 100644 index 0000000000..bb1d806335 --- /dev/null +++ b/tests/sources/tools/perception/object_tracking_2d/siamrpn/test_siamrpn.py @@ -0,0 +1,121 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import gc +import cv2 +import shutil +import os +import numpy as np +from opendr.engine.datasets import ExternalDataset +from opendr.engine.target import TrackingAnnotation +from opendr.perception.object_tracking_2d import SiamRPNLearner +from opendr.perception.object_tracking_2d.datasets import OTBTrainDataset + + +device = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu' + + +def rmfile(path): + try: + os.remove(path) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +def rmdir(_dir): + try: + shutil.rmtree(_dir) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +class TestSiamRPNLearner(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print("\n\n**********************************\nTEST SiamRPN Learner\n" + "**********************************") + + cls.temp_dir = os.path.join(".", "tests", "sources", "tools", "perception", "object_tracking_2d", + "siamrpn", "siamrpn_temp") + cls.learner = SiamRPNLearner(device=device, temp_path=cls.temp_dir, batch_size=1, n_epochs=1, + lr=1e-4, num_workers=1) + # Download all required files for testing + cls.learner.download(cls.temp_dir, mode="pretrained") + cls.learner.download(os.path.join(cls.temp_dir, "test_data"), mode="test_data") + + @classmethod + def tearDownClass(cls): + print('Removing temporary directories for SiamRPN...') + # Clean up downloaded files + rmdir(os.path.join(cls.temp_dir, "siamrpn_opendr")) + rmdir(os.path.join(cls.temp_dir, "test_data")) + rmdir(os.path.join(cls.temp_dir)) + + del cls.learner + gc.collect() + print('Finished cleaning for SiamRPN...') + + def test_fit(self): + print('Starting training test for SiamRPN...') + print(os.listdir(os.path.join(self.temp_dir, "test_data"))) + training_dataset = OTBTrainDataset(root=os.path.join(self.temp_dir, "test_data"), + json_path=os.path.join(self.temp_dir, "test_data", "OTBtest.json")) + m = list(self.learner._model.collect_params().values())[1].data().asnumpy().copy() + self.learner.fit(dataset=training_dataset, verbose=True) + n = list(self.learner._model.collect_params().values())[1].data().asnumpy() + self.assertFalse(np.array_equal(m, n), + msg="Model parameters did not change after running fit.") + del training_dataset, m, n + gc.collect() + print('Finished training test for SiamRPN...') + + def test_eval(self): + print('Starting evaluation test for SiamRPN...') + eval_dataset = ExternalDataset(os.path.join(self.temp_dir, "test_data"), + dataset_type="OTBtest") + self.learner.load(os.path.join(self.temp_dir, "siamrpn_opendr")) + results_dict = self.learner.eval(eval_dataset) + self.assertIsNotNone(results_dict['success'], + msg="Eval results dictionary not returned.") + del eval_dataset, results_dict + gc.collect() + print('Finished evaluation test for SiamRPN...') + + def test_infer(self): + print('Starting inference test for SiamRPN...') + self.learner._model = None + self.learner.load(os.path.join(self.temp_dir, "siamrpn_opendr")) + img = cv2.imread(os.path.join(self.temp_dir, "test_data", "Basketball", "img", "0001.jpg")) + init_box = TrackingAnnotation(left=198, top=214, width=34, height=81, id=0, name=0) + self.assertIsNotNone(self.learner.infer(img, init_box=init_box), + msg="Returned empty TrackingAnnotation.") + del img + gc.collect() + print('Finished inference test for SiamRPN...') + + def test_save_load(self): + print('Starting save/load test for SiamRPN...') + self.learner.save(os.path.join(self.temp_dir, "test_model")) + self.learner._model = None + self.learner.load(os.path.join(self.temp_dir, "test_model")) + self.assertIsNotNone(self.learner._model, "model is None after loading model.") + # Cleanup + rmdir(os.path.join(self.temp_dir, "test_model")) + print('Finished save/load test for SiamRPN...') + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/sources/tools/perception/object_tracking_3d/ab3dmot/test_object_tracking_3d_ab3dmot.py b/tests/sources/tools/perception/object_tracking_3d/ab3dmot/test_object_tracking_3d_ab3dmot.py index 723c0b2e15..88b474617e 100644 --- a/tests/sources/tools/perception/object_tracking_3d/ab3dmot/test_object_tracking_3d_ab3dmot.py +++ b/tests/sources/tools/perception/object_tracking_3d/ab3dmot/test_object_tracking_3d_ab3dmot.py @@ -48,6 +48,9 @@ def setUpClass(cls): cls.temp_dir, True ) + cls.use_long_tests = os.environ.get("OPENDR_USE_LONG_TESTS", "False") == "True" + cls.long_tracking_dataset_path = os.environ.get("OPENDR_KITTI_TRACKING_PATH", "") + print("Dataset downloaded", file=sys.stderr) @classmethod @@ -70,11 +73,25 @@ def test_unsupported(self): def test_eval(self): learner = ObjectTracking3DAb3dmotLearner() - results = learner.eval(self.dataset, count=1) - self.assertTrue("car" in results) - self.assertTrue("pedestrian" in results) - self.assertTrue("cyclist" in results) + if self.use_long_tests: + + self.assertTrue(len(self.long_tracking_dataset_path) > 0) + + dataset = KittiTrackingDatasetIterator(self.long_tracking_dataset_path, self.long_tracking_dataset_path, "tracking") + + results = learner.eval(dataset) + self.assertTrue("car" in results) + self.assertTrue("pedestrian" in results) + self.assertTrue("cyclist" in results) + for k, v in results.items(): + print(k, v) + else: + results = learner.eval(self.dataset, count=1) + + self.assertTrue("car" in results) + self.assertTrue("pedestrian" in results) + self.assertTrue("cyclist" in results) def test_infer(self): diff --git a/tests/sources/tools/perception/pose_estimation/high_resolution_pose_estimation/__init__.py b/tests/sources/tools/perception/pose_estimation/high_resolution_pose_estimation/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/pose_estimation/high_resolution_pose_estimation/test_high_resolution_pose_estimation.py b/tests/sources/tools/perception/pose_estimation/high_resolution_pose_estimation/test_high_resolution_pose_estimation.py new file mode 100644 index 0000000000..aa0a27005d --- /dev/null +++ b/tests/sources/tools/perception/pose_estimation/high_resolution_pose_estimation/test_high_resolution_pose_estimation.py @@ -0,0 +1,95 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import shutil +from opendr.perception.pose_estimation import HighResolutionPoseEstimationLearner + +from opendr.engine.datasets import ExternalDataset +from opendr.engine.data import Image +import warnings +import os + +device = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu' + + +def rmfile(path): + try: + os.remove(path) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +def rmdir(_dir): + try: + shutil.rmtree(_dir) + except OSError as e: + print("Error: %s - %s." % (e.filename, e.strerror)) + + +class TestLightweightOpenPoseLearner(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print("\n\n**********************************\nTEST High Resolution Pose Estimation Learner\n" + "**********************************") + + cls.temp_dir = os.path.join(".", "tests", "sources", "tools", "perception", + "pose_estimation", "high_resolution_pose_estimation", "hr_pose_estim_temp") + cls.pose_estimator = HighResolutionPoseEstimationLearner(device=device, temp_path=cls.temp_dir, num_workers=1) + + # Download all required files for testing + cls.pose_estimator.download(mode="pretrained") + cls.pose_estimator.download(mode="test_data") + + @classmethod + def tearDownClass(cls): + # Clean up downloaded files + rmdir(os.path.join(cls.temp_dir, "openpose_default")) + rmdir(os.path.join(cls.temp_dir, "dataset")) + + rmdir(os.path.join(cls.temp_dir)) + + def test_eval(self): + # Test eval will issue resource warnings due to some files left open in pycoco tools, + # as well as a deprecation warning due to a cast of a float to integer (hopefully they will be fixed in a future + # version) + warnings.simplefilter("ignore", ResourceWarning) + warnings.simplefilter("ignore", DeprecationWarning) + + eval_dataset = ExternalDataset(path=os.path.join(self.temp_dir, "dataset"), dataset_type="COCO") + self.pose_estimator.load(os.path.join(self.temp_dir, "openpose_default")) + results_dict = self.pose_estimator.eval(eval_dataset, use_subset=False, verbose=True, silent=True, + images_folder_name="image", annotations_filename="annotation.json") + self.assertNotEqual(len(results_dict['average_precision']), 0, + msg="Eval results dictionary contains empty list.") + self.assertNotEqual(len(results_dict['average_recall']), 0, + msg="Eval results dictionary contains empty list.") + # Cleanup + rmfile(os.path.join(self.temp_dir, "detections.json")) + warnings.simplefilter("default", ResourceWarning) + warnings.simplefilter("default", DeprecationWarning) + + def test_infer(self): + self.pose_estimator.model = None + self.pose_estimator.load(os.path.join(self.temp_dir, "openpose_default")) + + img = Image.open(os.path.join(self.temp_dir, "dataset", "image", "000000000785_1080.jpg")) + # Default pretrained mobilenet model detects 18 keypoints on img with id 785 + self.assertGreater(len(self.pose_estimator.infer(img)[0].data), 0, + msg="Returned pose must have non-zero number of keypoints.") + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/costgcn/__init__.py b/tests/sources/tools/perception/skeleton_based_action_recognition/costgcn/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/costgcn/test_costgcn.py b/tests/sources/tools/perception/skeleton_based_action_recognition/costgcn/test_costgcn.py new file mode 100644 index 0000000000..6c922d3d75 --- /dev/null +++ b/tests/sources/tools/perception/skeleton_based_action_recognition/costgcn/test_costgcn.py @@ -0,0 +1,173 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import torch +import unittest +import shutil + +from opendr.perception.skeleton_based_action_recognition import CoSTGCNLearner +from opendr.engine.datasets import ExternalDataset +# from opendr.engine.target import Category +from pathlib import Path +from logging import getLogger + +device = os.getenv("TEST_DEVICE") if os.getenv("TEST_DEVICE") else "cpu" + +logger = getLogger(__name__) + +_BACKBONE = "costgcn" + + +class TestCoSTGCNLearner(unittest.TestCase): + @classmethod + def setUpClass(cls): + print( + "\n\n**********************************\nTEST Continual STGCN Learner\n" + "**********************************" + ) + cls.temp_dir = Path("./tests/sources/tools/perception/skeleton_based_action_recognition/temp") + + cls.learner = CoSTGCNLearner( + device=device, + temp_path=str(cls.temp_dir), + iters=1, + batch_size=2, + backbone=_BACKBONE, + num_workers=0, + ) + + # Download all required files for testing + cls.pretrained_weights_path = cls.learner.download( + path=os.path.join(cls.temp_dir, "pretrained_models"), + method_name="costgcn", + mode="pretrained", + file_name="costgcn_ntu60_xview_joint.ckpt", + ) + cls.Train_DATASET_PATH = cls.learner.download( + mode="train_data", path=os.path.join(cls.temp_dir, "data") + ) + cls.Val_DATASET_PATH = cls.learner.download( + mode="val_data", path=os.path.join(cls.temp_dir, "data") + ) + + @classmethod + def tearDownClass(cls): + try: + shutil.rmtree(str(cls.temp_dir)) + except OSError as e: + logger.error(f"Caught error while cleaning up {e.filename}: {e.strerror}") + + def test_fit(self): + print( + "\n\n**********************************\nTest CoSTGCNLearner fit \n*" + "*********************************" + ) + + train_ds = self.learner._prepare_dataset( + ExternalDataset(path=self.Train_DATASET_PATH, dataset_type="NTURGBD"), + data_filename="train_joints.npy", + labels_filename="train_labels.pkl", + skeleton_data_type="joint", + phase="train", + verbose=False, + ) + + val_ds = self.learner._prepare_dataset( + ExternalDataset(path=self.Val_DATASET_PATH, dataset_type="NTURGBD"), + data_filename="val_joints.npy", + labels_filename="val_labels.pkl", + skeleton_data_type="joint", + phase="val", + verbose=False, + ) + + # Initialize with random parameters + self.learner.model = None + self.learner.init_model() + + # Store prior parameters + m = list(self.learner.model.parameters())[0].clone() + + # Fit model + self.learner.fit(dataset=train_ds, val_dataset=val_ds, steps=1) + + # Check that parameters changed + assert not torch.equal(m, list(self.learner.model.parameters())[0]) + + def test_eval(self): + test_ds = self.learner._prepare_dataset( + ExternalDataset(path=self.Val_DATASET_PATH, dataset_type="NTURGBD"), + data_filename="val_joints.npy", + labels_filename="val_labels.pkl", + skeleton_data_type="joint", + phase="val", + verbose=False, + ) + + self.learner.load(self.pretrained_weights_path) + results = self.learner.eval(test_ds, steps=2) + + assert results["accuracy"] > 0.5 + assert results["loss"] < 1 + + def test_infer(self): + ds = self.learner._prepare_dataset( + ExternalDataset(path=self.Val_DATASET_PATH, dataset_type="NTURGBD"), + data_filename="val_joints.npy", + labels_filename="val_labels.pkl", + skeleton_data_type="joint", + phase="val", + verbose=False, + ) + dl = torch.utils.data.DataLoader(ds, batch_size=self.learner.batch_size, num_workers=0) + batch = next(iter(dl))[0] + frame = batch[:, :, -1] # Select a single frame + + self.learner.model.clean_state() + self.learner.model.forward_steps(batch[:, :, :-1]) # Init model state + + # Input is Tensor + results1 = self.learner.infer(frame) + # Results is a batch with each item summing to 1.0 + assert all([torch.isclose(torch.sum(r.confidence), torch.tensor(1.0)) for r in results1]) + + # DISABLED: test passes however hangs unittest, preventing it from completing + # def test_optimize(self): + # self.learner.batch_size = 2 + # self.learner._ort_session = None + # self.learner.optimize() + # step_input = self.learner._example_input[:, :, 0] + # step_output = self.learner.infer(step_input) + # assert isinstance(step_output[0], Category) + # + # assert self.learner._ort_session is not None + # + # # Clean up + # self.learner._ort_session = None + + def test_save_and_load(self): + assert self.learner.model is not None + self.learner.batch_size == 2 + self.learner.save(self.temp_dir) + # Make changes to check subsequent load + self.learner.model = None + self.learner.batch_size = 42 + self.learner.load(self.temp_dir) + self.assertIsNotNone(self.learner.model, "model is None after loading pth model.") + assert self.learner.batch_size == 2 + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/pstgcn/__init__.py b/tests/sources/tools/perception/skeleton_based_action_recognition/pstgcn/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/test_pstgcn.py b/tests/sources/tools/perception/skeleton_based_action_recognition/pstgcn/test_pstgcn.py similarity index 100% rename from tests/sources/tools/perception/skeleton_based_action_recognition/test_pstgcn.py rename to tests/sources/tools/perception/skeleton_based_action_recognition/pstgcn/test_pstgcn.py diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/stbln/__init__.py b/tests/sources/tools/perception/skeleton_based_action_recognition/stbln/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/test_stbln.py b/tests/sources/tools/perception/skeleton_based_action_recognition/stbln/test_stbln.py similarity index 100% rename from tests/sources/tools/perception/skeleton_based_action_recognition/test_stbln.py rename to tests/sources/tools/perception/skeleton_based_action_recognition/stbln/test_stbln.py diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/stgcn/__init__.py b/tests/sources/tools/perception/skeleton_based_action_recognition/stgcn/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/test_stgcn.py b/tests/sources/tools/perception/skeleton_based_action_recognition/stgcn/test_stgcn.py similarity index 100% rename from tests/sources/tools/perception/skeleton_based_action_recognition/test_stgcn.py rename to tests/sources/tools/perception/skeleton_based_action_recognition/stgcn/test_stgcn.py diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/tagcn/__init__.py b/tests/sources/tools/perception/skeleton_based_action_recognition/tagcn/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/sources/tools/perception/skeleton_based_action_recognition/test_tagcn.py b/tests/sources/tools/perception/skeleton_based_action_recognition/tagcn/test_tagcn.py similarity index 100% rename from tests/sources/tools/perception/skeleton_based_action_recognition/test_tagcn.py rename to tests/sources/tools/perception/skeleton_based_action_recognition/tagcn/test_tagcn.py diff --git a/tests/sources/tools/planning/end_to_end_planning/test_end_to_end_planning.py b/tests/sources/tools/planning/end_to_end_planning/test_end_to_end_planning.py index 67533e5c61..8331dbf6ee 100644 --- a/tests/sources/tools/planning/end_to_end_planning/test_end_to_end_planning.py +++ b/tests/sources/tools/planning/end_to_end_planning/test_end_to_end_planning.py @@ -15,8 +15,9 @@ import numpy as np import unittest from pathlib import Path +from gym.spaces import Box -from opendr.planning.end_to_end_planning import EndToEndPlanningRLLearner, AgiEnv +from opendr.planning.end_to_end_planning import EndToEndPlanningRLLearner, UAVDepthPlanningEnv import opendr import torch import os @@ -44,7 +45,7 @@ class EndToEndPlanningTest(unittest.TestCase): @classmethod def setUpClass(cls): - cls.env = AgiEnv() + cls.env = UAVDepthPlanningEnv() cls.learner = EndToEndPlanningRLLearner(cls.env, device=device) @classmethod @@ -54,8 +55,12 @@ def tearDownClass(cls): def test_infer(self): obs = self.env.observation_space.sample() action = self.learner.infer(obs)[0] - self.assertTrue((action >= 0), "Actions below 0") - self.assertTrue((action < self.env.action_space.n), "Actions above discrete action space dimensions") + if isinstance(self.env.action_space, Box): + self.assertTrue((np.abs(action[0]) <= 1), "Action not between -1 and 1") + self.assertTrue((np.abs(action[1]) <= 1), "Action not between -1 and 1") + else: + self.assertTrue((action >= 0), "Actions below 0") + self.assertTrue((action < self.env.action_space.n), "Actions above discrete action space dimensions") def test_eval(self): episode_reward = self.learner.eval(self.env)["rewards_collected"] diff --git a/tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py b/tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py index 307e3ee220..aa2f3d3a2b 100644 --- a/tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py +++ b/tests/sources/tools/simulation/human_model_generation/test_human_model_generation.py @@ -44,10 +44,10 @@ def tearDownClass(cls): def test_infer(self): - img_rgb = Image.open(os.path.join(os.environ['OPENDR_HOME'], "projects", "simulation", "human_model_generation", - "demos", "imgs_input", "rgb", "result_0004.jpg")) - img_msk = Image.open(os.path.join(os.environ['OPENDR_HOME'], "projects", "simulation", "human_model_generation", - "demos", "imgs_input", "msk", "result_0004.jpg")) + img_rgb = Image.open(os.path.join(os.environ['OPENDR_HOME'], "projects", "python", "simulation", + "human_model_generation", "demos", "imgs_input", "rgb", "result_0004.jpg")) + img_msk = Image.open(os.path.join(os.environ['OPENDR_HOME'], "projects", "python", "simulation", + "human_model_generation", "demos", "imgs_input", "msk", "result_0004.jpg")) model_3D = self.learner.infer(imgs_rgb=[img_rgb], imgs_msk=[img_msk], extract_pose=False) # Default pretrained mobilenet model detects 18 keypoints on img with id 785 diff --git a/tests/sources/tools/utils/test_ambiguity_measure.py b/tests/sources/tools/utils/test_ambiguity_measure.py new file mode 100644 index 0000000000..5e01d6e871 --- /dev/null +++ b/tests/sources/tools/utils/test_ambiguity_measure.py @@ -0,0 +1,53 @@ +# Copyright 2020-2022 OpenDR European Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import numpy as np +from opendr.engine.data import Image +from opendr.utils.ambiguity_measure.ambiguity_measure import AmbiguityMeasure + + +class TestAmbiguityMeasure(unittest.TestCase): + @classmethod + def setUpClass(cls): + print("\n\n**********************************\nTEST Ambiguity Measure\n" "**********************************") + cls.am = AmbiguityMeasure() + + def test_get_ambiguity_measure(self): + heatmap = 10 * np.random.random((128, 128)) + ambiguous, locs, maxima, probs = self.am.get_ambiguity_measure(heatmap) + self.assertTrue(type(ambiguous) in [bool, np.bool_]) + self.assertTrue(type(locs) in [list, np.ndarray]) + self.assertTrue(type(maxima) in [list, np.ndarray]) + self.assertTrue(type(probs) in [list, np.ndarray]) + + def test_plot_ambiguity_measure(self): + img = 255 * np.random.random((128, 128, 3)) + img = np.asarray(img, dtype="uint8") + heatmap = 10 * np.random.random((128, 128)) + ambiguous, locs, maxima, probs = self.am.get_ambiguity_measure(heatmap) + self.am.plot_ambiguity_measure(heatmap, locs, probs, img) + + img = Image(img) + self.am.plot_ambiguity_measure(heatmap, locs, probs, img) + + def test_threshold(self): + threshold = self.am.threshold + new_threshold = threshold * 0.2 + self.am.threshold = new_threshold + self.assertTrue(self.am.threshold == new_threshold) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_license.py b/tests/test_license.py old mode 100755 new mode 100644 index 90f9726d7d..a806b9fcac --- a/tests/test_license.py +++ b/tests/test_license.py @@ -93,30 +93,50 @@ def setUp(self): 'src/opendr/perception/activity_recognition/cox3d/algorithm', 'src/opendr/perception/object_tracking_2d/fair_mot/algorithm', 'src/opendr/perception/object_tracking_2d/deep_sort/algorithm', + 'src/opendr/perception/object_tracking_2d/siamrpn/data_utils', 'src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/backbones', 'src/opendr/perception/heart_anomaly_detection/attention_neural_bag_of_feature/algorithm', 'src/opendr/simulation/human_model_generation/utilities/PIFu', 'src/opendr/perception/multimodal_human_centric/rgbd_hand_gesture_learner/algorithm/architectures', 'src/opendr/perception/skeleton_based_action_recognition/algorithm', - 'projects/data_generation/synthetic_multi_view_facial_image_generation/algorithm', + 'projects/python/simulation/synthetic_multi_view_facial_image_generation/algorithm', + 'projects/opendr_ws/devel', 'src/opendr/perception/semantic_segmentation/bisenet/algorithm', 'src/opendr/perception/object_detection_2d/retinaface/algorithm', 'src/opendr/perception/object_detection_2d/gem/algorithm', 'src/opendr/perception/object_detection_2d/detr/algorithm', + 'src/opendr/perception/object_detection_2d/nanodet/algorithm', 'src/opendr/perception/panoptic_segmentation/efficient_ps/algorithm/EfficientPS', - 'src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition', + 'src/opendr/perception/facial_expression_recognition/landmark_based_facial_expression_recognition/algorithm', + 'src/opendr/perception/facial_expression_recognition/image_based_facial_emotion_estimation/algorithm', + 'projects/python/perception/facial_expression_recognition/image_based_facial_emotion_estimation', + 'projects/opendr_ws_2/src/opendr_perception/test', + 'projects/opendr_ws_2/src/opendr_ros2_bridge/test', + 'projects/opendr_ws_2/src/vision_opencv', + 'projects/opendr_ws_2/install', + 'projects/opendr_ws_2/src/data_generation/test', + 'projects/opendr_ws_2/src/opendr_planning/test', + 'projects/opendr_ws_2/src/opendr_bridge/test', + 'projects/opendr_ws_2/src/opendr_interface/test', + 'projects/opendr_ws_2/src/opendr_data_generation/test', + 'projects/opendr_ws_2/src/opendr_simulation/test', ] skippedFilePaths = [ 'src/opendr/perception/activity_recognition/datasets/utils/decoder.py', - 'projects/perception/lightweight_open_pose/jetbot/utils/pid.py', + 'projects/python/perception/pose_estimation/lightweight_open_pose/jetbot/utils/pid.py', 'src/opendr/perception/compressive_learning/multilinear_compressive_learning/algorithm/trainers.py', 'src/opendr/perception/object_detection_2d/retinaface/Makefile', 'src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/algorithm/efficientface_modulator.py', 'src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/algorithm/efficientface_utils.py', 'src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/algorithm/spatial_transforms.py', 'src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/algorithm/transformer_timm.py', - 'src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/algorithm/utils.py' + 'src/opendr/perception/multimodal_human_centric/audiovisual_emotion_learner/algorithm/utils.py', + 'projects/opendr_ws_2/src/opendr_perception/setup.py', + 'projects/opendr_ws_2/src/opendr_planning/setup.py', + 'projects/opendr_ws_2/src/opendr_bridge/setup.py', + 'projects/opendr_ws_2/src/data_generation/setup.py', + 'projects/opendr_ws_2/src/opendr_simulation/setup.py', ] skippedDirectories = [ diff --git a/tests/test_pep8.py b/tests/test_pep8.py index cecddcc7c7..a12f81b0a5 100755 --- a/tests/test_pep8.py +++ b/tests/test_pep8.py @@ -32,7 +32,9 @@ 'dependencies', 'lib', 'src/opendr/perception/panoptic_segmentation/efficient_ps/algorithm/EfficientPS', - 'projects/control/eagerx', + 'projects/python/control/eagerx', + 'projects/opendr_ws_2/src/vision_opencv', + 'projects/opendr_ws/devel', 'venv', 'build', ]