diff --git a/.github/workflows/pyspark.yml b/.github/workflows/pyspark.yml new file mode 100644 index 000000000..34c211e66 --- /dev/null +++ b/.github/workflows/pyspark.yml @@ -0,0 +1,62 @@ +# Copyright 2022-2023 Alibaba Group Holding Limited. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: GraphAr PySpark CI + +on: + # Trigger the workflow on push or pull request, + # but only for the main branch + push: + branches: + - main + pull_request: + branches: + - main + +concurrency: + group: ${{ github.repository }}-${{ github.event.number || github.head_ref || github.sha }}-${{ github.workflow }} + cancel-in-progress: true + +jobs: + GraphAr-spark: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + with: + submodules: true + + - name: Install Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - name: Install Poetry + uses: abatilo/actions-poetry@v2 + + - name: Install Spark Scala && PySpark + run: | + cd pyspark + make install_test + + - name: Run PyTest + run: | + cd pyspark + make test + + - name: Lint + run: | + cd pyspark + make install_lint + make lint + diff --git a/.gitignore b/.gitignore index 8ae11d615..667cc9f7d 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,66 @@ .ccls-cache compile_commands.json + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ +pyspark/assets + +# Jupyter Notebook +.ipynb_checkpoints +*.ipynb + + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Ruff +.ruff_cache + +### Scala ### +*.bloop +*.metals \ No newline at end of file diff --git a/.licenserc.yaml b/.licenserc.yaml index 211e332ee..c6234a1f0 100644 --- a/.licenserc.yaml +++ b/.licenserc.yaml @@ -40,6 +40,7 @@ header: - '*.md' - '*.rst' - '**/*.json' + - 'pyspark/poetry.lock' # This file is generated automatically by Poetry-tool; there is no way to add license header comment: on-failure @@ -47,4 +48,4 @@ header: dependency: files: - spark/pom.xml # If this is a maven project. - - java/pom.xml # If this is a maven project. \ No newline at end of file + - java/pom.xml # If this is a maven project. diff --git a/docs/Makefile b/docs/Makefile index b26465f3b..3dfb507d0 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -53,3 +53,25 @@ html: cpp-apidoc spark-apidoc --quiet @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +.PHONY: pyspark-apidoc +pyspark-apidoc: + cd $(ROOTDIR)/pyspark && \ + poetry run sphinx-apidoc -o $(ROOTDIR)/docs/pyspark/api graphar_pyspark/ + +.PHONY: html-poetry +html-poetry: + cd $(ROOTDIR)/pyspark && \ + poetry run bash -c "cd $(ROOTDIR)/docs && $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html" + rm -fr $(BUILDDIR)/html/spark/reference + cp -fr $(ROOTDIR)/spark/target/site/scaladocs $(BUILDDIR)/html/spark/reference/ + cd $(ROOTDIR)/java && \ + mvn -P javadoc javadoc:aggregate \ + -Dmaven.antrun.skip=true \ + -DskipTests \ + -Djavadoc.output.directory=$(ROOTDIR)/docs/$(BUILDDIR)/html/java/ \ + -Djavadoc.output.destDir=reference \ + --quiet + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + diff --git a/docs/index.rst b/docs/index.rst index aebf60f38..9698da540 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -21,6 +21,7 @@ C++ Java Spark + PySpark .. toctree:: :maxdepth: 2 diff --git a/docs/pyspark/api/graphar_pyspark.rst b/docs/pyspark/api/graphar_pyspark.rst new file mode 100644 index 000000000..7035cb9c2 --- /dev/null +++ b/docs/pyspark/api/graphar_pyspark.rst @@ -0,0 +1,69 @@ +graphar\_pyspark package +======================== + +Submodules +---------- + +graphar\_pyspark.enums module +----------------------------- + +.. automodule:: graphar_pyspark.enums + :members: + :undoc-members: + :show-inheritance: + +graphar\_pyspark.errors module +------------------------------ + +.. automodule:: graphar_pyspark.errors + :members: + :undoc-members: + :show-inheritance: + +graphar\_pyspark.graph module +----------------------------- + +.. automodule:: graphar_pyspark.graph + :members: + :undoc-members: + :show-inheritance: + +graphar\_pyspark.info module +---------------------------- + +.. automodule:: graphar_pyspark.info + :members: + :undoc-members: + :show-inheritance: + +graphar\_pyspark.reader module +------------------------------ + +.. automodule:: graphar_pyspark.reader + :members: + :undoc-members: + :show-inheritance: + +graphar\_pyspark.util module +---------------------------- + +.. automodule:: graphar_pyspark.util + :members: + :undoc-members: + :show-inheritance: + +graphar\_pyspark.writer module +------------------------------ + +.. automodule:: graphar_pyspark.writer + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: graphar_pyspark + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/pyspark/api/modules.rst b/docs/pyspark/api/modules.rst new file mode 100644 index 000000000..b8915d9e3 --- /dev/null +++ b/docs/pyspark/api/modules.rst @@ -0,0 +1,7 @@ +graphar_pyspark +=============== + +.. toctree:: + :maxdepth: 4 + + graphar_pyspark diff --git a/docs/pyspark/how-to.rst b/docs/pyspark/how-to.rst new file mode 100644 index 000000000..c8ebd3605 --- /dev/null +++ b/docs/pyspark/how-to.rst @@ -0,0 +1,318 @@ +How to use GraphAr PySpark package +================================== + +.. container:: cell markdown + :name: b23d0681-da6d-4759-9d62-08d9376712ef + + .. rubric:: GraphAr PySpark + :name: graphar-pyspark + + ``graphar_pyspark`` is implemented as bindings to GraphAr spark scala + library. You should have ``graphar-0.1.0-SNAPSHOT.jar`` in your + Apache Spark JVM classpath. Otherwise you will get an exception. To + add it spceify ``config("spark.jars", "path-to-graphar-jar")`` when + you create a SparkSession: + +.. container:: cell code + :name: 40fa9a16-66b7-44d7-8aff-dd84fed0303a + + .. code:: python + + from pyspark.sql import SparkSession + + spark = ( + SparkSession + .builder + .master("local[1]") + .appName("graphar-local-tests") + .config("spark.jars", "../../spark/target/graphar-0.1.0-SNAPSHOT.jar") + .config("spark.log.level", "INFO") + .getOrCreate() + ) + +.. container:: cell markdown + :name: 1e40491b-9395-469c-bc30-ac4378d11265 + + .. rubric:: GraphAr PySpark initialize + :name: graphar-pyspark-initialize + + PySpark bindings are heavily relying on JVM-calls via ``py4j``. To + initiate all the neccessary things for it just call + ``graphar_pyspark.initialize()``: + +.. container:: cell code + :name: a1ff3f35-2a5a-4111-a296-b678b318b4dd + + .. code:: python + + from graphar_pyspark import initialize + + initialize(spark) + +.. container:: cell markdown + :name: 180b35c8-c0aa-4c6c-abc0-ffbf2ea1d833 + + .. rubric:: GraphAr objects + :name: graphar-objects + + Now you can import, create and modify all the classes you can work + call from `scala API of + GraphAr `__. + For simplify using of graphar from python constants, like GAR-types, + supported file-types, etc. are placed in ``graphar_pyspark.enums``. + +.. container:: cell code + :name: 85e186a4-0c44-450b-ac9d-d8624bb3d1d1 + + .. code:: python + + from graphar_pyspark.info import Property, PropertyGroup, AdjList, AdjListType, VertexInfo, EdgeInfo, GraphInfo + from graphar_pyspark.enums import GarType, FileType + +.. container:: cell markdown + :name: 4b0aad82-df2d-47f9-9799-89b45fe61519 + + Main objects of GraphAr are the following: + + - GraphInfo + - VertexInfo + - EdgeInfo + + You can check `Scala library + documentation `__ + for the more detailed information. + +.. container:: cell markdown + :name: 71ac9d59-521c-41bf-a951-b5d08768096e + + .. rubric:: Creating objects in graphar_pyspark + :name: creating-objects-in-graphar_pyspark + + GraphAr PySpark package provide two main ways how to initiate + objects, like ``GraphInfo``: + + #. ``from_python(**args)`` when you create an object based on + python-arguments + #. ``from_scala(jvm_ref)`` when you create an object from the + corresponded JVM-object (``py4j.java_gateway.JavaObject``) + +.. container:: cell code + :name: 560cec49-bb31-4ae5-86aa-f9b24642c283 + + .. code:: python + + help(Property.from_python) + + .. container:: output stream stdout + + :: + + Help on method from_python in module graphar_pyspark.info: + + from_python(name: 'str', data_type: 'GarType', is_primary: 'bool') -> 'PropertyType' method of builtins.type instance + Create an instance of the Class from Python arguments. + + :param name: property name + :param data_type: property data type + :param is_primary: flag that property is primary + :returns: instance of Python Class. + +.. container:: cell code + :name: 809301c2-89f3-4ea3-9afd-9154be317972 + + .. code:: python + + python_property = Property.from_python(name="my_property", data_type=GarType.INT64, is_primary=False) + print(type(python_property)) + + .. container:: output stream stdout + + :: + + + +.. container:: cell markdown + :name: 45f0f74d-9568-467f-809a-832f80d5afc6 + + You can always get a reference to the corresponding JVM object. For + example, you want to use it in your own code and need a direct link + to the underlaying instance of Scala Class, you can just call + ``to_scala()`` method: + +.. container:: cell code + :name: 9c29c329-76ad-4908-84b6-06e004963ae5 + + .. code:: python + + scala_obj = python_property.to_scala() + print(type(scala_obj)) + + .. container:: output stream stdout + + :: + + + +.. container:: cell markdown + :name: 0703a9e0-a48a-4380-8ea6-383cc8164650 + + As we already mentioned, you can initialize an instance of the Python + class from the JVM object: + +.. container:: cell code + :name: 25d243a3-645c-4777-b54e-9175b0685c6f + + .. code:: python + + help(Property.from_scala) + + .. container:: output stream stdout + + :: + + Help on method from_scala in module graphar_pyspark.info: + + from_scala(jvm_obj: 'JavaObject') -> 'PropertyType' method of builtins.type instance + Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + +.. container:: cell code + :name: fbea761b-a843-4225-a589-c66f98d7799c + + .. code:: python + + python_property = Property.from_scala(scala_obj) + +.. container:: cell markdown + :name: 7c54a9b6-29f1-4a57-aa14-30679613b128 + + Each public property and method of the Scala API is provided in + python, but in a pythonic-naming convention. For example, in Scala, + ``Property`` has the following fields: + + - name + - data_type + - is_primary + + For each of such a field in Scala API there is a getter and setter + methods. You can call them from the Python too: + +.. container:: cell code + :name: ec90236a-cc39-42bc-a5d1-2f57db3a3d8b + + .. code:: python + + python_property.get_name() + + .. container:: output execute_result + + :: + + 'my_property' + +.. container:: cell markdown + :name: 81f26098-7eb8-4df5-9764-1b2710f8198c + + You can also modify fields, but be careful: when you modify field of + instance of the Python class, you modify the underlaying Scala Object + in the same moment! + +.. container:: cell code + :name: ea88b175-ed5f-4fe3-8753-84916b52c7f2 + + .. code:: python + + new_name = "my_renamed_property" + python_property.set_name(new_name) + python_property.get_name() + + .. container:: output execute_result + + :: + + 'my_renamed_property' + +.. container:: cell markdown + :name: 3b013f4b-ffd6-4de0-9587-d7273cb9c90c + + .. rubric:: Loading Info objects from YAML + :name: loading-info-objects-from-yaml + + But manual creating of objects is not a primary way of using GraphAr + PySpark. ``GraphInfo``, ``VertexInfo`` and ``EdgeInfo`` can be also + initialized by reading from YAML-files: + +.. container:: cell code + :name: 3a6dafea-346f-4905-84c1-6c5eda86bba4 + + .. code:: python + + modern_graph_v_person = VertexInfo.load_vertex_info("../../testing/modern_graph/person.vertex.yml") + modern_graph_e_person_person = EdgeInfo.load_edge_info("../../testing/modern_graph/person_knows_person.edge.yml") + modern_graph = GraphInfo.load_graph_info("../../testing/modern_graph/modern_graph.graph.yml") + +.. container:: cell markdown + :name: 23c145ba-c4ee-43f0-a8d5-62ffebf1ebf3 + + After that you can work with such an objects like regular python + objects: + +.. container:: cell code + :name: 87e99095-0a5f-4e84-9e0a-35f97e1bf9f5 + + .. code:: python + + print(modern_graph_v_person.dump()) + + .. container:: output stream stdout + + :: + + chunk_size: 2 + prefix: vertex/person/ + property_groups: + - prefix: id/ + file_type: csv + properties: + - is_primary: true + name: id + data_type: int64 + - prefix: name_age/ + file_type: csv + properties: + - is_primary: false + name: name + data_type: string + - is_primary: false + name: age + data_type: int64 + label: person + version: gar/v1 + +.. container:: cell code + :name: b9d44691-d07d-43d5-833c-0ede2d9b99d9 + + .. code:: python + + print(modern_graph_v_person.contain_property("id") is True) + print(modern_graph_v_person.contain_property("bad_id?") is False) + + .. container:: output stream stdout + + :: + + True + True + +.. container:: cell markdown + :name: cc992785-eabc-44c2-a63a-b5eebc9af996 + + Please, refer to Scala API and examples of GraphAr Spark Scala + library to see detailed and business-case oriented examples! + +.. container:: cell code + :name: c8fbdbb6-6d48-47fa-b69e-8022c8d8f5a1 + + .. code:: python diff --git a/docs/pyspark/index.rst b/docs/pyspark/index.rst new file mode 100644 index 000000000..b37253e2f --- /dev/null +++ b/docs/pyspark/index.rst @@ -0,0 +1,9 @@ +PySpark Library +============================ + +.. toctree:: + :maxdepth: 2 + + Getting Started + How to use + Reference diff --git a/docs/pyspark/pyspark-lib.rst b/docs/pyspark/pyspark-lib.rst new file mode 100644 index 000000000..f02674306 --- /dev/null +++ b/docs/pyspark/pyspark-lib.rst @@ -0,0 +1,81 @@ +GraphAr PySpark Library +============================ + +.. note:: + The current policy of GraphAr project is that for Apache Spark the main API is Scala Spark API. PySpark API follows scala Spark API. Please refer to `GraphAr Spark Scala library <../spark/index.html>`_ for more detailed information about how to use GraphAr with Apache Spark. + +Overview +----------- + +The GraphAr PySpark library is provided for generating, loading and transforming GAR files with PySpark. + +- **Information Classes**: As same with in the C++ library, the information classes are implemented as a part of the PySpark library for constructing and accessing the meta information about the graphs, vertices and edges in GraphAr. +- **IndexGenerator**: The IndexGenerator helps to generate the indices for vertex/edge DataFrames. In most cases, IndexGenerator is first utilized to generate the indices for a DataFrame (e.g., from primary keys), and then this DataFrame can be written into GAR files through the writer. +- **Writer**: The GraphAr PySpark writer provides a set of interfaces that can be used to write Spark DataFrames into GAR files. Every time it takes a DataFrame as the logical table for a type of vertices or edges, assembles the data in specified format (e.g., reorganize the edges in the CSR way) and then dumps it to standard GAR files (CSV, ORC or Parquet files) under the specific directory path. +- **Reader**: The GraphAr PySpark reader provides a set of interfaces that can be used to read GAR files. It reads a collection of vertices or edges at a time and assembles the result into the Spark DataFrame. Similar with the reader in the C++ library, it supports the users to specify the data they need, e.g., reading a single property group instead of all properties. + +Use Cases +---------- + +The GraphAr Spark library can be used in a range of scenarios: + +- Taking GAR as a data source to execute SQL queries or do graph processing (e.g., using GraphX). +- Transforming data between GAR and other data sources (e.g., Hive, Neo4j, NebulaGraph, ...). +- Transforming GAR data between different file types (e.g., from ORC to Parquet). +- Transforming GAR data between different adjList types (e.g., from COO to CSR). +- Modifying existing GAR data (e.g., adding new vertices/edges). + + +Get GraphAr Spark Library +------------------------------ + +Building from source + +GraphAr PySpark uses poetry as a build system. Please refer to `Poetry documentation `_ to find the manual how to install this tool. Currently GraphAr PySpark is build with Python 3.9 and PySpark 3.2 + +````````````````````` + +Make the graphar-pyspark-library directory as the current working directory: + +.. code-block:: shell + + cd GraphAr/pyspark + +Build package: + +.. code-block:: shell + + poetry build + +After compilation, a similar file *graphar_pyspark-0.0.1.tar.gz* is generated in the directory *pyspark/dist/*. + +Get from PyPI +``````````````` + +You cannot install graphar-pyspark from PyPi for now. + + +How to Use +----------------- + +Initialization +``````````````` +GraphAr PySpark is not a standalone library but bindings to GraphAr Scala. You need to have *graphar-spark-x.x.x.jar* in your *spark-jars*. Please refer to `GraphAr scala documentation <../spark/index>`_ to get this JAR. + +.. code-block:: python + + // create a SparkSession + from pyspark.sql import SparkSession + + spark = ( + SparkSession + .builder. + ... + .conf("spark-jars", "path-to-graphara-spark-x.x.x.jar-file") + .getOrCreate() + ) + + from graphar_pyspark import initialize + initialize(spark) + +After initialization you can use the same API like in GraphAr scala library. diff --git a/pyspark/Makefile b/pyspark/Makefile new file mode 100644 index 000000000..f8f7fde8e --- /dev/null +++ b/pyspark/Makefile @@ -0,0 +1,42 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +.PHONY: install_test +install_test: + export JAVA_HOME=${JAVA_HOME_11_X64} + cd ../spark && mvn clean package -DskipTests -Dspotless.check.skip=true && cd ../pyspark + export PYSPARK_HADOOP_VERSION=3.2 + poetry install --with=spark,tests + +.PHONY: test +test: + export JAVA_HOME=${JAVA_HOME_11_X64} + poetry run pytest --cov + +.PHONY: coverage_report +coverage_report: + export JAVA_HOME=${JAVA_HOME_11_X64} + poetry run pytest --cov --cov-report=html:coverage_html graphar_pyspark tests/ + +.PHONY: install_lint +install_lint: + poetry install --with=lint + +.PHONY: lint +lint: + poetry run ruff check graphar_pyspark/ + +.PHONY: lint_fix +lint_fix: + poetry run ruff check --fix graphar_pyspark/ diff --git a/pyspark/README.md b/pyspark/README.md new file mode 100644 index 000000000..f909262b2 --- /dev/null +++ b/pyspark/README.md @@ -0,0 +1,35 @@ + + + +# GraphAr PySpark + +This directory contains the code and build system for the GraphAr PySpark library. Library is implemented as bindings to GraphAr Scala Spark library and does not contain any real logic. + + +## Introduction + +GraphAr PySpark project provides a PySpark API and utilities for working with GAR file format from PySpark. The project has the only python dependency -- `pyspark` itself. Currently only `pysaprk~=3.2` is supported, but in the future the scope of supported versions will be extended. + +## Installation + +Currently, the only way to install `graphar_pyspark` is to build it from the source code. The project is made with poetry, so it highly recommended to use this building system. + +```shell +poetry install +``` + +It creates a `tar.gz` file in `dist` directory. diff --git a/pyspark/graphar_pyspark/__init__.py b/pyspark/graphar_pyspark/__init__.py new file mode 100644 index 000000000..40f44a3d5 --- /dev/null +++ b/pyspark/graphar_pyspark/__init__.py @@ -0,0 +1,64 @@ +# Copyright 2022-2023 Alibaba Group Holding Limited. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""GraphSession and initialization.""" + +from pyspark.sql import SparkSession + +from graphar_pyspark.errors import GraphArIsNotInitializedError + + +class _GraphArSession: + """Singleton GraphAr helper object, that contains SparkSession and JVM. + + It is implemented as a module-level instance of the class. + """ + + def __init__(self) -> None: + self.ss = None + self.sc = None + self.jvm = None + self.graphar = None + self.jsc = None + self.jss = None + + def set_spark_session(self, spark_session: SparkSession) -> None: + self.ss = spark_session # Python SparkSession + self.sc = spark_session.sparkContext # Python SparkContext + self.jvm = spark_session._jvm # JVM + self.graphar = spark_session._jvm.com.alibaba.graphar # Alias to scala graphar + self.jsc = spark_session._jsc # Java SparkContext + self.jss = spark_session._jsparkSession # Java SparkSession + + def is_initialized(self) -> bool: + return self.ss is not None + + +GraphArSession = _GraphArSession() + + +def initialize(spark: SparkSession) -> None: + """Initialize GraphAr session. + + :param spark: pyspark SparkSession object. + """ + GraphArSession.set_spark_session( + spark, + ) # modify the global GraphArSession singleton. + + +def _check_session() -> None: + if not GraphArSession.is_initialized(): + msg = "GraphArSession is not initialized. Call `pyspark_graphar.initialize` first!" + raise GraphArIsNotInitializedError(msg) diff --git a/pyspark/graphar_pyspark/enums.py b/pyspark/graphar_pyspark/enums.py new file mode 100644 index 000000000..33d50ea1c --- /dev/null +++ b/pyspark/graphar_pyspark/enums.py @@ -0,0 +1,106 @@ +# Copyright 2022-2023 Alibaba Group Holding Limited. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Enumerations and constants.""" + +from enum import Enum + +from py4j.java_gateway import JavaObject + +from graphar_pyspark import GraphArSession, _check_session + + +class GarType(Enum): + """Main data type in gar enumeration.""" + + BOOL = "bool" + INT32 = "int32" + INT64 = "int64" + FLOAT = "float" + DOUBLE = "double" + STRING = "string" + LIST = "list" + + @staticmethod + def from_scala(jvm_obj: JavaObject) -> "GarType": + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + _check_session() + return GarType(GraphArSession.graphar.GarType.GarTypeToString(jvm_obj)) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + _check_session() + return GraphArSession.graphar.GarType.StringToGarType(self.value) + + +class FileType(Enum): + """Type of file format.""" + + CSV = "csv" + PARQUET = "parquet" + ORC = "orc" + + @staticmethod + def from_scala(jvm_obj: JavaObject) -> "FileType": + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + _check_session() + return FileType(GraphArSession.graphar.FileType.FileTypeToString(jvm_obj)) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + _check_session() + return GraphArSession.graphar.FileType.StringToFileType(self.value) + + +class AdjListType(Enum): + """Adj list type enumeration for adjacency list of graph.""" + + UNORDERED_BY_SOURCE = "unordered_by_source" + UNORDERED_BY_DEST = "unordered_by_dest" + ORDERED_BY_SOURCE = "ordered_by_source" + ORDERED_BY_DEST = "ordered_by_dest" + + @staticmethod + def from_scala(jvm_obj: JavaObject) -> "AdjListType": + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + _check_session() + return AdjListType( + GraphArSession.graphar.AdjListType.AdjListTypeToString(jvm_obj), + ) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + _check_session() + return GraphArSession.graphar.AdjListType.StringToAdjListType(self.value) diff --git a/pyspark/graphar_pyspark/errors.py b/pyspark/graphar_pyspark/errors.py new file mode 100644 index 000000000..da2273c2a --- /dev/null +++ b/pyspark/graphar_pyspark/errors.py @@ -0,0 +1,23 @@ +# Copyright 2022-2023 Alibaba Group Holding Limited. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Custom Exceptions.""" + + +class InvalidGraphFormatError(ValueError): + """Exception that graph format arguments have wrong format.""" + + +class GraphArIsNotInitializedError(ValueError): + """Exception that GraphAr Session is not initialized.""" diff --git a/pyspark/graphar_pyspark/graph.py b/pyspark/graphar_pyspark/graph.py new file mode 100644 index 000000000..55d222d09 --- /dev/null +++ b/pyspark/graphar_pyspark/graph.py @@ -0,0 +1,261 @@ +# Copyright 2022-2023 Alibaba Group Holding Limited. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Bidnings to com.alibaba.graphar.graph.""" + +from __future__ import annotations + +from collections.abc import Mapping +from dataclasses import dataclass +from typing import Optional, Union + +from py4j.java_gateway import JavaObject +from pyspark.sql import DataFrame + +from graphar_pyspark import GraphArSession, _check_session +from graphar_pyspark.enums import FileType +from graphar_pyspark.errors import InvalidGraphFormatError +from graphar_pyspark.info import GraphInfo + + +@dataclass(frozen=True) +class EdgeLabels: + """A triplet that describe edge. Contains source, edge and dest labels. Immutable.""" + + src_label: str + edge_label: str + dst_label: str + + +@dataclass(frozen=True) +class GraphReaderResult: + """A simple immutable class, that represent results of reading a graph with GraphReader.""" + + vertex_dataframes: Mapping[str, DataFrame] + edge_dataframes: Mapping[EdgeLabels, Mapping[str, DataFrame]] + + @staticmethod + def from_scala( + jvm_result: tuple[ + dict[str, JavaObject], + dict[tuple[str, str, str], dict[str, JavaObject]], + ], + ) -> "GraphReaderResult": + """Create an instance of the Class from JVM method output. + + :param jvm_result: structure, returned from JVM. + :returns: instance of Python Class. + """ + first_dict = {} + first_scala_map = jvm_result._1() + first_scala_map_iter = first_scala_map.keySet().iterator() + + while first_scala_map_iter.hasNext(): + k = first_scala_map_iter.next() + first_dict[k] = DataFrame(first_scala_map.get(k).get(), GraphArSession.ss) + + second_dict = {} + second_scala_map = jvm_result._2() + second_scala_map_iter = second_scala_map.keySet().iterator() + + while second_scala_map_iter.hasNext(): + k = second_scala_map_iter.next() + nested_scala_map = second_scala_map.get(k).get() + nested_scala_map_iter = nested_scala_map.keySet().iterator() + inner_dict = {} + + while nested_scala_map_iter.hasNext(): + kk = nested_scala_map_iter.next() + inner_dict[kk] = DataFrame( + nested_scala_map.get(kk).get(), + GraphArSession.ss, + ) + + second_dict[EdgeLabels(k._1(), k._2(), k._3())] = inner_dict + + return GraphReaderResult( + vertex_dataframes=first_dict, + edge_dataframes=second_dict, + ) + + +class GraphReader: + """The helper object for reading graph through the definitions of graph info.""" + + @staticmethod + def read( + graph_info: Union[GraphInfo, str], + ) -> GraphReaderResult: + """Read the graph as vertex and edge DataFrames with the graph info yaml file or GraphInfo object. + + :param graph_info: The path of the graph info yaml or GraphInfo instance. + :returns: GraphReaderResults, that contains vertex and edge dataframes. + """ + _check_session() + if isinstance(graph_info, str): + graph_info = GraphInfo.load_graph_info(graph_info) + + jvm_result = GraphArSession.graphar.graph.GraphReader.readWithGraphInfo( + graph_info.to_scala(), + GraphArSession.jss, + ) + return GraphReaderResult.from_scala(jvm_result) + + +class GraphWriter: + """The helper class for writing graph.""" + + def __init__(self, jvm_obj: JavaObject) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + self._jvm_graph_writer_obj = jvm_obj + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_graph_writer_obj + + @staticmethod + def from_scala(jvm_obj: JavaObject) -> "GraphWriter": + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return GraphWriter(jvm_obj) + + @staticmethod + def from_python() -> "GraphWriter": + """Create an instance of the Class from Python arguments.""" + return GraphWriter(GraphArSession.graphar.graph.GraphWriter()) + + def put_vertex_data(self, label: str, df: DataFrame, primary_key: str) -> None: + """Put the vertex DataFrame into writer. + + :param label: label of vertex. + :param df: DataFrame of the vertex type. + :param primary_key: primary key of the vertex type, default is empty, which take the first property column as primary key. + """ + self._jvm_graph_writer_obj.PutVertexData(label, df._jdf, primary_key) + + def put_edge_data(self, relation: tuple[str, str, str], df: DataFrame) -> None: + """Put the egde datafrme into writer. + + :param relation: 3-Tuple (source label, edge label, target label) to indicate edge type. + :param df: data frame of edge type. + """ + relation_jvm = GraphArSession.jvm.scala.Tuple3( + relation[0], relation[1], relation[2], + ) + self._jvm_graph_writer_obj.PutEdgeData(relation_jvm, df._jdf) + + def write_with_graph_info(self, graph_info: Union[GraphInfo, str]) -> None: + """Write the graph data in graphar format with graph info. + + Note: original method is `write` but there is not directly overloading in Python. + + :param graph_info: the graph info object for the graph or the path to graph info object. + """ + if isinstance(graph_info, str): + self._jvm_graph_writer_obj.write(graph_info, GraphArSession.jss) + else: + self._jvm_graph_writer_obj.write(graph_info.to_scala(), GraphArSession.jss) + + def write( + self, + path: str, + name: str = "graph", + vertex_chunk_size: Optional[int] = None, + edge_chunk_size: Optional[int] = None, + file_type: Optional[FileType] = None, + version: Optional[str] = None, + ) -> None: + """Write graph data in graphar format. + + Note: for default parameters check com.alibaba.graphar.GeneralParams; + For this method None for any of arguments means that the default value will be used. + + :param path: the directory to write. + :param name: the name of graph, default is 'grpah' + :param vertex_chunk_size: the chunk size for vertices, default is 2^18 + :param edge_chunk_size: the chunk size for edges, default is 2^22 + :param file_type: the file type for data payload file, support [parquet, orc, csv], default is parquet. + :param version: version of graphar format, default is v1. + """ + if vertex_chunk_size is None: + vertex_chunk_size = ( + GraphArSession.graphar.GeneralParams.defaultVertexChunkSize + ) + + if edge_chunk_size is None: + edge_chunk_size = GraphArSession.graphar.GeneralParams.defaultEdgeChunkSize + + file_type = ( + GraphArSession.graphar.GeneralParams.defaultFileType + if file_type is None + else file_type.value + ) + + if version is None: + version = GraphArSession.graphar.GeneralParams.defaultVersion + + self._jvm_graph_writer_obj.write( + path, + GraphArSession.jss, + name, + vertex_chunk_size, + edge_chunk_size, + file_type, + version, + ) + + +class GraphTransformer: + """The helper object for transforming graphs through the definitions of their infos.""" + + @staticmethod + def transform( + source_graph_info: Union[str, GraphInfo], + dest_graph_info: Union[str, GraphInfo], + ) -> None: + """Transform the graphs following the meta data provided or defined in info files. + + Note: both arguments should be strings or GrapInfo instances! Mixed arguments type is not supported. + + :param source_graph_info: The path of the graph info yaml file for the source graph OR the info object for the source graph. + :param dest_graph_info: The path of the graph info yaml file for the destination graph OR the info object for the destination graph. + :raise InvalidGraphFormatException: if you pass mixed format of source and dest graph info. + """ + _check_session() + if isinstance(source_graph_info, str) and isinstance(dest_graph_info, str): + GraphArSession.graphar.graph.GraphTransformer.transform( + source_graph_info, + dest_graph_info, + GraphArSession.jss, + ) + elif isinstance(source_graph_info, GraphInfo) and isinstance( + dest_graph_info, + GraphInfo, + ): + GraphArSession.graphar.graph.GraphTransformer.transform( + source_graph_info.to_scala(), + dest_graph_info.to_scala(), + GraphArSession.jss, + ) + else: + msg = "Both src and dst graph info objects should be of the same type. " + msg += f"But {type(source_graph_info)} and {type(dest_graph_info)} were provided!" + raise InvalidGraphFormatError(msg) diff --git a/pyspark/graphar_pyspark/info.py b/pyspark/graphar_pyspark/info.py new file mode 100644 index 000000000..4534c960f --- /dev/null +++ b/pyspark/graphar_pyspark/info.py @@ -0,0 +1,1504 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +"""Bindings to com.alibaba.graphar info classes.""" + +# because we are using type-hints, we need to define few custom TypeVar +# to describe returns of classmethods; + +from __future__ import annotations + +import os +from collections.abc import Sequence +from typing import Optional, TypeVar, Union + +from py4j.java_collections import JavaList +from py4j.java_gateway import JavaObject + +from graphar_pyspark import GraphArSession, _check_session +from graphar_pyspark.enums import AdjListType, FileType, GarType + +# Return type of Property classmethods +PropertyType = TypeVar("PropertyType", bound="Property") + + +class Property: + """The property information of vertex or edge.""" + + def __init__( + self, + name: Optional[str], + data_type: Optional[GarType], + is_primary: Optional[bool], + jvm_obj: Optional[JavaObject] = None, + ) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + if jvm_obj is not None: + self._jvm_property_obj = jvm_obj + else: + property_pyobj = GraphArSession.graphar.Property() + property_pyobj.setName(name) + property_pyobj.setData_type(data_type.value) + property_pyobj.setIs_primary(is_primary) + + self._jvm_property_obj = property_pyobj + + def get_name(self) -> str: + """Get name from corresponding JVM object. + + :returns: name + """ + return self._jvm_property_obj.getName() + + def set_name(self, name: str) -> None: + """Mutate corresponding JVM object. + + :param name: name + """ + self._jvm_property_obj.setName(name) + + def get_data_type(self) -> GarType: + """Get data type from corresponding JVM object. + + :returns: data type + """ + return GarType(self._jvm_property_obj.getData_type()) + + def set_data_type(self, data_type: GarType) -> None: + """Mutate corresponding JVM object. + + :param data_type: data type + """ + self._jvm_property_obj.setData_type(data_type.value) + + def get_is_primary(self) -> bool: + """Get is priamry flag from corresponding JVM object. + + :returns: is primary + """ + return self._jvm_property_obj.getIs_primary() + + def set_is_primary(self, is_primary: bool) -> None: + """Mutate corresponding JVM object. + + :param is_primary: is primary + """ + self._jvm_property_obj.setIs_primary(is_primary) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_property_obj + + @classmethod + def from_scala(cls: type[PropertyType], jvm_obj: JavaObject) -> PropertyType: + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return cls(None, None, None, jvm_obj) + + @classmethod + def from_python( + cls: type[PropertyType], + name: str, + data_type: GarType, + is_primary: bool, + ) -> PropertyType: + """Create an instance of the Class from Python arguments. + + :param name: property name + :param data_type: property data type + :param is_primary: flag that property is primary + :returns: instance of Python Class. + """ + return cls(name, data_type, is_primary, None) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Property): + return False + + return ( + (self.get_name() == other.get_name()) + and (self.get_data_type() == other.get_data_type()) + and (self.get_is_primary() == other.get_is_primary()) + ) + + +# Return type of PropertyGroup classmethods +PropertyGroupType = TypeVar("PropertyGroupType", bound="PropertyGroup") + + +class PropertyGroup: + """PropertyGroup is a class to store the property group information.""" + + def __init__( + self, + prefix: Optional[str], + file_type: Optional[FileType], + properties: Optional[Sequence[Property]], + jvm_obj: Optional[JavaObject], + ) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + if jvm_obj is not None: + self._jvm_property_group_obj = jvm_obj + else: + property_group = GraphArSession.graphar.PropertyGroup() + property_group.setPrefix(prefix) + property_group.setFile_type(file_type.value) + property_group.setProperties( + [py_property.to_scala() for py_property in properties], + ) + self._jvm_property_group_obj = property_group + + def get_prefix(self) -> str: + """Get prefix from the corresponding JVM object. + + :returns: prefix + """ + return self._jvm_property_group_obj.getPrefix() + + def set_prefix(self, prefix: str) -> None: + """Mutate the corresponding JVM object. + + :param prefix: prefix + """ + self._jvm_property_group_obj.setPrefix(prefix) + + def get_file_type(self) -> FileType: + """Get file type from the corresponding JVM object. + + :returns: FileType + """ + return FileType(self._jvm_property_group_obj.getFile_type()) + + def set_file_type(self, file_type: FileType) -> None: + """Mutate the corresponding JVM object. + + :param file_type: FileType + """ + self._jvm_property_group_obj.setFile_type(file_type.value) + + def get_properties(self) -> Sequence[Property]: + """Get properties from the corresponding JVM object. + + :returns: list of Properties + """ + return [ + Property.from_scala(jvm_property) + for jvm_property in self._jvm_property_group_obj.getProperties() + ] + + def set_properties(self, properties: Sequence[Property]) -> None: + """Mutate the corresponding JVM object. + + :param properties: list of Properties + """ + self._jvm_property_group_obj.setProperties( + [py_property.to_scala() for py_property in properties], + ) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_property_group_obj + + @classmethod + def from_scala( + cls: type[PropertyGroupType], + jvm_obj: JavaObject, + ) -> PropertyGroupType: + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return cls(None, None, None, jvm_obj) + + @classmethod + def from_python( + cls: type[PropertyGroupType], + prefix: str, + file_type: FileType, + properties: Sequence[Property], + ) -> PropertyGroupType: + """Create an instance of the class from Python args. + + :param prefix: path prefix + :param file_type: type of file + :param properties: list of properties + """ + return cls(prefix, file_type, properties, None) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PropertyGroup): + return False + + return ( + (self.get_prefix() == other.get_prefix()) + and (self.get_file_type() == other.get_file_type()) + and (len(self.get_properties()) == len(other.get_properties())) + and all( + p_left == p_right + for p_left, p_right in zip( + self.get_properties(), + other.get_properties(), + ) + ) + ) + + +# Return type of VertexInfo classmethods +VertexInfoType = TypeVar("VertexInfoType", bound="VertexInfo") + + +class VertexInfo: + """VertexInfo is a class to store the vertex meta information.""" + + def __init__( + self, + label: Optional[str], + chunk_size: Optional[int], + prefix: Optional[str], + property_groups: Optional[Sequence[PropertyGroup]], + version: Optional[str], + jvm_obj: Optional[JavaObject], + ) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + if jvm_obj is not None: + self._jvm_vertex_info_obj = jvm_obj + else: + vertex_info = GraphArSession.graphar.VertexInfo() + vertex_info.setLabel(label) + vertex_info.setChunk_size(chunk_size) + vertex_info.setPrefix(prefix) + vertex_info.setProperty_groups( + [py_property_group.to_scala() for py_property_group in property_groups], + ) + vertex_info.setVersion(version) + self._jvm_vertex_info_obj = vertex_info + + def get_label(self) -> str: + """Get label from the corresponding JVM object. + + :returns: label + """ + return self._jvm_vertex_info_obj.getLabel() + + def set_label(self, label: str) -> None: + """Mutate the corresponding JVM object. + + :param label: new label + """ + self._jvm_vertex_info_obj.setLabel(label) + + def get_chunk_size(self) -> int: + """Get chunk size from the corresponding JVM object. + + :returns: chunk size + """ + return self._jvm_vertex_info_obj.getChunk_size() + + def set_chunk_size(self, chunk_size: int) -> None: + """Mutate the corresponding JVM object. + + :param chunk_size: new chunk size + """ + self._jvm_vertex_info_obj.setChunk_size(chunk_size) + + def get_prefix(self) -> str: + """Get prefix from the corresponding JVM object. + + :returns: prefix + """ + return self._jvm_vertex_info_obj.getPrefix() + + def set_prefix(self, prefix: str) -> None: + """Mutate the corresponding JVM object. + + :param prefix: the new pefix. + """ + self._jvm_vertex_info_obj.setPrefix(prefix) + + def get_property_groups(self) -> Sequence[PropertyGroup]: + """Get property groups from the corresponding JVM object. + + :returns: property groups + """ + return [ + PropertyGroup.from_scala(jvm_property_group) + for jvm_property_group in self._jvm_vertex_info_obj.getProperty_groups() + ] + + def set_property_groups(self, property_groups: Sequence[PropertyGroup]) -> None: + """Mutate the corresponding JVM object. + + :param property_groups: new property groups + """ + self._jvm_vertex_info_obj.setProperty_groups( + [py_property_group.to_scala() for py_property_group in property_groups], + ) + + def get_version(self) -> str: + """Get version from the corresponding JVM object. + + :returns: version + """ + return self._jvm_vertex_info_obj.getVersion() + + def set_version(self, version: str) -> None: + """Mutate the corresponding JVM object. + + :param version: the new version. + """ + self._jvm_vertex_info_obj.setVersion(version) + + def contain_property_group(self, property_group: PropertyGroup) -> bool: + """Check if the vertex info contains the property group. + + :param property_group: the property group to check. + :returns: true if the vertex info contains the property group, otherwise false. + """ + return self._jvm_vertex_info_obj.containPropertyGroup(property_group.to_scala()) + + def contain_property(self, property_name: str) -> bool: + """Check if the vertex info contains certain property. + + :param property_name: name of the property. + :returns: true if the vertex info contains the property, otherwise false. + """ + return self._jvm_vertex_info_obj.containProperty(property_name) + + def get_property_group(self, property_name: str) -> PropertyGroup: + """Get the property group that contains property. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_name: name of the property. + :returns: property group that contains the property, otherwise raise IllegalArgumentException error. + """ + return PropertyGroup.from_scala( + self._jvm_vertex_info_obj.getPropertyGroup(property_name), + ) + + def get_property_type(self, property_name: str) -> GarType: + """Get the data type of property. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_name: name of the property. + :returns: the data type in gar of the property. If the vertex info does not contains the property, raise IllegalArgumentException error. + """ + return GarType.from_scala( + self._jvm_vertex_info_obj.getPropertyType(property_name), + ) + + def is_primary_key(self, property_name: str) -> bool: + """Check if the property is primary key. + + :param property_name: name of the property to check. + :returns: true if the property if the primary key of vertex info, otherwise return false. + """ + return self._jvm_vertex_info_obj.isPrimaryKey(property_name) + + def get_primary_key(self) -> str: + """Get primary key of vertex info. + + :returns: name of the primary key. + """ + return self._jvm_vertex_info_obj.getPrimaryKey() + + def is_validated(self) -> bool: + """Check if the vertex info is validated. + + :returns: true if the vertex info is validated, otherwise return false. + """ + return self._jvm_vertex_info_obj.isValidated() + + def get_vertices_num_file_path(self) -> str: + """Get the vertex num file path of vertex info. + + :returns: vertex num file path of vertex info. + """ + return self._jvm_vertex_info_obj.getVerticesNumFilePath() + + def get_file_path(self, property_group: PropertyGroup, chunk_index: int) -> str: + """Get the chunk file path of property group of vertex chunk. + + :param property_group: the property group. + :param chunk_index: the index of vertex chunk + :returns: chunk file path. + + """ + return self._jvm_vertex_info_obj.getFilePath( + property_group.to_scala(), + chunk_index, + ) + + def get_path_prefix(self, property_group: PropertyGroup) -> str: + """Get the path prefix for the specified property group. + + :param property_group: the property group. + :returns: the path prefix of the property group chunk files. + """ + return self._jvm_vertex_info_obj.getPathPrefix(property_group.to_scala()) + + def dump(self) -> str: + """Dump to Yaml string. + + :returns: yaml string + """ + return self._jvm_vertex_info_obj.dump() + + @staticmethod + def load_vertex_info(vertex_info_path: str) -> "VertexInfo": + """Load a yaml file from path and construct a VertexInfo from it. + + :param vertexInfoPath: yaml file path + :returns: VertexInfo object + """ + return VertexInfo.from_scala( + GraphArSession.graphar.VertexInfo.loadVertexInfo( + vertex_info_path, + GraphArSession.jss, + ), + ) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_vertex_info_obj + + @classmethod + def from_scala(cls: type[VertexInfoType], jvm_obj: JavaObject) -> VertexInfoType: + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return VertexInfo( + None, + None, + None, + None, + None, + jvm_obj, + ) + + @classmethod + def from_python( + cls: type[VertexInfoType], + label: str, + chunk_size: int, + prefix: str, + property_groups: Sequence[PropertyGroup], + version: str, + ) -> VertexInfoType: + """Create an instance of the class based on python args. + + :param label: label of the vertex + :chunk_size: chunk size + :prefix: vertex prefix + :property_groups: list of property groups + :version: version of GAR + """ + return VertexInfo(label, chunk_size, prefix, property_groups, version, None) + + +# Return type of AdjList classmethods +AdjListClassType = TypeVar("AdjListClassType", bound="AdjList") + + +class AdjList: + """AdjList is a class to store the adj list information of edge.""" + + def __init__( + self, + ordered: Optional[bool], + aligned_by: Optional[str], + prefix: Optional[str], + file_type: Optional[FileType], + property_groups: Optional[Sequence[PropertyGroup]], + jvm_obj: Optional[JavaObject], + ) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + if jvm_obj is not None: + self._jvm_adj_list_obj = jvm_obj + else: + jvm_adj_list = GraphArSession.graphar.AdjList() + jvm_adj_list.setOrdered(ordered) + jvm_adj_list.setAligned_by(aligned_by) + jvm_adj_list.setPrefix(prefix) + jvm_adj_list.setFile_type(file_type.value) + jvm_adj_list.setProperty_groups( + [py_property_group.to_scala() for py_property_group in property_groups], + ) + self._jvm_adj_list_obj = jvm_adj_list + + def get_ordered(self) -> bool: + """Get ordered flag from the corresponding JVM object. + + :returns: ordered + """ + return self._jvm_adj_list_obj.getOrdered() + + def set_ordered(self, ordered: bool) -> None: + """Mutate the corresponding JVM object. + + :param ordered: new ordered flag + """ + self._jvm_adj_list_obj.setOrdered(ordered) + + def get_aligned_by(self) -> str: + """Get aligned_by from the corresponding JVM object. + + :returns: aligned by as a string ("src", "dst") + """ + return self._jvm_adj_list_obj.getAligned_by() + + def set_aligned_by(self, aligned_by: str) -> None: + """Mutate the corresponding JVM object. + + :param aligned_by: the new aligned_by (recommended to use "src" or "dst") + + """ + self._jvm_adj_list_obj.setAligned_by(aligned_by) + + def get_prefix(self) -> str: + """Get prefix from the corresponding JVM object. + + :returns: prefix + """ + return self._jvm_adj_list_obj.getPrefix() + + def set_prefix(self, prefix: str) -> None: + """Mutate the corresponding JVM object. + + :param prefix: the new prefix + + """ + self._jvm_adj_list_obj.setPrefix(prefix) + + def get_file_type(self) -> FileType: + """Get FileType (as Enum) from the corresponding JVM object. + + :returns: file type + """ + return FileType(self._jvm_adj_list_obj.getFile_type()) + + def set_file_type(self, file_type: FileType) -> None: + """Mutate the corresponding JVM object. + + :param file_type: the new file type + """ + self._jvm_adj_list_obj.setFile_type(file_type.value) + + def get_property_groups(self) -> Sequence[PropertyGroup]: + """Get property groups from the corresponding JVM object. + + :returns: property groups + """ + return [ + PropertyGroup.from_scala(jvm_property_group) + for jvm_property_group in self._jvm_adj_list_obj.getProperty_groups() + ] + + def set_property_groups(self, property_groups: Sequence[PropertyGroup]) -> None: + """Mutate the corresponding JVM object. + + :param property_groups: new property groups + """ + self._jvm_adj_list_obj.setProperty_groups( + [p_group.to_scala() for p_group in property_groups], + ) + + def get_adj_list_type(self) -> AdjListType: + """Get adj list type. + + :returns: adj list type. + """ + return AdjListType(self._jvm_adj_list_obj.getAdjList_type()) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_adj_list_obj + + @classmethod + def from_scala( + cls: type[AdjListClassType], + jvm_obj: JavaObject, + ) -> AdjListClassType: + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return AdjList(None, None, None, None, None, jvm_obj) + + @classmethod + def from_python( + cls: type[AdjListClassType], + ordered: bool, + aligned_by: str, + prefix: str, + file_type: FileType, + property_groups: Sequence[PropertyGroup], + ) -> AdjListClassType: + """Create an instance of the class from python arguments. + + :param ordered: ordered flag + :param aligned_by: recommended values are "src" or "dst" + :param prefix: path prefix + :param file_type: file type + :param property_groups: sequence of PropertyGroup objects + """ + if not prefix.endswith(os.sep): + prefix += os.sep + return AdjList(ordered, aligned_by, prefix, file_type, property_groups, None) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, AdjList): + return False + + return ( + (self.get_ordered() == other.get_ordered()) + and (self.get_aligned_by() == other.get_aligned_by()) + and (self.get_prefix() == other.get_prefix()) + and (self.get_file_type() == other.get_file_type()) + and (len(self.get_property_groups()) == len(other.get_property_groups())) + and all( + left_pg == right_pg + for left_pg, right_pg in zip( + self.get_property_groups(), + other.get_property_groups(), + ) + ) + ) + + +# Return type of EdgeInfo classmethods +EdgeInfoType = TypeVar("EdgeInfoType", bound="EdgeInfo") + + +class EdgeInfo: + """Edge info is a class to store the edge meta information.""" + + def __init__( + self, + src_label: Optional[str], + edge_label: Optional[str], + dst_label: Optional[str], + chunk_size: Optional[int], + src_chunk_size: Optional[int], + dst_chunk_size: Optional[int], + directed: Optional[bool], + prefix: Optional[str], + adj_lists: Sequence[AdjList], + version: Optional[str], + jvm_edge_info_obj: JavaObject, + ) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + if jvm_edge_info_obj is not None: + self._jvm_edge_info_obj = jvm_edge_info_obj + else: + edge_info = GraphArSession.graphar.EdgeInfo() + edge_info.setSrc_label(src_label) + edge_info.setEdge_label(edge_label) + edge_info.setDst_label(dst_label) + edge_info.setChunk_size(chunk_size) + edge_info.setSrc_chunk_size(src_chunk_size) + edge_info.setDst_chunk_size(dst_chunk_size) + edge_info.setDirected(directed) + edge_info.setPrefix(prefix) + edge_info.setAdj_lists( + [py_adj_list.to_scala() for py_adj_list in adj_lists], + ) + edge_info.setVersion(version) + self._jvm_edge_info_obj = edge_info + + def get_src_label(self) -> str: + """Get src label from the corresponding JVM object. + + :returns: src label + """ + return self._jvm_edge_info_obj.getSrc_label() + + def set_src_label(self, src_label: str) -> None: + """Mutate the corresponding JVM object. + + :param src_label: the new src label + """ + self._jvm_edge_info_obj.setSrc_label(src_label) + + def get_edge_label(self) -> str: + """Get edge label from the corresponding JVM object. + + :returns: edge label + """ + return self._jvm_edge_info_obj.getEdge_label() + + def set_edge_label(self, edge_label: str) -> None: + """Mutate the corresponding JVM object. + + :param edge_label: the new edge label + """ + self._jvm_edge_info_obj.setEdge_label(edge_label) + + def get_dst_label(self) -> str: + """Get dst label from the corresponding JVM object. + + :returns: dst label + """ + return self._jvm_edge_info_obj.getDst_label() + + def set_dst_label(self, dst_label: str) -> None: + """Mutate the corresponding JVM object. + + :param dst_label: the new dst label + """ + self._jvm_edge_info_obj.setDst_label(dst_label) + + def get_chunk_size(self) -> int: + """Get chunk size from the corresponding JVM object. + + :returns: chunk size + """ + return self._jvm_edge_info_obj.getChunk_size() + + def set_chunk_size(self, chunk_size: int) -> None: + """Mutate the corresponding JVM object. + + :param chunk_size: the new chunk size + """ + self._jvm_edge_info_obj.setChunk_size(chunk_size) + + def get_src_chunk_size(self) -> int: + """Get source chunk size from the corresponding JVM object. + + :returns: source chunk size + """ + return self._jvm_edge_info_obj.getSrc_chunk_size() + + def set_src_chunk_size(self, src_chunk_size: int) -> None: + """Mutate the corresponding JVM object. + + :param src_chunk_size: the new source chunk size. + """ + self._jvm_edge_info_obj.setSrc_chunk_size(src_chunk_size) + + def get_dst_chunk_size(self) -> int: + """Get dest chunk size from the corresponding JVM object. + + :returns: destination chunk size + """ + return self._jvm_edge_info_obj.getDst_chunk_size() + + def set_dst_chunk_size(self, dst_chunk_size: int) -> None: + """Mutate the corresponding JVM object. + + :param dst_chunk_size: the new destination chunk size. + """ + self._jvm_edge_info_obj.setDst_chunk_size(dst_chunk_size) + + def get_directed(self) -> bool: + """Get directed flag from the corresponding JVM object. + + :returns: directed flag + """ + return self._jvm_edge_info_obj.getDirected() + + def set_directed(self, directed: bool) -> None: + """Mutate the corresponding JVM object. + + :param directed: the new directed flag + """ + self._jvm_edge_info_obj.setDirected(directed) + + def get_prefix(self) -> str: + """Get prefix from the corresponding JVM object. + + :returns: prefix + """ + return self._jvm_edge_info_obj.getPrefix() + + def set_prefix(self, prefix: str) -> None: + """Mutate the corresponding JVM object. + + :param prefix: the new prefix + """ + self._jvm_edge_info_obj.setPrefix(prefix) + + def get_adj_lists(self) -> Sequence[AdjList]: + """Get adj lists from the corresponding JVM object. + + :returns: sequence of AdjList + """ + return [ + AdjList.from_scala(jvm_adj_list) + for jvm_adj_list in self._jvm_edge_info_obj.getAdj_lists() + ] + + def set_adj_lists(self, adj_lists: Sequence[AdjList]) -> None: + """Mutate the corresponding JVM object. + + :param adj_lists: the new adj lists, sequence of AdjList + """ + self._jvm_edge_info_obj.setAdj_lists( + [py_adj_list.to_scala() for py_adj_list in adj_lists], + ) + + def get_version(self) -> str: + """Get GAR version from the corresponding JVM object. + + :returns: GAR version + """ + return self._jvm_edge_info_obj.getVersion() + + def set_version(self, version: str) -> None: + """Mutate the corresponding JVM object. + + :param version: the new GAR version + """ + self._jvm_edge_info_obj.setVersion(version) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_edge_info_obj + + @classmethod + def from_scala(cls: type[EdgeInfoType], jvm_obj: JavaObject) -> EdgeInfoType: + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return EdgeInfo( + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + jvm_obj, + ) + + @classmethod + def from_python( + cls: type[EdgeInfoType], + src_label: str, + edge_label: str, + dst_label: str, + chunk_size: int, + src_chunk_size: int, + dst_chunk_size: int, + directed: bool, + prefix: str, + adj_lists: Sequence[AdjList], + version: str, + ) -> EdgeInfoType: + """Create an instance of the class from python arguments. + + :param src_label: source vertex label + :param edge_label: edges label + :param dst_label: destination vertex label + :param chunk_size: chunk size + :param src_chunk_size: source chunk size + :param dst_chunk_size: destination chunk size + :param directed: directed graph flag + :param prefix: path prefix + :param adj_lists: sequence of AdjList objects + :param version: version of GAR format + """ + if not prefix.endswith(os.sep): + prefix += os.sep + + return EdgeInfo( + src_label, + edge_label, + dst_label, + chunk_size, + src_chunk_size, + dst_chunk_size, + directed, + prefix, + adj_lists, + version, + None, + ) + + def contain_adj_list(self, adj_list_type: AdjListType) -> bool: + """Check if the edge info supports the adj list type. + + :param adj_list_type: adjList type in gar to check. + :returns: true if edge info supports the adj list type, otherwise return false. + """ + return self._jvm_edge_info_obj.containAdjList(adj_list_type.to_scala()) + + def get_adj_list_prefix(self, adj_list_type: AdjListType) -> str: + """Get path prefix of adj list type. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param adj_list_type: The input adj list type in gar. + :returns: path prefix of the adj list type, if edge info not support the adj list type, raise an IllegalArgumentException error. + """ + return self._jvm_edge_info_obj.getAdjListPrefix(adj_list_type.to_scala()) + + def get_adj_list_file_type(self, adj_list_type: AdjListType) -> FileType: + """Get the adj list topology chunk file type of adj list type. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param adj_list_type: the input adj list type. + :returns: file format type in gar of the adj list type, if edge info not support the adj list type, + raise an IllegalArgumentException error. + """ + return FileType.from_scala( + self._jvm_edge_info_obj.getAdjListFileType(adj_list_type.to_scala()), + ) + + def get_property_groups( + self, + adj_list_type: AdjListType, + ) -> Sequence[PropertyGroup]: + """Get the property groups of adj list type. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param adj_list_type: the input adj list type. + :returns: property group of the input adj list type, if edge info not support the adj list type, + raise an IllegalArgumentException error. + """ + return [ + PropertyGroup.from_scala(property_group) + for property_group in self._jvm_edge_info_obj.getPropertyGroups( + adj_list_type.to_scala(), + ) + ] + + def contain_property_group( + self, + property_group: PropertyGroup, + adj_list_type: AdjListType, + ) -> bool: + """Check if the edge info contains the property group in certain adj list structure. + + :param property_group: the property group to check. + :param adj_list_type: the type of adj list structure. + :returns: true if the edge info contains the property group in certain adj list + structure. If edge info not support the given adj list type or not + contains the property group in the adj list structure, return false. + """ + return self._jvm_edge_info_obj.containPropertyGroup( + property_group.to_scala(), + adj_list_type.to_scala(), + ) + + def contain_property(self, property_name: str) -> bool: + """Check if the edge info contains the property. + + :param property_name: name of the property. + :returns: true if edge info contains the property, otherwise false. + """ + return self._jvm_edge_info_obj.containProperty(property_name) + + def get_property_group( + self, + property_name: str, + adj_list_type: AdjListType, + ) -> PropertyGroup: + """Get property group that contains property with adj list type. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_name: name of the property. + :param adj_list_type: the type of adj list structure. + :returns: property group that contains the property. If edge info not support the + adj list type, or not find the property group that contains the property, + return false. + """ + return PropertyGroup.from_scala( + self._jvm_edge_info_obj.getPropertyGroup( + property_name, + adj_list_type.to_scala(), + ), + ) + + def get_property_type(self, property_name: str) -> GarType: + """Get the data type of property. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_name: name of the property. + :returns: data type in gar of the property. If edge info not contains the property, raise an IllegalArgumentException error. + """ + return GarType.from_scala( + self._jvm_edge_info_obj.getPropertyType(property_name), + ) + + def is_primary_key(self, property_name: str) -> bool: + """Check the property is primary key of edge info. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_name: name of the property. + :returns: true if the property is the primary key of edge info, false if not. If + edge info not contains the property, raise an IllegalArgumentException error. + """ + return self._jvm_edge_info_obj.isPrimaryKey(property_name) + + def get_primary_key(self) -> str: + """Get Primary key of edge info. + + :returns: primary key of edge info. + """ + return self._jvm_edge_info_obj.getPrimaryKey() + + def is_validated(self) -> bool: + """Check if the edge info is validated. + + :returns: true if edge info is validated or false if not. + """ + return self._jvm_edge_info_obj.isValidated() + + def get_vertices_num_file_path(self, adj_list_type: AdjListType) -> str: + """Get the vertex num file path. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param adj_list_type: type of adj list structure. + :returns: the vertex num file path. If edge info not support the adj list type, + raise an IllegalArgumentException error. + """ + return self._jvm_edge_info_obj.getVerticesNumFilePath(adj_list_type.to_scala()) + + def get_edges_num_path_prefix(self, adj_list_type: AdjListType) -> str: + """Get the path prefix of the edge num file path. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param adj_list_type: type of adj list structure. + :returns: the edge num file path. If edge info not support the adj list type, raise + an IllegalArgumentException error. + """ + return self._jvm_edge_info_obj.getEdgesNumPathPrefix(adj_list_type.to_scala()) + + def get_edges_num_file_path( + self, + chunk_index: int, + adj_list_type: AdjListType, + ) -> str: + """Get the edge num file path of the vertex chunk. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param chunk_index: index of vertex chunk. + :param adj_list_type: type of adj list structure. + :returns: the edge num file path. If edge info not support the adj list type, raise + an IllegalArgumentException error. + """ + return self._jvm_edge_info_obj.getEdgesNumFilePath( + chunk_index, + adj_list_type.to_scala(), + ) + + def get_adj_list_offset_file_path( + self, + chunk_index: int, + adj_list_type: AdjListType, + ) -> str: + """Get the adj list offset chunk file path of vertex chunk the offset chunks is aligned with the vertex chunks. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param chunk_index: index of vertex chunk. + :param adj_list_type: type of adj list structure. + :returns: the offset chunk file path. If edge info not support the adj list type, raise an IllegalArgumentException error. + + """ + return self._jvm_edge_info_obj.getAdjListOffsetFilePath( + chunk_index, + adj_list_type.to_scala(), + ) + + def get_offset_path_prefix(self, adj_list_type: AdjListType) -> str: + """Get the path prefix of the adjacency list offset for the given adjacency list type. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param adj_list_type: type of adj list structure. + :returns: the path prefix of the offset. If edge info not support the adj list type, raise an IllegalArgumentException error. + + """ + return self._jvm_edge_info_obj.getOffsetPathPrefix(adj_list_type.to_scala()) + + def get_adj_list_file_path( + self, + vertex_chunk_index: int, + chunk_index: int, + adj_list_type: AdjListType, + ) -> str: + """Get the file path of adj list topology chunk. + + :param vertex_chunk_index: index of vertex chunk. + :param chunk_index: index of edge chunk. + :param adj_list_type: type of adj list structure. + :returns: adj list chunk file path. + """ + return self._jvm_edge_info_obj.getAdjListFilePath( + vertex_chunk_index, + chunk_index, + adj_list_type.to_scala(), + ) + + def get_adj_list_path_prefix( + self, + vertex_chunk_index: Optional[int], + adj_list_type: AdjListType, + ) -> str: + """Get the path prefix of adj list topology chunk of certain vertex chunk. + + :param vertex_chunk_index: index of vertex chunk (optional). + :param adj_list_type: type of adj list structure. + :returns: path prefix of the edge chunk of vertices of given vertex chunk. + """ + if vertex_chunk_index is None: + return self._jvm_edge_info_obj.getAdjListPathPrefix( + adj_list_type.to_scala(), + ) + + return self._jvm_edge_info_obj.getAdjListPathPrefix( + vertex_chunk_index, + adj_list_type.to_scala(), + ) + + def get_property_file_path( + self, + property_group: PropertyGroup, + adj_list_type: AdjListType, + vertex_chunk_index: int, + chunk_index: int, + ) -> str: + """Get the chunk file path of adj list property group. the property group chunks is aligned with the adj list topology chunks. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_group: property group + :param adj_list_type: type of adj list structure. + :param vertex_chunk_index: index of vertex chunk. + :param chunk_index: index of edge chunk. + :returns: property group chunk file path. If edge info not contains the property group, raise an IllegalArgumentException error. + """ + return self._jvm_edge_info_obj.getPropertyFilePath( + property_group.to_scala(), + adj_list_type.to_scala(), + vertex_chunk_index, + chunk_index, + ) + + def get_property_group_path_prefix( + self, + property_group: PropertyGroup, + adj_list_type: AdjListType, + vertex_chunk_index: Optional[int] = None, + ) -> str: + """Get path prefix of adj list property group of certain vertex chunk. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_group: property group. + :param adj_list_type: type of adj list structure. + :param vertex_chunk_index: index of vertex chunk (optional, default is None). + :returns: path prefix of property group chunks of of vertices of given vertex + chunk. If edge info not contains the property group, raise an IllegalArgumentException error. + """ + if vertex_chunk_index is not None: + return self._jvm_edge_info_obj.getPropertyGroupPathPrefix( + property_group.to_scala(), + adj_list_type.to_scala(), + vertex_chunk_index, + ) + + return self._jvm_edge_info_obj.getPropertyGroupPathPrefix( + property_group.to_scala(), + adj_list_type.to_scala(), + ) + + def get_concat_key(self) -> str: + """Get concat key. + + :returns: concat key + """ + return self._jvm_edge_info_obj.getConcatKey() + + def dump(self) -> str: + """Dump to Yaml string. + + :returns: yaml-string representation. + """ + return self._jvm_edge_info_obj.dump() + + @staticmethod + def load_edge_info(edge_info_path: str) -> "EdgeInfo": + """Load a yaml file from path and construct a EdgeInfo from it. + + :param edge_info_path: path of edge info YAML file. + :returns: EdgeInfo object. + """ + return EdgeInfo.from_scala( + GraphArSession.graphar.EdgeInfo.loadEdgeInfo( + edge_info_path, + GraphArSession.jss, + ), + ) + + +class GraphInfo: + """GraphInfo is a class to store the graph meta information.""" + + def __init__( + self, + name: Optional[str], + prefix: Optional[str], + vertices: Optional[list[str]], + edges: Optional[list[str]], + version: Optional[str], + jvm_grpah_info_obj: Optional[JavaObject], + ) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + if jvm_grpah_info_obj is not None: + self._jvm_graph_info_obj = jvm_grpah_info_obj + else: + graph_info = GraphArSession.graphar.GraphInfo() + graph_info.setName(name) + graph_info.setPrefix(prefix) + graph_info.setVertices(vertices) + graph_info.setEdges(edges) + graph_info.setVersion(version) + self._jvm_graph_info_obj = graph_info + + def get_name(self) -> str: + """Get name from the corresponding JVM object. + + :returns: name + """ + return self._jvm_graph_info_obj.getName() + + def set_name(self, name: str) -> None: + """Mutate the corresponding JVM object. + + :param name: new name + """ + self._jvm_graph_info_obj.setName(name) + + def get_prefix(self) -> str: + """Get prefix from corresponding JVM object. + + :returns: prefix + """ + return self._jvm_graph_info_obj.getPrefix() + + def set_prefix(self, prefix: str) -> None: + """Mutate the corresponding JVM object. + + :param prefix: new prefix + """ + self._jvm_graph_info_obj.setPrefix(prefix) + + def get_vertices(self) -> JavaList: + """Get list of vertices from the corresponding JVM object. + + :returns: vertices + """ + return self._jvm_graph_info_obj.getVertices() + + def set_vertices(self, vertices: Union[list[str], JavaList]) -> None: + """Mutate the corresponding JVM object. + + :param vertices: new list of vertices + """ + self._jvm_graph_info_obj.setVertices(vertices) + + def get_edges(self) -> JavaList: + """Get list of edges from the corresponding JVM object. + + :returns: edges + """ + return self._jvm_graph_info_obj.getEdges() + + def set_edges(self, edges: Union[list[str], JavaList]) -> None: + """Mutate the corresponding JVM object. + + :param edges: new list of edges. + """ + self._jvm_graph_info_obj.setEdges(edges) + + def get_version(self) -> str: + """Get GAR version from the corresponding JVM object. + + :returns: version + """ + return self._jvm_graph_info_obj.getVersion() + + def set_version(self, version: str) -> None: + """Mutate the corresponding JVM object. + + :param version: new version of GAR + """ + self._jvm_graph_info_obj.setVersion(version) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_graph_info_obj + + @staticmethod + def from_scala(jvm_obj: JavaObject) -> "GraphInfo": + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return GraphInfo(None, None, None, None, None, jvm_obj) + + @staticmethod + def from_python( + name: str, + prefix: str, + vertices: Sequence[str], + edges: Sequence[str], + version: str, + ) -> "GraphInfo": + """Create an instance of the class from python arguments. + + :param name: name of the graph + :param prefix: path prefix + :param vertices: list of vertices + :param edges: list of edges + :param version: version of GAR format + """ + if not prefix.endswith(os.sep): + prefix += os.sep + return GraphInfo(name, prefix, vertices, edges, version, None) + + def add_vertex_info(self, vertex_info: VertexInfo) -> None: + """Add VertexInfo to GraphInfo. + + :param vertex_info: VertexInfo to add + """ + self._jvm_graph_info_obj.addVertexInfo(vertex_info.to_scala()) + + def add_edge_info(self, edge_info: EdgeInfo) -> None: + """Add EdgeInfo to GraphInfo. + + :param edge_info: EdgeInfo to add + """ + self._jvm_graph_info_obj.addEdgeInfo(edge_info.to_scala()) + + def get_vertex_info(self, label: str) -> VertexInfo: + """Get vertex info from the corresponding JVM object. + + :param label: label of vertex + """ + return VertexInfo.from_scala(self._jvm_graph_info_obj.getVertexInfo(label)) + + def get_edge_info( + self, + src_label: str, + edge_label: str, + dst_label: str, + ) -> EdgeInfo: + """Get edge info from the corresponding JVM object. + + :param src_label: source label + :param edge_label: edge label + :param dst_label: destination label + """ + return EdgeInfo.from_scala( + self._jvm_graph_info_obj.getEdgeInfo(src_label, edge_label, dst_label), + ) + + def get_vertex_infos(self) -> dict[str, VertexInfo]: + """Get all vertex infos from the corresponding JVM object. + + :returns: Mapping label -> VertexInfo + """ + scala_map = self._jvm_graph_info_obj.getVertexInfos() + keys_set_iter = scala_map.keySet().iterator() + res = {} + while keys_set_iter.hasNext(): + k = keys_set_iter.next() + res[k] = VertexInfo.from_scala(scala_map.get(k)) + + return res + + def get_edge_infos(self) -> dict[str, EdgeInfo]: + """Get all edge infos from the corresponding JVM object. + + :returns: Mapping {src_label}_{edge_label}_{dst_label} -> EdgeInfo + """ + scala_map = self._jvm_graph_info_obj.getEdgeInfos() + keys_set_iter = scala_map.keySet().iterator() + res = {} + while keys_set_iter.hasNext(): + k = keys_set_iter.next() + res[k] = EdgeInfo.from_scala(scala_map.get(k)) + + return res + + def dump(self) -> str: + """Dump to Yaml string. + + :returns: YAML-string representation of object. + """ + return self._jvm_graph_info_obj.dump() + + @staticmethod + def load_graph_info(graph_info_path: str) -> "GraphInfo": + """Load a yaml file from path and construct a GraphInfo from it. + + :param graph_info_path: path of GraphInfo YAML file. + :returns: GraphInfo object. + """ + return GraphInfo.from_scala( + GraphArSession.graphar.GraphInfo.loadGraphInfo( + graph_info_path, + GraphArSession.jss, + ), + ) diff --git a/pyspark/graphar_pyspark/reader.py b/pyspark/graphar_pyspark/reader.py new file mode 100644 index 000000000..d324ca99e --- /dev/null +++ b/pyspark/graphar_pyspark/reader.py @@ -0,0 +1,461 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +"""Bidnings to com.alibaba.graphar.graph.""" + +from __future__ import annotations + +import os +from typing import Optional + +from py4j.java_gateway import JavaObject +from pyspark.sql import DataFrame + +from graphar_pyspark import GraphArSession, _check_session +from graphar_pyspark.enums import AdjListType +from graphar_pyspark.info import EdgeInfo, PropertyGroup, VertexInfo + + +class VertexReader: + """Reader for vertex chunks.""" + + def __init__( + self, + prefix: Optional[str], + vertex_info: Optional[VertexInfo], + jvm_obj: Optional[JavaObject], + ) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + if jvm_obj is not None: + self._jvm_vertex_reader_obj = jvm_obj + else: + self._jvm_vertex_reader_obj = GraphArSession.graphar.reader.VertexReader( + prefix, + vertex_info.to_scala(), + GraphArSession.jss, + ) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_vertex_reader_obj + + @staticmethod + def from_scala(jvm_obj: JavaObject) -> "VertexReader": + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return VertexReader(None, None, jvm_obj) + + @staticmethod + def from_python(prefix: str, vertex_info: VertexInfo) -> "VertexReader": + """Create an instance of the Class from Python arguments. + + :param prefix: the absolute prefix. + :param vertex_info: the vertex info that describes the vertex type. + """ + if not prefix.endswith(os.sep): + prefix += os.sep + return VertexReader(prefix, vertex_info, None) + + def read_vertices_number(self) -> int: + """Load the total number of vertices for this vertex type. + + :returns: total number of vertices. + """ + return self._jvm_vertex_reader_obj.readVerticesNumber() + + def read_vertex_property_chunk( + self, + property_group: PropertyGroup, + chunk_index: int, + ) -> DataFrame: + """Load a single vertex property chunk as a DataFrame. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_group: property group. + :param chunk_index: index of vertex chunk. + :returns: vertex property chunk DataFrame. Raise IllegalArgumentException if the property group not contained. + """ + return DataFrame( + self._jvm_vertex_reader_obj.readVertexPropertyChunk( + property_group.to_scala(), + chunk_index, + ), + GraphArSession.ss, + ) + + def read_vertex_property_group(self, property_group: PropertyGroup) -> DataFrame: + """Load all chunks for a property group as a DataFrame. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_group: property group. + :returns: DataFrame that contains all chunks of property group. Raise IllegalArgumentException if the property group not contained. + """ + return DataFrame( + self._jvm_vertex_reader_obj.readVertexPropertyGroup( + property_group.to_scala(), + ), + GraphArSession.ss, + ) + + def read_multiple_vertex_property_groups( + self, + property_groups: list[PropertyGroup], + ) -> DataFrame: + """Load the chunks for multiple property groups as a DataFrame. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_groups: list of property groups. + :returns: DataFrame that contains all chunks of property group. Raise IllegalArgumentException if the property group not contained. + """ + return DataFrame( + self._jvm_vertex_reader_obj.readMultipleVertexPropertyGroups( + [py_property_group.to_scala() for py_property_group in property_groups], + ), + GraphArSession.ss, + ) + + def read_all_vertex_property_groups(self) -> DataFrame: + """Load the chunks for all property groups as a DataFrame. + + :returns: DataFrame that contains all property group chunks of vertex. + """ + return DataFrame( + self._jvm_vertex_reader_obj.readAllVertexPropertyGroups(), + GraphArSession.ss, + ) + + +class EdgeReader: + """Reader for edge chunks.""" + + def __init__( + self, + prefix: Optional[str], + edge_info: Optional[EdgeInfo], + adj_list_type: Optional[AdjListType], + jvm_obj: Optional[JavaObject], + ) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + if jvm_obj is not None: + self._jvm_edge_reader_obj = jvm_obj + else: + self._jvm_edge_reader_obj = GraphArSession.graphar.reader.EdgeReader( + prefix, + edge_info.to_scala(), + adj_list_type.to_scala(), + GraphArSession.jss, + ) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_edge_reader_obj + + @staticmethod + def from_scala(jvm_obj: JavaObject) -> "EdgeReader": + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return EdgeReader(None, None, None, jvm_obj) + + @staticmethod + def from_python( + prefix: str, + edge_info: EdgeInfo, + adj_list_type: AdjListType, + ) -> "EdgeReader": + """Create an instance of the Class from Python arguments. + + Note that constructor would raise IllegalArgumentException if edge info does not support given adjListType. + + :param prefix: the absolute prefix. + :param edge_info: the edge info that describes the edge type. + :param adj_list_type: the adj list type for the edge. + """ + if not prefix.endswith(os.sep): + prefix += os.sep + return EdgeReader(prefix, edge_info, adj_list_type, None) + + def read_vertices_number(self) -> int: + """Load the total number of src/dst vertices for this edge type. + + :returns: total number of vertices. + """ + return self._jvm_edge_reader_obj.readVerticesNumber() + + def read_vertex_chunk_number(self) -> int: + """Load the chunk number of src/dst vertices. + + :returns: chunk number of vertices. + """ + return self._jvm_edge_reader_obj.readVertexChunkNumber() + + def read_edges_number(self, chunk_index: Optional[int] = None) -> int: + """Load the number of edges for the vertex chunk or for this edge type. + + :param chunk_index: index of vertex chunk (optional, default is None) + if not provided, returns the number of edges for this edge type + if provided, returns the number of edges for the vertex chunk + :returns: the number of edges + """ + if chunk_index is None: + return self._jvm_edge_reader_obj.readEdgesNumber() + return self._jvm_edge_reader_obj.readEdgesNumber(chunk_index) + + def read_offset(self, chunk_index: int) -> DataFrame: + """Load a single offset chunk as a DataFrame. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param chunk_index: index of offset chunk + :returns: offset chunk DataFrame. Raise IllegalArgumentException if adjListType is + not AdjListType.ordered_by_source or AdjListType.ordered_by_dest. + """ + return DataFrame( + self._jvm_edge_reader_obj.readOffset(chunk_index), + GraphArSession.ss, + ) + + def read_adj_list_chunk( + self, + vertex_chunk_index: int, + chunk_index: int, + ) -> DataFrame: + """Load a single AdjList chunk as a DataFrame. + + :param vertex_chunk_index: index of vertex chunk + :param chunk_index: index of AdjList chunk. + :returns: AdjList chunk DataFrame + """ + return DataFrame( + self._jvm_edge_reader_obj.readAdjListChunk(vertex_chunk_index, chunk_index), + GraphArSession.ss, + ) + + def read_adj_list_for_vertex_chunk( + self, + vertex_chunk_index: int, + add_index: bool = True, + ) -> DataFrame: + """Load all AdjList chunks for a vertex chunk as a DataFrame. + + :param vertex_chunk_index: index of vertex chunk. + :param add_index: flag that add edge index column or not in the final DataFrame. + :returns: DataFrame of all AdjList chunks of vertices in given vertex chunk. + """ + return DataFrame( + self._jvm_edge_reader_obj.readAdjListForVertexChunk( + vertex_chunk_index, + add_index, + ), + GraphArSession.ss, + ) + + def read_all_adj_list(self, add_index: bool = True) -> DataFrame: + """Load all AdjList chunks for this edge type as a DataFrame. + + :param add_index: flag that add index column or not in the final DataFrame. + :returns: DataFrame of all AdjList chunks. + """ + return DataFrame( + self._jvm_edge_reader_obj.readAllAdjList(add_index), + GraphArSession.ss, + ) + + def read_edge_property_chunk( + self, + property_group: PropertyGroup, + vertex_chunk_index: int, + chunk_index: int, + ) -> DataFrame: + """Load a single edge property chunk as a DataFrame. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_group: property group. + :param vertex_chunk_index: index of vertex chunk. + :param chunk_index: index of property group chunk. + :returns: property group chunk DataFrame. If edge info does not contain the + property group, raise an IllegalArgumentException error. + """ + return DataFrame( + self._jvm_edge_reader_obj.readEdgePropertyChunk( + property_group.to_scala(), + vertex_chunk_index, + chunk_index, + ), + ) + + def read_edge_property_group_for_vertex_chunk( + self, + property_group: PropertyGroup, + vertex_chunk_index: int, + add_index: bool = True, + ) -> DataFrame: + """Load the chunks for a property group of a vertex chunk as a DataFrame. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_group: property group. + :param vertex_chunk_index: index of vertex chunk. + :param add_index: flag that add edge index column or not in the final DataFrame. + :returns: DataFrame that contains all property group chunks of vertices in given + vertex chunk. If edge info does not contain the property group, raise an IllegalArgumentException error. + """ + return DataFrame( + self._jvm_edge_reader_obj.readEdgePropertyGroupForVertexChunk( + property_group.to_scala(), + vertex_chunk_index, + add_index, + ), + GraphArSession.ss, + ) + + def read_edge_property_group( + self, + property_group: PropertyGroup, + add_index: bool = True, + ) -> DataFrame: + """Load all chunks for a property group as a DataFrame. + + WARNING! Exceptions from the JVM are not checked inside, it is just a proxy-method! + + :param property_group: property group. + :param add_index: flag that add edge index column or not in the final DataFrame. + :returns: DataFrame that contains all chunks of property group. If edge info does + not contain the property group, raise an IllegalArgumentException error. + """ + return DataFrame( + self._jvm_edge_reader_obj.readEdgePropertyGroup( + property_group.to_scala(), + add_index, + ), + GraphArSession.ss, + ) + + def read_multiple_edge_property_groups_for_vertex_chunk( + self, + property_groups: list[PropertyGroup], + vertex_chunk_index: int, + add_index: bool = True, + ) -> DataFrame: + """Load the chunks for multiple property groups of a vertex chunk as a DataFrame. + + :param property_groups: list of property groups. + :param vertex_chunk_index: index of vertex chunk. + :param add_index: flag that add edge index column or not in the final DataFrame. + :returns: DataFrame that contains all property groups chunks of a vertex chunk. + """ + return DataFrame( + self._jvm_edge_reader_obj.readMultipleEdgePropertyGroupsForVertexChunk( + [py_property_group.to_scala() for py_property_group in property_groups], + vertex_chunk_index, + add_index, + ), + GraphArSession.ss, + ) + + def read_multiple_edge_property_groups( + self, + property_groups: list[PropertyGroup], + add_index: bool = True, + ) -> DataFrame: + """Load the chunks for multiple property groups as a DataFrame. + + :param property_groups: list of property groups. + :param add_index: flag that add edge index column or not in the final DataFrame. + :returns: DataFrame tha contains all property groups chunks of edge. + """ + return DataFrame( + self._jvm_edge_reader_obj.readMultipleEdgePropertyGroups( + [py_property_group.to_scala() for py_property_group in property_groups], + add_index, + ), + GraphArSession.ss, + ) + + def read_all_edge_property_groups_for_vertex_chunk( + self, + vertex_chunk_index: int, + add_index: bool = True, + ) -> DataFrame: + """Load the chunks for all property groups of a vertex chunk as a DataFrame. + + :param vertex_chunk_index: index of vertex chunk. + :param add_index: flag that add edge index column or not in the final DataFrame. + :returns: DataFrame that contains all property groups chunks of a vertex chunk. + """ + return DataFrame( + self._jvm_edge_reader_obj.readAllEdgePropertyGroupsForVertexChunk( + vertex_chunk_index, + add_index, + ), + GraphArSession.ss, + ) + + def read_all_edge_property_groups(self, add_index: bool = True) -> DataFrame: + """Load the chunks for all property groups as a DataFrame. + + :param add_index: flag that add edge index column or not in the final DataFrame. + :returns: DataFrame tha contains all property groups chunks of edge. + """ + return DataFrame( + self._jvm_edge_reader_obj.readAllEdgePropertyGroups(add_index), + GraphArSession.ss, + ) + + def read_edges_for_vertex_chunk( + self, + vertex_chunk_index: int, + add_index: bool = True, + ) -> DataFrame: + """Load the chunks for the AdjList and all property groups for a vertex chunk as a DataFrame. + + :param vertex_chunk_index: index of vertex chunk + :param add_index: flag that add edge index column or not in the final DataFrame. + :returns: DataFrame that contains all chunks of AdjList and property groups of vertices in given vertex chunk. + """ + return DataFrame( + self._jvm_edge_reader_obj.readEdgesForVertexChunk( + vertex_chunk_index, + add_index, + ), + GraphArSession.ss, + ) + + def read_edges(self, add_index: bool = True) -> DataFrame: + """Load the chunks for the AdjList and all property groups as a DataFrame. + + :param add_index: flag that add edge index column or not in the final DataFrame. + :returns: DataFrame that contains all chunks of AdjList and property groups of edges. + """ + return DataFrame( + self._jvm_edge_reader_obj.readEdges(add_index), + GraphArSession.ss, + ) diff --git a/pyspark/graphar_pyspark/util.py b/pyspark/graphar_pyspark/util.py new file mode 100644 index 000000000..0d87010d8 --- /dev/null +++ b/pyspark/graphar_pyspark/util.py @@ -0,0 +1,248 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +"""Bindings to com.alibaba.graphar.util.""" + +from __future__ import annotations + +from typing import Optional + +from pyspark.sql import DataFrame + +from graphar_pyspark import GraphArSession, _check_session + + +class IndexGenerator: + """IndexGenerator is an object to help generating the indices for vertex/edge DataFrames.""" + + @staticmethod + def construct_vertex_index_mapping( + vertex_df: DataFrame, + primary_key: str, + ) -> DataFrame: + """Generate a vertex index mapping from the primary key. + + The resulting DataFrame contains two columns: vertex index & primary key. + + :param vertex_df: input vertex DataFrame. + :param primary_key: the primary key of vertex + :returns: a DataFrame contains two columns: vertex index & primary key. + """ + _check_session() + return DataFrame( + GraphArSession.graphar.util.IndexGenerator.constructVertexIndexMapping( + vertex_df._jdf, + primary_key, + ), + GraphArSession.ss, + ) + + @staticmethod + def generate_vertex_index_column(vertex_df: DataFrame) -> DataFrame: + """Add a column contains vertex index to DataFrame. + + :param vertex_df: the input vertex DataFrame. + :returns: DataFrame that contains a new vertex index column. + """ + _check_session() + return DataFrame( + GraphArSession.graphar.util.IndexGenerator.generateVertexIndexColumn( + vertex_df._jdf, + ), + GraphArSession.ss, + ) + + @staticmethod + def generate_vertex_index_column_and_index_mapping( + vertex_df: DataFrame, + primary_key: str = "", + ) -> (DataFrame, DataFrame): + """Add an index column and generate a new index mapping. + + :param vertex_df: the input vertex DataFrame. + :param primary_key: the primary key of vertex. + :returns: the new vertex DataFrame and mapping DataFrame. + """ + _check_session() + jvm_res = GraphArSession.graphar.util.IndexGenerator.generateVertexIndexColumnAndIndexMapping( + vertex_df._jdf, + primary_key, + ) + + return ( + DataFrame(jvm_res._1(), GraphArSession.ss), + DataFrame(jvm_res._2(), GraphArSession.ss), + ) + + @staticmethod + def generate_edge_index_column(edge_df: DataFrame) -> DataFrame: + """Add a column contains edge index to input edge DataFrame. + + :param edge_df: DataFrame with edges. + :returns: DataFrame with edges and index. + """ + _check_session() + return DataFrame( + GraphArSession.graphar.util.IndexGenerator.generateEdgeIndexColumn( + edge_df._jdf, + ), + GraphArSession.ss, + ) + + @staticmethod + def generate_src_index_for_edges_from_mapping( + edge_df: DataFrame, + src_column_name: str, + src_index_mapping: DataFrame, + ) -> DataFrame: + """Join the edge table with the vertex index mapping for source column. + + :param edge_df: edges DataFrame + :param src_column_name: join-column + :param src_index_mapping: mapping DataFrame + :returns: DataFrame with index + """ + _check_session() + return DataFrame( + GraphArSession.graphar.util.IndexGenerator.generateSrcIndexForEdgesFromMapping( + edge_df._jdf, + src_column_name, + src_index_mapping._jdf, + ), + GraphArSession.ss, + ) + + @staticmethod + def generate_dst_index_for_edges_from_mapping( + edge_df: DataFrame, + dst_column_name: str, + dst_index_mapping: DataFrame, + ) -> DataFrame: + """Join the edge table with the vertex index mapping for destination column. + + :param edge_df: edges DataFrame + :param dst_column_name: join-column + :param dst_index_mapping: mapping DataFrame + :returns: DataFrame with index + """ + _check_session() + return DataFrame( + GraphArSession.graphar.util.IndexGenerator.generateDstIndexForEdgesFromMapping( + edge_df._jdf, + dst_column_name, + dst_index_mapping._jdf, + ), + GraphArSession.ss, + ) + + @staticmethod + def generate_src_and_dst_index_for_edges_from_mapping( + edge_df: DataFrame, + src_column_name: Optional[str], + dst_column_name: Optional[str], + src_index_mapping: DataFrame, + dst_index_mapping: DataFrame, + ) -> DataFrame: + """Join the edge table with the vertex index mapping for source & destination columns. + + Assumes that the first and second columns are the src and dst columns if they are None. + + + :param edge_df: edge DataFrame + :param src_column_name: src column, optional (the first col from edge_df will be used if None) + :param dst_column_name: dst column, optional (the second col from edge_df will be used if None) + :param src_index_mapping: source mapping DataFrame + :param dst_index_mapping: dest mapping DataFrame + :returns: DataFrame with indices + """ + _check_session() + if src_column_name is None: + src_column_name = edge_df.columns[0] + + if dst_column_name is None: + dst_column_name = edge_df.columns[1] + + return DataFrame( + GraphArSession.graphar.util.IndexGenerator.generateSrcAndDstIndexForEdgesFromMapping( + edge_df._jdf, + src_column_name, + dst_column_name, + src_index_mapping._jdf, + dst_index_mapping._jdf, + ), + GraphArSession.ss, + ) + + @staticmethod + def generate_scr_index_for_edges( + edge_df: DataFrame, + src_column_name: str, + ) -> DataFrame: + """Construct vertex index for source column. + + :param edge_df: edge DataFrame + :param src_column_name: source column + :returns: DataFrame with index + """ + _check_session() + return DataFrame( + GraphArSession.graphar.util.IndexGenerator.generateSrcIndexForEdges( + edge_df._jdf, + src_column_name, + ), + GraphArSession.ss, + ) + + @staticmethod + def generate_dst_index_for_edges( + edge_df: DataFrame, + dst_column_name: str, + ) -> DataFrame: + """Construct vertex index for destination column. + + :param edge_df: edge DataFrame + :param src_column_name: destination column + :returns: DataFrame with index + """ + _check_session() + return DataFrame( + GraphArSession.graphar.util.IndexGenerator.generateDstIndexForEdges( + edge_df._jdf, + dst_column_name, + ), + GraphArSession.ss, + ) + + @staticmethod + def generate_src_and_dst_index_unitedly_for_edges( + edge_df: DataFrame, + src_column_name: str, + dst_column_name: str, + ) -> DataFrame: + """Union and construct vertex index for source & destination columns. + + :param edge_df: edge DataFrame + :param src_column_name: source column name + :param dst_column_name: destination column name + :returns: DataFrame with index + """ + _check_session() + return DataFrame( + GraphArSession.graphar.util.IndexGenerator.generateSrcAndDstIndexUnitedlyForEdges( + edge_df._jdf, + src_column_name, + dst_column_name, + ), + GraphArSession.ss, + ) diff --git a/pyspark/graphar_pyspark/writer.py b/pyspark/graphar_pyspark/writer.py new file mode 100644 index 000000000..444b66053 --- /dev/null +++ b/pyspark/graphar_pyspark/writer.py @@ -0,0 +1,185 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +"""Bindings to com.alibaba.graphar.writer.""" + + +from __future__ import annotations + +import os +from typing import Optional + +from py4j.java_gateway import JavaObject +from pyspark.sql import DataFrame + +from graphar_pyspark import GraphArSession, _check_session +from graphar_pyspark.enums import AdjListType +from graphar_pyspark.info import EdgeInfo, PropertyGroup, VertexInfo + + +class VertexWriter: + """Writer for vertex DataFrame.""" + + def __init__( + self, + prefix: Optional[str], + vertex_info: Optional[VertexInfo], + vertex_df: Optional[DataFrame], + num_vertices: Optional[int], + jvm_obj: Optional[JavaObject], + ) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + if jvm_obj is not None: + self._jvm_vertex_writer_obj = jvm_obj + else: + num_vertices = -1 if num_vertices is None else num_vertices + self._jvm_vertex_writer_obj = GraphArSession.graphar.writer.VertexWriter( + prefix, + vertex_info.to_scala(), + vertex_df._jdf, + num_vertices, + ) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_vertex_writer_obj + + @staticmethod + def from_scala(jvm_obj: JavaObject) -> "VertexWriter": + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return VertexWriter(None, None, None, None, jvm_obj) + + @staticmethod + def from_python( + prefix: str, + vertex_info: VertexInfo, + vertex_df: DataFrame, + num_vertices: Optional[int], + ) -> "VertexWriter": + """Create an instance of the Class from Python arguments. + + :param prefix: the absolute prefix. + :param vertex_info: the vertex info that describes the vertex type. + :param vertex_df: the input vertex DataFrame. + :param num_vertices: the number of vertices, optional + """ + if not prefix.endswith(os.sep): + prefix += os.sep + return VertexWriter(prefix, vertex_info, vertex_df, num_vertices, None) + + def write_vertex_properties( + self, + property_group: Optional[PropertyGroup] = None, + ) -> None: + """Generate chunks of the property group (or all property groups) for vertex DataFrame. + + :param property_group: property group (optional, default is None) + if provided, generate chunks of the property group, otherwise generate for all property groups. + """ + if property_group is not None: + self._jvm_vertex_writer_obj.writeVertexProperties(property_group.to_scala()) + else: + self._jvm_vertex_writer_obj.writeVertexProperties() + + +class EdgeWriter: + """Writer for edge DataFrame.""" + + def __init__( + self, + prefix: Optional[str], + edge_info: Optional[EdgeInfo], + adj_list_type: Optional[AdjListType], + vertex_num: Optional[int], + edge_df: Optional[DataFrame], + jvm_obj: Optional[JavaObject], + ) -> None: + """One should not use this constructor directly, please use `from_scala` or `from_python`.""" + _check_session() + if jvm_obj is not None: + self._jvm_edge_writer_obj = jvm_obj + else: + self._jvm_edge_writer_obj = GraphArSession.graphar.writer.EdgeWriter( + prefix, + edge_info.to_scala(), + adj_list_type.to_scala(), + vertex_num, + edge_df._jdf, + ) + + def to_scala(self) -> JavaObject: + """Transform object to JVM representation. + + :returns: JavaObject + """ + return self._jvm_edge_writer_obj + + @staticmethod + def from_scala(jvm_obj: JavaObject) -> "EdgeWriter": + """Create an instance of the Class from the corresponding JVM object. + + :param jvm_obj: scala object in JVM. + :returns: instance of Python Class. + """ + return EdgeWriter(None, None, None, None, None, jvm_obj) + + @staticmethod + def from_python( + prefix: str, + edge_info: EdgeInfo, + adj_list_type: AdjListType, + vertex_num: int, + edge_df: DataFrame, + ) -> "EdgeWriter": + """Create an instance of the Class from Python arguments. + + :param prefix: the absolute prefix. + :param edge_info: the edge info that describes the ede type. + :param adj_list_type: the adj list type for the edge. + :param vertex_num: vertex number of the primary vertex label + :param edge_df: the input edge DataFrame. + """ + if not prefix.endswith(os.sep): + prefix += os.sep + return EdgeWriter(prefix, edge_info, adj_list_type, vertex_num, edge_df, None) + + def write_adj_list(self) -> None: + """Generate the chunks of AdjList from edge DataFrame for this edge type.""" + self._jvm_edge_writer_obj.writeAdjList() + + def write_edge_properties( + self, + property_group: Optional[PropertyGroup] = None, + ) -> None: + """Generate the chunks of all or selected property groups from edge DataFrame. + + :param property_group: property group (optional, default is None) + if provided, generate the chunks of selected property group, otherwise generate for all groups. + """ + if property_group is not None: + self._jvm_edge_writer_obj.writeEdgeProperties(property_group.to_scala()) + else: + self._jvm_edge_writer_obj.writeEdgeProperties() + + def write_edges(self) -> None: + """Generate the chunks for the AdjList and all property groups from edge.""" + self._jvm_edge_writer_obj.writeEdges() diff --git a/pyspark/poetry.lock b/pyspark/poetry.lock new file mode 100644 index 000000000..83aee3f21 --- /dev/null +++ b/pyspark/poetry.lock @@ -0,0 +1,1704 @@ +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "beautifulsoup4" +version = "4.12.2" +description = "Screen-scraping library" +category = "dev" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bleach" +version = "6.1.0" +description = "An easy safelist-based HTML-sanitizing tool." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.3)"] + +[[package]] +name = "breathe" +version = "4.35.0" +description = "Sphinx Doxygen renderer" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "breathe-4.35.0-py3-none-any.whl", hash = "sha256:52c581f42ca4310737f9e435e3851c3d1f15446205a85fbc272f1f97ed74f5be"}, + {file = "breathe-4.35.0.tar.gz", hash = "sha256:5165541c3c67b6c7adde8b3ecfe895c6f7844783c4076b6d8d287e4f33d62386"}, +] + +[package.dependencies] +docutils = ">=0.12" +Sphinx = ">=4.0,<5.0.0 || >5.0.0" + +[[package]] +name = "certifi" +version = "2023.11.17" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.4.0" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastjsonschema" +version = "2.19.1" +description = "Fastest Python implementation of JSON schema" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, + {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, +] + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "furo" +version = "2022.9.29" +description = "A clean customisable Sphinx documentation theme." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"}, + {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +pygments = ">=2.7" +sphinx = ">=4.0,<6.0" +sphinx-basic-ng = "*" + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.20.0" +description = "An implementation of JSON Schema validation for Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"}, + {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "jupyter-client" +version = "8.6.0" +description = "Jupyter protocol implementation and client libraries" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, + {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.7.0" +description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.7.0-py3-none-any.whl", hash = "sha256:16eea462f7dad23ba9f86542bdf17f830804e2028eb48d609b6134d91681e983"}, + {file = "jupyter_core-5.7.0.tar.gz", hash = "sha256:cb8d3ed92144d2463a3c5664fdd686a3f0c1442ea45df8babb1c1a9e6333fe03"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.3.0" +description = "Pygments theme using JupyterLab CSS variables" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, + {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, +] + +[[package]] +name = "markupsafe" +version = "2.0.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, +] + +[[package]] +name = "mistune" +version = "3.0.2" +description = "A sane and fast Markdown parser with useful plugins and renderers" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, +] + +[[package]] +name = "nbclient" +version = "0.9.0" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "dev" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "nbclient-0.9.0-py3-none-any.whl", hash = "sha256:a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15"}, + {file = "nbclient-0.9.0.tar.gz", hash = "sha256:4b28c207877cf33ef3a9838cdc7a54c5ceff981194a82eac59d558f05487295e"}, +] + +[package.dependencies] +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +nbformat = ">=5.1" +traitlets = ">=5.4" + +[package.extras] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] + +[[package]] +name = "nbconvert" +version = "7.14.0" +description = "Converting Jupyter Notebooks" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbconvert-7.14.0-py3-none-any.whl", hash = "sha256:483dde47facdaa4875903d651305ad53cd76e2255ae3c61efe412a95f2d22a24"}, + {file = "nbconvert-7.14.0.tar.gz", hash = "sha256:92b9a44b63e5a7fb4f6fa0ef41261e35c16925046ccd1c04a5c8099bf100476e"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +bleach = "!=5.0.0" +defusedxml = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<4" +nbclient = ">=0.5.0" +nbformat = ">=5.7" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.1" + +[package.extras] +all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["nbconvert[qtpng]"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest"] +webpdf = ["playwright"] + +[[package]] +name = "nbformat" +version = "5.9.2" +description = "The Jupyter Notebook format" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbformat-5.9.2-py3-none-any.whl", hash = "sha256:1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9"}, + {file = "nbformat-5.9.2.tar.gz", hash = "sha256:5f98b5ba1997dff175e77e0c17d5c10a96eaed2cbd1de3533d1fc35d5e111192"}, +] + +[package.dependencies] +fastjsonschema = "*" +jsonschema = ">=2.6" +jupyter-core = "*" +traitlets = ">=5.1" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] + +[[package]] +name = "nbsphinx" +version = "0.9.3" +description = "Jupyter Notebook Tools for Sphinx" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "nbsphinx-0.9.3-py3-none-any.whl", hash = "sha256:6e805e9627f4a358bd5720d5cbf8bf48853989c79af557afd91a5f22e163029f"}, + {file = "nbsphinx-0.9.3.tar.gz", hash = "sha256:ec339c8691b688f8676104a367a4b8cf3ea01fd089dc28d24dec22d563b11562"}, +] + +[package.dependencies] +docutils = "*" +jinja2 = "*" +nbconvert = "!=5.4" +nbformat = "*" +sphinx = ">=1.8" +traitlets = ">=5" + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandocfilters" +version = "1.5.0" +description = "Utilities for writing pandoc filters in python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, +] + +[[package]] +name = "platformdirs" +version = "4.1.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py4j" +version = "0.10.9.5" +description = "Enables Python programs to dynamically access arbitrary Java objects" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "py4j-0.10.9.5-py2.py3-none-any.whl", hash = "sha256:52d171a6a2b031d8a5d1de6efe451cf4f5baff1a2819aabc3741c8406539ba04"}, + {file = "py4j-0.10.9.5.tar.gz", hash = "sha256:276a4a3c5a2154df1860ef3303a927460e02e97b047dc0a47c1c3fb8cce34db6"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyspark" +version = "3.2.2" +description = "Apache Spark Python API" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyspark-3.2.2.tar.gz", hash = "sha256:5455214cf0b83d4a184cda25ca3b0812481915353b180cf7d7ac227728a4d99e"}, +] + +[package.dependencies] +py4j = "0.10.9.5" + +[package.extras] +ml = ["numpy (>=1.7)"] +mllib = ["numpy (>=1.7)"] +pandas-on-spark = ["numpy (>=1.14)", "pandas (>=0.23.2)", "pyarrow (>=1.0.0)"] +sql = ["pandas (>=0.23.2)", "pyarrow (>=1.0.0)"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "pyzmq" +version = "25.1.2" +description = "Python bindings for 0MQ" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, + {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, + {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, + {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, + {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, + {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, + {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, + {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, + {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, + {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, + {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, + {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, + {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "referencing" +version = "0.32.0" +description = "JSON Referencing + Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.32.0-py3-none-any.whl", hash = "sha256:bdcd3efb936f82ff86f993093f6da7435c7de69a3b3a5a06678a6050184bee99"}, + {file = "referencing-0.32.0.tar.gz", hash = "sha256:689e64fe121843dcfd57b71933318ef1f91188ffb45367332700a86ac8fd6161"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rpds-py" +version = "0.16.2" +description = "Python bindings to Rust's persistent data structures (rpds)" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:509b617ac787cd1149600e731db9274ebbef094503ca25158e6f23edaba1ca8f"}, + {file = "rpds_py-0.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:413b9c17388bbd0d87a329d8e30c1a4c6e44e2bb25457f43725a8e6fe4161e9e"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2946b120718eba9af2b4dd103affc1164a87b9e9ebff8c3e4c05d7b7a7e274e2"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35ae5ece284cf36464eb160880018cf6088a9ac5ddc72292a6092b6ef3f4da53"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc6a7620ba7639a3db6213da61312cb4aa9ac0ca6e00dc1cbbdc21c2aa6eb57"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cb6fe8ecdfffa0e711a75c931fb39f4ba382b4b3ccedeca43f18693864fe850"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dace7b26a13353e24613417ce2239491b40a6ad44e5776a18eaff7733488b44"}, + {file = "rpds_py-0.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1bdbc5fcb04a7309074de6b67fa9bc4b418ab3fc435fec1f2779a0eced688d04"}, + {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f42e25c016927e2a6b1ce748112c3ab134261fc2ddc867e92d02006103e1b1b7"}, + {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eab36eae3f3e8e24b05748ec9acc66286662f5d25c52ad70cadab544e034536b"}, + {file = "rpds_py-0.16.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0474df4ade9a3b4af96c3d36eb81856cb9462e4c6657d4caecfd840d2a13f3c9"}, + {file = "rpds_py-0.16.2-cp310-none-win32.whl", hash = "sha256:84c5a4d1f9dd7e2d2c44097fb09fffe728629bad31eb56caf97719e55575aa82"}, + {file = "rpds_py-0.16.2-cp310-none-win_amd64.whl", hash = "sha256:2bd82db36cd70b3628c0c57d81d2438e8dd4b7b32a6a9f25f24ab0e657cb6c4e"}, + {file = "rpds_py-0.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:adc0c3d6fc6ae35fee3e4917628983f6ce630d513cbaad575b4517d47e81b4bb"}, + {file = "rpds_py-0.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ec23fcad480e77ede06cf4127a25fc440f7489922e17fc058f426b5256ee0edb"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07aab64e2808c3ebac2a44f67e9dc0543812b715126dfd6fe4264df527556cb6"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a4ebb8b20bd09c5ce7884c8f0388801100f5e75e7f733b1b6613c713371feefc"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3d7e2ea25d3517c6d7e5a1cc3702cffa6bd18d9ef8d08d9af6717fc1c700eed"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f28ac0e8e7242d140f99402a903a2c596ab71550272ae9247ad78f9a932b5698"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19f00f57fdd38db4bb5ad09f9ead1b535332dbf624200e9029a45f1f35527ebb"}, + {file = "rpds_py-0.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3da5a4c56953bdbf6d04447c3410309616c54433146ccdb4a277b9cb499bc10e"}, + {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec2e1cf025b2c0f48ec17ff3e642661da7ee332d326f2e6619366ce8e221f018"}, + {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e0441fb4fdd39a230477b2ca9be90868af64425bfe7b122b57e61e45737a653b"}, + {file = "rpds_py-0.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9f0350ef2fba5f34eb0c9000ea328e51b9572b403d2f7f3b19f24085f6f598e8"}, + {file = "rpds_py-0.16.2-cp311-none-win32.whl", hash = "sha256:5a80e2f83391ad0808b4646732af2a7b67550b98f0cae056cb3b40622a83dbb3"}, + {file = "rpds_py-0.16.2-cp311-none-win_amd64.whl", hash = "sha256:e04e56b4ca7a770593633556e8e9e46579d66ec2ada846b401252a2bdcf70a6d"}, + {file = "rpds_py-0.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5e6caa3809e50690bd92fa490f5c38caa86082c8c3315aa438bce43786d5e90d"}, + {file = "rpds_py-0.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e53b9b25cac9065328901713a7e9e3b12e4f57ef4280b370fbbf6fef2052eef"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af27423662f32d7501a00c5e7342f7dbd1e4a718aea7a239781357d15d437133"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43d4dd5fb16eb3825742bad8339d454054261ab59fed2fbac84e1d84d5aae7ba"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e061de3b745fe611e23cd7318aec2c8b0e4153939c25c9202a5811ca911fd733"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b811d182ad17ea294f2ec63c0621e7be92a1141e1012383461872cead87468f"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5552f328eaef1a75ff129d4d0c437bf44e43f9436d3996e8eab623ea0f5fcf73"}, + {file = "rpds_py-0.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dcbe1f8dd179e4d69b70b1f1d9bb6fd1e7e1bdc9c9aad345cdeb332e29d40748"}, + {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8aad80645a011abae487d356e0ceb359f4938dfb6f7bcc410027ed7ae4f7bb8b"}, + {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b6f5549d6ed1da9bfe3631ca9483ae906f21410be2445b73443fa9f017601c6f"}, + {file = "rpds_py-0.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d452817e0d9c749c431a1121d56a777bd7099b720b3d1c820f1725cb40928f58"}, + {file = "rpds_py-0.16.2-cp312-none-win32.whl", hash = "sha256:888a97002e986eca10d8546e3c8b97da1d47ad8b69726dcfeb3e56348ebb28a3"}, + {file = "rpds_py-0.16.2-cp312-none-win_amd64.whl", hash = "sha256:d8dda2a806dfa4a9b795950c4f5cc56d6d6159f7d68080aedaff3bdc9b5032f5"}, + {file = "rpds_py-0.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:071980663c273bf3d388fe5c794c547e6f35ba3335477072c713a3176bf14a60"}, + {file = "rpds_py-0.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:726ac36e8a3bb8daef2fd482534cabc5e17334052447008405daca7ca04a3108"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9e557db6a177470316c82f023e5d571811c9a4422b5ea084c85da9aa3c035fc"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:90123853fc8b1747f80b0d354be3d122b4365a93e50fc3aacc9fb4c2488845d6"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a61f659665a39a4d17d699ab3593d7116d66e1e2e3f03ef3fb8f484e91908808"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc97f0640e91d7776530f06e6836c546c1c752a52de158720c4224c9e8053cad"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44a54e99a2b9693a37ebf245937fd6e9228b4cbd64b9cc961e1f3391ec6c7391"}, + {file = "rpds_py-0.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd4b677d929cf1f6bac07ad76e0f2d5de367e6373351c01a9c0a39f6b21b4a8b"}, + {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5ef00873303d678aaf8b0627e111fd434925ca01c657dbb2641410f1cdaef261"}, + {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:349cb40897fd529ca15317c22c0eab67f5ac5178b5bd2c6adc86172045210acc"}, + {file = "rpds_py-0.16.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2ddef620e70eaffebed5932ce754d539c0930f676aae6212f8e16cd9743dd365"}, + {file = "rpds_py-0.16.2-cp38-none-win32.whl", hash = "sha256:882ce6e25e585949c3d9f9abd29202367175e0aab3aba0c58c9abbb37d4982ff"}, + {file = "rpds_py-0.16.2-cp38-none-win_amd64.whl", hash = "sha256:f4bd4578e44f26997e9e56c96dedc5f1af43cc9d16c4daa29c771a00b2a26851"}, + {file = "rpds_py-0.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:69ac7ea9897ec201ce68b48582f3eb34a3f9924488a5432a93f177bf76a82a7e"}, + {file = "rpds_py-0.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a9880b4656efe36ccad41edc66789e191e5ee19a1ea8811e0aed6f69851a82f4"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee94cb58c0ba2c62ee108c2b7c9131b2c66a29e82746e8fa3aa1a1effbd3dcf1"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24f7a2eb3866a9e91f4599851e0c8d39878a470044875c49bd528d2b9b88361c"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca57468da2d9a660bcf8961637c85f2fbb2aa64d9bc3f9484e30c3f9f67b1dd7"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccd4e400309e1f34a5095bf9249d371f0fd60f8a3a5c4a791cad7b99ce1fd38d"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80443fe2f7b3ea3934c5d75fb0e04a5dbb4a8e943e5ff2de0dec059202b70a8b"}, + {file = "rpds_py-0.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d6a9f052e72d493efd92a77f861e45bab2f6be63e37fa8ecf0c6fd1a58fedb0"}, + {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:35953f4f2b3216421af86fd236b7c0c65935936a94ea83ddbd4904ba60757773"}, + {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:981d135c7cdaf6cd8eadae1c950de43b976de8f09d8e800feed307140d3d6d00"}, + {file = "rpds_py-0.16.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d0dd7ed2f16df2e129496e7fbe59a34bc2d7fc8db443a606644d069eb69cbd45"}, + {file = "rpds_py-0.16.2-cp39-none-win32.whl", hash = "sha256:703d95c75a72e902544fda08e965885525e297578317989fd15a6ce58414b41d"}, + {file = "rpds_py-0.16.2-cp39-none-win_amd64.whl", hash = "sha256:e93ec1b300acf89730cf27975ef574396bc04edecc358e9bd116fb387a123239"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:44627b6ca7308680a70766454db5249105fa6344853af6762eaad4158a2feebe"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3f91df8e6dbb7360e176d1affd5fb0246d2b88d16aa5ebc7db94fd66b68b61da"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d904c5693e08bad240f16d79305edba78276be87061c872a4a15e2c301fa2c0"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:290a81cfbe4673285cdf140ec5cd1658ffbf63ab359f2b352ebe172e7cfa5bf0"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b634c5ec0103c5cbebc24ebac4872b045cccb9456fc59efdcf6fe39775365bd2"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a297a4d08cc67c7466c873c78039d87840fb50d05473db0ec1b7b03d179bf322"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e75e17bd0bb66ee34a707da677e47c14ee51ccef78ed6a263a4cc965a072a1"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f1b9d9260e06ea017feb7172976ab261e011c1dc2f8883c7c274f6b2aabfe01a"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:162d7cd9cd311c1b0ff1c55a024b8f38bd8aad1876b648821da08adc40e95734"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:9b32f742ce5b57201305f19c2ef7a184b52f6f9ba6871cc042c2a61f0d6b49b8"}, + {file = "rpds_py-0.16.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac08472f41ea77cd6a5dae36ae7d4ed3951d6602833af87532b556c1b4601d63"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:495a14b72bbe217f2695dcd9b5ab14d4f8066a00f5d209ed94f0aca307f85f6e"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d6b6937ae9eac6d6c0ca3c42774d89fa311f55adff3970fb364b34abde6ed3d"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a61226465bda9283686db8f17d02569a98e4b13c637be5a26d44aa1f1e361c2"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5cf6af100ffb5c195beec11ffaa8cf8523057f123afa2944e6571d54da84cdc9"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6df15846ee3fb2e6397fe25d7ca6624af9f89587f3f259d177b556fed6bebe2c"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1be2f033df1b8be8c3167ba3c29d5dca425592ee31e35eac52050623afba5772"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96f957d6ab25a78b9e7fc9749d754b98eac825a112b4e666525ce89afcbd9ed5"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:088396c7c70e59872f67462fcac3ecbded5233385797021976a09ebd55961dfe"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4c46ad6356e1561f2a54f08367d1d2e70a0a1bb2db2282d2c1972c1d38eafc3b"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:47713dc4fce213f5c74ca8a1f6a59b622fc1b90868deb8e8e4d993e421b4b39d"}, + {file = "rpds_py-0.16.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f811771019f063bbd0aa7bb72c8a934bc13ebacb4672d712fc1639cfd314cccc"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f19afcfc0dd0dca35694df441e9b0f95bc231b512f51bded3c3d8ca32153ec19"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4b682c5775d6a3d21e314c10124599976809455ee67020e8e72df1769b87bc3"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c647ca87fc0ebe808a41de912e9a1bfef9acb85257e5d63691364ac16b81c1f0"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:302bd4983bbd47063e452c38be66153760112f6d3635c7eeefc094299fa400a9"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf721ede3eb7b829e4a9b8142bd55db0bdc82902720548a703f7e601ee13bdc3"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:358dafc89ce3894c7f486c615ba914609f38277ef67f566abc4c854d23b997fa"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cad0f59ee3dc35526039f4bc23642d52d5f6616b5f687d846bfc6d0d6d486db0"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cffa76b385dfe1e38527662a302b19ffb0e7f5cf7dd5e89186d2c94a22dd9d0c"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:83640a5d7cd3bff694747d50436b8b541b5b9b9782b0c8c1688931d6ee1a1f2d"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:ed99b4f7179d2111702020fd7d156e88acd533f5a7d3971353e568b6051d5c97"}, + {file = "rpds_py-0.16.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4022b9dc620e14f30201a8a73898a873c8e910cb642bcd2f3411123bc527f6ac"}, + {file = "rpds_py-0.16.2.tar.gz", hash = "sha256:781ef8bfc091b19960fc0142a23aedadafa826bc32b433fdfe6fd7f964d7ef44"}, +] + +[[package]] +name = "ruff" +version = "0.1.11" +description = "An extremely fast Python linter and code formatter, written in Rust." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.11-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:a7f772696b4cdc0a3b2e527fc3c7ccc41cdcb98f5c80fdd4f2b8c50eb1458196"}, + {file = "ruff-0.1.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:934832f6ed9b34a7d5feea58972635c2039c7a3b434fe5ba2ce015064cb6e955"}, + {file = "ruff-0.1.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea0d3e950e394c4b332bcdd112aa566010a9f9c95814844a7468325290aabfd9"}, + {file = "ruff-0.1.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bd4025b9c5b429a48280785a2b71d479798a69f5c2919e7d274c5f4b32c3607"}, + {file = "ruff-0.1.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1ad00662305dcb1e987f5ec214d31f7d6a062cae3e74c1cbccef15afd96611d"}, + {file = "ruff-0.1.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4b077ce83f47dd6bea1991af08b140e8b8339f0ba8cb9b7a484c30ebab18a23f"}, + {file = "ruff-0.1.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a88efecec23c37b11076fe676e15c6cdb1271a38f2b415e381e87fe4517f18"}, + {file = "ruff-0.1.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b25093dad3b055667730a9b491129c42d45e11cdb7043b702e97125bcec48a1"}, + {file = "ruff-0.1.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231d8fb11b2cc7c0366a326a66dafc6ad449d7fcdbc268497ee47e1334f66f77"}, + {file = "ruff-0.1.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:09c415716884950080921dd6237767e52e227e397e2008e2bed410117679975b"}, + {file = "ruff-0.1.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0f58948c6d212a6b8d41cd59e349751018797ce1727f961c2fa755ad6208ba45"}, + {file = "ruff-0.1.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:190a566c8f766c37074d99640cd9ca3da11d8deae2deae7c9505e68a4a30f740"}, + {file = "ruff-0.1.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6464289bd67b2344d2a5d9158d5eb81025258f169e69a46b741b396ffb0cda95"}, + {file = "ruff-0.1.11-py3-none-win32.whl", hash = "sha256:9b8f397902f92bc2e70fb6bebfa2139008dc72ae5177e66c383fa5426cb0bf2c"}, + {file = "ruff-0.1.11-py3-none-win_amd64.whl", hash = "sha256:eb85ee287b11f901037a6683b2374bb0ec82928c5cbc984f575d0437979c521a"}, + {file = "ruff-0.1.11-py3-none-win_arm64.whl", hash = "sha256:97ce4d752f964ba559c7023a86e5f8e97f026d511e48013987623915431c7ea9"}, + {file = "ruff-0.1.11.tar.gz", hash = "sha256:f9d4d88cb6eeb4dfe20f9f0519bd2eaba8119bde87c3d5065c541dbae2b5a2cb"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "sphinx" +version = "4.5.0" +description = "Python documentation generator" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, + {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +description = "A modern skeleton for Sphinx themes." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, + {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, +] + +[package.dependencies] +sphinx = ">=4.0" + +[package.extras] +docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] + +[[package]] +name = "sphinx-copybutton" +version = "0.5.2" +description = "Add a copy button to each of your code cells." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, + {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, +] + +[package.dependencies] +sphinx = ">=1.8" + +[package.extras] +code-style = ["pre-commit (==2.12.1)"] +rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] + +[[package]] +name = "sphinx-panels" +version = "0.6.0" +description = "A sphinx extension for creating panels in a grid layout." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "sphinx-panels-0.6.0.tar.gz", hash = "sha256:d36dcd26358117e11888f7143db4ac2301ebe90873ac00627bf1fe526bf0f058"}, + {file = "sphinx_panels-0.6.0-py3-none-any.whl", hash = "sha256:bd64afaf85c07f8096d21c8247fc6fd757e339d1be97832c8832d6ae5ed2e61d"}, +] + +[package.dependencies] +docutils = "*" +sphinx = ">=2,<5" + +[package.extras] +code-style = ["pre-commit (>=2.7.0,<2.8.0)"] +live-dev = ["sphinx-autobuild", "web-compile (>=0.2.0,<0.3.0)"] +testing = ["pytest (>=6.0.1,<6.1.0)", "pytest-regressions (>=2.0.1,<2.1.0)"] +themes = ["myst-parser (>=0.12.9,<0.13.0)", "pydata-sphinx-theme (>=0.4.0,<0.5.0)", "sphinx-book-theme (>=0.0.36,<0.1.0)", "sphinx-rtd-theme"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxemoji" +version = "0.2.0" +description = "An extension to use emoji codes in your Sphinx documentation" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "sphinxemoji-0.2.0.tar.gz", hash = "sha256:27861d1dd7c6570f5e63020dac9a687263f7481f6d5d6409eb31ecebcc804e4c"}, +] + +[package.dependencies] +sphinx = ">=1.8" + +[[package]] +name = "sphinxext-opengraph" +version = "0.9.1" +description = "Sphinx Extension to enable OGP support" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxext-opengraph-0.9.1.tar.gz", hash = "sha256:dd2868a1e7c9497977fbbf44cc0844a42af39ca65fe1bb0272518af225d06fc5"}, + {file = "sphinxext_opengraph-0.9.1-py3-none-any.whl", hash = "sha256:b3b230cc6a5b5189139df937f0d9c7b23c7c204493b22646273687969dcb760e"}, +] + +[package.dependencies] +sphinx = ">=4.0" + +[[package]] +name = "tinycss2" +version = "1.2.1" +description = "A tiny CSS parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, + {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, +] + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tornado" +version = "6.4" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, +] + +[[package]] +name = "traitlets" +version = "5.14.1" +description = "Traitlets Python configuration system" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, + {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "urllib3" +version = "2.1.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "fb66816dab017aabb2ab255ef4477d65a0f38d53cda70f0c1df148b86c094149" diff --git a/pyspark/pyproject.toml b/pyspark/pyproject.toml new file mode 100644 index 000000000..d6cb317ae --- /dev/null +++ b/pyspark/pyproject.toml @@ -0,0 +1,95 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +[tool.poetry] +name = "graphar-pyspark" +version = "0.0.1" +description = "PySpark bindings to GraphAr scala spark package" +authors = ["Semyon Sinchenko "] +readme = "README.md" +packages = [{include = "graphar_pyspark"}] + +[tool.poetry.dependencies] +python = "^3.9" + +[tool.poetry.group.spark] +optional = true + +[tool.poetry.group.spark.dependencies] +pyspark = "3.2.2" # TODO: relax requirement when scala part will be available for multiple spark versions + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] +ruff = "*" + +[tool.poetry.group.tests] +optional = true + +[tool.poetry.group.tests.dependencies] +pytest = "*" +pytest-cov = "*" +pyyaml = "*" + +[tool.poetry.group.docs] +optional = true + +[tool.poetry.group.docs.dependencies] +breathe = "*" +docutils = "*" +furo = "*" # sphinx theme +nbsphinx = "*" +sphinx = ">=3.0.2" +jinja2 = ">=3.1.2" +sphinx-copybutton = "*" +sphinx-panels = "*" +sphinxemoji = "*" +sphinxext-opengraph = "*" +markupsafe = "2.0.1" + +[tool.ruff] +exclude = ["tests"] + +line-length = 150 +select = ["ALL"] +ignore = [ + "UP007", # bad compatibility with python < 3.10 + "UP037", # bad compatibility with python < 3.10 + "ANN101", # requires 3d party tool, like typing-extensions + "ANN401", # it is questinable + "SLF001", # we cannot avoid accessing private members of SparkSession + "PLR0913", # public API follows Scala API, changing signatures is not possible + "FBT001", # not a problem in our code + "FBT002", # not a problem in our code + "TD002", # our todos does not have authorship + "TD003", # our todos does not have issue links + "D203", # not compatible with D211 + "D213", # not compatible with D212 + "TCH001", # the whole idea of TCHxxx rules is very questinable + "TCH002", # it makes code harder to read + "TCH003", # it makes code harder to read + "D105", # magic methods are self-documented + "B905", # not works in python 3.9 +] + +[tool.pytest] +testpaths = "tests" + +[tool.coverage.run] +omit = ["tests/*"] + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/pyspark/tests/__init__.py b/pyspark/tests/__init__.py new file mode 100644 index 000000000..26d7050fa --- /dev/null +++ b/pyspark/tests/__init__.py @@ -0,0 +1,13 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. diff --git a/pyspark/tests/conftest.py b/pyspark/tests/conftest.py new file mode 100644 index 000000000..61bdd9591 --- /dev/null +++ b/pyspark/tests/conftest.py @@ -0,0 +1,40 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +from pathlib import Path + +import pytest +from pyspark.sql import SparkSession + +JARS_PATH = Path(__file__).parent.parent.parent.joinpath("spark").joinpath("target") +GRAPHAR_SHADED_JAR_PATH = None + +for jar_file in JARS_PATH.glob("*.jar"): + if "shaded" in jar_file.name: + GRAPHAR_SHADED_JAR_PATH = jar_file.absolute() + +if GRAPHAR_SHADED_JAR_PATH is None: + raise FileNotFoundError("You need to build scala-part first!") + + +@pytest.fixture(scope="session") +def spark(): + spark = ( + SparkSession.builder.master("local[1]") + .appName("graphar-pyspark-local-tests") + .config("spark.jars", str(GRAPHAR_SHADED_JAR_PATH)) + .getOrCreate() + ) + yield spark + spark.stop() diff --git a/pyspark/tests/test_enums.py b/pyspark/tests/test_enums.py new file mode 100644 index 000000000..a28571fd5 --- /dev/null +++ b/pyspark/tests/test_enums.py @@ -0,0 +1,61 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +from graphar_pyspark import initialize +from graphar_pyspark.enums import GarType, FileType, AdjListType + + +def test_gar_type(spark): + initialize(spark) + + gar_string = GarType.STRING + gar_int = GarType.INT32 + gar_long = GarType.INT64 + gar_float = GarType.FLOAT + gar_double = GarType.DOUBLE + gar_bool = GarType.BOOL + gar_list = GarType.LIST + + assert gar_string == GarType.from_scala(gar_string.to_scala()) + assert gar_int == GarType.from_scala(gar_int.to_scala()) + assert gar_long == GarType.from_scala(gar_long.to_scala()) + assert gar_float == GarType.from_scala(gar_float.to_scala()) + assert gar_double == GarType.from_scala(gar_double.to_scala()) + assert gar_bool == GarType.from_scala(gar_bool.to_scala()) + assert gar_list == GarType.from_scala(gar_list.to_scala()) + +def test_file_type(spark): + initialize(spark) + + file_type_csv = FileType.CSV + file_type_orc = FileType.ORC + file_type_parquet = FileType.PARQUET + + assert file_type_csv == FileType.from_scala(file_type_csv.to_scala()) + assert file_type_orc == FileType.from_scala(file_type_orc.to_scala()) + assert file_type_parquet == FileType.from_scala(file_type_parquet.to_scala()) + + +def test_adj_list_type(spark): + initialize(spark) + + ordered_by_dest = AdjListType.ORDERED_BY_DEST + ordered_by_src = AdjListType.ORDERED_BY_SOURCE + unordered_by_dest = AdjListType.UNORDERED_BY_DEST + unordered_by_src = AdjListType.UNORDERED_BY_SOURCE + + assert ordered_by_dest == AdjListType.from_scala(ordered_by_dest.to_scala()) + assert ordered_by_src == AdjListType.from_scala(ordered_by_src.to_scala()) + assert unordered_by_dest == AdjListType.from_scala(unordered_by_dest.to_scala()) + assert unordered_by_src == AdjListType.from_scala(unordered_by_src.to_scala()) diff --git a/pyspark/tests/test_info.py b/pyspark/tests/test_info.py new file mode 100644 index 000000000..4c57a0aab --- /dev/null +++ b/pyspark/tests/test_info.py @@ -0,0 +1,547 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +from pathlib import Path + +import pytest +import yaml +from graphar_pyspark import initialize +from graphar_pyspark.enums import AdjListType, FileType, GarType +from graphar_pyspark.info import ( + AdjList, + EdgeInfo, + GraphInfo, + Property, + PropertyGroup, + VertexInfo, +) +from pyspark.sql.utils import IllegalArgumentException + +GRAPHAR_TESTS_EXAMPLES = Path(__file__).parent.parent.parent.joinpath("testing") + + +def test_property(spark): + initialize(spark) + property_from_py = Property.from_python("name", GarType.BOOL, False) + + assert property_from_py == Property.from_scala(property_from_py.to_scala()) + assert property_from_py != 0 + assert property_from_py == Property.from_python("name", GarType.BOOL, False) + + property_from_py.set_name("new_name") + property_from_py.set_data_type(GarType.INT32) + property_from_py.set_is_primary(True) + + assert property_from_py.get_name() == "new_name" + assert property_from_py.get_data_type() == GarType.INT32 + assert property_from_py.get_is_primary() == True + + +def test_property_group(spark): + initialize(spark) + p_group_from_py = PropertyGroup.from_python( + "prefix", + FileType.CSV, + [ + Property.from_python("non_primary", GarType.DOUBLE, False), + Property.from_python("primary", GarType.INT64, True), + ], + ) + + assert p_group_from_py == PropertyGroup.from_scala(p_group_from_py.to_scala()) + assert p_group_from_py != 0 + + p_group_from_py.set_prefix("new_prefix") + p_group_from_py.set_file_type(FileType.ORC) + p_group_from_py.set_properties( + p_group_from_py.get_properties() + + [Property("another_one", GarType.LIST, False)] + ) + + assert p_group_from_py.get_prefix() == "new_prefix" + assert p_group_from_py.get_file_type() == FileType.ORC + assert all( + p_left == p_right + for p_left, p_right in zip( + p_group_from_py.get_properties(), + [ + Property.from_python("non_primary", GarType.DOUBLE, False), + Property.from_python("primary", GarType.INT64, True), + Property("another_one", GarType.LIST, False), + ], + ) + ) + + +def test_adj_list(spark): + initialize(spark) + + props_list_1 = [ + Property.from_python("non_primary", GarType.DOUBLE, False), + Property.from_python("primary", GarType.INT64, True), + ] + + props_list_2 = [ + Property.from_python("non_primary", GarType.DOUBLE, False), + Property.from_python("primary", GarType.INT64, True), + Property("another_one", GarType.LIST, False), + ] + + adj_list_from_py = AdjList.from_python( + True, + "dest", + "prefix", + FileType.PARQUET, + [ + PropertyGroup.from_python("prefix1", FileType.PARQUET, props_list_1), + PropertyGroup.from_python("prefix2", FileType.ORC, props_list_2), + ], + ) + + assert adj_list_from_py == AdjList.from_scala(adj_list_from_py.to_scala()) + assert adj_list_from_py != 0 + assert adj_list_from_py.get_adj_list_type() == AdjListType.ORDERED_BY_DEST + + adj_list_from_py.set_aligned_by("src") + assert adj_list_from_py.get_adj_list_type() == AdjListType.ORDERED_BY_SOURCE + adj_list_from_py.set_ordered(False) + assert adj_list_from_py.get_adj_list_type() == AdjListType.UNORDERED_BY_SOURCE + adj_list_from_py.set_aligned_by("dest") + assert adj_list_from_py.get_adj_list_type() == AdjListType.UNORDERED_BY_DEST + adj_list_from_py.set_prefix("prefix_new") + assert adj_list_from_py.get_prefix() == "prefix_new" + + adj_list_from_py.set_file_type(FileType.CSV) + assert adj_list_from_py.get_file_type() == FileType.CSV + + adj_list_from_py.set_property_groups( + adj_list_from_py.get_property_groups() + + [ + PropertyGroup.from_python( + "prefix3", FileType.CSV, props_list_1 + props_list_2 + ) + ] + ) + assert all( + pg_left == pg_right + for pg_left, pg_right in zip( + adj_list_from_py.get_property_groups(), + [ + PropertyGroup.from_python("prefix1", FileType.PARQUET, props_list_1), + PropertyGroup.from_python("prefix2", FileType.ORC, props_list_2), + PropertyGroup.from_python( + "prefix3", FileType.CSV, props_list_1 + props_list_2 + ), + ], + ) + ) + + +def test_vertex_info(spark): + initialize(spark) + + props_list_1 = [ + Property.from_python("non_primary", GarType.DOUBLE, False), + Property.from_python("primary", GarType.INT64, True), + ] + + props_list_2 = [ + Property.from_python("non_primary", GarType.DOUBLE, False), + Property.from_python("primary", GarType.INT64, True), + Property("another_one", GarType.LIST, False), + ] + + vertex_info_from_py = VertexInfo.from_python( + "label", + 100, + "prefix", + [ + PropertyGroup.from_python("prefix1", FileType.PARQUET, props_list_1), + PropertyGroup.from_python("prefix2", FileType.ORC, props_list_2), + ], + "1", + ) + + assert vertex_info_from_py.contain_property_group( + PropertyGroup.from_python("prefix1", FileType.PARQUET, props_list_1) + ) + assert ( + vertex_info_from_py.contain_property_group( + PropertyGroup.from_python("prefix333", FileType.PARQUET, props_list_1) + ) + == False + ) + + assert vertex_info_from_py.contain_property("primary") + assert vertex_info_from_py.contain_property("non_primary") + assert vertex_info_from_py.contain_property("non_existen_one") == False + + yaml_string = vertex_info_from_py.dump() + restored_py_obj = yaml.safe_load(yaml_string) + + assert restored_py_obj["label"] == "label" + assert restored_py_obj["prefix"] == "prefix" + + # test setters + vertex_info_from_py.set_label("new_label") + assert vertex_info_from_py.get_label() == "new_label" + + vertex_info_from_py.set_chunk_size(101) + assert vertex_info_from_py.get_chunk_size() == 101 + + vertex_info_from_py.set_prefix("new_prefix") + assert vertex_info_from_py.get_prefix() == "new_prefix" + + vertex_info_from_py.set_version("2") + assert vertex_info_from_py.get_version() == "2" + + vertex_info_from_py.set_property_groups( + [ + PropertyGroup.from_python("prefix1", FileType.PARQUET, props_list_1), + PropertyGroup.from_python("prefix2", FileType.ORC, props_list_2), + PropertyGroup.from_python( + "prefix3", FileType.CSV, props_list_1 + props_list_2 + ), + ], + ) + assert len(vertex_info_from_py.get_property_groups()) == 3 + + # Get property group + assert vertex_info_from_py.get_property_group("primary") is not None + assert vertex_info_from_py.get_property_group("non_primary") is not None + assert vertex_info_from_py.get_property_group("another_one") is not None + + with pytest.raises(IllegalArgumentException) as e: + vertex_info_from_py.get_property_group("non-exsiten-one") + assert e == "Property not found: non-exsiten-one" + + assert vertex_info_from_py.get_property_type("primary") == GarType.INT64 + assert vertex_info_from_py.get_property_type("non_primary") == GarType.DOUBLE + assert vertex_info_from_py.get_property_type("another_one") == GarType.LIST + + with pytest.raises(IllegalArgumentException) as e: + vertex_info_from_py.get_property_type("non-existen-one") + assert e == "Property not found: non-exsiten-one" + + # Load from disk + person_info = VertexInfo.load_vertex_info( + GRAPHAR_TESTS_EXAMPLES.joinpath("transformer") + .joinpath("person.vertex.yml") + .absolute() + .__str__() + ) + assert person_info.get_label() == "person" + assert person_info.get_chunk_size() == 50 + assert person_info.get_prefix() == "vertex/person/" + assert person_info.get_version() == "gar/v1" + assert len(person_info.get_property_groups()) == 2 + assert person_info.get_property_type("id") == GarType.INT64 + assert person_info.get_property_type("firstName") == GarType.STRING + + # Primary keys logic + assert person_info.get_primary_key() == "id" + assert person_info.is_primary_key("id") + assert person_info.is_primary_key("firstName") == False + + # Other + assert ( + person_info.get_vertices_num_file_path() + == person_info.get_prefix() + "vertex_count" + ) + assert person_info.is_validated() + + assert ( + VertexInfo.from_scala(person_info.to_scala()).get_prefix() + == person_info.get_prefix() + ) + + nebula_vi = VertexInfo.load_vertex_info( + GRAPHAR_TESTS_EXAMPLES.joinpath("nebula") + .joinpath("team.vertex.yml") + .absolute() + .__str__() + ) + assert ( + GRAPHAR_TESTS_EXAMPLES.joinpath("nebula") + .joinpath(nebula_vi.get_file_path(nebula_vi.get_property_group("name"), 0)) + .exists() + ) + assert len(nebula_vi.get_path_prefix(nebula_vi.get_property_group("name"))) > 0 + + +def test_edge_info(spark): + initialize(spark) + + py_edge_info = EdgeInfo.from_python( + src_label="src_label", + edge_label="edge_label", + dst_label="dst_label", + chunk_size=100, + src_chunk_size=101, + dst_chunk_size=102, + directed=True, + prefix="prefix", + adj_lists=[], + version="v1", + ) + + # getters/setters + py_edge_info.set_src_label("new_src_label") + assert py_edge_info.get_src_label() == "new_src_label" + + py_edge_info.set_dst_label("new_dst_label") + assert py_edge_info.get_dst_label() == "new_dst_label" + + py_edge_info.set_edge_label("new_edge_label") + assert py_edge_info.get_edge_label() == "new_edge_label" + + py_edge_info.set_chunk_size(101) + assert py_edge_info.get_chunk_size() == 101 + + py_edge_info.set_src_chunk_size(102) + assert py_edge_info.get_src_chunk_size() == 102 + + py_edge_info.set_dst_chunk_size(103) + assert py_edge_info.get_dst_chunk_size() == 103 + + py_edge_info.set_directed(False) + assert py_edge_info.get_directed() == False + + py_edge_info.set_prefix("new_prefix") + assert py_edge_info.get_prefix() == "new_prefix" + + py_edge_info.set_version("v2") + assert py_edge_info.get_version() == "v2" + + props_list_1 = [ + Property.from_python("non_primary", GarType.DOUBLE, False), + Property.from_python("primary", GarType.INT64, True), + ] + py_edge_info.set_adj_lists( + [ + AdjList.from_python( + True, + "dest", + "prefix", + FileType.PARQUET, + [ + PropertyGroup.from_python( + "prefix1", FileType.PARQUET, props_list_1 + ), + ], + ) + ] + ) + assert len(py_edge_info.get_adj_lists()) == 1 + + # Load from YAML + person_knows_person_info = EdgeInfo.load_edge_info( + GRAPHAR_TESTS_EXAMPLES.joinpath("transformer") + .joinpath("person_knows_person.edge.yml") + .absolute() + .__str__() + ) + assert person_knows_person_info.get_directed() == False + assert person_knows_person_info.contain_property("creationDate") + assert ( + person_knows_person_info.get_adj_list_prefix(AdjListType.UNORDERED_BY_DEST) + is not None + ) + assert ( + person_knows_person_info.get_adj_list_prefix(AdjListType.ORDERED_BY_SOURCE) + is not None + ) + with pytest.raises(IllegalArgumentException) as e: + person_knows_person_info.get_adj_list_prefix(AdjListType.ORDERED_BY_DEST) + assert e == "adj list type not found: ordered_by_dest" + + assert person_knows_person_info.contain_adj_list(AdjListType.UNORDERED_BY_DEST) + assert ( + person_knows_person_info.contain_adj_list(AdjListType.UNORDERED_BY_SOURCE) + == False + ) + + assert person_knows_person_info.get_chunk_size() == 500 + assert ( + person_knows_person_info.get_offset_path_prefix(AdjListType.ORDERED_BY_SOURCE) + is not None + ) + + assert ( + person_knows_person_info.get_adj_list_file_type(AdjListType.UNORDERED_BY_DEST) + == FileType.CSV + ) + assert ( + person_knows_person_info.get_adj_list_file_type(AdjListType.ORDERED_BY_SOURCE) + != 0 + ) + assert ( + len(person_knows_person_info.get_property_groups(AdjListType.ORDERED_BY_SOURCE)) + == 1 + ) + assert person_knows_person_info.contain_property_group( + person_knows_person_info.get_property_groups(AdjListType.UNORDERED_BY_DEST)[0], + AdjListType.UNORDERED_BY_DEST, + ) + assert person_knows_person_info.get_property_type("creationDate") == GarType.STRING + assert person_knows_person_info.is_primary_key("creationDate") == False + assert person_knows_person_info.get_primary_key() == "" + assert person_knows_person_info.is_validated() + assert ( + person_knows_person_info.get_vertices_num_file_path( + AdjListType.ORDERED_BY_SOURCE + ) + == "edge/person_knows_person/ordered_by_source/vertex_count" + ) + assert ( + person_knows_person_info.get_edges_num_path_prefix( + AdjListType.ORDERED_BY_SOURCE + ) + == "edge/person_knows_person/ordered_by_source/edge_count" + ) + assert ( + person_knows_person_info.get_edges_num_file_path( + 0, AdjListType.ORDERED_BY_SOURCE + ) + == "edge/person_knows_person/ordered_by_source/edge_count0" + ) + assert ( + person_knows_person_info.get_adj_list_offset_file_path( + 0, AdjListType.ORDERED_BY_SOURCE + ) + == "edge/person_knows_person/ordered_by_source/offset/chunk0" + ) + assert ( + person_knows_person_info.get_adj_list_file_path( + 0, 0, AdjListType.ORDERED_BY_SOURCE + ) + == "edge/person_knows_person/ordered_by_source/adj_list/part0/chunk0" + ) + assert ( + person_knows_person_info.get_adj_list_path_prefix( + None, AdjListType.ORDERED_BY_SOURCE + ) + is not None + ) + assert ( + person_knows_person_info.get_adj_list_path_prefix( + 0, AdjListType.ORDERED_BY_SOURCE + ) + is not None + ) + assert ( + person_knows_person_info.get_property_file_path( + person_knows_person_info.get_property_group( + "creationDate", AdjListType.ORDERED_BY_SOURCE + ), + AdjListType.ORDERED_BY_SOURCE, + 0, + 0, + ) + is not None + ) + assert ( + person_knows_person_info.get_property_group_path_prefix( + person_knows_person_info.get_property_group( + "creationDate", AdjListType.ORDERED_BY_SOURCE + ), + AdjListType.ORDERED_BY_SOURCE, + 0, + ) + ) is not None + assert ( + person_knows_person_info.get_property_group_path_prefix( + person_knows_person_info.get_property_group( + "creationDate", AdjListType.ORDERED_BY_SOURCE + ), + AdjListType.ORDERED_BY_SOURCE, + None, + ) + ) is not None + assert person_knows_person_info.get_concat_key() == "person_knows_person" + yaml_string = person_knows_person_info.dump() + parsed_dict = yaml.safe_load(yaml_string) + assert "prefix" in parsed_dict.keys() + + +def test_graph_info(spark): + initialize(spark) + + modern_graph_person = GraphInfo.load_graph_info( + GRAPHAR_TESTS_EXAMPLES.joinpath("modern_graph") + .joinpath("modern_graph.graph.yml") + .absolute() + .__str__() + ) + assert len(modern_graph_person.get_edges()) == 2 + assert modern_graph_person.get_name() == "modern_graph" + assert len(modern_graph_person.get_vertex_infos().keys()) == 2 + assert "person" in modern_graph_person.get_vertex_infos().keys() + assert "software" in modern_graph_person.get_vertex_infos().keys() + assert len(modern_graph_person.get_edge_infos()) == 2 + assert "person_knows_person" in modern_graph_person.get_edge_infos().keys() + assert "person_created_software" in modern_graph_person.get_edge_infos().keys() + + assert modern_graph_person.get_edge_info("person", "knows", "person") is not None + assert modern_graph_person.get_vertex_info("person") is not None + + # YAML + yaml_dict = yaml.safe_load(modern_graph_person.dump()) + assert "name" in yaml_dict + assert yaml_dict["version"] == "gar/v1" + + # Python constructor and setters + py_graph_info = GraphInfo.from_python( + "name", "prefix", ["person", "software"], ["person_knnows_person"], "v1" + ) + py_graph_info.set_name("new_name") + assert py_graph_info.get_name() == "new_name" + py_graph_info.set_prefix("new_prefix") + assert py_graph_info.get_prefix() == "new_prefix" + + init_vertices_size = py_graph_info.get_vertices().__len__() + new_vertices = py_graph_info.get_vertices() + new_vertices.append("new_one") + py_graph_info.set_vertices(new_vertices) + assert len(py_graph_info.get_vertices()) > init_vertices_size + + init_edges_size = py_graph_info.get_edges().__len__() + new_edges = py_graph_info.get_edges() + new_edges.append("new_one") + py_graph_info.set_edges(new_edges) + assert len(py_graph_info.get_edges()) > init_edges_size + + py_graph_info.set_version("v2") + assert py_graph_info.get_version() == "v2" + + py_graph_info.add_edge_info( + EdgeInfo.from_python( + "src_label100", + "edge_label100", + "dst_label100", + 10, + 100, + 100, + True, + "prefix", + [], + "v1", + ) + ) + assert len(py_graph_info.get_edge_infos()) == 1 + + py_graph_info.add_vertex_info( + VertexInfo.from_python("some", 100, "prefix", [], "v1") + ) + assert len(py_graph_info.get_vertex_infos()) == 1 diff --git a/pyspark/tests/test_reader.py b/pyspark/tests/test_reader.py new file mode 100644 index 000000000..4bafa90f9 --- /dev/null +++ b/pyspark/tests/test_reader.py @@ -0,0 +1,139 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +from pathlib import Path + +from graphar_pyspark import initialize +from graphar_pyspark.enums import AdjListType +from graphar_pyspark.graph import EdgeLabels, GraphReader +from graphar_pyspark.info import EdgeInfo, GraphInfo, VertexInfo +from graphar_pyspark.reader import EdgeReader, VertexReader + +GRAPHAR_TESTS_EXAMPLES = Path(__file__).parent.parent.parent.joinpath("testing") + + +def test_vertex_reader(spark): + initialize(spark) + + vertex_info = VertexInfo.load_vertex_info( + GRAPHAR_TESTS_EXAMPLES.joinpath("modern_graph") + .joinpath("person.vertex.yml") + .absolute() + .__str__() + ) + vertex_reader = VertexReader.from_python( + GRAPHAR_TESTS_EXAMPLES.joinpath("modern_graph").absolute().__str__(), + vertex_info, + ) + assert VertexReader.from_scala(vertex_reader.to_scala()) is not None + assert vertex_reader.read_vertices_number() > 0 + assert ( + vertex_reader.read_vertex_property_group( + vertex_info.get_property_group("name") + ).count() + > 0 + ) + assert ( + vertex_reader.read_vertex_property_chunk( + vertex_info.get_property_groups()[0], 0 + ).count() + > 0 + ) + assert ( + vertex_reader.read_all_vertex_property_groups().count() + >= vertex_reader.read_vertex_property_group( + vertex_info.get_property_group("age") + ).count() + ) + assert ( + vertex_reader.read_multiple_vertex_property_groups( + [vertex_info.get_property_group("name")] + ).count() + > 0 + ) + + +def test_edge_reader(spark): + initialize(spark) + + edge_info = EdgeInfo.load_edge_info( + GRAPHAR_TESTS_EXAMPLES.joinpath("modern_graph") + .joinpath("person_knows_person.edge.yml") + .absolute() + .__str__(), + ) + + edge_reader = EdgeReader.from_python( + GRAPHAR_TESTS_EXAMPLES.joinpath("modern_graph").absolute().__str__(), + edge_info, + AdjListType.ORDERED_BY_SOURCE, + ) + assert EdgeReader.from_scala(edge_reader.to_scala()) is not None + assert ( + "_graphArEdgeIndex" + in edge_reader.read_edge_property_group( + edge_info.get_property_group("weight", AdjListType.ORDERED_BY_SOURCE) + ).columns + ) + assert ( + edge_reader.read_edge_property_group( + edge_info.get_property_group("weight", AdjListType.ORDERED_BY_SOURCE) + ).count() + > 0 + ) + assert edge_reader.read_vertex_chunk_number() > 0 + assert edge_reader.read_edges_number() > 0 + assert edge_reader.read_edges_number(0) == 0 + assert edge_reader.read_offset(0).count() > 0 + + +def test_graph_reader(spark): + initialize(spark) + + graph_info = GraphReader.read( + GRAPHAR_TESTS_EXAMPLES.joinpath("modern_graph") + .joinpath("modern_graph.graph.yml") + .absolute() + .__str__() + ) + assert graph_info is not None + assert len(graph_info.vertex_dataframes.keys()) > 0 + assert len(graph_info.edge_dataframes.keys()) > 0 + assert "person" in graph_info.vertex_dataframes.keys() + assert ( + EdgeLabels("person", "created", "software") in graph_info.edge_dataframes.keys() + ) + assert graph_info.vertex_dataframes["person"].count() > 0 + assert ( + "ordered_by_source" + in graph_info.edge_dataframes[EdgeLabels("person", "created", "software")] + ) + assert ( + graph_info.edge_dataframes[EdgeLabels("person", "created", "software")][ + "ordered_by_source" + ].count() + > 0 + ) + + # test read with graph info + graph_info_obj = GraphInfo.load_graph_info( + GRAPHAR_TESTS_EXAMPLES.joinpath("modern_graph") + .joinpath("modern_graph.graph.yml") + .absolute() + .__str__() + ) + graph_info2 = GraphReader.read(graph_info_obj) + assert len(graph_info2.vertex_dataframes.keys()) == len( + graph_info.vertex_dataframes.keys() + ) diff --git a/pyspark/tests/test_transform.py b/pyspark/tests/test_transform.py new file mode 100644 index 000000000..96aca8619 --- /dev/null +++ b/pyspark/tests/test_transform.py @@ -0,0 +1,46 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +from pathlib import Path + +import pytest +from graphar_pyspark import initialize +from graphar_pyspark.errors import InvalidGraphFormatError +from graphar_pyspark.graph import GraphTransformer +from graphar_pyspark.info import GraphInfo + +GRAPHAR_TESTS_EXAMPLES = Path(__file__).parent.parent.parent.joinpath("testing") + + +def test_transform(spark): + initialize(spark) + source_path = ( + GRAPHAR_TESTS_EXAMPLES.joinpath("ldbc_sample/parquet/ldbc_sample.graph.yml") + .absolute() + .__str__() + ) + dest_path = ( + GRAPHAR_TESTS_EXAMPLES.joinpath("transformer/ldbc_sample.graph.yml") + .absolute() + .__str__() + ) + GraphTransformer.transform(source_path, dest_path) + + source_info = GraphInfo.load_graph_info(source_path) + dest_info = GraphInfo.load_graph_info(dest_path) + + with pytest.raises(InvalidGraphFormatError): + GraphTransformer.transform(source_path, source_info) + + GraphTransformer.transform(source_info, dest_info) diff --git a/pyspark/tests/test_writer.py b/pyspark/tests/test_writer.py new file mode 100644 index 000000000..207abff6e --- /dev/null +++ b/pyspark/tests/test_writer.py @@ -0,0 +1,118 @@ +# copyright 2022-2023 alibaba group holding limited. +# +# licensed under the apache license, version 2.0 (the "license"); +# you may not use this file except in compliance with the license. +# you may obtain a copy of the license at +# +# http://www.apache.org/licenses/license-2.0 +# +# unless required by applicable law or agreed to in writing, software +# distributed under the license is distributed on an "as is" basis, +# without warranties or conditions of any kind, either express or implied. +# see the license for the specific language governing permissions and +# limitations under the license. + +from pathlib import Path + +from graphar_pyspark import initialize +from graphar_pyspark.enums import AdjListType +from graphar_pyspark.info import EdgeInfo, GraphInfo, VertexInfo +from graphar_pyspark.reader import EdgeReader, VertexReader +from graphar_pyspark.util import IndexGenerator +from graphar_pyspark.writer import EdgeWriter, VertexWriter +from graphar_pyspark.graph import GraphWriter + +GRAPHAR_TESTS_EXAMPLES = Path(__file__).parent.parent.parent.joinpath("testing") + + +def test_vertex_writer(spark): + initialize(spark) + vertex_info = VertexInfo.load_vertex_info( + GRAPHAR_TESTS_EXAMPLES.joinpath("nebula") + .joinpath("player.vertex.yml") + .absolute() + .__str__() + ) + vertex_reader = VertexReader.from_python( + GRAPHAR_TESTS_EXAMPLES.joinpath("nebula").absolute().__str__(), + vertex_info, + ) + + vertex_df = vertex_reader.read_all_vertex_property_groups() + vertex_df_with_index = IndexGenerator.generate_vertex_index_column(vertex_df) + num_vertices = vertex_reader.read_vertices_number() + + vertex_writer = VertexWriter.from_python( + "/tmp/nebula", + vertex_info, + vertex_df_with_index, + num_vertices, + ) + vertex_writer.write_vertex_properties() + vertex_writer.write_vertex_properties(vertex_info.get_property_groups()[0]) + assert Path("/tmp/nebula").exists() + assert Path("/tmp/nebula/vertex/player/vertex_count").exists() + assert Path("/tmp/nebula/vertex/player/_vertexId_name_age/chunk0").exists() + + assert VertexWriter.from_scala(vertex_writer.to_scala()) is not None + + +def test_edge_writer(spark): + initialize(spark) + edge_info = EdgeInfo.load_edge_info( + GRAPHAR_TESTS_EXAMPLES.joinpath("nebula") + .joinpath("player_follow_player.edge.yml") + .absolute() + .__str__() + ) + + edge_reader = EdgeReader.from_python( + GRAPHAR_TESTS_EXAMPLES.joinpath("nebula").absolute().__str__(), + edge_info, + AdjListType.ORDERED_BY_SOURCE, + ) + edge_df = edge_reader.read_edges() + edge_num = edge_reader.read_vertices_number() + + edge_writer = EdgeWriter.from_python( + "/tmp/nebula", + edge_info, + AdjListType.ORDERED_BY_SOURCE, + edge_num, + edge_df, + ) + edge_writer.write_edge_properties() + assert Path("/tmp/nebula").exists() + assert Path("/tmp/nebula/edge").exists() + + assert EdgeWriter.from_scala(edge_writer.to_scala()) is not None + + edge_writer.write_edges() + edge_writer.write_edge_properties( + edge_info.get_property_group("degree", AdjListType.ORDERED_BY_SOURCE), + ) + edge_writer.write_edge_properties() + edge_writer.write_adj_list() + + +def test_graph_writer(spark): + initialize(spark) + graph_writer = GraphWriter.from_python() + + assert GraphWriter.from_scala(graph_writer.to_scala()) is not None + vertex_file_path = GRAPHAR_TESTS_EXAMPLES.joinpath("ldbc_sample/person_0_0.csv").absolute().__str__() + vertex_df = spark.read.option("delimiter", "|").option("header", "true").csv(vertex_file_path) + label = "person" + graph_writer.put_vertex_data(label, vertex_df, "id") + + edge_file_path = GRAPHAR_TESTS_EXAMPLES.joinpath("ldbc_sample/person_knows_person_0_0.csv").absolute().__str__() + edge_df = spark.read.option("delimiter", "|").option("header", "true").csv(edge_file_path) + tag = ("person", "knows", "person") + + graph_info = GraphInfo.from_python("ldbc", "/tmp/ldbc", ["person.vertex.yml"], ["person_knows_person.yml"], "gar/v1") + + graph_writer.put_edge_data(tag, edge_df) + graph_writer.write_with_graph_info(graph_info) + graph_writer.write("/tmp/ldbc", "ldbc") + + assert Path("/tmp/ldbc").exists()