Skip to content

Commit

Permalink
style(docstrings): format docstrings according to black style
Browse files Browse the repository at this point in the history
  • Loading branch information
cpcloud committed Sep 12, 2023
1 parent 5ed750f commit 9e1109b
Show file tree
Hide file tree
Showing 34 changed files with 431 additions and 284 deletions.
12 changes: 8 additions & 4 deletions ibis/backends/base/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ class TablesAccessor(collections.abc.Mapping):
Examples
--------
>>> con = ibis.sqlite.connect("example.db")
>>> people = con.tables['people'] # access via index
>>> people = con.tables["people"] # access via index
>>> people = con.tables.people # access via attribute
"""

Expand Down Expand Up @@ -848,7 +848,7 @@ def tables(self):
Examples
--------
>>> con = ibis.sqlite.connect("example.db")
>>> people = con.tables['people'] # access via index
>>> people = con.tables["people"] # access via index
>>> people = con.tables.people # access via attribute
"""
return TablesAccessor(self)
Expand Down Expand Up @@ -1202,11 +1202,15 @@ def connect(resource: Path | str, **kwargs: Any) -> BaseBackend:
Connect to a PostgreSQL server:
>>> con = ibis.connect("postgres://user:password@hostname:5432") # quartodoc: +SKIP # doctest: +SKIP
>>> con = ibis.connect(
... "postgres://user:password@hostname:5432"
... ) # quartodoc: +SKIP # doctest: +SKIP
Connect to BigQuery:
>>> con = ibis.connect("bigquery://my-project/my-dataset") # quartodoc: +SKIP # doctest: +SKIP
>>> con = ibis.connect(
... "bigquery://my-project/my-dataset"
... ) # quartodoc: +SKIP # doctest: +SKIP
"""
url = resource = str(resource)

Expand Down
24 changes: 13 additions & 11 deletions ibis/backends/base/df/timecontext.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,30 +162,30 @@ def construct_time_context_aware_series(
Examples
--------
>>> import pandas as pd
>>> from ibis.backends.base.df.timecontext import construct_time_context_aware_series
>>> from ibis.backends.base.df.timecontext import (
... construct_time_context_aware_series,
... )
>>> df = pd.DataFrame(
... {
... 'time': pd.Series(
... pd.date_range(
... start='2017-01-02', periods=3
... ).values
... ),
... 'id': [1,2,3],
... 'value': [1.1, 2.2, 3.3],
... "time": pd.Series(pd.date_range(start="2017-01-02", periods=3).values),
... "id": [1, 2, 3],
... "value": [1.1, 2.2, 3.3],
... }
... )
>>> df
time id value
0 2017-01-02 1 1.1
1 2017-01-03 2 2.2
2 2017-01-04 3 3.3
>>> series = df['value']
>>> series = df["value"]
>>> series
0 1.1
1 2.2
2 3.3
Name: value, dtype: float64
>>> construct_time_context_aware_series(series, df) # quartodoc: +SKIP # doctest: +SKIP
>>> construct_time_context_aware_series(
... series, df
... ) # quartodoc: +SKIP # doctest: +SKIP
time
0 2017-01-02 1.1
1 2017-01-03 2.2
Expand All @@ -203,7 +203,9 @@ def construct_time_context_aware_series(
2 2017-01-04 3.3
Name: value, dtype: float64
>>> construct_time_context_aware_series(timed_series, df) # quartodoc: +SKIP # doctest: +SKIP
>>> construct_time_context_aware_series(
... timed_series, df
... ) # quartodoc: +SKIP # doctest: +SKIP
time
0 2017-01-02 1.1
1 2017-01-03 2.2
Expand Down
1 change: 1 addition & 0 deletions ibis/backends/base/sql/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ def raw_sql(self, query: str):
>>> con = ibis.connect("duckdb://")
>>> with con.raw_sql("SELECT 1") as cursor:
... result = cursor.fetchall()
...
>>> result
[(1,)]
>>> cursor.closed
Expand Down
10 changes: 3 additions & 7 deletions ibis/backends/bigquery/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,8 +178,7 @@ def parse_project_and_dataset(project: str, dataset: str = "") -> tuple[str, str
Examples
--------
>>> data_project, billing_project, dataset = parse_project_and_dataset(
... 'ibis-gbq',
... 'foo-bar.my_dataset'
... "ibis-gbq", "foo-bar.my_dataset"
... )
>>> data_project
'foo-bar'
Expand All @@ -188,18 +187,15 @@ def parse_project_and_dataset(project: str, dataset: str = "") -> tuple[str, str
>>> dataset
'my_dataset'
>>> data_project, billing_project, dataset = parse_project_and_dataset(
... 'ibis-gbq',
... 'my_dataset'
... "ibis-gbq", "my_dataset"
... )
>>> data_project
'ibis-gbq'
>>> billing_project
'ibis-gbq'
>>> dataset
'my_dataset'
>>> data_project, billing_project, _ = parse_project_and_dataset(
... 'ibis-gbq'
... )
>>> data_project, billing_project, _ = parse_project_and_dataset("ibis-gbq")
>>> data_project
'ibis-gbq'
"""
Expand Down
23 changes: 13 additions & 10 deletions ibis/backends/bigquery/udf/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ def python(
>>> @udf.python(input_type=[dt.double], output_type=dt.double)
... def add_one(x):
... return x + 1
...
>>> print(add_one.sql)
CREATE TEMPORARY FUNCTION add_one_0(x FLOAT64)
RETURNS FLOAT64
Expand All @@ -97,14 +98,16 @@ def python(
}
return add_one(x);
""";
>>> @udf.python(input_type=[dt.double, dt.double],
... output_type=dt.Array(dt.double))
>>> @udf.python(
... input_type=[dt.double, dt.double], output_type=dt.Array(dt.double)
... )
... def my_range(start, stop):
... def gen(start, stop):
... curr = start
... while curr < stop:
... yield curr
... curr += 1
...
... result = []
... for value in gen(start, stop):
... result.append(value)
Expand Down Expand Up @@ -132,9 +135,9 @@ def python(
""";
>>> @udf.python(
... input_type=[dt.double, dt.double],
... output_type=dt.Struct.from_tuples([
... ('width', 'double'), ('height', 'double')
... ])
... output_type=dt.Struct.from_tuples(
... [("width", "double"), ("height", "double")]
... ),
... )
... def my_rectangle(width, height):
... class Rectangle:
Expand Down Expand Up @@ -247,7 +250,7 @@ def js(
... name="add_one",
... params={"a": dt.double},
... output_type=dt.double,
... body="return x + 1"
... body="return x + 1",
... )
>>> print(add_one.sql)
CREATE TEMPORARY FUNCTION add_one_0(x FLOAT64)
Expand Down Expand Up @@ -356,10 +359,10 @@ def sql(
>>> from ibis.backends.bigquery import udf
>>> import ibis.expr.datatypes as dt
>>> add_one = udf.sql(
... name="add_one",
... params={'x': dt.double},
... output_type=dt.double,
... sql_expression="x + 1"
... name="add_one",
... params={"x": dt.double},
... output_type=dt.double,
... sql_expression="x + 1",
... )
>>> print(add_one.sql)
CREATE TEMPORARY FUNCTION add_one_0(x FLOAT64)
Expand Down
2 changes: 1 addition & 1 deletion ibis/backends/bigquery/udf/find.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def find_names(node: ast.AST) -> list[ast.Name]:
Examples
--------
>>> import ast
>>> node = ast.parse('a + b')
>>> node = ast.parse("a + b")
>>> names = find_names(node)
>>> names
[<....Name object at 0x...>, <....Name object at 0x...>]
Expand Down
4 changes: 2 additions & 2 deletions ibis/backends/dask/execution/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,12 +115,12 @@ def coerce_to_output(
>>> coerce_to_output(1, expr) # quartodoc: +SKIP # doctest: +SKIP
0 1
Name: result, dtype: int64
>>> coerce_to_output(1, expr, [1,2,3]) # quartodoc: +SKIP # doctest: +SKIP
>>> coerce_to_output(1, expr, [1, 2, 3]) # quartodoc: +SKIP # doctest: +SKIP
1 1
2 1
3 1
Name: result, dtype: int64
>>> coerce_to_output([1,2,3], expr) # quartodoc: +SKIP # doctest: +SKIP
>>> coerce_to_output([1, 2, 3], expr) # quartodoc: +SKIP # doctest: +SKIP
0 [1, 2, 3]
Name: result, dtype: object
"""
Expand Down
6 changes: 5 additions & 1 deletion ibis/backends/duckdb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -759,6 +759,7 @@ def read_sqlite(self, path: str | Path, table_name: str | None = None) -> ir.Tab
... _ = con.execute("DROP TABLE IF EXISTS t")
... _ = con.execute("CREATE TABLE t (a INT, b TEXT)")
... _ = con.execute("INSERT INTO t VALUES (1, 'a'), (2, 'b'), (3, 'c')")
...
>>> con = ibis.connect("duckdb://")
>>> t = con.read_sqlite("/tmp/sqlite.db", table_name="t")
>>> t
Expand Down Expand Up @@ -809,6 +810,7 @@ def attach_sqlite(
... _ = con.execute("DROP TABLE IF EXISTS t")
... _ = con.execute("CREATE TABLE t (a INT, b TEXT)")
... _ = con.execute("INSERT INTO t VALUES (1, 'a'), (2, 'b'), (3, 'c')")
...
>>> con = ibis.connect("duckdb://")
>>> con.list_tables()
[]
Expand Down Expand Up @@ -995,7 +997,9 @@ def to_parquet(
Partition on multiple columns.
>>> con.to_parquet(penguins, tempfile.mkdtemp(), partition_by=("year", "island"))
>>> con.to_parquet(
... penguins, tempfile.mkdtemp(), partition_by=("year", "island")
... )
"""
self._run_pre_execute_hooks(expr)
query = self._to_sql(expr, params=params)
Expand Down
32 changes: 19 additions & 13 deletions ibis/backends/impala/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,10 +272,10 @@ def do_connect(
--------
>>> import os
>>> import ibis
>>> hdfs_host = os.environ.get('IBIS_TEST_NN_HOST', 'localhost')
>>> hdfs_port = int(os.environ.get('IBIS_TEST_NN_PORT', 50070))
>>> impala_host = os.environ.get('IBIS_TEST_IMPALA_HOST', 'localhost')
>>> impala_port = int(os.environ.get('IBIS_TEST_IMPALA_PORT', 21050))
>>> hdfs_host = os.environ.get("IBIS_TEST_NN_HOST", "localhost")
>>> hdfs_port = int(os.environ.get("IBIS_TEST_NN_PORT", 50070))
>>> impala_host = os.environ.get("IBIS_TEST_IMPALA_HOST", "localhost")
>>> impala_port = int(os.environ.get("IBIS_TEST_IMPALA_PORT", 21050))
>>> hdfs = ibis.impala.hdfs_connect(host=hdfs_host, port=hdfs_port)
>>> client = ibis.impala.connect(
... host=impala_host,
Expand Down Expand Up @@ -927,11 +927,13 @@ def insert(
Examples
--------
>>> table = 'my_table'
>>> table = "my_table"
>>> con.insert(table, table_expr) # quartodoc: +SKIP # doctest: +SKIP
Completely overwrite contents
>>> con.insert(table, table_expr, overwrite=True) # quartodoc: +SKIP # doctest: +SKIP
>>> con.insert(
... table, table_expr, overwrite=True
... ) # quartodoc: +SKIP # doctest: +SKIP
"""
table = self.table(table_name, database=database)
return table.insert(
Expand All @@ -958,9 +960,11 @@ def drop_table(
Examples
--------
>>> table = 'my_table'
>>> db = 'operations'
>>> con.drop_table(table, database=db, force=True) # quartodoc: +SKIP # doctest: +SKIP
>>> table = "my_table"
>>> db = "operations"
>>> con.drop_table(
... table, database=db, force=True
... ) # quartodoc: +SKIP # doctest: +SKIP
"""
statement = DropTable(name, database=database, must_exist=not force)
self._safe_exec_sql(statement)
Expand Down Expand Up @@ -1015,10 +1019,12 @@ def cache_table(self, table_name, *, database=None, pool="default"):
Examples
--------
>>> table = 'my_table'
>>> db = 'operations'
>>> pool = 'op_4GB_pool'
>>> con.cache_table('my_table', database=db, pool=pool) # quartodoc: +SKIP # doctest: +SKIP
>>> table = "my_table"
>>> db = "operations"
>>> pool = "op_4GB_pool"
>>> con.cache_table(
... "my_table", database=db, pool=pool
... ) # quartodoc: +SKIP # doctest: +SKIP
"""
statement = ddl.CacheTable(table_name, database=database, pool=pool)
self._safe_exec_sql(statement)
Expand Down
18 changes: 6 additions & 12 deletions ibis/backends/mysql/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,20 +65,14 @@ def do_connect(
--------
>>> import os
>>> import getpass
>>> host = os.environ.get('IBIS_TEST_MYSQL_HOST', 'localhost')
>>> user = os.environ.get('IBIS_TEST_MYSQL_USER', getpass.getuser())
>>> password = os.environ.get('IBIS_TEST_MYSQL_PASSWORD')
>>> database = os.environ.get('IBIS_TEST_MYSQL_DATABASE',
... 'ibis_testing')
>>> con = connect(
... database=database,
... host=host,
... user=user,
... password=password
... )
>>> host = os.environ.get("IBIS_TEST_MYSQL_HOST", "localhost")
>>> user = os.environ.get("IBIS_TEST_MYSQL_USER", getpass.getuser())
>>> password = os.environ.get("IBIS_TEST_MYSQL_PASSWORD")
>>> database = os.environ.get("IBIS_TEST_MYSQL_DATABASE", "ibis_testing")
>>> con = connect(database=database, host=host, user=user, password=password)
>>> con.list_tables() # doctest: +ELLIPSIS
[...]
>>> t = con.table('functional_alltypes')
>>> t = con.table("functional_alltypes")
>>> t
MySQLTable[table]
name: functional_alltypes
Expand Down
Loading

0 comments on commit 9e1109b

Please sign in to comment.