Skip to content

Commit

Permalink
Upgrade to latest Black, closes #2239
Browse files Browse the repository at this point in the history
  • Loading branch information
simonw committed Jan 31, 2024
1 parent c3caf36 commit 5c64af6
Show file tree
Hide file tree
Showing 16 changed files with 93 additions and 66 deletions.
14 changes: 8 additions & 6 deletions datasette/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,9 @@ async def inner():
"{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format(
fts_table=escape_sqlite(fts_table),
fts_pk=escape_sqlite(fts_pk),
match_clause=":search"
if search_mode_raw
else "escape_fts(:search)",
match_clause=(
":search" if search_mode_raw else "escape_fts(:search)"
),
)
)
human_descriptions.append(f'search matches "{search}"')
Expand All @@ -99,9 +99,11 @@ async def inner():
"rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format(
fts_table=escape_sqlite(fts_table),
search_col=escape_sqlite(search_col),
match_clause=":search_{}".format(i)
if search_mode_raw
else "escape_fts(:search_{})".format(i),
match_clause=(
":search_{}".format(i)
if search_mode_raw
else "escape_fts(:search_{})".format(i)
),
)
)
human_descriptions.append(
Expand Down
14 changes: 8 additions & 6 deletions datasette/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,9 +402,9 @@ def make_dockerfile(
apt_get_extras = apt_get_extras_
if spatialite:
apt_get_extras.extend(["python3-dev", "gcc", "libsqlite3-mod-spatialite"])
environment_variables[
"SQLITE_EXTENSIONS"
] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so"
environment_variables["SQLITE_EXTENSIONS"] = (
"/usr/lib/x86_64-linux-gnu/mod_spatialite.so"
)
return """
FROM python:3.11.0-slim-bullseye
COPY . /app
Expand All @@ -416,9 +416,11 @@ def make_dockerfile(
ENV PORT {port}
EXPOSE {port}
CMD {cmd}""".format(
apt_get_extras=APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras))
if apt_get_extras
else "",
apt_get_extras=(
APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras))
if apt_get_extras
else ""
),
environment_variables="\n".join(
[
"ENV {} '{}'".format(key, value)
Expand Down
1 change: 1 addition & 0 deletions datasette/utils/shutil_backport.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
This code is licensed under the Python License:
https://github.com/python/cpython/blob/v3.8.3/LICENSE
"""

import os
from shutil import copy, copy2, copystat, Error

Expand Down
22 changes: 13 additions & 9 deletions datasette/views/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,9 +126,9 @@ async def database_actions():
"views": sql_views,
"queries": canned_queries,
"allow_execute_sql": allow_execute_sql,
"table_columns": await _table_columns(datasette, database)
if allow_execute_sql
else {},
"table_columns": (
await _table_columns(datasette, database) if allow_execute_sql else {}
),
}

if format_ == "json":
Expand Down Expand Up @@ -719,9 +719,11 @@ async def fetch_data_for_csv(request, _next=None):
display_rows=await display_rows(
datasette, database, request, rows, columns
),
table_columns=await _table_columns(datasette, database)
if allow_execute_sql
else {},
table_columns=(
await _table_columns(datasette, database)
if allow_execute_sql
else {}
),
columns=columns,
renderers=renderers,
url_csv=datasette.urls.path(
Expand Down Expand Up @@ -1036,9 +1038,11 @@ async def display_rows(datasette, database, request, rows, columns):
display_value = markupsafe.Markup(
'<a class="blob-download" href="{}"{}>&lt;Binary:&nbsp;{:,}&nbsp;byte{}&gt;</a>'.format(
blob_url,
' title="{}"'.format(formatted)
if "bytes" not in formatted
else "",
(
' title="{}"'.format(formatted)
if "bytes" not in formatted
else ""
),
len(value),
"" if len(value) == 1 else "s",
)
Expand Down
30 changes: 17 additions & 13 deletions datasette/views/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,9 +236,11 @@ async def display_columns_and_rows(
path_from_row_pks(row, pks, not pks),
column,
),
' title="{}"'.format(formatted)
if "bytes" not in formatted
else "",
(
' title="{}"'.format(formatted)
if "bytes" not in formatted
else ""
),
len(value),
"" if len(value) == 1 else "s",
)
Expand Down Expand Up @@ -289,9 +291,9 @@ async def display_columns_and_rows(
"column": column,
"value": display_value,
"raw": value,
"value_type": "none"
if value is None
else str(type(value).__name__),
"value_type": (
"none" if value is None else str(type(value).__name__)
),
}
)
cell_rows.append(Row(cells))
Expand Down Expand Up @@ -974,9 +976,9 @@ async def table_view_data(

from_sql = "from {table_name} {where}".format(
table_name=escape_sqlite(table_name),
where=("where {} ".format(" and ".join(where_clauses)))
if where_clauses
else "",
where=(
("where {} ".format(" and ".join(where_clauses))) if where_clauses else ""
),
)
# Copy of params so we can mutate them later:
from_sql_params = dict(**params)
Expand Down Expand Up @@ -1040,10 +1042,12 @@ async def table_view_data(
column=escape_sqlite(sort or sort_desc),
op=">" if sort else "<",
p=len(params),
extra_desc_only=""
if sort
else " or {column2} is null".format(
column2=escape_sqlite(sort or sort_desc)
extra_desc_only=(
""
if sort
else " or {column2} is null".format(
column2=escape_sqlite(sort or sort_desc)
)
),
next_clauses=" and ".join(next_by_pk_clauses),
)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def get_version():
"pytest-xdist>=2.2.1",
"pytest-asyncio>=0.17",
"beautifulsoup4>=4.8.1",
"black==23.9.1",
"black==24.1.1",
"blacken-docs==1.16.0",
"pytest-timeout>=1.4.2",
"trustme>=0.7",
Expand Down
6 changes: 3 additions & 3 deletions tests/plugins/my_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ async def inner():
"database": database,
"table": table,
"view_name": view_name,
"request_path": request.path
if request is not None
else None,
"request_path": (
request.path if request is not None else None
),
"added": (
await datasette.get_database().execute("select 3 * 5")
).first()[0],
Expand Down
8 changes: 5 additions & 3 deletions tests/test_api_write.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,9 +279,11 @@ async def test_insert_or_upsert_row_errors(
json=input,
headers={
"Authorization": "Bearer {}".format(token),
"Content-Type": "text/plain"
if special_case == "invalid_content_type"
else "application/json",
"Content-Type": (
"text/plain"
if special_case == "invalid_content_type"
else "application/json"
),
},
)

Expand Down
8 changes: 5 additions & 3 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,9 +335,11 @@ def test_serve_create(tmpdir):
def test_serve_config(tmpdir, argument, format_):
config_path = tmpdir / "datasette.{}".format(format_)
config_path.write_text(
"settings:\n default_page_size: 5\n"
if format_ == "yaml"
else '{"settings": {"default_page_size": 5}}',
(
"settings:\n default_page_size: 5\n"
if format_ == "yaml"
else '{"settings": {"default_page_size": 5}}'
),
"utf-8",
)
runner = CliRunner()
Expand Down
1 change: 1 addition & 0 deletions tests/test_docs.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Tests to ensure certain things are documented.
"""

from datasette import app, utils
from datasette.app import Datasette
from datasette.filters import Filters
Expand Down
1 change: 1 addition & 0 deletions tests/test_internals_database.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Tests for the datasette.database.Database class
"""

from datasette.app import Datasette
from datasette.database import Database, Results, MultipleValues
from datasette.utils.sqlite import sqlite3
Expand Down
1 change: 1 addition & 0 deletions tests/test_internals_datasette.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Tests for the datasette.app.Datasette class
"""

import dataclasses
from datasette import Forbidden, Context
from datasette.app import Datasette, Database
Expand Down
8 changes: 5 additions & 3 deletions tests/test_permissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,9 +381,11 @@ async def test_permissions_debug(ds_client):
{
"action": div.select_one(".check-action").text,
# True = green tick, False = red cross, None = gray None
"result": None
if div.select(".check-result-no-opinion")
else bool(div.select(".check-result-true")),
"result": (
None
if div.select(".check-result-no-opinion")
else bool(div.select(".check-result-true"))
),
"used_default": bool(div.select(".check-used-default")),
}
for div in check_divs
Expand Down
34 changes: 18 additions & 16 deletions tests/test_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -1096,24 +1096,26 @@ def filters_from_request(self, request):
@pytest.mark.parametrize("extra_metadata", (False, True))
async def test_hook_register_permissions(extra_metadata):
ds = Datasette(
config={
"plugins": {
"datasette-register-permissions": {
"permissions": [
{
"name": "extra-from-metadata",
"abbr": "efm",
"description": "Extra from metadata",
"takes_database": False,
"takes_resource": False,
"default": True,
}
]
config=(
{
"plugins": {
"datasette-register-permissions": {
"permissions": [
{
"name": "extra-from-metadata",
"abbr": "efm",
"description": "Extra from metadata",
"takes_database": False,
"takes_resource": False,
"default": True,
}
]
}
}
}
}
if extra_metadata
else None,
if extra_metadata
else None
),
plugins_dir=PLUGINS_DIR,
)
await ds.invoke_startup()
Expand Down
8 changes: 5 additions & 3 deletions tests/test_table_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,9 +305,11 @@ async def test_paginate_compound_keys_with_extra_filters(ds_client):
"_sort_desc=sortable_with_nulls",
lambda row: (
1 if row["sortable_with_nulls"] is None else 0,
-row["sortable_with_nulls"]
if row["sortable_with_nulls"] is not None
else 0,
(
-row["sortable_with_nulls"]
if row["sortable_with_nulls"] is not None
else 0
),
row["content"],
),
"sorted by sortable_with_nulls descending",
Expand Down
1 change: 1 addition & 0 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Tests for various datasette helper functions.
"""

from datasette.app import Datasette
from datasette import utils
from datasette.utils.asgi import Request
Expand Down

0 comments on commit 5c64af6

Please sign in to comment.