Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

table_config instead of table_metadata #2257

Merged
merged 3 commits into from
Feb 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 8 additions & 4 deletions datasette/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@
format_bytes,
module_from_path,
move_plugins,
move_table_config,
parse_metadata,
resolve_env_secrets,
resolve_routes,
Expand Down Expand Up @@ -346,7 +347,9 @@ def __init__(
# Move any "plugins" settings from metadata to config - updates them in place
metadata = metadata or {}
config = config or {}
move_plugins(metadata, config)
metadata, config = move_plugins(metadata, config)
# Now migrate any known table configuration settings over as well
metadata, config = move_table_config(metadata, config)

self._metadata_local = metadata or {}
self.sqlite_extensions = []
Expand Down Expand Up @@ -1202,10 +1205,11 @@ def _threads(self):
def _actor(self, request):
return {"actor": request.actor}

async def table_config(self, database, table):
"""Fetch table-specific metadata."""
async def table_config(self, database: str, table: str) -> dict:
"""Return dictionary of configuration for specified table"""
return (
(self.metadata("databases") or {})
(self.config or {})
.get("databases", {})
.get(database, {})
.get("tables", {})
.get(table, {})
Expand Down
10 changes: 4 additions & 6 deletions datasette/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,13 +487,11 @@ async def hidden_table_names(self):
)
).rows
]
# Add any from metadata.json
db_metadata = self.ds.metadata(database=self.name)
if "tables" in db_metadata:
# Add any tables marked as hidden in config
db_config = self.ds.config.get("databases", {}).get(self.name, {})
if "tables" in db_config:
hidden_tables += [
t
for t in db_metadata["tables"]
if db_metadata["tables"][t].get("hidden")
t for t in db_config["tables"] if db_config["tables"][t].get("hidden")
]
# Also mark as hidden any tables which start with the name of a hidden table
# e.g. "searchable_fts" implies "searchable_fts_content" should be hidden
Expand Down
69 changes: 57 additions & 12 deletions datasette/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from contextlib import contextmanager
import click
from collections import OrderedDict, namedtuple, Counter
import copy
import base64
import hashlib
import inspect
Expand All @@ -17,7 +18,7 @@
import types
import secrets
import shutil
from typing import Iterable
from typing import Iterable, Tuple
import urllib
import yaml
from .shutil_backport import copytree
Expand Down Expand Up @@ -1290,11 +1291,24 @@
return inner


def move_plugins(source, destination):
def prune_empty_dicts(d: dict):
"""
Recursively prune all empty dictionaries from a given dictionary.
"""
for key, value in list(d.items()):
if isinstance(value, dict):
prune_empty_dicts(value)
if value == {}:
d.pop(key, None)


def move_plugins(source: dict, destination: dict) -> Tuple[dict, dict]:
"""
Move 'plugins' keys from source to destination dictionary. Creates hierarchy in destination if needed.
After moving, recursively remove any keys in the source that are left empty.
"""
source = copy.deepcopy(source)
destination = copy.deepcopy(destination)
Comment on lines +1310 to +1311
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm a bit worried about the performance overhead of this - not on Datasette itself (it only starts up once) but on the Datasette test suite which might call this hundreds of times.

Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm going to ignore this problem in the interest of getting this release done.


def recursive_move(src, dest, path=None):
if path is None:
Expand All @@ -1316,18 +1330,49 @@
if not value:
src.pop(key, None)

def prune_empty_dicts(d):
"""
Recursively prune all empty dictionaries from a given dictionary.
"""
for key, value in list(d.items()):
if isinstance(value, dict):
prune_empty_dicts(value)
if value == {}:
d.pop(key, None)

recursive_move(source, destination)
prune_empty_dicts(source)
return source, destination


_table_config_keys = (
"hidden",
"sort",
"sort_desc",
"size",
"sortable_columns",
"label_column",
"facets",
"fts_table",
"fts_pk",
"searchmode",
"units",
)


def move_table_config(metadata: dict, config: dict):
"""
Move all known table configuration keys from metadata to config.
"""
if "databases" not in metadata:
return metadata, config
metadata = copy.deepcopy(metadata)
config = copy.deepcopy(config)
for database_name, database in metadata["databases"].items():
if "tables" not in database:
continue

Check warning on line 1363 in datasette/utils/__init__.py

View check run for this annotation

Codecov / codecov/patch

datasette/utils/__init__.py#L1363

Added line #L1363 was not covered by tests
for table_name, table in database["tables"].items():
for key in _table_config_keys:
if key in table:
config.setdefault("databases", {}).setdefault(
database_name, {}
).setdefault("tables", {}).setdefault(table_name, {})[
key
] = table.pop(
key
)
prune_empty_dicts(metadata)
return metadata, config


def redact_keys(original: dict, key_patterns: Iterable) -> dict:
Expand Down
11 changes: 5 additions & 6 deletions datasette/views/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,11 +142,11 @@
"""Returns columns, rows for specified table - including fancy foreign key treatment"""
sortable_columns = sortable_columns or set()
db = datasette.databases[database_name]
table_metadata = await datasette.table_config(database_name, table_name)
column_descriptions = table_metadata.get("columns") or {}
column_descriptions = datasette.metadata("columns", database_name, table_name) or {}
column_details = {
col.name: col for col in await db.table_column_details(table_name)
}
table_config = await datasette.table_config(database_name, table_name)
pks = await db.primary_keys(table_name)
pks_for_display = pks
if not pks_for_display:
Expand Down Expand Up @@ -193,7 +193,6 @@
"raw": pk_path,
"value": markupsafe.Markup(
'<a href="{table_path}/{flat_pks_quoted}">{flat_pks}</a>'.format(
base_url=base_url,
table_path=datasette.urls.table(database_name, table_name),
flat_pks=str(markupsafe.escape(pk_path)),
flat_pks_quoted=path_from_row_pks(row, pks, not pks),
Expand Down Expand Up @@ -274,9 +273,9 @@
),
)
)
elif column in table_metadata.get("units", {}) and value != "":
elif column in table_config.get("units", {}) and value != "":
# Interpret units using pint
value = value * ureg(table_metadata["units"][column])
value = value * ureg(table_config["units"][column])

Check warning on line 278 in datasette/views/table.py

View check run for this annotation

Codecov / codecov/patch

datasette/views/table.py#L278

Added line #L278 was not covered by tests
# Pint uses floating point which sometimes introduces errors in the compact
# representation, which we have to round off to avoid ugliness. In the vast
# majority of cases this rounding will be inconsequential. I hope.
Expand Down Expand Up @@ -591,7 +590,7 @@
try:
data = json.loads(await request.post_body())
confirm = data.get("confirm")
except json.JSONDecodeError as e:
except json.JSONDecodeError:
pass

if not confirm:
Expand Down
101 changes: 100 additions & 1 deletion tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -771,7 +771,7 @@ def test_databases_json(app_client_two_attached_databases_one_immutable):
@pytest.mark.asyncio
async def test_metadata_json(ds_client):
response = await ds_client.get("/-/metadata.json")
assert response.json() == METADATA
assert response.json() == ds_client.ds.metadata()


@pytest.mark.asyncio
Expand Down Expand Up @@ -1061,3 +1061,102 @@ async def test_config_json(config, expected):
ds = Datasette(config=config)
response = await ds.client.get("/-/config.json")
assert response.json() == expected


@pytest.mark.asyncio
@pytest.mark.parametrize(
"metadata,expected_config,expected_metadata",
(
({}, {}, {}),
(
# Metadata input
{
"title": "Datasette Fixtures",
"databases": {
"fixtures": {
"tables": {
"sortable": {
"sortable_columns": [
"sortable",
"sortable_with_nulls",
"sortable_with_nulls_2",
"text",
],
},
"no_primary_key": {"sortable_columns": [], "hidden": True},
"units": {"units": {"distance": "m", "frequency": "Hz"}},
"primary_key_multiple_columns_explicit_label": {
"label_column": "content2"
},
"simple_view": {"sortable_columns": ["content"]},
"searchable_view_configured_by_metadata": {
"fts_table": "searchable_fts",
"fts_pk": "pk",
},
"roadside_attractions": {
"columns": {
"name": "The name of the attraction",
"address": "The street address for the attraction",
}
},
"attraction_characteristic": {"sort_desc": "pk"},
"facet_cities": {"sort": "name"},
"paginated_view": {"size": 25},
},
}
},
},
# Should produce a config with just the table configuration keys
{
"databases": {
"fixtures": {
"tables": {
"sortable": {
"sortable_columns": [
"sortable",
"sortable_with_nulls",
"sortable_with_nulls_2",
"text",
]
},
"units": {"units": {"distance": "m", "frequency": "Hz"}},
# These one get redacted:
"no_primary_key": "***",
"primary_key_multiple_columns_explicit_label": "***",
"simple_view": {"sortable_columns": ["content"]},
"searchable_view_configured_by_metadata": {
"fts_table": "searchable_fts",
"fts_pk": "pk",
},
"attraction_characteristic": {"sort_desc": "pk"},
"facet_cities": {"sort": "name"},
"paginated_view": {"size": 25},
}
}
}
},
# And metadata with everything else
{
"title": "Datasette Fixtures",
"databases": {
"fixtures": {
"tables": {
"roadside_attractions": {
"columns": {
"name": "The name of the attraction",
"address": "The street address for the attraction",
}
},
}
}
},
},
),
),
)
async def test_upgrade_metadata(metadata, expected_config, expected_metadata):
ds = Datasette(metadata=metadata)
response = await ds.client.get("/-/config.json")
assert response.json() == expected_config
response2 = await ds.client.get("/-/metadata.json")
assert response2.json() == expected_metadata
2 changes: 1 addition & 1 deletion tests/test_html.py
Original file line number Diff line number Diff line change
Expand Up @@ -753,7 +753,7 @@ async def test_metadata_json_html(ds_client):
response = await ds_client.get("/-/metadata")
assert response.status_code == 200
pre = Soup(response.content, "html.parser").find("pre")
assert METADATA == json.loads(pre.text)
assert ds_client.ds.metadata() == json.loads(pre.text)


@pytest.mark.asyncio
Expand Down
Loading