Skip to content

Commit

Permalink
Move logging to %-style formatting [ethyca#837] (ethyca#1132)
Browse files Browse the repository at this point in the history
* Add a new Pii class and use it to wrap arguments not already wrapped with NonPii in those logs that are currently using %-style formatting.

* Switch logging formatting to %-style instead of f-string.

* Continue to address lingering f string instances, and wrap some arguments in Pii, such as raw exceptions.

* Remove NotPii class and update tests.

* Adjust errors made in %-style conversion.

* Remove accidental Pii on print statements, update some PII wrappings.

* Adjust string formatting of newly added log.

* Update Changelog.

* Fix missed closing curly brackets.

* Remove missed curly brackets.
  • Loading branch information
pattisdr authored Aug 24, 2022
1 parent b84e4de commit 08ab71e
Show file tree
Hide file tree
Showing 65 changed files with 478 additions and 304 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ The types of changes are:
* Update request status endpoint to return both audit and execution logs [#1068] (https://github.com/ethyca/fidesops/pull/1068/)
* Update backend routing to handle dynamic frontend routes [#1033](https://github.com/ethyca/fidesops/pull/1033)
* Make connection type search case-insensitive [#1133](https://github.com/ethyca/fidesops/pull/1133)
* Adjust log formatting to be %-style instead of f-string [#1132](https://github.com/ethyca/fidesops/pull/1132)

### Docs

Expand Down
2 changes: 1 addition & 1 deletion docs/fidesops/docs/tutorial/annotate_datasets.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def create_dataset(connection_key, yaml_path, access_token):
headers=oauth_headers(access_token=access_token),
json=dataset_create_data,
)
logger.info(f"Creating an annotated Dataset. Status {response.status_code}")
logger.info("Creating an annotated Dataset. Status %s", response.status_code)
return response.json()
```

Expand Down
6 changes: 3 additions & 3 deletions docs/fidesops/docs/tutorial/define_policy.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def create_policy(key, access_token):
headers=oauth_headers(access_token=access_token),
json=policy_create_data,
)
logger.info(f"Creating a Policy. Status {response.status_code}")
logger.info("Creating a Policy. Status %s", response.status_code)
return response.json()
```

Expand All @@ -58,7 +58,7 @@ def create_policy_rule(
json=rule_create_data,
)

logger.info(f"Creating a rule. Status {response.status_code}")
logger.info("Creating a rule. Status %s", response.status_code)
return response.json()
```

Expand All @@ -80,7 +80,7 @@ def create_policy_rule_target(policy_key, rule_key, data_category, access_token)
json=target_create_data,
)

logger.info(f"Creating a Rule Target. Status {response.status_code}")
logger.info("Creating a Rule Target. Status %s", response.status_code)
return response.json()
```
```python
Expand Down
4 changes: 2 additions & 2 deletions docs/fidesops/docs/tutorial/execute_privacy_request.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ def create_privacy_request(email, policy_key):
f"{FIDESOPS_URL}/api/v1/privacy-request",
json=privacy_request_data,
)
logger.info(f"Executing a Privacy Request. Status {response.status_code}")
logger.info(f"Check fidesdemo/fides_uploads for upload package.")
logger.info("Executing a Privacy Request. Status %s", response.status_code)
logger.info("Check fidesdemo/fides_uploads for upload package.")
return response.json()
```

Expand Down
4 changes: 2 additions & 2 deletions docs/fidesops/docs/tutorial/oauth_client.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def get_access_token(client_id, client_secret):
"client_secret": client_secret,
}
response = requests.post(f"{FIDESOPS_URL}/api/v1/oauth/token", data=data)
logger.info(f"Creating access token. Status {response.status_code}")
logger.info("Creating access token. Status %s", response.status_code)
return response.json()["access_token"]
```

Expand Down Expand Up @@ -64,7 +64,7 @@ def create_oauth_client(access_token):
response = requests.post(
f"{FIDESOPS_URL}/api/v1/oauth/client", headers=oauth_headers(access_token), json=scopes_data
)
logger.info(f"Creating Oauth Client. Status {response.status_code}")
logger.info("Creating Oauth Client. Status %s", response.status_code)
return response.json()

```
Expand Down
4 changes: 2 additions & 2 deletions docs/fidesops/docs/tutorial/postgres_connection.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def create_postgres_connection(key, access_token):
headers=oauth_headers(access_token=access_token),
json=connection_create_data,
)
logger.info(f"Creating PostgreSQL ConnectionConfig. Status {response.status_code}")
logger.info("Creating PostgreSQL ConnectionConfig. Status %s", response.status_code)
return response.json()

```
Expand All @@ -52,7 +52,7 @@ def configure_postgres_connection(
headers=oauth_headers(access_token=access_token),
json=connection_secrets_data,
)
logger.info(f"Updating PostgreSQL Secrets. Status {response.status_code}.")
logger.info("Updating PostgreSQL Secrets. Status %s.", response.status_code)
return response.json()
```

Expand Down
2 changes: 1 addition & 1 deletion docs/fidesops/docs/tutorial/storage_config.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def create_local_storage(key, format, access_token):
headers=oauth_headers(access_token=access_token),
json=storage_create_data,
)
logger.info(f"Defining an upload location. Status {response.status_code}")
logger.info("Defining an upload location. Status %s", response.status_code)
return response.json()

```
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,6 @@ disable=[
"import-outside-toplevel",
"invalid-name",
"line-too-long",
"logging-fstring-interpolation",
"missing-class-docstring",
"missing-function-docstring",
"missing-module-docstring",
Expand Down
2 changes: 1 addition & 1 deletion scripts/quickstart.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ def create_policy(key: str):
if response.ok:
policies = (response.json())["succeeded"]
if len(policies) > 0:
logger.info(f"Created fidesops policy with key={key} via /api/v1/policy")
logger.info("Created fidesops policy with key=%s via /api/v1/policy", key)
return response.json()

raise RuntimeError(
Expand Down
4 changes: 3 additions & 1 deletion src/fidesops/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,9 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
print(
"unable to find command, tried %s" % (commands),
)
return None, None
stdout = p.communicate()[0].strip().decode()
if p.returncode != 0:
Expand Down
6 changes: 3 additions & 3 deletions src/fidesops/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
from fidesops.ops.tasks.scheduled.scheduler import scheduler
from fidesops.ops.tasks.scheduled.tasks import initiate_scheduled_request_intake
from fidesops.ops.util.cache import get_cache
from fidesops.ops.util.logger import get_fides_log_record_factory
from fidesops.ops.util.logger import Pii, get_fides_log_record_factory
from fidesops.ops.util.oauth_util import verify_oauth_client

logging.basicConfig(level=config.security.log_level)
Expand Down Expand Up @@ -238,15 +238,15 @@ def start_webserver() -> None:
try:
init_db(config.database.sqlalchemy_database_uri)
except Exception as error: # pylint: disable=broad-except
logger.error(f"Connection to database failed: {error}")
logger.error("Connection to database failed: %s", Pii(str(error)))
return

if config.redis.enabled:
logger.info("Running Redis connection test...")
try:
get_cache()
except (RedisConnectionError, ResponseError) as e:
logger.error(f"Connection to cache failed: {e}")
logger.error("Connection to cache failed: %s", Pii(str(e)))
return

scheduler.start()
Expand Down
6 changes: 4 additions & 2 deletions src/fidesops/ops/analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,10 @@ def send_analytics_event(event: AnalyticsEvent) -> None:
try:
analytics_client.send(event)
except AnalyticsError as err:
logger.warning(f"Error sending analytics event: {err}")
logger.warning("Error sending analytics event: %s", err)
else:
logger.info(
f"Analytics event sent: {event.event} with client id: {analytics_client.client_id}"
"Analytics event sent: %s with client id: %s",
event.event,
analytics_client.client_id,
)
29 changes: 17 additions & 12 deletions src/fidesops/ops/api/v1/endpoints/connection_endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@
from fidesops.ops.schemas.shared_schemas import FidesOpsKey
from fidesops.ops.service.connectors import get_connector
from fidesops.ops.util.api_router import APIRouter
from fidesops.ops.util.logger import NotPii
from fidesops.ops.util.logger import Pii
from fidesops.ops.util.oauth_util import verify_oauth_client

router = APIRouter(tags=["Connections"], prefix=V1_URL_PREFIX)
Expand All @@ -72,7 +72,7 @@ def get_connection_config_or_error(
) -> ConnectionConfig:
"""Helper to load the ConnectionConfig object or throw a 404"""
connection_config = ConnectionConfig.get_by(db, field="key", value=connection_key)
logger.info(f"Finding connection configuration with key '{connection_key}'")
logger.info("Finding connection configuration with key '%s'", connection_key)
if not connection_config:
raise HTTPException(
status_code=HTTP_404_NOT_FOUND,
Expand Down Expand Up @@ -108,7 +108,9 @@ def get_connections(
into an "or" query.
"""
logger.info(
f"Finding connection configurations with pagination params {params} and search query: '{search if search else ''}'."
"Finding connection configurations with pagination params %s and search query: '%s'.",
params,
search if search else "",
)
query = ConnectionConfig.query(db)

Expand Down Expand Up @@ -188,7 +190,7 @@ def patch_connections(
"""
created_or_updated: List[ConnectionConfig] = []
failed: List[BulkUpdateFailed] = []
logger.info(f"Starting bulk upsert for {len(configs)} connection configuration(s)")
logger.info("Starting bulk upsert for %s connection configuration(s)", len(configs))

for config in configs:
orig_data = config.dict().copy()
Expand All @@ -199,7 +201,9 @@ def patch_connections(
created_or_updated.append(connection_config)
except KeyOrNameAlreadyExists as exc:
logger.warning(
f"Create/update failed for connection config with key '{config.key}': {exc}"
"Create/update failed for connection config with key '%s': %s",
config.key,
exc,
)
failed.append(
BulkUpdateFailed(
Expand All @@ -209,7 +213,7 @@ def patch_connections(
)
except Exception:
logger.warning(
f"Create/update failed for connection config with key '{config.key}'."
"Create/update failed for connection config with key '%s'.", config.key
)
failed.append(
BulkUpdateFailed(
Expand All @@ -234,7 +238,7 @@ def delete_connection(
) -> None:
"""Removes the connection configuration with matching key."""
connection_config = get_connection_config_or_error(db, connection_key)
logger.info(f"Deleting connection config with key '{connection_key}'.")
logger.info("Deleting connection config with key '%s'.", connection_key)
connection_config.delete(db)


Expand All @@ -255,7 +259,8 @@ def validate_secrets(
try:
schema = get_connection_secrets_validator(connection_type.value, saas_config) # type: ignore
logger.info(
f"Validating secrets on connection config with key '{connection_config.key}'"
"Validating secrets on connection config with key '%s'",
connection_config.key,
)
connection_secrets = schema.parse_obj(request_body)
except ValidationError as e:
Expand All @@ -278,8 +283,8 @@ def connection_status(
except (ConnectionException, ClientUnsuccessfulException) as exc:
logger.warning(
"Connection test failed on %s: %s",
NotPii(connection_config.key),
str(exc),
connection_config.key,
Pii(str(exc)),
)
connection_config.update_test_status(
test_status=ConnectionTestStatus.failed, db=db
Expand All @@ -290,7 +295,7 @@ def connection_status(
failure_reason=str(exc),
)

logger.info(f"Connection test {status.value} on {connection_config.key}") # type: ignore
logger.info("Connection test %s on %s", status.value, connection_config.key) # type: ignore
connection_config.update_test_status(test_status=status, db=db) # type: ignore

return TestStatusMessage(
Expand Down Expand Up @@ -324,7 +329,7 @@ async def put_connection_config_secrets(
unvalidated_secrets, connection_config
).dict()
# Save validated secrets, regardless of whether they've been verified.
logger.info(f"Updating connection config secrets for '{connection_key}'")
logger.info("Updating connection config secrets for '%s'", connection_key)
connection_config.save(db=db)

msg = f"Secrets updated for ConnectionConfig with key: {connection_key}."
Expand Down
20 changes: 11 additions & 9 deletions src/fidesops/ops/api/v1/endpoints/dataset_endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@
def _get_connection_config(
connection_key: FidesOpsKey, db: Session = Depends(deps.get_db)
) -> ConnectionConfig:
logger.info(f"Finding connection config with key '{connection_key}'")
logger.info("Finding connection config with key '%s'", connection_key)
connection_config = ConnectionConfig.get_by(db, field="key", value=connection_key)
if not connection_config:
raise HTTPException(
Expand Down Expand Up @@ -117,7 +117,7 @@ def validate_dataset(
Traversal(complete_graph, {k: None for k in unique_identities})
except (TraversalError, ValidationError) as err:
logger.warning(
f"Traversal validation failed for dataset '{dataset.fides_key}': {err}"
"Traversal validation failed for dataset '%s': %s", dataset.fides_key, err
)
return ValidateDatasetResponse(
dataset=dataset,
Expand All @@ -127,7 +127,7 @@ def validate_dataset(
),
)

logger.info(f"Validation successful for dataset '{dataset.fides_key}'!")
logger.info("Validation successful for dataset '%s'!", dataset.fides_key)
return ValidateDatasetResponse(
dataset=dataset,
traversal_details=DatasetTraversalDetails(
Expand Down Expand Up @@ -160,7 +160,7 @@ def patch_datasets(

created_or_updated: List[FidesopsDataset] = []
failed: List[BulkUpdateFailed] = []
logger.info(f"Starting bulk upsert for {len(datasets)} datasets")
logger.info("Starting bulk upsert for %s datasets", len(datasets))

# warn if there are duplicate fides_keys within the datasets
# valid datasets with the same fides_key will override each other
Expand Down Expand Up @@ -260,7 +260,7 @@ def create_or_update_dataset(
)
)
except Exception:
logger.warning(f"Create/update failed for dataset '{data['fides_key']}'.")
logger.warning("Create/update failed for dataset '%s'.", data["fides_key"])
failed.append(
BulkUpdateFailed(
message="Dataset create/update failed.",
Expand Down Expand Up @@ -309,7 +309,9 @@ def get_datasets(
"""Returns all datasets in the database."""

logger.info(
f"Finding all datasets for connection '{connection_config.key}' with pagination params {params}"
"Finding all datasets for connection '%s' with pagination params %s",
connection_config.key,
params,
)
dataset_configs = DatasetConfig.filter(
db=db, conditions=(DatasetConfig.connection_config_id == connection_config.id)
Expand Down Expand Up @@ -339,7 +341,7 @@ def get_dataset(
"""Returns a single dataset based on the given key."""

logger.info(
f"Finding dataset '{fides_key}' for connection '{connection_config.key}'"
"Finding dataset '%s' for connection '%s'", fides_key, connection_config.key
)
dataset_config = DatasetConfig.filter(
db=db,
Expand Down Expand Up @@ -370,7 +372,7 @@ def delete_dataset(
"""Removes the dataset based on the given key."""

logger.info(
f"Finding dataset '{fides_key}' for connection '{connection_config.key}'"
"Finding dataset '%s' for connection '%s'", fides_key, connection_config.key
)
dataset_config = DatasetConfig.filter(
db=db,
Expand All @@ -386,6 +388,6 @@ def delete_dataset(
)

logger.info(
f"Deleting dataset '{fides_key}' for connection '{connection_config.key}'"
"Deleting dataset '%s' for connection '%s'", fides_key, connection_config.key
)
dataset_config.delete(db)
Loading

0 comments on commit 08ab71e

Please sign in to comment.