Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[#1241] Restore default DSR policies #1426

Merged
merged 20 commits into from
Oct 14, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .github/workflows/backend_checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,7 @@ jobs:
matrix:
python_version: ["3.8.14", "3.9.14", "3.10.6"]
runs-on: ubuntu-latest
timeout-minutes: 15
continue-on-error: true
steps:
- name: Download container
Expand Down Expand Up @@ -228,6 +229,7 @@ jobs:
matrix:
python_version: ["3.8.14", "3.9.14", "3.10.6"]
runs-on: ubuntu-latest
timeout-minutes: 15
continue-on-error: true
steps:
- name: Download container
Expand All @@ -254,6 +256,7 @@ jobs:
matrix:
python_version: ["3.8.14", "3.9.14", "3.10.6"]
runs-on: ubuntu-latest
timeout-minutes: 15
continue-on-error: true
steps:
- name: Download container
Expand Down Expand Up @@ -284,6 +287,7 @@ jobs:
matrix:
python_version: ["3.8.14", "3.9.14", "3.10.6"]
runs-on: ubuntu-latest
timeout-minutes: 15
if: contains(github.event.pull_request.labels.*.name, 'run unsafe ci checks')
continue-on-error: true
steps:
Expand Down Expand Up @@ -320,6 +324,7 @@ jobs:
matrix:
python_version: ["3.8.14", "3.9.14", "3.10.6"]
runs-on: ubuntu-latest
timeout-minutes: 15
if: contains(github.event.pull_request.labels.*.name, 'run unsafe ci checks')
continue-on-error: true
steps:
Expand Down Expand Up @@ -350,6 +355,7 @@ jobs:
External-SaaS-Connectors:
needs: Build
runs-on: ubuntu-latest
timeout-minutes: 15
continue-on-error: true
if: contains(github.event.pull_request.labels.*.name, 'run unsafe ci checks')
permissions:
Expand Down
25 changes: 13 additions & 12 deletions clients/privacy-center/config/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"logo_path": "/logo.svg",
"actions": [
{
"policy_key": "example_request_policy",
"policy_key": "default_access_policy",
"icon_path": "/download.svg",
"title": "Access your data",
"description": "We will email you a report of the data related to your account.",
Expand All @@ -17,7 +17,7 @@
}
},
{
"policy_key": "example_erasure_policy",
"policy_key": "default_erasure_policy",
"icon_path": "/delete.svg",
"title": "Erase your data",
"description": "We will delete all of your account data. This action cannot be undone.",
Expand All @@ -32,19 +32,20 @@
"consent": {
"consentOptions": [
{
"fidesDataUseKey": "third_party_sharing",
"name": "Do not sell my personal information",
"description": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc vulputate libero et velit. Lorem ipsum dolor sit amet, consectetur.",
"highlight": true,
"url": "https://example.com/privacy#data-sales"
"fidesDataUseKey": "advertising",
"name": "Advertising / Data Sharing",
"description": "We may use some of your personal information for advertising performance analysis and audience modeling for ongoing advertising which may be interpreted as 'Data Sharing' under some regulations.",
"url": "https://example.com/privacy#advertising",
"default": true,
"highlight": false
ThomasLaPiana marked this conversation as resolved.
Show resolved Hide resolved
},
{
"fidesDataUseKey": "improve",
"name": "Product Analytics",
"description": "We may use some of your personal information to collect analytics about how you use our products & services, in order to improve our service.",
"url": "https://example.com/privacy#data-sales",
"default": true,
"fidesDataUseKey": "provide.service",
"name": "Provide a service",
"url": "https://example.com/privacy#provide-service",
"highlight": false,
"description": "Manage how we use your data, including Do Not Sell My Personal Information."
"highlight": false
}
]
}
Expand Down
7 changes: 6 additions & 1 deletion noxfiles/docker_nox.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,12 @@ def get_platform(posargs: List[str]) -> str:
Calculate the CPU platform or get it from the
positional arguments.
"""
docker_platforms = {"amd64": "linux/amd64", "arm64": "linux/arm64"}
# Support Intel Macs
docker_platforms = {
"amd64": "linux/amd64",
"arm64": "linux/arm64",
"x86_64": "linux/amd64",
}
ThomasLaPiana marked this conversation as resolved.
Show resolved Hide resolved
if "amd64" in posargs:
return docker_platforms["amd64"]
if "arm64" in posargs:
Expand Down
86 changes: 17 additions & 69 deletions src/fides/api/ctl/database/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,18 @@
from alembic import command, script
from alembic.config import Config
from alembic.runtime import migration
from fideslang import DEFAULT_TAXONOMY
from fideslib.db.base import Base
from loguru import logger as log
from sqlalchemy.orm import Session
from sqlalchemy_utils.functions import create_database, database_exists

from fides.api.ctl.sql_models import sql_model_map
from fides.api.ctl.utils.errors import (
AlreadyExistsError,
QueryError,
get_full_exception_name,
)
from fides.api.ctl.utils.errors import get_full_exception_name
from fides.ctl.core.config import get_config
from fides.ctl.core.utils import get_db_engine

from .crud import create_resource, list_resource
from .seed import load_default_resources

CONFIG = get_config()


def get_alembic_config(database_url: str) -> Config:
Expand Down Expand Up @@ -49,7 +46,7 @@ async def init_db(database_url: str) -> None:
log.info("Initializing database")
alembic_config = get_alembic_config(database_url)
upgrade_db(alembic_config)
await load_default_taxonomy()
await load_default_resources()


def create_db_if_not_exists(database_url: str) -> None:
Expand All @@ -60,71 +57,22 @@ def create_db_if_not_exists(database_url: str) -> None:
create_database(database_url)


async def load_default_taxonomy() -> None:
"""
Attempts to insert organization resources into the database,
to avoid overwriting a user-created organization under the
`default_organization` fides_key.

Upserts the remaining default taxonomy resources.
"""

log.info("Loading the default fideslang taxonomy...")

log.info("Processing organization resources...")
organizations = list(map(dict, DEFAULT_TAXONOMY.dict()["organization"]))
inserted = 0
for org in organizations:
try:
await create_resource(sql_model_map["organization"], org)
inserted += 1
except AlreadyExistsError:
pass

log.info(f"INSERTED {inserted} organization resource(s)")
log.info(f"SKIPPED {len(organizations)-inserted} organization resource(s)")

upsert_resource_types = list(DEFAULT_TAXONOMY.__fields_set__)
upsert_resource_types.remove("organization")

log.info("INSERTING new default fideslang taxonomy resources")
for resource_type in upsert_resource_types:
log.info(f"Processing {resource_type} resources...")
default_resources = DEFAULT_TAXONOMY.dict()[resource_type]
existing_resources = await list_resource(sql_model_map[resource_type])
existing_keys = [item.fides_key for item in existing_resources]
resources = [
resource
for resource in default_resources
if resource["fides_key"] not in existing_keys
]

if len(resources) == 0:
log.info(f"No new {resource_type} resources to add from default taxonomy.")
continue

try:
for resource in resources:
await create_resource(sql_model_map[resource_type], resource)
except QueryError:
pass # The create_resource function will log the error
else:
log.info(f"INSERTED {len(resources)} {resource_type} resource(s)")


def reset_db(database_url: str) -> None:
"""
Drops all tables/metadata from the database.
"""
log.info("Resetting database")
log.info("Resetting database...")
engine = get_db_engine(database_url)
connection = engine.connect()
Base.metadata.drop_all(connection)

migration_context = migration.MigrationContext.configure(connection)
version = migration_context._version # pylint: disable=protected-access
if version.exists(connection):
version.drop(connection)
with engine.connect() as connection:
log.info("Dropping tables...")
Base.metadata.drop_all(connection)

log.info("Dropping Alembic table...")
migration_context = migration.MigrationContext.configure(connection)
version = migration_context._version # pylint: disable=protected-access
if version.exists(connection):
version.drop(connection)
log.info("Reset complete.")


def get_db_health(database_url: str, db: Session) -> str:
Expand Down
Loading