Skip to content

Commit

Permalink
Make the config_models mandatory in integrations-core (#16311)
Browse files Browse the repository at this point in the history
* Make the config_models mandatory in integrations-core

* address
  • Loading branch information
FlorentClarret authored Nov 30, 2023
1 parent a5ff650 commit 217245d
Show file tree
Hide file tree
Showing 8 changed files with 164 additions and 7 deletions.
1 change: 1 addition & 0 deletions datadog_checks_dev/changelog.d/16311.fixed
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Make the config_models mandatory in integrations-core
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,11 @@

LICENSE_HEADER = "(C) Datadog, Inc."

INTEGRATIONS_WITHOUT_MODELS = {
'snmp', # Deprecated
'tokumx', # Python 2 only
}


def standardize_new_lines(lines):
# If a new line is at the start or end of a line, remove it and add it to the list
Expand Down Expand Up @@ -77,9 +82,11 @@ def models(ctx, check, sync, verbose):
checks, an 'all' or empty `check` value will validate all README files.
"""
root = get_root()
community_check = ctx.obj['repo_choice'] not in ('core', 'internal')
is_community_check = ctx.obj['repo_choice'] not in ('core', 'internal')
is_core_check = ctx.obj['repo_choice'] == 'core'

checks = set(process_checks_option(check, source='valid_checks', extend_changed=True))

checks = process_checks_option(check, source='valid_checks', extend_changed=True)
echo_info(f"Validating data models for {len(checks)} checks ...")

specs_failed = {}
Expand All @@ -91,7 +98,10 @@ def models(ctx, check, sync, verbose):

code_formatter = ModelConsumer.create_code_formatter()

for check in checks:
if is_core_check:
checks = checks.difference(INTEGRATIONS_WITHOUT_MODELS)

for check in sorted(checks):
display_queue = {}
if check == 'datadog_checks_base':
spec_path = path_join(root, 'datadog_checks_base', 'tests', 'models', 'data', 'spec.yaml')
Expand Down Expand Up @@ -126,8 +136,7 @@ def models(ctx, check, sync, verbose):
else:
models_location = get_models_location(check)

# TODO: Remove when all integrations have models
if not sync and not dir_exists(models_location):
if not sync and not dir_exists(models_location) and not is_core_check:
continue

model_consumer = ModelConsumer(spec.data, code_formatter)
Expand Down Expand Up @@ -165,7 +174,7 @@ def models(ctx, check, sync, verbose):
# validators.py and deprecations.py are custom files, they should only be rendered the first time
continue

if not community_check:
if not is_community_check:
expected_model_file_lines.extend(license_header_lines)

if model_file not in CUSTOM_FILES:
Expand All @@ -175,7 +184,7 @@ def models(ctx, check, sync, verbose):

# If we're re-generating a file, we should ensure we do not change the license date
# We also want to handle the case where there is no license header
if not community_check:
if not is_community_check:
if len(current_model_file_lines) > 0 and LICENSE_HEADER in current_model_file_lines[0]:
expected_model_file_lines[0] = current_model_file_lines[0]

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
name: My Check
files:
- name: tokumx.yaml
options:
- template: init_config
options:
- template: init_config/default
- template: instances
options:
- template: instances/default
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
__version__ = "1.0.0"
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
{
"manifest_version": "2.0.0",
"app_uuid": "f84d2d4e-03c5-44d7-aa9e-53efc08346a7",
"app_id": "tokumx",
"display_on_public_website": false,
"tile": {
"overview": "README.md#Overview",
"configuration": "README.md#Setup",
"support": "README.md#Support",
"changelog": "CHANGELOG.md",
"description": "",
"title": "tokumx",
"media": [],
"classifier_tags": [
"Supported OS::Linux",
"Supported OS::Windows",
"Supported OS::macOS"
]
},
"assets": {
"integration": {
"source_type_name": "tokumx",
"configuration": {
"spec": "assets/configuration/spec.yaml"
},
"events": {
"creates_events": false
},
"metrics": {
"prefix": "tokumx.",
"check": "",
"metadata_path": "metadata.csv"
},
"service_checks": {
"metadata_path": "assets/service_checks.json"
}
}
},
"author": {
"support_email": "[email protected]",
"name": "Datadog",
"homepage": "https://www.datadoghq.com",
"sales_email": "[email protected]"
},
"oauth": {}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[project]
name = "datadog-tokumx"
authors = [{ name = "Agent Integrations", email = "[email protected]" }]

[tool.hatch.version]
path = "datadog_checks/tokumx/__about__.py"
84 changes: 84 additions & 0 deletions datadog_checks_dev/tests/tooling/commands/validate/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,3 +53,87 @@ def test_generate_new_files_check_licenses(repo, expect_licenses):
assert validators_file.read() == get_license_header() + "\n\n" + VALIDATORS_DOCUMENTATION
else:
assert validators_file.read() == VALIDATORS_DOCUMENTATION


@pytest.mark.parametrize(
'repo,expect_failure',
[
("core", True),
("extras", False),
("marketplace", False),
("internal", False),
],
)
def test_validate_config_models_not_in_sync(repo, expect_failure):
runner = CliRunner()

with runner.isolated_filesystem():

# Generate the check structure
working_repo = 'integrations-{}'.format(repo)
shutil.copytree(
os.path.dirname(os.path.realpath(__file__)) + "/data/my_check", "./{}/my_check".format(working_repo)
)
os.chdir(working_repo)
shutil.rmtree("my_check/datadog_checks/my_check/config_models")

result = run_command(
[sys.executable, '-m', 'datadog_checks.dev', '--here', 'validate', 'models', 'my_check'],
capture=True,
)

if expect_failure:
assert 1 == result.code
assert 'Validating data models for 1 checks ...' in result.stdout
assert 'File `__init__.py` is not in sync, run "ddev validate models my_check -s"' in result.stdout
assert 'File `defaults.py` is not in sync, run "ddev validate models my_check -s"' in result.stdout
assert 'File `instance.py` is not in sync, run "ddev validate models my_check -s"' in result.stdout
assert 'File `shared.py` is not in sync, run "ddev validate models my_check -s"' in result.stdout
assert 'File `validators.py` is not in sync, run "ddev validate models my_check -s"' in result.stdout
assert '' == result.stderr
else:
assert 0 == result.code
assert 'Validating data models for 1 checks ...' in result.stdout
assert '' == result.stderr


@pytest.mark.parametrize(
'repo,expect_failure',
[
("core", False),
("extras", True),
("marketplace", True),
("internal", True),
],
)
def test_validate_no_config_models(repo, expect_failure):
# Some integrations do not have config models, for instance tokumx because it's py2 only
runner = CliRunner()

with runner.isolated_filesystem():

# Generate the check structure
working_repo = 'integrations-{}'.format(repo)
shutil.copytree(
os.path.dirname(os.path.realpath(__file__)) + "/data/tokumx", "./{}/tokumx".format(working_repo)
)
os.chdir(working_repo)

result = run_command(
[sys.executable, '-m', 'datadog_checks.dev', '--here', 'validate', 'models', 'tokumx'],
capture=True,
)

if expect_failure:
assert 1 == result.code
assert 'Validating data models for 1 checks ...' in result.stdout
assert 'File `__init__.py` is not in sync, run "ddev validate models tokumx -s"' in result.stdout
assert 'File `defaults.py` is not in sync, run "ddev validate models tokumx -s"' in result.stdout
assert 'File `instance.py` is not in sync, run "ddev validate models tokumx -s"' in result.stdout
assert 'File `shared.py` is not in sync, run "ddev validate models tokumx -s"' in result.stdout
assert 'File `validators.py` is not in sync, run "ddev validate models tokumx -s"' in result.stdout
assert '' == result.stderr
else:
assert 0 == result.code
assert 'Validating data models for 1 checks ...' in result.stdout
assert '' == result.stderr

0 comments on commit 217245d

Please sign in to comment.