Skip to content

Commit

Permalink
feat: Add capacity settlement (#32)
Browse files Browse the repository at this point in the history

---------

Co-authored-by: Alexander <[email protected]>
Co-authored-by: Alexander <[email protected]>
  • Loading branch information
3 people authored Dec 20, 2024
1 parent a3969d0 commit fdef39f
Show file tree
Hide file tree
Showing 29 changed files with 552 additions and 39 deletions.
8 changes: 5 additions & 3 deletions .devcontainer/check_test_count.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,14 @@
# The script must be invoked with a filter matching the paths NOT included in the matrix

# $@: (Optional) Can be set to specify a filter for running python tests at the specified path.
echo "Filter (paths): '$@'"
echo "Parameters: '$@'"

# Exit immediately with failure status if any command fails
set -e

cd source/electrical_heating/tests/
test_path=$1
filter=$2
cd $test_path
# Enable extended globbing. E.g. see https://stackoverflow.com/questions/8525437/list-files-not-matching-a-pattern
shopt -s extglob

Expand All @@ -20,7 +22,7 @@ shopt -s extglob
# 'awk' is used to get the second column of the output which contains the number of tests.
# 'head' is used to get the first line of the output which contains the number of tests.
# Example output line returned by the grep filter: 'collected 10 items'
executed_test_count=$(coverage run --branch -m pytest $@ --collect-only | grep collected | awk '{print $2}' | head -n 1)
executed_test_count=$(coverage run --branch -m pytest $filter --collect-only | grep collected | awk '{print $2}' | head -n 1)

total_test_count=$(coverage run --branch -m pytest --collect-only | grep collected | awk '{print $2}' | head -n 1)

Expand Down
20 changes: 15 additions & 5 deletions .github/workflows/cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,23 +45,32 @@ jobs:
# CD Databricks
#

electrical_heating:
electrical_heating_promote_prerelease:
needs: changes
if: ${{ needs.changes.outputs.electrical_heating == 'true' }}
uses: Energinet-DataHub/.github/.github/workflows/promote-prerelease.yml@v14
with:
release_name_prefix: electrical_heating


capacity_settlement_promote_prerelease:
needs: changes
if: ${{ needs.changes.outputs.capacity_settlement == 'true' }}
uses: Energinet-DataHub/.github/.github/workflows/promote-prerelease.yml@v14
with:
release_name_prefix: capacity_settlement

#
# Dispatch deployment request
#

dispatch_deployment_event:
if: ${{ always() && !cancelled() && !failure() && needs.changes.outputs.electrical_heating == 'true' }}
if: ${{ always() && !cancelled() && !failure() && needs.changes.outputs.electrical_heating == 'true' || needs.changes.outputs.capacity_settlement == 'true' }}
runs-on: ubuntu-latest
needs: [
changes,
electrical_heating
electrical_heating_promote_prerelease,
capacity_settlement_promote_prerelease,
]
steps:
- run: echo "${{ toJSON(needs) }}"
Expand All @@ -84,7 +93,7 @@ jobs:
repository: ${{ vars.environment_repository_path }}
event-type: measurements-deployment-request-domain
# yamllint disable-line rule:quoted-strings
client-payload: '{"pr_number": "${{ steps.find_pull_request.outputs.pull_request_number }}", "electrical_heating": "${{ needs.changes.outputs.electrical_heating }}"}'
client-payload: '{"pr_number": "${{ steps.find_pull_request.outputs.pull_request_number }}", "electrical_heating": "${{ needs.changes.outputs.electrical_heating }}", "capacity_settlement": "${{ needs.changes.outputs.capacity_settlement }}"}'

#
# Send notification to teams channel if deployment dispatch failed
Expand All @@ -93,7 +102,8 @@ jobs:
dispatch_failed:
needs:
[
electrical_heating,
electrical_heating_promote_prerelease,
capacity_settlement_promote_prerelease,
dispatch_deployment_event,
cd_docker
]
Expand Down
92 changes: 92 additions & 0 deletions .github/workflows/ci-capacity-settlement.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
name: CI Capacity Settlement

on:
workflow_call:
inputs:
image_tag:
type: string
default: latest

jobs:
databricks_ci_build:
uses: Energinet-DataHub/.github/.github/workflows/databricks-build-prerelease.yml@v14
with:
python_version: 3.11.7
architecture: x64
wheel_working_directory: ./source/capacity_settlement
prerelease_prefix: capacity_settlement

unit_tests:
strategy:
fail-fast: false
matrix:
# IMPORTANT: When adding a new folder here it should also be added in the `unit_test_check` job!
tests_filter_expression:
- name: Capacity Settlement
paths: capacity_settlement_tests/
uses: Energinet-DataHub/.github/.github/workflows/python-ci.yml@v14
with:
job_name: ${{ matrix.tests_filter_expression.name }}
operating_system: dh3-ubuntu-20.04-4core
path_static_checks: ./source/capacity_settlement
# documented here: https://github.com/Energinet-DataHub/opengeh-wholesale/tree/main/source/databricks#styling-and-formatting
ignore_errors_and_warning_flake8: E501,F401,E402,E203,W503
tests_folder_path: ./source/capacity_settlement/tests
test_report_path: ./source/capacity_settlement/tests
# See .docker/entrypoint.py on how to use the filter expression
tests_filter_expression: ${{ matrix.tests_filter_expression.paths }}
image_tag: ${{ inputs.image_tag }}

# Check executed unit tests
capacity_settlement_unit_test_check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Log in to the Container registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}

- name: Execute python tests
shell: bash
id: test_count
run: |
# Small hack to get the repository name
repository=${{ github.repository }}
repository_owner=${{ github.repository_owner }}
repository_name=${repository/$repository_owner\//}
# IMPORTANT: When adding a new folder here, one must also add the folder
# to one of the test jobs above! This is because this filter contains the sum of all folders
# from test jobs.
test_path="source/capacity_settlement/tests/"
filter="capacity_settlement_tests/"
chmod +x ./.devcontainer/check_test_count.sh
IMAGE_TAG=${{ inputs.image_tag }} docker compose -f .devcontainer/docker-compose.yml run --rm -u root python-unit-test ./.devcontainer/check_test_count.sh $test_path $filter
mypy_check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.x
- name: Run pip install and mypy check of files in package
shell: bash
run: |
pip install --upgrade pip
pip install mypy types-python-dateutil
mypy ./source/capacity_settlement --disallow-untyped-defs --ignore-missing-imports
black_check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: psf/black@stable
with:
options: --check --diff
src: ./source/capacity_settlement
15 changes: 4 additions & 11 deletions .github/workflows/ci-electrical-heating.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,6 @@
on:
workflow_call:
inputs:
has_electrical_heating_changes:
description: Whether there are changes in the electrical heating job folder
required: true
type: boolean
image_tag:
type: string
default: latest
Expand All @@ -21,14 +17,13 @@ jobs:
prerelease_prefix: electrical_heating

unit_tests:
if: ${{ inputs.has_electrical_heating_changes }}
strategy:
fail-fast: false
matrix:
# IMPORTANT: When adding a new folder here it should also be added in the `unit_test_check` job!
tests_filter_expression:
- name: Electrical Heating
paths: entry_points/
paths: electrical_heating_tests/
uses: Energinet-DataHub/.github/.github/workflows/python-ci.yml@v14
with:
job_name: ${{ matrix.tests_filter_expression.name }}
Expand All @@ -44,7 +39,6 @@ jobs:

# Check executed unit tests
electrical_heating_unit_test_check:
if: ${{ inputs.has_electrical_heating_changes }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
Expand All @@ -62,14 +56,14 @@ jobs:
# IMPORTANT: When adding a new folder here, one must also add the folder
# to one of the test jobs above! This is because this filter contains the sum of all folders
# from test jobs.
filter="entry_points/"
test_path="source/electrical_heating/tests/"
filter="electrical_heating_tests/"
chmod +x ./.devcontainer/check_test_count.sh
IMAGE_TAG=${{ inputs.image_tag }} docker compose -f .devcontainer/docker-compose.yml run --rm -u root python-unit-test ./.devcontainer/check_test_count.sh $filter
IMAGE_TAG=${{ inputs.image_tag }} docker compose -f .devcontainer/docker-compose.yml run --rm -u root python-unit-test ./.devcontainer/check_test_count.sh $test_path $filter
mypy_check:
if: ${{ inputs.has_electrical_heating_changes }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
Expand All @@ -84,7 +78,6 @@ jobs:
mypy ./source/electrical_heating --disallow-untyped-defs --ignore-missing-imports
black_check:
if: ${{ inputs.has_electrical_heating_changes }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
Expand Down
11 changes: 9 additions & 2 deletions .github/workflows/ci-orchestrator.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ concurrency:
jobs:

#
# License and Markdown Check.
# License and Markdown Check
#
ci_base:
uses: Energinet-DataHub/.github/.github/workflows/ci-base.yml@v14
Expand All @@ -36,7 +36,13 @@ jobs:
if: ${{ needs.changes.outputs.electrical_heating == 'true' }}
uses: ./.github/workflows/ci-electrical-heating.yml
with:
has_electrical_heating_changes: ${{ needs.changes.outputs.electrical_heating == 'true' }}
image_tag: ${{ needs.ci_docker.outputs.image_tag }}

ci_capacity_settlement:
needs: [changes, ci_docker]
if: ${{ needs.changes.outputs.capacity_settlement == 'true' }}
uses: ./.github/workflows/ci-capacity-settlement.yml
with:
image_tag: ${{ needs.ci_docker.outputs.image_tag }}

#
Expand All @@ -49,6 +55,7 @@ jobs:
changes,
ci_base,
ci_electrical_heating,
ci_capacity_settlement
]
if: |
always()
Expand Down
14 changes: 11 additions & 3 deletions .github/workflows/detect-changes.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ on:
outputs:
electrical_heating:
value: ${{ jobs.changes.outputs.electrical_heating }}
capacity_settlement:
value: ${{ jobs.changes.outputs.capacity_settlement }}
docker:
value: ${{ jobs.changes.outputs.docker }}
docker_in_commit:
Expand All @@ -23,10 +25,11 @@ jobs:
# Set job outputs to values from filter step
outputs:
electrical_heating: ${{ steps.filter.outputs.electrical_heating }}
capacity_settlement: ${{ steps.filter.outputs.capacity_settlement }}
docker: ${{ steps.filter.outputs.docker }}
docker_in_commit: ${{ steps.docker_changed.outputs.any_changed }}
steps:
# For pull requests it's not necessary to checkout the code because GitHub REST API is used to determine changes
# For pull requests it's not necessary to check out the code because GitHub REST API is used to determine changes
- name: Checkout repository
uses: actions/checkout@v4
with:
Expand All @@ -41,10 +44,15 @@ jobs:
- 'source/electrical_heating/**'
- '.github/workflows/ci-electrical-heating.yml'
- '.github/workflows/cd.yml'
docker:
- .docker/**
- '.devcontainer/**'
capacity_settlement:
- 'source/capacity_settlement/**'
- '.github/workflows/ci-capacity-settlement.yml'
- '.github/workflows/cd.yml'
- '.devcontainer/**'
docker:
- .docker/**
- name: Package content or build has changed
id: docker_changed
Expand Down
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .entry_point import execute
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import os
import sys
from argparse import Namespace
from collections.abc import Callable

import telemetry_logging.logging_configuration as config
from opentelemetry.trace import SpanKind
from telemetry_logging.span_recording import span_record_exception

from capacity_settlement.entry_points.job_args.capacity_settlement_args import (
CapacitySettlementArgs,
)
from capacity_settlement.entry_points.job_args.capacity_settlement_job_args import (
parse_command_line_arguments,
parse_job_arguments,
)


def execute() -> None:
applicationinsights_connection_string = os.getenv(
"APPLICATIONINSIGHTS_CONNECTION_STRING"
)

start_with_deps(
applicationinsights_connection_string=applicationinsights_connection_string,
)


def start_with_deps(
*,
cloud_role_name: str = "dbr-capacity-settlement",
applicationinsights_connection_string: str | None = None,
parse_command_line_args: Callable[..., Namespace] = parse_command_line_arguments,
parse_job_args: Callable[..., CapacitySettlementArgs] = parse_job_arguments,
) -> None:
"""Start overload with explicit dependencies for easier testing."""
config.configure_logging(
cloud_role_name=cloud_role_name,
tracer_name="capacity-settlement-job",
applicationinsights_connection_string=applicationinsights_connection_string,
extras={"Subsystem": "measurements"},
)

with config.get_tracer().start_as_current_span(
__name__, kind=SpanKind.SERVER
) as span:
# Try/except added to enable adding custom fields to the exception as
# the span attributes do not appear to be included in the exception.
try:

# The command line arguments are parsed to have necessary information for
# coming log messages
command_line_args = parse_command_line_args()

# Add extra to structured logging data to be included in every log message.
config.add_extras(
{
"orchestration-instance-id": command_line_args.orchestration_instance_id,
}
)
span.set_attributes(config.get_extras())
parse_job_args(command_line_args)

# Added as ConfigArgParse uses sys.exit() rather than raising exceptions
except SystemExit as e:
if e.code != 0:
span_record_exception(e, span)
sys.exit(e.code)

except Exception as e:
span_record_exception(e, span)
sys.exit(4)
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from dataclasses import dataclass
from uuid import UUID


@dataclass
class CapacitySettlementArgs:
orchestration_instance_id: UUID
Loading

0 comments on commit fdef39f

Please sign in to comment.