Skip to content

Commit

Permalink
fixup! Support handling proposals from incremental actors in the Powe…
Browse files Browse the repository at this point in the history
…rManager

Signed-off-by: Sahas Subramanian <[email protected]>
  • Loading branch information
shsms committed Jun 4, 2024
1 parent 9a2b67b commit 66f602f
Showing 1 changed file with 22 additions and 67 deletions.
89 changes: 22 additions & 67 deletions src/frequenz/sdk/actor/_power_managing/_power_managing_actor.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,6 @@ def __init__( # pylint: disable=too-many-arguments
] = {}
self._distribution_results: dict[frozenset[int], power_distributing.Result] = {}

self._highest_known_priority: dict[frozenset[int], int] = {}
self._highest_priority_is_shifting: dict[frozenset[int], bool] = {}

self._non_shifting_group: BaseAlgorithm = Matryoshka(
max_proposal_age=timedelta(seconds=60.0)
)
Expand All @@ -116,54 +113,29 @@ async def _send_reports(self, component_ids: frozenset[int]) -> None:
if bounds is None:
_logger.warning("PowerManagingActor: No bounds for %s", component_ids)
return
if not self._highest_priority_is_shifting.get(component_ids, False):
for priority, sender in self._non_shifting_subscriptions.get(
component_ids, {}
).items():
status = self._non_shifting_group.get_status(
component_ids,
priority,
bounds,
self._distribution_results.get(component_ids),
)
await sender.send(status)
for priority, sender in self._shifting_subscriptions.get(
component_ids, {}
).items():
status = self._shifting_group.get_status(
component_ids,
priority,
self._calculate_remaining_bounds(
bounds,
self._non_shifting_group.get_target_power(component_ids),
),
self._distribution_results.get(component_ids),
)
await sender.send(status)
else:
for priority, sender in self._shifting_subscriptions.get(
component_ids, {}
).items():
status = self._shifting_group.get_status(
component_ids,
priority,
for priority, sender in self._shifting_subscriptions.get(
component_ids, {}
).items():
status = self._shifting_group.get_status(
component_ids,
priority,
bounds,
self._distribution_results.get(component_ids),
)
await sender.send(status)
for priority, sender in self._non_shifting_subscriptions.get(
component_ids, {}
).items():
status = self._non_shifting_group.get_status(
component_ids,
priority,
self._calculate_remaining_bounds(
bounds,
self._distribution_results.get(component_ids),
)
await sender.send(status)
for priority, sender in self._non_shifting_subscriptions.get(
component_ids, {}
).items():
status = self._non_shifting_group.get_status(
component_ids,
priority,
self._calculate_remaining_bounds(
bounds,
self._shifting_group.get_target_power(component_ids),
),
self._distribution_results.get(component_ids),
)
await sender.send(status)
self._shifting_group.get_target_power(component_ids),
),
self._distribution_results.get(component_ids),
)
await sender.send(status)

async def _bounds_tracker(
self,
Expand Down Expand Up @@ -388,16 +360,6 @@ async def _run(self) -> None:
if proposal.component_ids not in self._bound_tracker_tasks:
self._add_system_bounds_tracker(proposal.component_ids)

if proposal.priority > self._highest_known_priority.setdefault(
proposal.component_ids, -sys.maxsize - 1
):
self._highest_known_priority[proposal.component_ids] = (
proposal.priority
)
self._highest_priority_is_shifting[proposal.component_ids] = (
proposal.in_shifting_group
)

# TODO: must_send=True forces a new request to # pylint: disable=fixme
# be sent to the PowerDistributor, even if there's no change in power.
#
Expand Down Expand Up @@ -439,13 +401,6 @@ async def _run(self) -> None:

if component_ids not in self._bound_tracker_tasks:
self._add_system_bounds_tracker(component_ids)
if priority > self._highest_known_priority.setdefault(
component_ids, -sys.maxsize - 1
):
self._highest_known_priority[component_ids] = priority
self._highest_priority_is_shifting[component_ids] = (
in_shifting_group
)

elif selected_from(selected, self._power_distributing_results_receiver):
from .. import ( # pylint: disable=import-outside-toplevel
Expand Down

0 comments on commit 66f602f

Please sign in to comment.