Skip to content

Commit

Permalink
Refactor tasks to speedup process (#57)
Browse files Browse the repository at this point in the history
* Refactor tasks to speedup process
* Add build logger name
* Add some jinja inlines to test fabric
* Fix CI env: install ansible.utils
  • Loading branch information
dlobato authored Aug 13, 2024
1 parent 6c124f9 commit 1d6d779
Show file tree
Hide file tree
Showing 5 changed files with 166 additions and 164 deletions.
4 changes: 3 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,9 @@ jobs:
run: |
python -m venv .venv
.venv/bin/python -m pip install --upgrade pip setuptools pyavd==${{ matrix.version }}
.venv/bin/python -m pip install .
.venv/bin/python -m pip install . netaddr
- name: Install ansible.utils
run: .venv/bin/ansible-galaxy collection install ansible.utils
- name: Install referencing for 4.8.0
if: ${{ matrix.version=='4.8.0' }}
run: .venv/bin/python -m pip install "referencing>=0.35.0"
Expand Down
257 changes: 139 additions & 118 deletions pyavd_cli/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@
import argparse
import logging
import os
import sys
import time
from concurrent.futures import Executor, ProcessPoolExecutor, as_completed
from functools import wraps
from concurrent.futures import Executor, ProcessPoolExecutor
from functools import partial, wraps
from pathlib import Path
from typing import Callable, List, Optional

Expand All @@ -31,7 +32,7 @@

os.environ["PYAVD"] = "1"

logger = logging.getLogger()
logger = logging.getLogger("pyavd-build")


def log_execution_time(logger_fn: Callable = logger.debug, log_prefix: Optional[str] = None) -> Callable:
Expand All @@ -56,7 +57,7 @@ def log_host_validation_result(hostname: str, result: ValidationResult) -> None:
logger.warning("%s: %s", hostname, deprecation_warning)


def validate_hostvars(hostname: str, hostvars: dict, strict: bool):
def validate_hostvars(hostname: str, hostvars: dict, strict: bool = False):
validation_result = validate_inputs(hostvars)

log_host_validation_result(hostname, validation_result)
Expand All @@ -67,149 +68,157 @@ def validate_hostvars(hostname: str, hostvars: dict, strict: bool):
return hostname, hostvars


def build_structured_config(hostname: str, inputs: dict, avd_facts: dict):
def build_and_write_device_config( # pylint: disable=too-many-arguments
hostname: str,
inputs: dict,
avd_facts: dict,
structured_configs_path: Path,
intended_configs_path: Path,
strict: bool = False,
):
try:
structured_config = get_device_structured_config(hostname, inputs, avd_facts=avd_facts)
except Exception as exc: # as of pyavd 4.5.0 AristaAvdDuplicateDataError can't be pickled, wrap exceptions with RuntimeError
raise RuntimeError(f"{exc}") from exc

return hostname, structured_config

templar = Templar(loader=DataLoader(), variables=avd_facts["avd_switch_facts"][hostname] | structured_config)
template_structured_config = templar.template(structured_config)

# Write structured config
with open(structured_configs_path / f"{hostname}.yml", mode="w", encoding="utf8") as fd:
yaml.dump(
template_structured_config,
fd,
Dumper=AnsibleDumper,
indent=2,
sort_keys=False,
width=130,
)

def build_device_config(hostname: str, structured_config: dict, strict: bool):
validation_result = validate_structured_config(structured_config)
validation_result = validate_structured_config(template_structured_config)

log_host_validation_result(hostname, validation_result)

if validation_result.failed and strict:
raise RuntimeError(f"{hostname} validate_structured_config failed")

return hostname, get_device_config(structured_config)
# Write device configs
with open(intended_configs_path / f"{hostname}.cfg", mode="w", encoding="utf8") as fd:
fd.write(get_device_config(template_structured_config))

return hostname

@log_execution_time(log_prefix="Validate inputs time")
def validate_all_inputs(all_hostvars: dict, strict: bool, executor: Executor) -> dict:
validated_inputs = {}
futures = [executor.submit(validate_hostvars, hostname, hostvars, strict) for hostname, hostvars in all_hostvars.items()]
for future in as_completed(futures):
hostname, hostvars = future.result()
validated_inputs[hostname] = hostvars

return validated_inputs
@log_execution_time(log_prefix="Load inputs time")
def get_fabric_hostvars(fabric_name: str, inventory: InventoryManager, loader: DataLoader) -> dict:
variable_manager = VariableManager(loader=loader, inventory=inventory)
templar = Templar(loader=loader)

all_hostvars = {}
for host in inventory.get_hosts(pattern=fabric_name):
hostvars = variable_manager.get_vars(host=inventory.get_host(host.name))
templar.available_variables = hostvars
template_hostvars = templar.template(hostvars, fail_on_undefined=False)
all_hostvars[host.name] = template_hostvars

@log_execution_time(log_prefix="Build structured config time")
def build_and_write_all_structured_configs(
all_hostvars: dict,
avd_facts: dict,
structured_configs_path: Path,
templar: Templar,
executor: Executor,
) -> dict:
structured_configs = {}
futures = [
executor.submit(build_structured_config, hostname, hostvars, avd_facts) for hostname, hostvars in all_hostvars.items()
]
# Write structured configs
structured_configs_path.mkdir(parents=True, exist_ok=True)
for future in as_completed(futures):
hostname, structured_config = future.result()
return all_hostvars

templar.available_variables = avd_facts["avd_switch_facts"][hostname] | structured_config
template_structured_config = templar.template(structured_config)
structured_configs[hostname] = template_structured_config

with open(structured_configs_path / f"{hostname}.yml", mode="w", encoding="utf8") as fd:
@log_execution_time(log_prefix="Validate inputs time")
def validate_all_inputs(all_hostvars: dict, strict: bool, executor: Executor) -> dict:
return dict(
executor.map(
partial(
validate_hostvars,
strict=strict,
),
all_hostvars.keys(),
all_hostvars.values(),
chunksize=16,
)
)


@log_execution_time(log_prefix="Generate facts time")
def generate_avd_facts(all_hostvars: dict, avd_facts_path: Optional[Path] = None):
avd_facts = get_avd_facts(all_hostvars)
if avd_facts_path is not None:
avd_facts_path.parent.mkdir(parents=True, exist_ok=True)
with open(avd_facts_path, mode="w", encoding="utf8") as fd:
yaml.dump(
structured_configs[hostname],
avd_facts,
fd,
Dumper=AnsibleDumper,
indent=2,
sort_keys=False,
width=130,
)
return structured_configs
return avd_facts


@log_execution_time(log_prefix="Build device config time")
def build_and_write_all_device_configs(
@log_execution_time(log_prefix="Build and write device config time")
def build_and_write_all_device_configs( # pylint: disable=too-many-arguments
all_hostvars: dict,
avd_facts: dict,
structured_configs_path: Path,
intended_configs_path: Path,
structured_configs: dict,
strict: bool,
executor: Executor,
):
# Build device config
futures = [
executor.submit(build_device_config, hostname, structured_config, strict)
for hostname, structured_config in structured_configs.items()
]
# Write device configs
) -> list:
structured_configs_path.mkdir(parents=True, exist_ok=True)
intended_configs_path.mkdir(parents=True, exist_ok=True)
for future in as_completed(futures):
hostname, device_config = future.result()

with open(intended_configs_path / f"{hostname}.cfg", mode="w", encoding="utf8") as fd:
fd.write(device_config)
processed_hostnames = list(
executor.map(
partial(
build_and_write_device_config,
avd_facts=avd_facts,
structured_configs_path=structured_configs_path,
intended_configs_path=intended_configs_path,
strict=strict,
),
all_hostvars.keys(),
all_hostvars.values(),
chunksize=8,
)
)

return processed_hostnames


@log_execution_time(log_prefix="Total build time")
def build( # pylint: disable=too-many-arguments,too-many-locals
inventory_path: Path,
fabric_name: str,
limit: str,
def build( # pylint: disable=too-many-arguments
fabric_hostvars: dict,
target_hosts: List[str],
intended_configs_path: Path,
structured_configs_path: Path,
avd_facts_path: Optional[Path] = None,
max_workers: int = 10,
max_workers: Optional[int] = None,
strict: bool = False,
vault_ids: Optional[List[str]] = None,
):
init_plugin_loader()

loader = DataLoader()
if vault_ids:
CLI.setup_vault_secrets(loader, vault_ids=vault_ids)
inventory = InventoryManager(loader=loader, sources=[inventory_path.as_posix()])
variable_manager = VariableManager(loader=loader, inventory=inventory)

templar = Templar(loader=loader)

all_hostvars = {}
for host in inventory.get_hosts(pattern=fabric_name):
hostvars = variable_manager.get_vars(host=inventory.get_host(host.name))
templar.available_variables = hostvars
template_hostvars = templar.template(hostvars, fail_on_undefined=False)
all_hostvars[host.name] = template_hostvars

limit_hostnames = [host.name for host in inventory.get_hosts(pattern=limit)]

with ProcessPoolExecutor(max_workers=max_workers) as executor:
# Validate inputs
all_hostvars = validate_all_inputs(all_hostvars, strict, executor)
validated_fabric_hostvars = validate_all_inputs(all_hostvars=fabric_hostvars, strict=strict, executor=executor)

# Generate facts
avd_facts = log_execution_time(log_prefix="Generate facts time")(get_avd_facts)(all_hostvars)
if avd_facts_path is not None:
avd_facts_path.parent.mkdir(parents=True, exist_ok=True)
with open(avd_facts_path, mode="w", encoding="utf8") as fd:
yaml.dump(
avd_facts,
fd,
Dumper=AnsibleDumper,
indent=2,
sort_keys=False,
width=130,
)

limit_hostvars = {hostname: hostvars for hostname, hostvars in all_hostvars.items() if hostname in limit_hostnames}

# Build structured config
structured_configs = build_and_write_all_structured_configs(
limit_hostvars, avd_facts, structured_configs_path, templar, executor
avd_facts = generate_avd_facts(all_hostvars=validated_fabric_hostvars, avd_facts_path=avd_facts_path)

target_hostvars = {
hostname: hostvars for hostname, hostvars in validated_fabric_hostvars.items() if hostname in target_hosts
}

# Build and write device configs
n_processed_hosts = len(
build_and_write_all_device_configs(
all_hostvars=target_hostvars,
avd_facts=avd_facts,
structured_configs_path=structured_configs_path,
intended_configs_path=intended_configs_path,
strict=strict,
executor=executor,
)
)

# Generate device config
build_and_write_all_device_configs(intended_configs_path, structured_configs, strict, executor)
logger.debug("Processed %d hosts", n_processed_hosts)


def main():
Expand Down Expand Up @@ -250,38 +259,50 @@ def main():

logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)

inventory_path = args.inventory_path
config_output_path = args.config_output_path
inventory_path = args.inventory_path.resolve()
config_output_path = args.config_output_path.resolve()
intended_configs_path = config_output_path / "configs"
structured_configs_path = config_output_path / "structured_configs"
avd_facts_path = args.avd_facts_path
max_workers = args.max_workers
strict = args.strict
fabric_group_name = args.fabric_group_name
avd_facts_path = args.avd_facts_path.resolve() if args.avd_facts_path else None
limit = args.limit or args.fabric_group_name
vault_ids = args.vault_id

logger.debug("pyavd version: %s", pyavd_version)
logger.debug("inventory_path: %s", inventory_path)
logger.debug("intended_configs_path: %s", intended_configs_path)
logger.debug("structured_configs_path: %s", structured_configs_path)
logger.debug("avd_facts_path: %s", avd_facts_path)
logger.debug("max_workers: %s", max_workers)
logger.debug("strict: %s", strict)
logger.debug("fabric_group_name: %s", fabric_group_name)
logger.debug("max_workers: %s", args.max_workers)
logger.debug("strict: %s", args.strict)
logger.debug("fabric_group_name: %s", args.fabric_group_name)
logger.debug("limit: %s", limit)
logger.debug("vault_ids: %s", vault_ids)
logger.debug("vault_ids: %s", args.vault_id)

# load inventory
init_plugin_loader()
loader = DataLoader()
if args.vault_id:
CLI.setup_vault_secrets(loader, vault_ids=args.vault_id)
inventory_manager = InventoryManager(loader=loader, sources=[inventory_path.as_posix()])

fabric_hostvars = get_fabric_hostvars(args.fabric_group_name, inventory_manager, loader)

target_hosts = [host.name for host in inventory_manager.get_hosts(pattern=limit)]
if len(target_hosts) == 0:
logger.error("No hosts matched pattern=%s", limit)
sys.exit(1)

if set(fabric_hostvars.keys()).isdisjoint(target_hosts):
logger.error("No hosts from group %s selected with pattern=%s", args.fabric_group_name, limit)
sys.exit(1)

build(
inventory_path=inventory_path,
fabric_name=fabric_group_name,
limit=limit,
fabric_hostvars=fabric_hostvars,
target_hosts=target_hosts,
intended_configs_path=intended_configs_path,
structured_configs_path=structured_configs_path,
avd_facts_path=avd_facts_path,
max_workers=max_workers,
strict=strict,
vault_ids=vault_ids,
max_workers=args.max_workers,
strict=args.strict,
)


Expand Down
Loading

0 comments on commit 1d6d779

Please sign in to comment.