Skip to content

Commit

Permalink
Cmake cleanup and adding jobs argument (#366)
Browse files Browse the repository at this point in the history
SonarCloud code duplication is spurious. It doesn't seem to understand Python multiple dispatch.
  • Loading branch information
thirtytwobits authored Jan 8, 2025
1 parent c251414 commit 453f667
Show file tree
Hide file tree
Showing 12 changed files with 549 additions and 273 deletions.
27 changes: 22 additions & 5 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ jobs:
-Dsonar.python.coverage.reportPaths=.tox/report/tmp/coverage.xml
-Dsonar.python.xunit.reportPath=.tox/py311-test/tmp/xunit-result.xml
compat-test-python3-mac:
compat-test-python3-windows-and-mac:
strategy:
matrix:
python3-version: ['11', '12', '13']
Expand Down Expand Up @@ -179,7 +179,7 @@ jobs:
cmake --build --preset build-${{ matrix.compiler }}-${{ matrix.architecture }}-${{ matrix.language }}-debugasan
cmake --build --preset build-${{ matrix.compiler }}-${{ matrix.architecture }}-${{ matrix.language }}-release
language-verification-c-cpp-clang-native-extra:
language-verification-c-clang-native-extra:
runs-on: ubuntu-latest
needs: test
container: ghcr.io/opencyphal/toolshed:ts22.4.10
Expand All @@ -190,14 +190,31 @@ jobs:
- name: verify
working-directory: verification
run: |
cmake -DNUNAVUT_EXTRA_GENERATOR_ARGS="--enable-override-variable-array-capacity;--embed-auditing-info" --preset config-clang-native-cpp-20
cmake --build --preset build-clang-native-cpp-20-debugcov --target cov_all
cmake -DNUNAVUT_EXTRA_GENERATOR_ARGS="--enable-override-variable-array-capacity;--embed-auditing-info" --preset config-clang-native-c-11
cmake --build --preset build-clang-native-c-11-debugcov --target cov_all
- name: upload-verification-coverage-reports
uses: actions/upload-artifact@v4
with:
name: verification-coverage-reports
name: verification-c-coverage-reports
path: verification/build/DebugCov/coverage/*

language-verification-cpp-clang-native-extra:
runs-on: ubuntu-latest
needs: test
container: ghcr.io/opencyphal/toolshed:ts22.4.10
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: verify
working-directory: verification
run: |
cmake -DNUNAVUT_EXTRA_GENERATOR_ARGS="--enable-override-variable-array-capacity;--embed-auditing-info" --preset config-clang-native-cpp-20
cmake --build --preset build-clang-native-cpp-20-debugcov --target cov_all
- name: upload-verification-coverage-reports
uses: actions/upload-artifact@v4
with:
name: verification-cpp-coverage-reports
path: verification/build/DebugCov/coverage/*

language-verification-python:
Expand Down
3 changes: 3 additions & 0 deletions CONTRIBUTING.rst
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ recommend the following environment for vscode::
tox devenv -e local
source venv/bin/activate

On Windows that last line is instead::

./venv/Scripts/activate

cmake
================================================
Expand Down
12 changes: 10 additions & 2 deletions src/nunavut/_generators.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@
"""

import abc
import multiprocessing
import multiprocessing.pool
from dataclasses import dataclass
from pathlib import Path
from typing import Any, Dict, Iterable, List, Mapping, Optional, Type, Union
Expand Down Expand Up @@ -297,6 +295,7 @@ def generate_all(
resource_types: int = ResourceType.ANY.value,
embed_auditing_info: bool = False,
dry_run: bool = False,
jobs: int = 0,
no_overwrite: bool = False,
allow_unregulated_fixed_port_id: bool = False,
omit_dependencies: bool = False,
Expand Down Expand Up @@ -389,6 +388,9 @@ def generate_all(
:param bool dry_run:
If True then no files will be generated/written but all logic will be exercised with commensurate logging and
errors.
:param int jobs:
The number of parallel jobs to use when generating code. If 1 then no parallelism is used. If 0 then the
number of jobs is determined by the number of CPUs available.
:param bool no_overwrite:
If True then generated files will not be allowed to overwrite existing files under the `outdir` path causing
errors.
Expand Down Expand Up @@ -424,6 +426,7 @@ def generate_all(
resource_types,
embed_auditing_info,
dry_run,
jobs,
no_overwrite,
allow_unregulated_fixed_port_id,
omit_dependencies,
Expand All @@ -441,6 +444,7 @@ def generate_all_for_language(
resource_types: int = ResourceType.ANY.value,
embed_auditing_info: bool = False,
dry_run: bool = False,
jobs: int = 0,
no_overwrite: bool = False,
allow_unregulated_fixed_port_id: bool = False,
omit_dependencies: bool = False,
Expand All @@ -465,6 +469,9 @@ def generate_all_for_language(
source will be embedded in the generated files at the cost of build reproducibility.
:param dry_run: If True then no files will be generated/written but all logic will be exercised with commensurate
logging and errors.
:param int jobs:
The number of parallel jobs to use when generating code. If 1 then no parallelism is used. If 0 then the
number of jobs is determined by the number of CPUs available.
:param no_overwrite: If True then generated files will not be allowed to overwrite existing files under the `outdir`
path causing errors.
:param allow_unregulated_fixed_port_id: If True then errors will become warning when using fixed port identifiers
Expand All @@ -485,6 +492,7 @@ def generate_all_for_language(
language_context,
target_files,
root_namespace_directories_or_names,
jobs,
allow_unregulated_fixed_port_id=allow_unregulated_fixed_port_id,
omit_dependencies=omit_dependencies,
)
Expand Down
87 changes: 65 additions & 22 deletions src/nunavut/_namespace.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,6 +455,8 @@ def read_files(
index: "Namespace",
dsdl_files: Union[Path, str, Iterable[Union[Path, str]]],
root_namespace_directories_or_names: Optional[Union[Path, str, Iterable[Union[Path, str]]]],
jobs: int = 0,
job_timeout_seconds: float = 0,
lookup_directories: Optional[Union[Path, str, Iterable[Union[Path, str]]]] = None,
print_output_handler: Optional[Callable[[Path, int, str], None]] = None,
allow_unregulated_fixed_port_id: bool = False,
Expand All @@ -466,6 +468,10 @@ def read_files(
:param Namespace index: The index namespace to add the new namespaces and types to.
:param Path | str | Iterable[Path | str] dsdl_files: The dsdl files to read.
:param Path | str | Iterable[Path | str] root_namespace_directories_or_names: See :meth:`pydsdl.read_files`.
:param int jobs: The number of parallel jobs to allow when reading multiple files. 0 Indicates no limit and 1
diasallows all parallelism.
:param float job_timeout_seconds: Maximum time in fractional seconds any one read file job is allowed to take
before timing out. 0 disables timeouts.
:param Path | str | Iterable[Path | str] lookup_directories: See :meth:`pydsdl.read_files`.
:param Callable[[Path, int, str], None] print_output_handler: A callback to handle print output.
:param bool allow_unregulated_fixed_port_id: Allow unregulated fixed port ids.
Expand All @@ -481,32 +487,53 @@ def read_files(

already_read: set[Path] = set()

running_lookups: list[multiprocessing.pool.AsyncResult] = []
with multiprocessing.pool.Pool() as pool:
if jobs == 1:
# Don't use multiprocessing when jobs is 1.
while fileset:
next_file = fileset.pop()
running_lookups.append(
pool.apply_async(
pydsdl.read_files,
args=(
next_file,
root_namespace_directories_or_names,
lookup_directories,
print_output_handler,
allow_unregulated_fixed_port_id,
),
)
target_type, dependent_types = pydsdl.read_files(
next_file,
root_namespace_directories_or_names,
lookup_directories,
print_output_handler,
allow_unregulated_fixed_port_id,
)
already_read.add(next_file) # TODO: canonical paths for keying here?
if not fileset:
for lookup in running_lookups:
target_type, dependent_types = lookup.get()
Namespace.add_types(index, (target_type[0], dependent_types))
if not omit_dependencies:
for dependent_type in dependent_types:
if dependent_type.source_file_path not in already_read:
fileset.add(dependent_type.source_file_path)
running_lookups.clear()
Namespace.add_types(index, (target_type[0], dependent_types))
if not omit_dependencies:
for dependent_type in dependent_types:
if dependent_type.source_file_path not in already_read:
fileset.add(dependent_type.source_file_path)
else:
running_lookups: list[multiprocessing.pool.AsyncResult] = []
with multiprocessing.pool.Pool(processes=None if jobs == 0 else jobs) as pool:
while fileset:
next_file = fileset.pop()
running_lookups.append(
pool.apply_async(
pydsdl.read_files,
args=(
next_file,
root_namespace_directories_or_names,
lookup_directories,
print_output_handler,
allow_unregulated_fixed_port_id,
),
)
)
already_read.add(next_file) # TODO: canonical paths for keying here?
if not fileset:
for lookup in running_lookups:
if job_timeout_seconds <= 0:
target_type, dependent_types = lookup.get()
else:
target_type, dependent_types = lookup.get(timeout=job_timeout_seconds)
Namespace.add_types(index, (target_type[0], dependent_types))
if not omit_dependencies:
for dependent_type in dependent_types:
if dependent_type.source_file_path not in already_read:
fileset.add(dependent_type.source_file_path)
running_lookups.clear()

return index

Expand All @@ -518,6 +545,8 @@ def _(
lctx: LanguageContext,
dsdl_files: Optional[Union[Path, str, Iterable[Union[Path, str]]]],
root_namespace_directories_or_names: Optional[Union[Path, str, Iterable[Union[Path, str]]]],
jobs: int = 0,
job_timeout_seconds: float = 0,
lookup_directories: Optional[Union[Path, str, Iterable[Union[Path, str]]]] = None,
print_output_handler: Optional[Callable[[Path, int, str], None]] = None,
allow_unregulated_fixed_port_id: bool = False,
Expand All @@ -530,6 +559,10 @@ def _(
:param LanguageContext lctx: The language context to use when building the namespace.
:param Path | str | Iterable[Path | str] dsdl_files: The dsdl files to read.
:param Path | str | Iterable[Path | str] root_namespace_directories_or_names: See :meth:`pydsdl.read_files`.
:param int jobs: The number of parallel jobs to allow when reading multiple files. 0 Indicates no limit and 1
diasallows all parallelism.
:param float job_timeout_seconds: Maximum time in fractional seconds any one read file job is allowed to take
before timing out. 0 disables timeouts.
:param Path | str | Iterable[Path | str] lookup_directories: See :meth:`pydsdl.read_files`.
:param Callable[[Path, int, str], None] print_output_handler: A callback to handle print output.
:param bool allow_unregulated_fixed_port_id: Allow unregulated fixed port ids.
Expand All @@ -539,6 +572,8 @@ def _(
Namespace.Identity(output_path, lctx),
dsdl_files,
root_namespace_directories_or_names,
jobs,
job_timeout_seconds,
lookup_directories,
print_output_handler,
allow_unregulated_fixed_port_id,
Expand All @@ -553,6 +588,8 @@ def _(
lctx: LanguageContext,
dsdl_files: Optional[Union[Path, str, Iterable[Union[Path, str]]]],
root_namespace_directories_or_names: Optional[Union[Path, str, Iterable[Union[Path, str]]]],
jobs: int = 0,
job_timeout_seconds: float = 0,
lookup_directories: Optional[Union[Path, str, Iterable[Union[Path, str]]]] = None,
print_output_handler: Optional[Callable[[Path, int, str], None]] = None,
allow_unregulated_fixed_port_id: bool = False,
Expand All @@ -565,6 +602,10 @@ def _(
:param LanguageContext lctx: The language context to use when building the namespace.
:param Path | str | Iterable[Path | str] dsdl_files: The dsdl files to read.
:param Path | str | Iterable[Path | str] root_namespace_directories_or_names: See :meth:`pydsdl.read_files`.
:param int The number of parallel jobs to allow when reading multiple files. 0 Indicates no limit and 1
diasallows all parallelism.
:param float job_timeout_seconds: Maximum time in fractional seconds any one read file job is allowed to take
before timing out. 0 disables timeouts.
:param Path | str | Iterable[Path | str] lookup_directories: See :meth:`pydsdl.read_files`.
:param Callable[[Path, int, str], None] print_output_handler: A callback to handle print output.
:param bool allow_unregulated_fixed_port_id: Allow unregulated fixed port ids.
Expand All @@ -574,6 +615,8 @@ def _(
Namespace.Identity(Path(output_path), lctx),
dsdl_files,
root_namespace_directories_or_names,
jobs,
job_timeout_seconds,
lookup_directories,
print_output_handler,
allow_unregulated_fixed_port_id,
Expand Down
21 changes: 21 additions & 0 deletions src/nunavut/cli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -522,6 +522,27 @@ def extension_type(raw_arg: str) -> str:

run_mode_group.add_argument("--dry-run", "-d", action="store_true", help="If True then no files will be generated.")

run_mode_group.add_argument(
"--jobs",
"-j",
type=int,
default=0,
help=textwrap.dedent(
"""
Limits the number of subprocesses nnvg can use to parallelize type discovery
and code generation.
If set to 0 then the number of jobs will be set to the number of CPUs available
on the system.
If set to 1 then no subprocesses will be used and all work will be done in th
main process.
"""
).lstrip(),
)

run_mode_group.add_argument(
"--list-outputs",
action="store_true",
Expand Down
25 changes: 25 additions & 0 deletions test/gentest_nnvg/test_nnvg.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,31 @@ def test_realgen_using_nnvg(gen_paths: Any, run_nnvg: Callable) -> None:
assert note.exists()


@pytest.mark.parametrize("jobs", [0, 1, 2])
def test_realgen_using_nnvg_jobs(gen_paths: Any, run_nnvg_main: Callable, jobs: int) -> None:
"""
Sanity test that nnvg can generate code from known types.
"""
public_regulated_data_types = gen_paths.root_dir / Path("submodules") / Path("public_regulated_data_types")

nnvg_args0 = [
"--outdir",
gen_paths.out_dir.as_posix(),
"-j",
str(jobs),
"-l",
"c",
"--lookup-dir",
(public_regulated_data_types / Path("uavcan")).as_posix(),
(public_regulated_data_types / Path("uavcan", "node", "430.GetInfo.1.0.dsdl")).as_posix(),
]

run_nnvg_main(gen_paths, nnvg_args0)

get_info = gen_paths.out_dir / Path("uavcan") / Path("node") / Path("GetInfo_1_0").with_suffix(".h")
assert get_info.exists()


def test_DSDL_INCLUDE_PATH(gen_paths: Any, run_nnvg_main: Callable) -> None:
"""
Verify that the DSDL_INCLUDE_PATH environment variable is used by nnvg.
Expand Down
Loading

0 comments on commit 453f667

Please sign in to comment.