Skip to content

Commit

Permalink
More robust linting & formating has been added to the code base (#511)
Browse files Browse the repository at this point in the history
* fix RUF rules

* fix cannot be set on an instance

* code satisfies TRY rules (#512)

* fir TRY rules

* allow pytest as function in tox

* fixes an issue where p is not initialized

* fix tests

* code satisfies UP rules (#513)

* fix UP rules

* fix typing union on py39

* fix

* fix round

* fix union typing

* fix round

* add eval-type-backport = {version = "^0.2.0", python = "3.9"}

* Code satisfies E rules (#514)

* Code satisfies simplify rules (#516)

* fir TRY rules

* allow pytest as function in tox

* fixes an issue where p is not initialized

* fix tests

* fix UP rules

* fix typing union on py39

* fix E rules

* fix

* fix SIM rules

* fix test

don't show plot in pytest

fix union error

mock test

rm test

path

* code satisfies C4 rules (#517)

* fir TRY rules

* allow pytest as function in tox

* fixes an issue where p is not initialized

* fix tests

* fix UP rules

* fix typing union on py39

* fix E rules

* fix

* fix SIM rules

* fix C4 rules

* fix types

* fix typing

* code satisfies A rules (#518)

* fir TRY rules

* allow pytest as function in tox

* fixes an issue where p is not initialized

* fix tests

* fix UP rules

* fix typing union on py39

* fix E rules

* fix

* fix SIM rules

* fix C4 rules

* fix A rules

* fix typing

* fix type to format change

* code satisfies B rules (#519)

* fix B rules

* fix type

* idf.simulate return self

* fix test

* zip for py39 no strict field

* config

* fix RUF rules

fix cannot be set on an instance

* code satisfies TRY rules (#512)

* fir TRY rules

* allow pytest as function in tox

* fixes an issue where p is not initialized

* fix tests

* code satisfies UP rules (#513)

* fix UP rules

* fix typing union on py39

* fix

* fix round

* fix union typing

* fix round

* add eval-type-backport = {version = "^0.2.0", python = "3.9"}

* Code satisfies E rules (#514)

* Code satisfies simplify rules (#516)

* fir TRY rules

* allow pytest as function in tox

* fixes an issue where p is not initialized

* fix tests

* fix UP rules

* fix typing union on py39

* fix E rules

* fix

* fix SIM rules

* fix test

don't show plot in pytest

fix union error

mock test

rm test

path

* code satisfies C4 rules (#517)

* fir TRY rules

* allow pytest as function in tox

* fixes an issue where p is not initialized

* fix tests

* fix UP rules

* fix typing union on py39

* fix E rules

* fix

* fix SIM rules

* fix C4 rules

* fix types

* fix typing

* code satisfies A rules (#518)

* fir TRY rules

* allow pytest as function in tox

* fixes an issue where p is not initialized

* fix tests

* fix UP rules

* fix typing union on py39

* fix E rules

* fix

* fix SIM rules

* fix C4 rules

* fix A rules

* fix typing

* fix type to format change

* code satisfies B rules (#519)

* fix B rules

* fix type

* idf.simulate return self

* fix test

* zip for py39 no strict field

* config

* robust simulate
  • Loading branch information
samuelduchesne authored Oct 25, 2024
1 parent 2c895fe commit 0fa77d2
Show file tree
Hide file tree
Showing 60 changed files with 1,334 additions and 1,420 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -236,3 +236,5 @@ fabric.properties
docs/reference/

.idea/

tests/tests/.temp/
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ check: ## Run code quality tools.
.PHONY: test
test: ## Test the code with pytest
@echo "🚀 Testing code: Running pytest"
@poetry run pytest tests --cov --cov-config=pyproject.toml --cov-report=xml
@poetry run pytest -n auto tests --cov --cov-config=pyproject.toml --cov-report=xml

.PHONY: build
build: clean-build ## Build wheel file using poetry
Expand Down
114 changes: 59 additions & 55 deletions archetypal/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@
# License: MIT, see full license in LICENSE.txt
# Web: https://github.com/samuelduchesne/archetypal
################################################################################
from __future__ import annotations

import logging as lg
from pathlib import Path
from typing import Any, List, Literal, Optional
from typing import Any, ClassVar, Literal

from energy_pandas.units import unit_registry

Expand All @@ -28,7 +30,7 @@
class ZoneWeight:
"""Zone weights for Umi Templates"""

weight_attr = {0: "area", 1: "volume"}
weight_attr: ClassVar[dict] = {0: "area", 1: "volume"}

def __init__(self, n=0):
self._weight_attr = self.weight_attr[n]
Expand Down Expand Up @@ -67,96 +69,98 @@ class Settings(BaseSettings, arbitrary_types_allowed=True, validate_assignment=T
log_filename: str = Field("archetypal")

# usual idfobjects
useful_idf_objects: List[str] = [
"WINDOWMATERIAL:GAS",
"WINDOWMATERIAL:GLAZING",
"WINDOWMATERIAL:SIMPLEGLAZINGSYSTEM",
"MATERIAL",
"MATERIAL:NOMASS",
"CONSTRUCTION",
"BUILDINGSURFACE:DETAILED",
"FENESTRATIONSURFACE:DETAILED",
"SCHEDULE:DAY:INTERVAL",
"SCHEDULE:WEEK:DAILY",
"SCHEDULE:YEAR",
]
useful_idf_objects: list[str] = Field(
[
"WINDOWMATERIAL:GAS",
"WINDOWMATERIAL:GLAZING",
"WINDOWMATERIAL:SIMPLEGLAZINGSYSTEM",
"MATERIAL",
"MATERIAL:NOMASS",
"CONSTRUCTION",
"BUILDINGSURFACE:DETAILED",
"FENESTRATIONSURFACE:DETAILED",
"SCHEDULE:DAY:INTERVAL",
"SCHEDULE:WEEK:DAILY",
"SCHEDULE:YEAR",
]
)

# List of Available SQLite Tables
# Ref: https://bigladdersoftware.com/epx/docs/8-3/output-details-and-examples
# /eplusout.sql.html#schedules-table

available_sqlite_tables: dict = dict(
ComponentSizes={"PrimaryKey": ["ComponentSizesIndex"], "ParseDates": []},
ConstructionLayers={"PrimaryKey": ["ConstructionIndex"], "ParseDates": []},
Constructions={"PrimaryKey": ["ConstructionIndex"], "ParseDates": []},
Materials={"PrimaryKey": ["MaterialIndex"], "ParseDates": []},
NominalBaseboardHeaters={
available_sqlite_tables: ClassVar[dict] = {
"ComponentSizes": {"PrimaryKey": ["ComponentSizesIndex"], "ParseDates": []},
"ConstructionLayers": {"PrimaryKey": ["ConstructionIndex"], "ParseDates": []},
"Constructions": {"PrimaryKey": ["ConstructionIndex"], "ParseDates": []},
"Materials": {"PrimaryKey": ["MaterialIndex"], "ParseDates": []},
"NominalBaseboardHeaters": {
"PrimaryKey": ["NominalBaseboardHeaterIndex"],
"ParseDates": [],
},
NominalElectricEquipment={
"NominalElectricEquipment": {
"PrimaryKey": ["NominalElectricEquipmentIndex"],
"ParseDates": [],
},
NominalGasEquipment={
"NominalGasEquipment": {
"PrimaryKey": ["NominalGasEquipmentIndex"],
"ParseDates": [],
},
NominalHotWaterEquipment={
"NominalHotWaterEquipment": {
"PrimaryKey": ["NominalHotWaterEquipmentIndex"],
"ParseDates": [],
},
NominalInfiltration={
"NominalInfiltration": {
"PrimaryKey": ["NominalInfiltrationIndex"],
"ParseDates": [],
},
NominalLighting={"PrimaryKey": ["NominalLightingIndex"], "ParseDates": []},
NominalOtherEquipment={
"NominalLighting": {"PrimaryKey": ["NominalLightingIndex"], "ParseDates": []},
"NominalOtherEquipment": {
"PrimaryKey": ["NominalOtherEquipmentIndex"],
"ParseDates": [],
},
NominalPeople={"PrimaryKey": ["NominalPeopleIndex"], "ParseDates": []},
NominalSteamEquipment={
"NominalPeople": {"PrimaryKey": ["NominalPeopleIndex"], "ParseDates": []},
"NominalSteamEquipment": {
"PrimaryKey": ["NominalSteamEquipmentIndex"],
"ParseDates": [],
},
NominalVentilation={
"NominalVentilation": {
"PrimaryKey": ["NominalVentilationIndex"],
"ParseDates": [],
},
ReportData={"PrimaryKey": ["ReportDataIndex"], "ParseDates": []},
ReportDataDictionary={
"ReportData": {"PrimaryKey": ["ReportDataIndex"], "ParseDates": []},
"ReportDataDictionary": {
"PrimaryKey": ["ReportDataDictionaryIndex"],
"ParseDates": [],
},
ReportExtendedData={
"ReportExtendedData": {
"PrimaryKey": ["ReportExtendedDataIndex"],
"ParseDates": [],
},
RoomAirModels={"PrimaryKey": ["ZoneIndex"], "ParseDates": []},
Schedules={"PrimaryKey": ["ScheduleIndex"], "ParseDates": []},
Surfaces={"PrimaryKey": ["SurfaceIndex"], "ParseDates": []},
SystemSizes={
"RoomAirModels": {"PrimaryKey": ["ZoneIndex"], "ParseDates": []},
"Schedules": {"PrimaryKey": ["ScheduleIndex"], "ParseDates": []},
"Surfaces": {"PrimaryKey": ["SurfaceIndex"], "ParseDates": []},
"SystemSizes": {
"PrimaryKey": ["SystemSizesIndex"],
"ParseDates": {"PeakHrMin": "%m/%d %H:%M:%S"},
},
Time={"PrimaryKey": ["TimeIndex"], "ParseDates": []},
ZoneGroups={"PrimaryKey": ["ZoneGroupIndex"], "ParseDates": []},
Zones={"PrimaryKey": ["ZoneIndex"], "ParseDates": []},
ZoneLists={"PrimaryKey": ["ZoneListIndex"], "ParseDates": []},
ZoneSizes={"PrimaryKey": ["ZoneSizesIndex"], "ParseDates": []},
ZoneInfoZoneLists={"PrimaryKey": ["ZoneListIndex"], "ParseDates": []},
Simulations={
"Time": {"PrimaryKey": ["TimeIndex"], "ParseDates": []},
"ZoneGroups": {"PrimaryKey": ["ZoneGroupIndex"], "ParseDates": []},
"Zones": {"PrimaryKey": ["ZoneIndex"], "ParseDates": []},
"ZoneLists": {"PrimaryKey": ["ZoneListIndex"], "ParseDates": []},
"ZoneSizes": {"PrimaryKey": ["ZoneSizesIndex"], "ParseDates": []},
"ZoneInfoZoneLists": {"PrimaryKey": ["ZoneListIndex"], "ParseDates": []},
"Simulations": {
"PrimaryKey": ["SimulationIndex"],
"ParseDates": {"TimeStamp": {"format": "YMD=%Y.%m.%d %H:%M"}},
},
EnvironmentPeriods={"PrimaryKey": ["EnvironmentPeriodIndex"], "ParseDates": []},
TabularData={"PrimaryKey": ["TabularDataIndex"], "ParseDates": []},
Strings={"PrimaryKey": ["StringIndex"], "ParseDates": []},
StringTypes={"PrimaryKey": ["StringTypeIndex"], "ParseDates": []},
TabularDataWithStrings={"PrimaryKey": ["TabularDataIndex"], "ParseDates": []},
Errors={"PrimaryKey": ["ErrorIndex"], "ParseDates": []},
)
"EnvironmentPeriods": {"PrimaryKey": ["EnvironmentPeriodIndex"], "ParseDates": []},
"TabularData": {"PrimaryKey": ["TabularDataIndex"], "ParseDates": []},
"Strings": {"PrimaryKey": ["StringIndex"], "ParseDates": []},
"StringTypes": {"PrimaryKey": ["StringTypeIndex"], "ParseDates": []},
"TabularDataWithStrings": {"PrimaryKey": ["TabularDataIndex"], "ParseDates": []},
"Errors": {"PrimaryKey": ["ErrorIndex"], "ParseDates": []},
}

zone_weight: ZoneWeight = ZoneWeight(n=0)

Expand All @@ -167,7 +171,7 @@ class Settings(BaseSettings, arbitrary_types_allowed=True, validate_assignment=T
"for ENERGYPLUS_VERSION in os.environ",
)

energyplus_location: Optional[DirectoryPath] = Field(
energyplus_location: DirectoryPath | None = Field(
None,
validation_alias="ENERGYPLUS_LOCATION",
description="Root directory of the EnergyPlus install.",
Expand All @@ -194,9 +198,9 @@ def initialize_units(cls, v):
# After settings are loaded, import other modules
from .eplus_interface.version import EnergyPlusVersion # noqa: E402
from .idfclass import IDF # noqa: E402
from .umi_template import (
BuildingTemplate, # noqa: E402
UmiTemplateLibrary, # noqa: E402
from .umi_template import ( # noqa: E402
BuildingTemplate,
UmiTemplateLibrary,
)
from .utils import clear_cache, config, parallel_process # noqa: E402

Expand Down
40 changes: 19 additions & 21 deletions archetypal/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from .eplus_interface.exceptions import EnergyPlusVersionError
from .eplus_interface.version import EnergyPlusVersion

CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
CONTEXT_SETTINGS = {"help_option_names": ["-h", "--help"]}


class CliConfig:
Expand Down Expand Up @@ -234,17 +234,18 @@ def reduce(ctx, idf, output, weather, cores, all_zones, as_version):
def validate_energyplusversion(ctx, param, value):
try:
return EnergyPlusVersion(value)
except EnergyPlusVersionError:
raise click.BadParameter("invalid energyplus version")
except EnergyPlusVersionError as e:
raise click.BadParameter("invalid energyplus version") from e


def validate_paths(ctx, param, value):
try:
file_paths = set_filepaths(value)
file_list = "\n".join([f"{i}. " + str(file.name) for i, file in enumerate(file_paths)])
except FileNotFoundError as e:
raise click.BadParameter("no files were found.") from e
else:
return file_paths, file_list
except FileNotFoundError:
raise click.BadParameter("no files were found.")


@cli.command()
Expand Down Expand Up @@ -291,22 +292,19 @@ def transition(idf, to_version, cores, yes):
log(
f"executing {len(file_paths)} file(s):\n{file_list}",
)
if not yes:
overwrite = click.confirm("Would you like to overwrite the file(s)?")
else:
overwrite = False
overwrite = click.confirm("Would you like to overwrite the file(s)?") if not yes else False
start_time = time.time()

to_version = to_version.dash
rundict = {
file: dict(
idfname=file,
as_version=to_version,
check_required=False,
check_length=False,
overwrite=overwrite,
prep_outputs=False,
)
file: {
"idfname": file,
"as_version": to_version,
"check_required": False,
"check_length": False,
"overwrite": overwrite,
"prep_outputs": False,
}
for i, file in enumerate(file_paths)
}
results = parallel_process(
Expand Down Expand Up @@ -349,12 +347,12 @@ def set_filepaths(idf):
set of Path: The set of a list of paths
"""
if not isinstance(idf, (list, tuple)):
raise ValueError("A list must be passed")
raise TypeError("A list must be passed")
idf = tuple(Path(file_or_path).expand() for file_or_path in idf) # make Paths
file_paths = () # Placeholder for tuple of paths
for file_or_path in idf:
if file_or_path.isfile(): # if a file, concatenate into file_paths
file_paths += tuple([file_or_path])
file_paths += (file_or_path,)
elif file_or_path.isdir(): # if a directory, walkdir (recursive) and get *.idf
file_paths += tuple(file_or_path.walkfiles("*.idf"))
else:
Expand All @@ -366,11 +364,11 @@ def set_filepaths(idf):
settings.logs_folder,
]
top = file_or_path.abspath().dirname()
for root, dirs, files in walkdirs(top, excluded_dirs):
for root, _, _ in walkdirs(top, excluded_dirs):
pattern = file_or_path.basename()
file_paths += tuple(Path(root).files(pattern))

file_paths = set([f.relpath().expand() for f in file_paths]) # Only keep unique
file_paths = {f.relpath().expand() for f in file_paths} # Only keep unique
# values
if file_paths:
return file_paths
Expand Down
32 changes: 16 additions & 16 deletions archetypal/dataportal.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,23 +168,25 @@ def tabula_building_details_sheet(
code_num,
code_variantnumber,
) = code_building.split(".")
except ValueError:
except ValueError as e:
msg = (
f'the query "{code_building}" is missing a parameter. Make sure the '
'"code_building" has the form: '
"AT.MT.AB.02.Gen.ReEx.001.001"
)
log(msg, lg.ERROR)
raise ValueError(msg)
raise ValueError(msg) from e

# Check code country
code_country = _resolve_codecountry(code_country)

# Check code_buildingsizeclass
if code_buildingsizeclass.upper() not in ["SFH", "TH", "MFH", "AB"]:
raise ValueError(
'specified code_buildingsizeclass "{}" not supported. Available ' 'values are "SFH", "TH", ' '"MFH" or "AB"'
if code_buildingsizeclass.upper() not in {"SFH", "TH", "MFH", "AB"}:
msg = (
f'specified code_buildingsizeclass "{code_buildingsizeclass}" not supported. '
'Available values are "SFH", "TH", "MFH" or "AB"'
)
raise ValueError(msg)
# Check numericals
if not isinstance(code_construcionyearclass, str):
code_construcionyearclass = str(code_construcionyearclass).zfill(2)
Expand Down Expand Up @@ -528,12 +530,11 @@ def nrel_bcl_api_request(data):
return response_json


def stat_can_request(type, lang="E", dguid="2016A000011124", topic=0, notes=0, stat=0):
"""Send a request to the StatCan API via HTTP GET and return the JSON
response.
def stat_can_request(response_format, lang="E", dguid="2016A000011124", topic=0, notes=0, stat=0):
"""Send a request to the StatCan API via HTTP GET and return the JSON response.
Args:
type (str): "json" or "xml". json = json response format and xml = xml
response_format (str): "json" or "xml". json = json response format and xml = xml
response format.
lang (str): "E" or "F". E = English and F = French.
dguid (str): Dissemination Geography Unique Identifier - DGUID. It is an
Expand All @@ -558,7 +559,7 @@ def stat_can_request(type, lang="E", dguid="2016A000011124", topic=0, notes=0, s
"""
prepared_url = (
"https://www12.statcan.gc.ca/rest/census-recensement"
f"/CPR2016.{type}?lang={lang}&dguid={dguid}&topic="
f"/CPR2016.{response_format}?lang={lang}&dguid={dguid}&topic="
f"{topic}&notes={notes}&stat={stat}"
)

Expand Down Expand Up @@ -610,10 +611,11 @@ def stat_can_request(type, lang="E", dguid="2016A000011124", topic=0, notes=0, s
return response_json


def stat_can_geo_request(type="json", lang="E", geos="PR", cpt="00"):
"""
def stat_can_geo_request(response_format="json", lang="E", geos="PR", cpt="00"):
"""Send a request to the StatCan API via HTTP GET and return the JSON response.
Args:
type (str): "json" or "xml". json = json response format and xml = xml
response_format (str): "json" or "xml". json = json response format and xml = xml
response format.
lang (str): "E" or "F". where: E = English F = French.
geos (str): one geographic level code (default = PR). where: CD = Census
Expand All @@ -630,9 +632,7 @@ def stat_can_geo_request(type="json", lang="E", geos="PR", cpt="00"):
35 = Ontario 46 = Manitoba 47 = Saskatchewan 48 = Alberta 59 =
British Columbia 60 = Yukon 61 = Northwest Territories 62 = Nunavut.
"""
prepared_url = (
f"https://www12.statcan.gc.ca/rest/census-recensement/CR2016Geo.{type}?lang={lang}&geos={geos}&cpt={cpt}"
)
prepared_url = f"https://www12.statcan.gc.ca/rest/census-recensement/CR2016Geo.{response_format}?lang={lang}&geos={geos}&cpt={cpt}"

cached_response_json = get_from_cache(prepared_url)

Expand Down
Loading

0 comments on commit 0fa77d2

Please sign in to comment.