Skip to content

Commit

Permalink
Add pylint checks
Browse files Browse the repository at this point in the history
  • Loading branch information
facetoe committed Apr 27, 2020
1 parent 4984de5 commit b79cbbc
Show file tree
Hide file tree
Showing 13 changed files with 1,030 additions and 23 deletions.
52 changes: 52 additions & 0 deletions .pylintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
[MASTER]
load-plugins=pylint_quotes

string-quote=double-avoid-escape
triple-quote=double
docstring-quote=double

[MESSAGES CONTROL]
disable=
abstract-method,
bad-continuation,
chained-comparison, # R1716: Simplify chained comparison between the operands
duplicate-code,
fixme,
invalid-name,
len-as-condition,
missing-docstring,
no-else-return,
no-else-raise,
no-self-use,
superfluous-parens,
too-few-public-methods,
too-many-ancestors,
too-many-arguments,
too-many-boolean-expressions,
too-many-branches,
too-many-instance-attributes,
too-many-lines,
too-many-locals,
too-many-nested-blocks,
too-many-public-methods,
too-many-statements,
ungrouped-imports,
unused-argument,
wrong-import-order,
line-too-long,
no-else-continue,
no-else-break,
import-outside-toplevel

[FORMAT]
max-line-length=125

[REPORTS]
output-format=text
reports=no
score=no

[TYPECHECK]
ignored-classes=PurePath,responses
extension-pkg-whitelist=cassandra.cluster,cassandra.metadata,cassandra.query

4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
short_ver = 0.0.1
long_ver = $(shell git describe --long 2>/dev/null || echo $(short_ver)-0-unknown-g`git describe --always`)

all:
all:

PYTHON ?= python3
PYTHON_SOURCE_DIRS = rpm_s3_mirror/ tests/
Expand All @@ -28,7 +28,7 @@ build-dep-fed:
python3-boto3 \
python3-lxml

test: copyright unittest
test: copyright pylint unittest

.PHONY: copyright
copyright:
Expand Down
6 changes: 3 additions & 3 deletions rpm_s3_mirror/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
from rpm_s3_mirror.config import JSONConfig, ENVConfig
from rpm_s3_mirror.mirror import Mirror

logging.getLogger('boto').setLevel(logging.WARNING)
logging.getLogger('botocore').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)
logging.getLogger("boto").setLevel(logging.WARNING)
logging.getLogger("botocore").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.basicConfig(level=logging.DEBUG)


Expand Down
2 changes: 1 addition & 1 deletion rpm_s3_mirror/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def _populate_required(self):
elif key == "max_workers":
value = int(value)
elif key == "bootstrap":
value = True if value.lower() == "true" else False
value = value.lower() == "true"
self._config[key] = value


Expand Down
2 changes: 1 addition & 1 deletion rpm_s3_mirror/mirror.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def sync(self):
mirror_repository = RPMRepository(base_url=self._build_s3_url(upstream_repository))
last_check_time = self.s3.repomd_update_time(base_url=mirror_repository.base_url)
if not upstream_repository.has_updates(since=last_check_time):
self.log.info(f"Skipping repository with no updates since: {last_check_time}")
self.log.info("Skipping repository with no updates since: %s", last_check_time)
continue

# Extract our metadata and detect any new/updated packages.
Expand Down
14 changes: 7 additions & 7 deletions rpm_s3_mirror/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from typing import Iterator, Dict
from urllib.parse import urlparse

from lxml.etree import fromstring, Element
from lxml.etree import XMLParser
from lxml.etree import fromstring, Element # pylint: disable=no-name-in-module
from lxml.etree import XMLParser # pylint: disable=no-name-in-module
from dateutil.parser import parse
from tempfile import TemporaryDirectory
import os
Expand All @@ -33,7 +33,7 @@ def safe_parse_xml(xml_string: bytes) -> Element:

def download_repodata_section(section, request, destination_dir) -> str:
local_path = join(destination_dir, os.path.basename(section.location))
with open(local_path, 'wb') as out:
with open(local_path, "wb") as out:
shutil.copyfileobj(request.raw, out)
validate_checksum(path=local_path, checksum_type=section.checksum_type, checksum=section.checksum)
return local_path
Expand Down Expand Up @@ -101,7 +101,7 @@ def __init__(self, base_url: str, packages_xml: bytes):
self.root = safe_parse_xml(packages_xml)

def __len__(self) -> int:
return int(self.root.get('packages'))
return int(self.root.get("packages"))

def __iter__(self) -> Iterator[Package]:
for package_element in self.root:
Expand Down Expand Up @@ -145,12 +145,12 @@ def _extract_package_list(self, primary: RepodataSection) -> PackageList:

def parse_repomd(self, xml: Element) -> Dict[str, RepodataSection]:
sections = {}
for data_element in xml.findall(f'repo:data', namespaces=namespaces):
for data_element in xml.findall(f"repo:data", namespaces=namespaces):
section_type = data_element.attrib["type"]
section = {}
for element in xml.findall(f'repo:data[@type="{section_type}"]/repo:*', namespaces=namespaces):
for element in xml.findall(f"repo:data[@type='{section_type}']/repo:*", namespaces=namespaces):
# Strip the namespace from the tag as it is annoying
_, _, key = element.tag.partition('}')
_, _, key = element.tag.partition("}")
value = element.text
if key == "location":
value = element.get("href")
Expand Down
10 changes: 5 additions & 5 deletions rpm_s3_mirror/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,13 +113,13 @@ def repomd_update_time(self, base_url: str) -> datetime:
def _sync_objects(self, temp_dir: str, repo_objects: Iterable[Package], skip_existing: bool):
sync = functools.partial(self._sync_object, temp_dir, skip_existing)
start = time.time()
self.log.info(f"Beginning sync of {len(repo_objects)} objects.")
self.log.info("Beginning sync of %s objects.", len(repo_objects))
with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
# We iterate through the generator to pick up and propagate any Exceptions
for _ in executor.map(sync, repo_objects):
pass
elapsed = int(time.time() - start)
self.log.info(f"Completed syncing {len(repo_objects)} objects in {elapsed} seconds")
self.log.info("Completed syncing %s objects in %s seconds", len(repo_objects), elapsed)

def _sync_object(self, temp_dir: str, skip_existing: bool, repo_object: Union[Package, RepodataSection]):
if skip_existing and self._object_exists(repo_object.destination):
Expand All @@ -131,15 +131,15 @@ def _sync_object(self, temp_dir: str, skip_existing: bool, repo_object: Union[Pa
self._put_object(package_path, repo_object.destination)
try:
os.unlink(package_path)
except Exception as e:
except Exception as e: # pylint: disable=broad-except
self.log.debug("Failed to unlink %s: %s", package_path, e)

def _download_file(self, temp_dir: str, url: str) -> str:
self.log.debug("GET: %s", url)
with self.session.get(url, stream=True) as request:
request.raise_for_status()
out_path = join(temp_dir, os.path.basename(url))
with open(out_path, 'wb') as f:
with open(out_path, "wb") as f:
shutil.copyfileobj(request.raw, f)
return out_path

Expand Down Expand Up @@ -224,4 +224,4 @@ def _build_md5_header(self, fp: BinaryIO) -> str:
while data:
h.update(data)
data = fp.read(1000000)
return base64.b64encode(h.digest()).decode('utf-8')
return base64.b64encode(h.digest()).decode("utf-8")
4 changes: 2 additions & 2 deletions rpm_s3_mirror/statsd.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@ def __init__(
host="127.0.0.1",
port=8125,
tags=None,
format="telegraf",
metric_format="telegraf",
):
self._dest_addr = (host, port)
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._tags = tags or {}
self._message_format = format
self._message_format = metric_format

def gauge(self, metric, value, tags=None):
self._send(metric, b"g", value, tags)
Expand Down
4 changes: 2 additions & 2 deletions rpm_s3_mirror/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ def validate_checksum(path, checksum_type, checksum) -> None:
def get_requests_session() -> Session:
session = requests.session()
retries = Retry(total=5, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504])
session.mount('http://', HTTPAdapter(max_retries=retries))
session.mount('https://', HTTPAdapter(max_retries=retries))
session.mount("http://", HTTPAdapter(max_retries=retries))
session.mount("https://", HTTPAdapter(max_retries=retries))
return session


Expand Down
1 change: 1 addition & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# Copyright (c) 2020 Aiven, Helsinki, Finland. https://aiven.io/
12 changes: 12 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Copyright (c) 2020 Aiven, Helsinki, Finland. https://aiven.io/

import os
import pytest


@pytest.fixture(name="test_package_list_xml")
def test_package_list_xml():
root_dir = os.path.dirname(os.path.abspath(__file__))
package_xml_path = os.path.join(root_dir, "resources", "primary.xml")
with open(package_xml_path, "rb") as f:
return f.read()
Loading

0 comments on commit b79cbbc

Please sign in to comment.