Skip to content

Commit

Permalink
[WIP] Adding a TimeTravel improver for nginx
Browse files Browse the repository at this point in the history
---------------------NOT READY FOR REVIEW YET--------------------

This is not the best way. Maybe we want a generic time travel improver
which could run on multiple data source. This is highly experimental and
is meant to serve as proof of work for current model.
Currently, this is incomplete and doesn't work. The classes and
functions used in time travel have not been ported properly.

This commit might need a full rewrite

Signed-off-by: Hritik Vijay <[email protected]>
  • Loading branch information
Hritik14 committed Oct 12, 2021
1 parent 58fcd48 commit 56866f7
Show file tree
Hide file tree
Showing 3 changed files with 79 additions and 24 deletions.
13 changes: 7 additions & 6 deletions vulnerabilities/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def requests_with_5xx_retry(max_retries=5, backoff_factor=0.5):

def nearest_patched_package(
vulnerable_packages: List[PackageURL], resolved_packages: List[PackageURL]
) -> List[AffectedPackage]:
) -> List[Tuple]:
class PackageURLWithVersionComparator:
"""
This class is used to get around bisect module's lack of supplying custom
Expand Down Expand Up @@ -158,11 +158,12 @@ def __lt__(self, other):
if patched_package_index < resolved_package_count:
patched_package = resolved_packages[patched_package_index].package

affected_package_with_patched_package_objects.append(
AffectedPackage(
vulnerable_package=vulnerable_package.package, patched_package=patched_package
)
)
# affected_package_with_patched_package_objects.append(
# AffectedPackage(
# vulnerable_package=vulnerable_package.package, patched_package=patched_package
# )
# )
affected_package_with_patched_package_objects.append((vulnerable_package.package, patched_package))

return affected_package_with_patched_package_objects

Expand Down
84 changes: 67 additions & 17 deletions vulnerabilities/importers/nginx.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,12 @@
from vulnerabilities.data_source import DataSource
from vulnerabilities.data_source import DataSourceConfiguration
from vulnerabilities.data_source import Reference
from vulnerabilities.data_inference import Inference
from vulnerabilities.data_inference import Improver
from vulnerabilities.package_managers import GitHubTagsAPI
from vulnerabilities.package_managers import Version
from vulnerabilities.helpers import nearest_patched_package

from vulnerabilities.models import Advisory

@dataclasses.dataclass
class NginxDataSourceConfiguration(DataSourceConfiguration):
Expand All @@ -51,21 +53,6 @@ class NginxDataSource(DataSource):

url = "http://nginx.org/en/security_advisories.html"

def set_api(self):
self.version_api = GitHubTagsAPI()
asyncio.run(self.version_api.load_api(["nginx/nginx"]))

# For some reason nginx tags it's releases are in the form of `release-1.2.3`
# Chop off the `release-` part here.
normalized_versions = set()
while self.version_api.cache["nginx/nginx"]:
version = self.version_api.cache["nginx/nginx"].pop()
normalized_version = Version(
version.value.replace("release-", ""), version.release_date
)
normalized_versions.add(normalized_version)
self.version_api.cache["nginx/nginx"] = normalized_versions

def advisory_data(self) -> List[AdvisoryData]:
adv_data = []
# self.set_api()
Expand Down Expand Up @@ -175,7 +162,9 @@ def extract_vuln_pkgs(self, vuln_info):
if "-" not in version_info:
# These are discrete versions
version_ranges.append(
VersionSpecifier.from_scheme_version_spec_string("semver", version_info[0])
VersionSpecifier.from_scheme_version_spec_string(
"semver", version_info[0]
)
)
continue

Expand All @@ -200,6 +189,67 @@ def extract_vuln_pkgs(self, vuln_info):
]


class NginxTimeTravel(Improver):
def infer(self):
self.set_api()
advisories = Advisory.objects.filter(
source="vulnerabilities.importers.nginx.NginxDataSource"
)
inferences = []
for advisory in advisories:
advisory_data = AdvisoryData.from_json(advisory.data)

affected_package_ranges = [
pkg.version_specifier for pkg in advisory_data.affected_packages
]
affected_package_versions = find_valid_versions(
self.version_api.get("nginx/nginx").valid_versions,
affected_package_ranges,
)
affected_packages = []
for pkg in advisory_data.affected_packages:
for
affected_packages.extend([])
affected_packages = [advisory_data.affected_packages]

fixed_package_ranges = [
pkg.version_specifier for pkg in advisory_data.affected_packages
]
fixed_packages = find_valid_versions(
self.version_api.get("nginx/nginx").valid_versions, fixed_package_ranges
)

pkgs = nearest_patched_package(affected_package_versions, fixed_package_ranges)
for pkg in pkgs:
print(pkg)
print(type(pkg))
inferences.append(
Inference(
confidence=90, # TODO: Decide properly
vulnerability_id=advisory_data.vulnerability_id,
affected_packages=pkg[0],
fixed_packages=pkg[1],
)
)

return inferences

def set_api(self):
self.version_api = GitHubTagsAPI()
asyncio.run(self.version_api.load_api(["nginx/nginx"]))

# For some reason nginx tags it's releases are in the form of `release-1.2.3`
# Chop off the `release-` part here.
normalized_versions = set()
while self.version_api.cache["nginx/nginx"]:
version = self.version_api.cache["nginx/nginx"].pop()
normalized_version = Version(
version.value.replace("release-", ""), version.release_date
)
normalized_versions.add(normalized_version)
self.version_api.cache["nginx/nginx"] = normalized_versions


def find_valid_versions(versions, version_ranges):
valid_versions = set()
for version in versions:
Expand Down
6 changes: 5 additions & 1 deletion vulnerabilities/improvers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
from vulnerabilities.importers.nginx import NginxTimeTravel
from . import default

IMPROVER_REGISTRY = [default.DefaultImprover]
IMPROVER_REGISTRY = [
default.DefaultImprover,
NginxTimeTravel,
]

improver_mapping = {f"{x.__module__}.{x.__name__}": x for x in IMPROVER_REGISTRY}

0 comments on commit 56866f7

Please sign in to comment.