Skip to content

Commit

Permalink
Merge branch 'contrib/tarraschk_cyberwatch' into cyberwatch
Browse files Browse the repository at this point in the history
  • Loading branch information
tarraschk authored Jul 11, 2024
2 parents 643bcf1 + 2d87a6c commit a748245
Show file tree
Hide file tree
Showing 200 changed files with 1,858 additions and 627 deletions.
2 changes: 1 addition & 1 deletion .github/content_roles.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"CONTRIBUTION_REVIEWERS": [
"MosheEichler",
"barryyosi-panw",
"samuelFain",
"israelpoli"
],
Expand Down
119 changes: 92 additions & 27 deletions .github/github_workflow_scripts/handle_external_pr.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
import json
import os
from pathlib import Path
import sys
import urllib3
Expand All @@ -9,7 +8,7 @@
from git import Repo
from github.PullRequest import PullRequest
from github.Repository import Repository
from demisto_sdk.commands.common.tools import get_pack_metadata
from demisto_sdk.commands.common.tools import get_pack_metadata, get_yaml
from demisto_sdk.commands.content_graph.objects.base_content import BaseContent
from demisto_sdk.commands.content_graph.objects.integration import Integration
from demisto_sdk.commands.common.content_constant_paths import CONTENT_PATH
Expand All @@ -21,8 +20,9 @@
timestamped_print,
Checkout,
get_content_reviewers,
get_support_level,
get_content_roles,
get_support_level
get_metadata
)
from demisto_sdk.commands.common.tools import get_pack_name
from urllib3.exceptions import InsecureRequestWarning
Expand Down Expand Up @@ -167,7 +167,8 @@ def packs_to_check_in_pr(file_paths: list[str]) -> set:
return pack_dirs_to_check


def get_packs_support_level_label(file_paths: list[str], external_pr_branch: str, repo_name: str = 'content') -> str:
def get_packs_support_level_label(file_paths: list[str], external_pr_branch: str, remote_fork_owner: str,
repo_name: str = 'content') -> str:
"""
Get The contributions' support level label.
Expand All @@ -183,6 +184,7 @@ def get_packs_support_level_label(file_paths: list[str], external_pr_branch: str
Args:
file_paths(str): file paths
external_pr_branch (str): the branch of the external PR.
remote_fork_owner: the remote fork owner
repo_name(str): the name of the forked repo (without the owner)
Returns:
Expand All @@ -198,12 +200,11 @@ def get_packs_support_level_label(file_paths: list[str], external_pr_branch: str
f'to retrieve support level of {pack_dirs_to_check_support_levels_labels}'
)
try:
fork_owner = os.getenv('GITHUB_ACTOR')
with Checkout(
repo=Repo(Path().cwd(), search_parent_directories=True),
branch_to_checkout=external_pr_branch,
# in marketplace contributions the name of the owner should be xsoar-contrib
fork_owner=fork_owner if fork_owner != 'xsoar-bot' else 'xsoar-contrib',
fork_owner=remote_fork_owner if remote_fork_owner != 'xsoar-bot' else 'xsoar-contrib',
repo_name=repo_name
):
packs_support_levels = get_support_level(pack_dirs_to_check_support_levels_labels)
Expand Down Expand Up @@ -253,35 +254,96 @@ def is_requires_security_reviewer(pr_files: list[str]) -> bool:
return False


def is_tim_content(pr_files: list[str]) -> bool:
def check_if_item_is_tim(content_object: BaseContent | None) -> bool:
"""
This is where the actual search for feed:True or relevant tags or categories are being searched
according to the login in is_tim_reviewer_needed
Checks whether a given object (graph object) is a feed or related to TIM
Arguments:
- pr_files: List[str] The list of files changed in the Pull Request.
- `content_object`: ``BaseContent``: Content object taken from the graph
Returns: returns True or False if tim reviewer needed
Returns: `bool` whether the content object is a feed or has the relevant tags/categories
"""
integrations_checked = []
for file in pr_files:
if 'CONTRIBUTORS.json' in file:
continue
integration = BaseContent.from_path(CONTENT_PATH / file)
if not isinstance(integration, Integration) or integration.path in integrations_checked:
continue
integrations_checked.append(integration.path)
if integration.is_feed:
return True
pack = integration.in_pack
if isinstance(content_object, Integration) and content_object.is_feed:
return True
try:
pack = content_object.in_pack # type: ignore
tags = pack.tags
categories = pack.categories
if TIM_TAGS in tags or TIM_CATEGORIES in categories:
return True
except Exception as er:
print(f"The pack is not TIM: {er}")
finally:
return False


def check_files_of_pr_manually(pr_files: list[str]) -> bool:
"""
If the checkout of the branch has failed, this function will go over the files and check whether the contribution
need to be reviewed by TIM owner
Arguments:
- `pr_files`: ``List[str]``: The list of files changed in the Pull Request. Will be used to determine
whether a security engineer is required for the review.
Returns: `bool` whether a security engineer should be assigned
"""
pack_dirs_to_check = packs_to_check_in_pr(pr_files)
pack_metadata_list = get_metadata(pack_dirs_to_check)
for file in pr_files:
if "yml" in file and "Integrations" in file:
content_yml = get_yaml(file_path=file)
is_feed = content_yml.get("script").get("feed", "False")
print(f'Is it a feed: {is_feed}')
if is_feed:
return True
for pack_metadata in pack_metadata_list:
print(f'the metadata is: {pack_metadata}')
tags = pack_metadata.get("tags")
categories = pack_metadata.get("categories")
if TIM_TAGS in tags or TIM_CATEGORIES in categories: # type: ignore
return True
return False


def is_tim_reviewer_needed(pr_files: list[str], support_label: str) -> bool:
def is_tim_content(pr_files: list[str], external_pr_branch: str, remote_fork_owner: str, repo_name: str) -> bool:
"""
Checks if tim reviewer needed, if the pack is new and not part of Master.
First the remote branch is going to be checked out and then verified for the data
Arguments:
- `pr_files`: ``List[str]``: The list of files changed in the Pull Request. Will be used to determine
whether a security engineer is required for the review.
- 'external_pr_branch': str : name of branch to checkout
- 'remote_fork_owner' (str) : name of the remote owner for checkout
- 'repo_name': str : name of repository
Returns: `bool` whether a security engineer should be assigned
"""
try:
with Checkout(
repo=Repo(Path().cwd(), search_parent_directories=True),
branch_to_checkout=external_pr_branch,
# in marketplace contributions the name of the owner should be xsoar-contrib
fork_owner=remote_fork_owner if remote_fork_owner != 'xsoar-bot' else 'xsoar-contrib',
repo_name=repo_name
):
for file in pr_files:
if 'CONTRIBUTORS.json' in file or 'Author_image' in file or 'README.md' in file or ".pack-ignore" in file:
continue
content_object = BaseContent.from_path(CONTENT_PATH / file)
is_tim_needed = check_if_item_is_tim(content_object)
if is_tim_needed:
return True
except Exception as er:
print(f"couldn't checkout branch to get metadata, error is {er}")
# if the checkout didn't work for any reason, will try to go over files manually
return check_files_of_pr_manually(pr_files)
return False


def is_tim_reviewer_needed(pr_files: list[str], support_label: str, external_pr_branch: str,
remote_fork_owner: str, repo_name: str) -> bool:
"""
Checks whether the PR need to be reviewed by a TIM reviewer.
It check the yml file of the integration - if it has the feed: True
Expand All @@ -291,11 +353,14 @@ def is_tim_reviewer_needed(pr_files: list[str], support_label: str) -> bool:
Arguments:
- pr_files: tThe list of files changed in the Pull Request
- support_label: the support label of the PR - the highest one.
- 'external_pr_branch' (str) : name of the external branch to checkout
- 'remote_fork_owner' (str) : name of the remote owner for checkout
- 'repo_name' (str) : name of the external repository
Returns: True or false if tim reviewer needed
"""
if support_label in (XSOAR_SUPPORT_LEVEL_LABEL, PARTNER_SUPPORT_LEVEL_LABEL):
return is_tim_content(pr_files)
return is_tim_content(pr_files, external_pr_branch, remote_fork_owner, repo_name)
return False


Expand Down Expand Up @@ -427,9 +492,9 @@ def main():

pr_files = [file.filename for file in pr.get_files()]
print(f'{pr_files=} for {pr_number=}')

remote_fork_owner = pr.head.repo.full_name.split('/')[0]
labels_to_add = [CONTRIBUTION_LABEL, EXTERNAL_LABEL]
if support_label := get_packs_support_level_label(pr_files, pr.head.ref, repo_name):
if support_label := get_packs_support_level_label(pr_files, pr.head.ref, remote_fork_owner, repo_name):
labels_to_add.append(support_label)

# Add the initial labels to PR:
Expand Down Expand Up @@ -489,7 +554,7 @@ def main():
pr.add_to_labels(SECURITY_LABEL)

# adding TIM reviewer
if is_tim_reviewer_needed(pr_files, support_label):
if is_tim_reviewer_needed(pr_files, support_label, pr.head.ref, remote_fork_owner, repo_name):
reviewers.append(tim_reviewer)
pr.add_to_labels(TIM_LABEL)

Expand Down
50 changes: 37 additions & 13 deletions .github/github_workflow_scripts/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,19 +124,21 @@ def __init__(self, repo: git.Repo, branch_to_checkout: str, fork_owner: str | No
self.repo.create_remote(name=forked_remote_name, url=url)
print(f'Successfully created remote {forked_remote_name} for repo {url}') # noqa: T201
except Exception as error:
print(f'could not create remote from {url}, {error=}') # noqa: T201
# handle the case where the name of the forked repo is not content
if github_event_path := os.getenv("GITHUB_EVENT_PATH"):
try:
payload = json.loads(github_event_path)
except ValueError:
print('failed to load GITHUB_EVENT_PATH') # noqa: T201
raise ValueError(f'cannot checkout to the forked branch {branch_to_checkout} of the owner {fork_owner}')
# forked repo name includes fork_owner + repo name, for example foo/content.
forked_repo_name = payload.get("pull_request", {}).get("head", {}).get("repo", {}).get("full_name")
self.repo.create_remote(name=forked_remote_name, url=f"https://github.com/{forked_repo_name}")
else:
raise
if f'{forked_remote_name} already exists' not in str(error):
print(f'could not create remote from {url}, {error=}') # noqa: T201
# handle the case where the name of the forked repo is not content
if github_event_path := os.getenv("GITHUB_EVENT_PATH"):
try:
payload = json.loads(github_event_path)
except ValueError:
print('failed to load GITHUB_EVENT_PATH') # noqa: T201
raise ValueError(f'cannot checkout to the forked branch {branch_to_checkout} of the '
f'owner {fork_owner}')
# forked repo name includes fork_owner + repo name, for example foo/content.
forked_repo_name = payload.get("pull_request", {}).get("head", {}).get("repo", {}).get("full_name")
self.repo.create_remote(name=forked_remote_name, url=f"https://github.com/{forked_repo_name}")
else:
raise

forked_remote = self.repo.remote(forked_remote_name)
forked_remote.fetch(branch_to_checkout)
Expand Down Expand Up @@ -327,3 +329,25 @@ def get_repo_path(path: str = ".") -> Path:
except (git.exc.InvalidGitRepositoryError, ValueError):
print("Unable to get repo root path. Terminating...")
sys.exit(1)


def get_metadata(pack_dirs: set[str]) -> list[dict]:
"""
Get the pack metadata.
Args:
pack_dirs (set): paths to the packs that were changed
Return:
- pack metadata dictionary
"""
pack_metadata_list = []

for pack_dir in pack_dirs:
if pack_metadata := get_pack_metadata(pack_dir):
print(f"pack metadata was retrieved for pack {pack_dir}") # noqa: T201
pack_metadata_list.append(pack_metadata)
else:
print(f'Could not find pack support level for pack {pack_dir}') # noqa: T201

return pack_metadata_list
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ configuration:
type: 8
required: false
- additionalinfo: How far back in time to go when performing the first fetch
defaultvalue: '14 days'
defaultvalue: 14 days
display: First fetch timestamp (<number> <time unit>, e.g., 12 hours, 7 days)
name: fetch_time
type: 0
Expand Down Expand Up @@ -150,7 +150,7 @@ script:
name: limit
description: Gets the feed indicators.
name: acti-get-indicators
dockerimage: demisto/py3-tools:1.0.0.96102
dockerimage: demisto/py3-tools:1.0.0.102774
feed: true
runonce: false
script: '-'
Expand Down
6 changes: 6 additions & 0 deletions Packs/AccentureCTI_Feed/ReleaseNotes/1_1_37.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

#### Integrations

##### ACTI Indicator Feed

- Updated the Docker image to: *demisto/py3-tools:1.0.0.102774*.
2 changes: 1 addition & 1 deletion Packs/AccentureCTI_Feed/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "Accenture CTI Feed",
"description": "Accenture Cyber Threat Intelligence Feed",
"support": "partner",
"currentVersion": "1.1.36",
"currentVersion": "1.1.37",
"author": "Accenture",
"url": "https://www.accenture.com/us-en/services/security/cyber-defense",
"email": "[email protected]",
Expand Down
2 changes: 1 addition & 1 deletion Packs/Armorblox/Playbooks/Armorblox_Needs_Review_README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,4 @@ This playbook does not use any sub-playbooks.

## Playbook Image
---
![Armorblox Needs Review](doc_files/Armorblox_Needs_Review.png)
![Armorblox Needs Review](../doc_files/Armorblox_Needs_Review.png)
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ script:
- name: asana-get-all-projects
arguments: []
description: gets the list of projects on asana
dockerimage: demisto/py3-tools:0.0.1.25751
dockerimage: demisto/py3-tools:1.0.0.102774
subtype: python3
fromversion: 6.0.0
tests:
Expand Down
6 changes: 6 additions & 0 deletions Packs/AsanaConnect/ReleaseNotes/1_0_3.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

#### Integrations

##### AsanaConnect

- Updated the Docker image to: *demisto/py3-tools:1.0.0.102774*.
2 changes: 1 addition & 1 deletion Packs/AsanaConnect/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "Asana Connect",
"description": "Use the Asana Connect integration to connect to projects related to your Asana account.",
"support": "community",
"currentVersion": "1.0.2",
"currentVersion": "1.0.3",
"author": "zh-lim",
"url": "",
"email": "",
Expand Down
2 changes: 1 addition & 1 deletion Packs/Base/ReleaseNotes/1_34_26.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@

##### CommonServerPython

- Removed support for the *DemistoWrapper* class in python2 integrations and scripts.
- Removed support for the *DemistoWrapper* class in python2 integrations and scripts. This fixes an issue where the following error was encountered for python2 integrations and scripts: `TypeError: super() argument 1 must be type, not classobj`
Loading

0 comments on commit a748245

Please sign in to comment.