-
Notifications
You must be signed in to change notification settings - Fork 1
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
New script requested_changes #65
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,371 @@ | ||
import json | ||
import logging | ||
import os | ||
import re | ||
import time | ||
from datetime import datetime | ||
|
||
import psycopg2 | ||
import requests | ||
|
||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | ||
|
||
start_time = time.time() | ||
|
||
logging.info("-------------------------REQUEST CHANGES SCRIPT IS RUNNING-------------------------") | ||
|
||
gitea_api_endpoint = "https://gitea.eco.tsi-dev.otc-service.com/api/v1" | ||
session = requests.Session() | ||
gitea_token = os.getenv("GITEA_TOKEN") | ||
|
||
db_host = os.getenv("DB_HOST") | ||
db_port = os.getenv("DB_PORT") | ||
db_csv = os.getenv("DB_CSV") # main postgres db, where open PRs tables for both public and hybrid clouds are stored | ||
db_user = os.getenv("DB_USER") | ||
db_password = os.getenv("DB_PASSWORD") | ||
|
||
|
||
def check_env_variables(): | ||
required_env_vars = [ | ||
"DB_HOST", "DB_PORT", | ||
"DB_CSV", "DB_USER", "DB_PASSWORD", "GITEA_TOKEN" | ||
] | ||
for var in required_env_vars: | ||
if os.getenv(var) is None: | ||
raise Exception("Missing environment variable: %s", var) | ||
|
||
|
||
def connect_to_db(db_name): | ||
logging.info("Connecting to Postgres (%s)...", db_name) | ||
try: | ||
return psycopg2.connect( | ||
host=db_host, | ||
port=db_port, | ||
dbname=db_name, | ||
user=db_user, | ||
password=db_password | ||
) | ||
except psycopg2.Error as e: | ||
logging.error("Connecting to Postgres: an error occurred while trying to connect to the database: %s", e) | ||
return None | ||
|
||
|
||
def create_prs_table(conn, cur, table_name): | ||
try: | ||
cur.execute( | ||
f'''CREATE TABLE IF NOT EXISTS {table_name} ( | ||
id SERIAL PRIMARY KEY, | ||
"PR Number" VARCHAR(255), | ||
"Service Name" VARCHAR(255), | ||
"Squad" VARCHAR(255), | ||
"PR URL" VARCHAR(255), | ||
"Days passed" INT, | ||
"Reviewer" VARCHAR(255), | ||
"Parent PR Status" VARCHAR(255) | ||
);''' | ||
) | ||
conn.commit() | ||
logging.info("Table %s has been created successfully", table_name) | ||
except psycopg2.Error as e: | ||
logging.error("Tables creating: an error occurred while trying to create a table %s in the database: %s", | ||
table_name, e) | ||
|
||
|
||
def get_repos(cur, rtc): | ||
repos = [] | ||
|
||
try: | ||
cur.execute(f"SELECT DISTINCT \"Repository\" FROM {rtc} WHERE \"Env\" = 'public';") | ||
repos = [row[0] for row in cur.fetchall()] | ||
# repos.append('doc-exports') | ||
if not repos: | ||
logging.info("No repositories found.") | ||
except Exception as e: | ||
logging.error("Fetching repos: %s", e) | ||
|
||
return repos | ||
|
||
|
||
def get_pr_number(org, repo): | ||
page = 1 | ||
pr_details = [] | ||
while True: | ||
try: | ||
repo_resp = session.get(f"{gitea_api_endpoint}/repos/{org}/{repo}/pulls?state=open&page={page}" | ||
f"&limit=1000&token={gitea_token}") | ||
repo_resp.raise_for_status() | ||
pull_requests = json.loads(repo_resp.content.decode()) | ||
except requests.exceptions.HTTPError as e: | ||
if e.response.status_code == 404: | ||
logging.info("No repository or pull requests found in %s (404 error). Skipping.", repo) | ||
return [] | ||
else: | ||
logging.error("Error checking pull requests in %s: %s", repo, e) | ||
return [] | ||
|
||
except json.JSONDecodeError as e: | ||
logging.error("Error occurred while trying to decode JSON: %s", e) | ||
break | ||
|
||
if not pull_requests: | ||
break | ||
|
||
for pr in pull_requests: | ||
pr_details.append({'pr_number': pr['number']}) | ||
|
||
link_header = repo_resp.headers.get("Link") | ||
if link_header is None or "rel=\"next\"" not in link_header: | ||
break | ||
else: | ||
page += 1 | ||
|
||
return pr_details | ||
|
||
|
||
def convert_iso_to_datetime(iso_str): | ||
return datetime.fromisoformat(iso_str.replace('Z', '+00:00')) | ||
|
||
|
||
def process_pr_reviews(org, repo, pr_number, changes_tab): | ||
reviews = [] | ||
try: | ||
reviews_resp = session.get(f"{gitea_api_endpoint}/repos/{org}/{repo}/pulls/{pr_number}/reviews?token=" | ||
f"{gitea_token}") | ||
reviews_resp.raise_for_status() | ||
reviews = json.loads(reviews_resp.content.decode()) | ||
except requests.exceptions.HTTPError as e: | ||
if e.response.status_code == 404: | ||
logging.info("No reviews found for PR %s in %s (404 error). Skipping.", pr_number, repo) | ||
return False | ||
except requests.exceptions.RequestException as e: | ||
logging.error("Error occurred while trying to get PR %s reviews: %s", pr_number, e) | ||
return | ||
except json.JSONDecodeError as e: | ||
logging.error("Error occurred while trying to decode JSON: %s", e) | ||
return | ||
|
||
final_review = reviews[-1] if reviews else None | ||
if final_review and final_review['state'] == "REQUEST_CHANGES": | ||
last_review_date_str = final_review["updated_at"] | ||
last_review_date = convert_iso_to_datetime(last_review_date_str) | ||
reviewer_login = final_review['user']['login'] | ||
|
||
get_last_commit(org, repo, pr_number, reviewer_login, last_review_date, changes_tab) | ||
|
||
|
||
def get_last_commit(org, repo, pr_number, reviewer_login, last_review_date, changes_tab): | ||
|
||
try: | ||
commits_resp = session.get(f"{gitea_api_endpoint}/repos/{org}/{repo}/pulls/{pr_number}/commits?token=" | ||
f"{gitea_token}") | ||
commits_resp.raise_for_status() | ||
commits = json.loads(commits_resp.content.decode()) | ||
except requests.exceptions.RequestException as e: | ||
logging.error("Error occurred while trying to get commits for PR %s: %s", pr_number, e) | ||
return None | ||
except json.JSONDecodeError as e: | ||
logging.error("Error occurred while trying to decode JSON: %s", e) | ||
return None | ||
|
||
commit = commits[0] if commits else None | ||
if commit: | ||
commit_date_str = commit["commit"]["committer"]["date"] | ||
commit_date = convert_iso_to_datetime(commit_date_str) | ||
commit_author_info = commit.get("author") | ||
commit_author = commit_author_info.get("login") if commit_author_info else None | ||
if commit_author != reviewer_login and commit_date < last_review_date: | ||
insert_data_postgres(org, repo, pr_number, conn_csv, cur_csv, last_review_date, changes_tab) | ||
elif commit_author != reviewer_login and commit_date > last_review_date: | ||
insert_data_postgres(org, repo, pr_number, conn_csv, cur_csv, commit_date, "our_side_problem") | ||
|
||
return None | ||
|
||
|
||
def insert_data_postgres(org, repo, pr_number, conn, cur, activity_date, changes_tab): | ||
try: | ||
filtered_reviews_resp = session.get(f"{gitea_api_endpoint}/repos/{org}/{repo}/pulls/{pr_number}/" | ||
f"reviews?token={gitea_token}") | ||
filtered_reviews_resp.raise_for_status() | ||
filtered_reviews = json.loads(filtered_reviews_resp.content.decode()) | ||
except requests.exceptions.RequestException as e: | ||
logging.error("Error occurred while trying to get PR %s reviews: %s", pr_number, e) | ||
return | ||
except json.JSONDecodeError as e: | ||
logging.error("Error occurred while trying to decode JSON: %s", e) | ||
return | ||
|
||
if not filtered_reviews: | ||
logging.info("No reviews found for PR %s in %s.", pr_number, repo) | ||
return | ||
final_review = filtered_reviews[-1] if filtered_reviews else None | ||
pr_url = final_review["pull_request_url"] | ||
last_activity_date = activity_date.date() if isinstance(activity_date, datetime) else activity_date | ||
current_date = datetime.utcnow().date() | ||
days_since_last_activity = (current_date - last_activity_date).days | ||
reviewer_name = final_review['user']['full_name'] | ||
|
||
cur.execute(f""" | ||
INSERT INTO {changes_tab} ("PR Number", "Service Name", "Squad", "PR URL", "Days passed", "Reviewer", | ||
"Parent PR Status") | ||
VALUES (%s, %s, %s, %s, %s, %s, %s); | ||
""", (pr_number, repo, '', pr_url, days_since_last_activity, reviewer_name, 'No changes requested')) | ||
conn.commit() | ||
|
||
|
||
def parent_pr_changes_check(cur, conn, org, changes_tab): | ||
try: | ||
cur.execute(f"SELECT \"PR Number\", \"Service Name\" FROM {changes_tab}") | ||
records = cur.fetchall() | ||
repo_pr_dict = {record[1]: record[0] for record in records} | ||
except Exception as e: | ||
logging.error("Fetching PR numbers: %s", e) | ||
return | ||
|
||
for repo, pr_number in repo_pr_dict.items(): | ||
try: | ||
pr_resp = session.get(f"{gitea_api_endpoint}/repos/{org}/{repo}/pulls/{pr_number}?token={gitea_token}") | ||
pr_resp.raise_for_status() | ||
except requests.exceptions.HTTPError as e: | ||
if e.response.status_code == 404: | ||
logging.info("No repository or pull requests found in %s (404 error). Skipping.", repo) | ||
return False | ||
else: | ||
logging.error("Error checking pull requests in %s: %s", repo, e) | ||
return False | ||
except requests.exceptions.RequestException as e: | ||
logging.error("Error occurred while trying to get PR %s in repo %s: %s", pr_number, repo, e) | ||
break | ||
|
||
try: | ||
parent_pr = json.loads(pr_resp.content.decode()) | ||
except json.JSONDecodeError as e: | ||
logging.error("Error occurred while trying to decode JSON: %s", e) | ||
break | ||
|
||
body = parent_pr["body"] | ||
if body.startswith("This is an automatically created Pull Request"): | ||
match_repo = re.search(r"(?<=/).+(?=#)", str(body)) | ||
repo_name = match_repo.group(0) | ||
parent_pr_number = extract_number_from_body(body) | ||
parent_reviews = [] | ||
try: | ||
parent_reviews_resp = session.get(f"{gitea_api_endpoint}/repos/{org}/{repo_name}/pulls/" | ||
f"{parent_pr_number}/reviews?token={gitea_token}") | ||
parent_reviews_resp.raise_for_status() | ||
parent_reviews = json.loads(parent_reviews_resp.content.decode()) | ||
except requests.exceptions.HTTPError as e: | ||
if e.response.status_code == 404: | ||
logging.info("No reviews found for PR %s in %s (404 error). Skipping.", parent_pr_number, repo_name) | ||
return False | ||
except requests.exceptions.RequestException as e: | ||
logging.error("Error occurred while trying to get PR %s reviews: %s", parent_pr_number, e) | ||
return | ||
except json.JSONDecodeError as e: | ||
logging.error("Error occurred while trying to decode JSON: %s", e) | ||
return | ||
|
||
final_review = parent_reviews[-1] if parent_reviews else None | ||
if final_review and final_review['state'] == "REQUEST_CHANGES": | ||
try: | ||
cur.execute(f""" | ||
UPDATE {changes_tab} SET "Parent PR Status" = 'CHANGES REQUESTED' | ||
WHERE "PR Number" = %s AND "Service Name" = %s; | ||
""", (pr_number, repo)) | ||
conn.commit() | ||
logging.info("Updated PR %s in %s to CHANGES REQUESTED.", pr_number, repo) | ||
except psycopg2.Error as e: | ||
logging.error("Error updating database: %s", e) | ||
|
||
|
||
def extract_number_from_body(text): | ||
try: | ||
match = re.search(r"#\d+", str(text)) | ||
if match: | ||
return int(match.group()[1:]) | ||
except ValueError as e: | ||
logging.error("An error occurred while converting match group to int: %s", e) | ||
return None | ||
except re.error as e: | ||
logging.error("An error occurred while searching text: %s", e) | ||
return None | ||
return None | ||
|
||
|
||
def update_squad_and_title(cur, conn, rtc, changes_tab): | ||
logging.info("Updating squads and titles...") | ||
try: | ||
cur.execute(f"SELECT * FROM {changes_tab};") | ||
open_issues_rows = cur.fetchall() | ||
|
||
for row in open_issues_rows: | ||
cur.execute( | ||
f"""UPDATE {changes_tab} | ||
SET "Service Name" = rtc."Title", "Squad" = rtc."Squad" | ||
FROM {rtc} AS rtc | ||
WHERE {changes_tab}."Service Name" = rtc."Repository" | ||
AND {changes_tab}.id = %s;""", | ||
(row[0],) | ||
) | ||
cur.execute( | ||
f"""UPDATE {changes_tab} | ||
SET "Squad" = 'Other' | ||
WHERE {changes_tab}."Service Name" IN ('doc-exports', 'docs_on_docs', 'docsportal') | ||
AND {changes_tab}.id = %s;""", | ||
(row[0],) | ||
) | ||
conn.commit() | ||
|
||
except Exception as e: | ||
logging.error("Error updating squad and title: %s", e) | ||
|
||
|
||
def main(org, rtc, changes_tab): | ||
check_env_variables() | ||
|
||
cur_csv.execute(f"DROP TABLE IF EXISTS {changes_tab}") | ||
conn_csv.commit() | ||
|
||
create_prs_table(conn_csv, cur_csv, changes_tab) | ||
|
||
repos = get_repos(cur_csv, rtc) | ||
|
||
logging.info("Gathering PRs where changes has been requested...") | ||
|
||
for repo in repos: | ||
prs = get_pr_number(org, repo) | ||
for pr_info in prs: | ||
pr_number = pr_info['pr_number'] | ||
process_pr_reviews(org, repo, pr_number, changes_tab) | ||
|
||
parent_pr_changes_check(cur_csv, conn_csv, org, changes_tab) | ||
parent_pr_changes_check(cur_csv, conn_csv, org, "our_side_problem") | ||
update_squad_and_title(cur_csv, conn_csv, rtc, changes_tab) | ||
|
||
|
||
if __name__ == "__main__": | ||
org_string = "docs" | ||
rtc_table = "repo_title_category" | ||
changes_table = "requested_changes" | ||
Comment on lines
+346
to
+348
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. if using const, better not to store them here, move to first lines and probably good to have ability to change them through env variables |
||
|
||
conn_csv = connect_to_db(db_csv) | ||
cur_csv = conn_csv.cursor() | ||
|
||
done = False | ||
|
||
cur_csv.execute("DROP TABLE IF EXISTS our_side_problem") | ||
create_prs_table(conn_csv, cur_csv, "our_side_problem") | ||
|
||
conn_csv.commit() | ||
|
||
main(org_string, rtc_table, changes_table) | ||
main(f"{org_string}-swiss", f"{rtc_table}_swiss", f"{changes_table}_swiss") | ||
Comment on lines
+360
to
+361
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. use for cycle by dictionary with envs, in case that in future there will be more environments and add logging for each call. Also main is a bad name for this func |
||
|
||
update_squad_and_title(cur_csv, conn_csv, rtc_table, "our_side_problem") | ||
|
||
if done: | ||
logging.info("Search successfully finish!") | ||
|
||
end_time = time.time() | ||
execution_time = end_time - start_time | ||
minutes, seconds = divmod(execution_time, 60) | ||
logging.info("Script executed in %s minutes %s seconds! Let's go drink some beer :)", int(minutes), int(seconds)) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
i don't understand why are you using same code in all files, please introduce db class and reuse it
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
same for env variables