Skip to content

Commit

Permalink
Build bot now build new SVGs in folder that were already built (#666)
Browse files Browse the repository at this point in the history
* Refactor the pull request fetching code

* Refactor build script to use past PRs

* Added function to update icomoon json

* new icon: matlab (line) (#640)

* Add matlab-line

* Fixed issues reported by check svg bot

* optimisation for svg (#643)

Co-authored-by: Clemens Bastian <[email protected]>
Co-authored-by: David Leal <[email protected]>

* Add better logging to icomoon_build

Co-authored-by: Clemens Bastian <[email protected]>
Co-authored-by: David Leal <[email protected]>
  • Loading branch information
3 people authored Jun 13, 2021
1 parent 8d617d7 commit d60b334
Show file tree
Hide file tree
Showing 8 changed files with 253 additions and 121 deletions.
94 changes: 94 additions & 0 deletions .github/scripts/build_assets/api_handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import requests
import sys
import re

def get_merged_pull_reqs(token, page):
"""
Get the merged pull requests based on page. There are
100 results page. See https://docs.github.com/en/rest/reference/pulls
for more details on the parameters.
:param token, a GitHub API token.
:param page, the page number.
"""
queryPath = "https://api.github.com/repos/devicons/devicon/pulls"
headers = {
"Authorization": f"token {token}"
}
params = {
"accept": "application/vnd.github.v3+json",
"state": "closed",
"per_page": 100,
"page": page
}

print(f"Querying the GitHub API for requests page #{page}")
response = requests.get(queryPath, headers=headers, params=params)
if not response:
print(f"Can't query the GitHub API. Status code is {response.status_code}. Message is {response.text}")
sys.exit(1)

closed_pull_reqs = response.json()
return [merged_pull_req
for merged_pull_req in closed_pull_reqs
if merged_pull_req["merged_at"] is not None]


def is_feature_icon(pull_req_data):
"""
Check whether the pullData is a feature:icon PR.
:param pull_req_data - the data on a specific pull request from GitHub.
:return true if the pullData has a label named "feature:icon"
"""
for label in pull_req_data["labels"]:
if label["name"] == "feature:icon":
return True
return False


def find_all_authors(pull_req_data, token):
"""
Find all the authors of a PR based on its commits.
:param pull_req_data - the data on a specific pull request from GitHub.
:param token - a GitHub API token.
"""
headers = {
"Authorization": f"token {token}"
}
response = requests.get(pull_req_data["commits_url"], headers=headers)
if not response:
print(f"Can't query the GitHub API. Status code is {response.status_code}")
print("Response is: ", response.text)
return

commits = response.json()
authors = set() # want unique authors only
for commit in commits:
authors.add(commit["commit"]["author"]["name"])
return ", ".join(["@" + author for author in list(authors)])


def get_merged_pull_reqs_since_last_release(token):
"""
Get all the merged pull requests since the last release.
"""
stopPattern = r"^(r|R)elease v"
pull_reqs = []
found_last_release = False
page = 1

print("Getting PRs since last release.")
while not found_last_release:
data = get_merged_pull_reqs(token, page)
# assume we don't encounter it during the loop
last_release_index = 101

for i in range(len(data)):
if re.search(stopPattern, data[i]["title"]):
found_last_release = True
last_release_index = i
break
pull_reqs.extend(data[:last_release_index])
page += 1

# should contain all the PRs since last release
return pull_reqs
4 changes: 4 additions & 0 deletions .github/scripts/build_assets/arg_getters.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@ def get_selenium_runner_args(peek_mode=False):
help="The download destination of the Icomoon files",
action=PathResolverAction)

parser.add_argument("token",
help="The GitHub token to access the GitHub REST API.",
type=str)

if peek_mode:
parser.add_argument("--pr_title",
help="The title of the PR that we are peeking at")
Expand Down
22 changes: 21 additions & 1 deletion .github/scripts/build_assets/util.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import os
import re
from typing import List
import platform
import sys
import traceback
Expand Down Expand Up @@ -41,4 +43,22 @@ def set_env_var(key: str, value: str, delimiter: str='~'):
else:
os.system(f'echo "{key}={value}" >> $GITHUB_ENV')
else:
raise Exception("This function doesn't support this platform: " + platform.system())
raise Exception("This function doesn't support this platform: " + platform.system())


def find_object_added_in_this_pr(icons: List[dict], pr_title: str):
"""
Find the icon name from the PR title.
:param icons, a list of the font objects found in the devicon.json.
:pr_title, the title of the PR that this workflow was called on.
:return a dictionary with the "name"
entry's value matching the name in the pr_title.
:raise If no object can be found, raise an Exception.
"""
try:
pattern = re.compile(r"(?<=^new icon: )\w+ (?=\(.+\))", re.I)
icon_name = pattern.findall(pr_title)[0].lower().strip() # should only have one match
icon = [icon for icon in icons if icon["name"] == icon_name][0]
return icon
except IndexError: # there are no match in the findall()
raise Exception("Couldn't find an icon matching the name in the PR title.")
100 changes: 32 additions & 68 deletions .github/scripts/get_release_message.py
Original file line number Diff line number Diff line change
@@ -1,75 +1,39 @@
import requests
from build_assets import arg_getters
from build_assets import arg_getters, api_handler, util
import re

def main():
print("Please wait a few seconds...")
args = arg_getters.get_release_message_args()
queryPath = "https://api.github.com/repos/devicons/devicon/pulls?accept=application/vnd.github.v3+json&state=closed&per_page=100"
stopPattern = r"^(r|R)elease v"
headers = {
"Authorization": f"token {args.token}"
}

response = requests.get(queryPath, headers=headers)
if not response:
print(f"Can't query the GitHub API. Status code is {response.status_code}. Message is {response.text}")
return

data = response.json()
newIcons = []
features = []

for pullData in data:
if re.search(stopPattern, pullData["title"]):
break

authors = findAllAuthors(pullData, headers)
markdown = f"- [{pullData['title']}]({pullData['html_url']}) by {authors}."

if isFeatureIcon(pullData):
newIcons.append(markdown)
else:
features.append(markdown)

thankYou = "A huge thanks to all our maintainers and contributors for making this release possible!"
iconTitle = "**{} New Icons**\n".format(len(newIcons))
featureTitle = "**{} New Features**\n".format(len(features))
finalString = "{0}\n\n {1}{2}\n\n {3}{4}".format(thankYou,
iconTitle, "\n".join(newIcons), featureTitle, "\n".join(features))

print("--------------Here is the build message--------------\n", finalString)


"""
Check whether the pullData is a feature:icon PR.
:param pullData
:return true if the pullData has a label named "feature:icon"
"""
def isFeatureIcon(pullData):
for label in pullData["labels"]:
if label["name"] == "feature:icon":
return True
return False


"""
Find all the authors of a PR based on its commits.
:param pullData - the data of a pull request.
"""
def findAllAuthors(pullData, authHeader):
response = requests.get(pullData["commits_url"], headers=authHeader)
if not response:
print(f"Can't query the GitHub API. Status code is {response.status_code}")
print("Response is: ", response.text)
return

commits = response.json()
authors = set() # want unique authors only
for commit in commits:
authors.add("@" + commit["author"]["login"])
return ", ".join(list(authors))

try:
print("Please wait a few seconds...")
args = arg_getters.get_release_message_args()

# fetch first page by default
data = api_handler.get_merged_pull_reqs_since_last_release(args.token)
newIcons = []
features = []

print("Parsing through the pull requests")
for pullData in data:
authors = api_handler.find_all_authors(pullData, args.token)
markdown = f"- [{pullData['title']}]({pullData['html_url']}) by {authors}."

if api_handler.is_feature_icon(pullData):
newIcons.append(markdown)
else:
features.append(markdown)

print("Constructing message")
thankYou = "A huge thanks to all our maintainers and contributors for making this release possible!"
iconTitle = f"**{len(newIcons)} New Icons**"
featureTitle = f"**{len(features)} New Features**"
finalString = "{0}\n\n {1}\n{2}\n\n {3}\n{4}".format(thankYou,
iconTitle, "\n".join(newIcons), featureTitle, "\n".join(features))

print("--------------Here is the build message--------------\n", finalString)
print("Script finished")
except Exception as e:
util.exit_with_err(e)


if __name__ == "__main__":
main()
112 changes: 92 additions & 20 deletions .github/scripts/icomoon_build.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,36 @@
from pathlib import Path
import sys
from selenium.common.exceptions import TimeoutException
import re
import subprocess
import json
from typing import List, Dict


# pycharm complains that build_assets is an unresolved ref
# don't worry about it, the script still runs
from build_assets.SeleniumRunner import SeleniumRunner
from build_assets import filehandler, arg_getters
from build_assets import util
from build_assets import filehandler, arg_getters, util, api_handler


def main():
args = arg_getters.get_selenium_runner_args()
new_icons = filehandler.find_new_icons(args.devicon_json_path, args.icomoon_json_path)
if len(new_icons) == 0:
sys.exit("No files need to be uploaded. Ending script...")

# print list of new icons
print("List of new icons:", *new_icons, sep = "\n")

"""
Build the icons using Icomoon. Also optimize the svgs.
"""
runner = None
try:
svgs = filehandler.get_svgs_paths(new_icons, args.icons_folder_path, icon_versions_only=False)
# optimizes the files
# do in each batch in case the command
# line complains there's too many characters
start = 0
step = 10
for i in range(start, len(svgs), step):
batch = svgs[i:i + step]
subprocess.run(["npm", "run", "optimize-svg", "--", f"--svgFiles={json.dumps(batch)}"], shell=True)
args = arg_getters.get_selenium_runner_args()
new_icons = get_icons_for_building(args.devicon_json_path, args.token)
if len(new_icons) == 0:
sys.exit("No files need to be uploaded. Ending script...")

print(f"There are {len(new_icons)} icons to be build. Here are they:", *new_icons, sep = "\n")

print("Begin optimizing files")
optimize_svgs(new_icons, args.icons_folder_path)

print("Updating the icomoon json")
update_icomoon_json(new_icons, args.icomoon_json_path)

icon_svgs = filehandler.get_svgs_paths(
new_icons, args.icons_folder_path, icon_versions_only=True)
Expand All @@ -50,7 +50,79 @@ def main():
except Exception as e:
util.exit_with_err(e)
finally:
runner.close()
if runner is not None:
runner.close()


def get_icons_for_building(devicon_json_path: str, token: str):
"""
Get the icons for building.
:param devicon_json_path - the path to the `devicon.json`.
:param token - the token to access the GitHub API.
"""
all_icons = filehandler.get_json_file_content(devicon_json_path)
pull_reqs = api_handler.get_merged_pull_reqs_since_last_release(token)
new_icons = []

for pull_req in pull_reqs:
if api_handler.is_feature_icon(pull_req):
filtered_icon = util.find_object_added_in_this_pr(all_icons, pull_req["title"])
new_icons.append(filtered_icon)
return new_icons


def optimize_svgs(new_icons: List[str], icons_folder_path: str):
"""
Optimize the newly added svgs. This is done in batches
since the command line has a limit on characters allowed.
:param new_icons - the new icons that need to be optimized.
:param icons_folder_path - the path to the /icons folder.
"""
svgs = filehandler.get_svgs_paths(new_icons, icons_folder_path, icon_versions_only=False)
start = 0
step = 10
for i in range(start, len(svgs), step):
batch = svgs[i:i + step]
subprocess.run(["npm", "run", "optimize-svg", "--", f"--svgFiles={json.dumps(batch)}"], shell=True)


def update_icomoon_json(new_icons: List[str], icomoon_json_path: str):
"""
Update the `icomoon.json` if it contains any icons
that needed to be updated. This will remove the icons
from the `icomoon.json` so the build script will reupload
it later.
"""
icomoon_json = filehandler.get_json_file_content(icomoon_json_path)
cur_len = len(icomoon_json["icons"])
messages = []

wrapper_function = lambda icomoon_icon : find_icomoon_icon_not_in_new_icons(
icomoon_icon, new_icons, messages)
icons_to_keep = filter(wrapper_function, icomoon_json["icons"])
icomoon_json["icons"] = list(icons_to_keep)

new_len = len(icomoon_json["icons"])
print(f"Update completed. Removed {cur_len - new_len} icons:", *messages, sep='\n')
filehandler.write_to_file(icomoon_json_path, json.dumps(icomoon_json))


def find_icomoon_icon_not_in_new_icons(icomoon_icon: Dict, new_icons: List, messages: List):
"""
Find all the icomoon icons that are not listed in the new icons.
This also add logging for which icons were removed.
:param icomoon_icon - a dict object from the icomoon.json's `icons` attribute.
:param new_icons - a list of new icons. Each element is an object from the `devicon.json`.
:param messages - an empty list where the function can attach logging on which
icon were removed.
"""
for new_icon in new_icons:
pattern = re.compile(f"^{new_icon['name']}-")
if pattern.search(icomoon_icon["properties"]["name"]):
message = f"-'{icomoon_icon['properties']['name']}' cause it matches '{new_icon['name']}'"
messages.append(message)
return False
return True


if __name__ == "__main__":
Expand Down
Loading

0 comments on commit d60b334

Please sign in to comment.