diff --git a/backend/algorithms/cache/cache.py b/backend/algorithms/cache/cache.py
index 330e9d9e0..e0c026f8a 100644
--- a/backend/algorithms/cache/cache.py
+++ b/backend/algorithms/cache/cache.py
@@ -91,11 +91,11 @@ def cache_mappings():
def tokeniseFaculty(Faculty):
faculty_token = "F "
if re.search("Faculty of.+", Faculty):
- match_object = re.search("(?<=Faculty\sof\s)[^\s\n\,]+", Faculty)
+ match_object = re.search(r"(?<=Faculty\sof\s)[^\s\n\,]+", Faculty)
elif re.search("UNSW", Faculty):
match_object = re.search(r"(?<=UNSW\s)[^\s\n\,]+", Faculty)
else:
- match_object = re.search("^([\w]+)", Faculty)
+ match_object = re.search(r"^([\w]+)", Faculty)
if match_object is None:
raise KeyError(f'no match found for faculty: {Faculty}')
match = match_object.group()
@@ -105,20 +105,20 @@ def tokeniseFaculty(Faculty):
# Tokenise faculty using regex, e.g 'School of Psychology' -> 'S Psychology'
def tokeniseSchool(School):
school_token = "S "
- if re.search("School\sof\sthe.+", School):
+ if re.search(r"School\sof\sthe.+", School):
match_object = re.search("(?<=School\sof\sthe\s)[^\s\n\,]+", School)
- elif re.search("School\sof\s.+", School):
- match_object = re.search("(?<=School\sof\s)[^\s\n\,]+", School)
+ elif re.search(r"School\sof\s.+", School):
+ match_object = re.search(r"(?<=School\sof\s)[^\s\n\,]+", School)
elif re.search("^(UC)", School):
- match_object = re.search("(?<=UC\s)[^\s\n\,]+", School)
+ match_object = re.search(r"(?<=UC\s)[^\s\n\,]+", School)
if match_object is None:
raise KeyError(f'no match found for school: {School}')
match = school_token + "UC-" + match_object.group()
return match
elif re.search("UNSW", School):
- match_object = re.search("(?<=UNSW\s)[^\s\n\,]+", School)
+ match_object = re.search(r"(?<=UNSW\s)[^\s\n\,]+", School)
else:
- match_object = re.search("^([\w]+)", School)
+ match_object = re.search(r"^([\w]+)", School)
if match_object is None:
raise KeyError(f'no match found for school: {School}')
match = match_object.group()
diff --git a/backend/algorithms/create_program.py b/backend/algorithms/create_program.py
index e320e2de2..5fad4e4e8 100644
--- a/backend/algorithms/create_program.py
+++ b/backend/algorithms/create_program.py
@@ -85,7 +85,7 @@ def create_maturity_restriction(tokens: Dict[str, List[str]]) -> ProgramRestrict
)
def create_dependency_condition(tokens: List[str]) -> Condition:
- """
+ r"""
Creates a dependent condition from the tokens.
Dependency Types:
@@ -108,7 +108,7 @@ def create_dependency_condition(tokens: List[str]) -> Condition:
# Maybe assert that this is the end?
# Matching LevelCategory
- if re.match("L\d", base_token):
+ if re.match(r"L\d", base_token):
index += 1
level = int(base_token[1:])
level_category = LevelCategory(level)
@@ -136,7 +136,7 @@ def create_dependency_condition(tokens: List[str]) -> Condition:
def create_dependent_condition(tokens: List[str]) -> Category:
- """
+ r"""
Creates a dependency condition from the tokens.
Need to worry about:
diff --git a/backend/data/processors/courses_processing.py b/backend/data/processors/courses_processing.py
index 745d0a779..b583ae1b0 100644
--- a/backend/data/processors/courses_processing.py
+++ b/backend/data/processors/courses_processing.py
@@ -140,7 +140,7 @@ def process_exclusions(processed: dict, formatted: dict) -> None:
)
# Clean and add any remaining plaintext to 'exclusions' field
- patterns = ["
", " ,", "[.,]\s*$",
+ patterns = ["
", " ,", r"[.,]\s*$",
"^[.,]", "^and$", "enrolment in program"]
exclusion_str = exclusion_str.strip()
for pattern in patterns:
diff --git a/backend/data/processors/program_conditions_tokenising.py b/backend/data/processors/program_conditions_tokenising.py
index bdb61a058..002059ea1 100644
--- a/backend/data/processors/program_conditions_tokenising.py
+++ b/backend/data/processors/program_conditions_tokenising.py
@@ -40,7 +40,7 @@ def tokenise_maturity_requirement(condition: Dict[str, str]):
}
def tokenise_dependency(condition: str) -> List[str]:
- """
+ r"""
This is literally just a condition. At time of writing, it follows one of the following form
- Must have completed \d UOC
- Must complete all level \d [CATEGORY] courses
@@ -64,12 +64,12 @@ def tokenise_uoc_dependency(condition: str) -> List[str]:
Example input: "Must have completed 24 UOC"
"""
- num_uoc: Optional[re.Match[str]] = re.search("(\d+)", condition)
+ num_uoc: Optional[re.Match[str]] = re.search(r"(\d+)", condition)
return ["UOC", num_uoc.group()] if num_uoc else ["UOC", "0"]
def tokenise_core_dependency(condition: str):
- """
+ r"""
Tokenise the core dependency.
Assumes that the caller has already verified that the given condition is core only.
@@ -87,7 +87,7 @@ def tokenise_core_dependency(condition: str):
# Keep only tokens with meaning
tokens_filtered: List[str] = [
token for token in tokens_raw
- if re.search("([lL]evel)|(\d+)|(prescribed)|([A-Z]{4})", token)
+ if re.search(r"([lL]evel)|(\d+)|(prescribed)|([A-Z]{4})", token)
]
# Clean tokens into a regular form readable by processors
@@ -113,7 +113,7 @@ def compress_cores_tokens(tokens: List[str]) -> List[str]:
return list(tokens_out)
def compress_level_tokens(tokens: List[str]) -> List[str]:
- """
+ r"""
Take in a list of tokens [..., "Level", "\d", ...]
and simplify to [..., "L\d", ...]
"""
@@ -160,7 +160,7 @@ def tokenise_dependent(condition: str):
# Keep only tokens with meaning
tokens = list(filter(
# Groups (left -> right): level, FacultyCode, Number
- lambda tok: re.search("([Ll]evel)|(^[A-Za-z]{4}$)|(\d+)", tok),
+ lambda tok: re.search(r"([Ll]evel)|(^[A-Za-z]{4}$)|(\d+)", tok),
tokens
))
# Clean tokens into a regular form readable by processors
diff --git a/backend/requirements.txt b/backend/requirements.txt
index 17ea3809e..208fd99b5 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -5,11 +5,11 @@ google-auth==2.10.0
hypothesis==6.61.0
mypy==1.2.0
mypy-extensions==1.0.0
-ortools==9.5.2237
+ortools==9.8.3296
pymongo==4.3.3
pytest==7.2.2
python-dotenv==1.0.0
-python-Levenshtein==0.20.9
+python-Levenshtein==0.25.0
requests==2.31.0
types-paramiko==3.3.0.0
types-requests==2.28.11.15
diff --git a/docker-compose.yaml b/docker-compose.yaml
index 15dd5af00..f65ec61a0 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -1,5 +1,3 @@
-version: '3.8'
-
services:
mongodb:
image: mongo:6.0.14
@@ -33,7 +31,7 @@ services:
build:
context: ./backend
dockerfile: production.dockerfile
- container_name: backend
+ container_name: backend-prod
ports:
- '8000:8000'
env_file:
@@ -64,7 +62,7 @@ services:
build:
context: ./frontend
dockerfile: production.dockerfile
- container_name: frontend
+ container_name: frontend-prod
ports:
- '3000:80'
env_file:
@@ -94,7 +92,7 @@ services:
build:
context: ./backend
dockerfile: init-mongo.dockerfile
- container_name: init-mongo
+ container_name: init-mongo-ci
env_file:
- ./env/backend.env
depends_on: