Skip to content

Commit

Permalink
Merge pull request #16 from ruchernchong/15-reduce-complexity-of-upda…
Browse files Browse the repository at this point in the history
…ter-function

Refactor function complexity
  • Loading branch information
ruchernchong authored Jul 6, 2024
2 parents 248ebe9 + 37abb21 commit 63eec19
Showing 1 changed file with 24 additions and 16 deletions.
40 changes: 24 additions & 16 deletions updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,29 @@
load_dotenv()


def read_csv_data(file_path: str) -> List[Dict[str, Any]]:
csv_data = []
with open(file_path, "r", encoding="utf-8") as csv_file:
csv_reader = csv.DictReader(csv_file)
for row in csv_reader:
csv_data.append(process_csv_row(row))
return csv_data


def process_csv_row(row) -> Dict[str, Any]:
if "make" in row:
row["make"] = row["make"].replace(".", "")
# Convert string values to numbers if possible
for key, value in row.items():
if re.match(r"\b\d+(?:,\d+)?\b", value):
row[key] = 0 if value == "" else value
try:
value = str(int(value.replace(",", "")))
row[key] = float(value) if "." in value else int(value)
except ValueError:
pass


async def updater(
collection_name: str, zip_file_name: str, zip_url: str, key_fields: List[str]
) -> str:
Expand All @@ -31,22 +54,7 @@ async def updater(
destination_path = os.path.join(temp_dir, extracted_file_name)
print(f"Destination path: {destination_path}")

csv_data: List[Dict[str, Any]] = []
with open(destination_path, "r", encoding="utf-8") as csv_file:
csv_reader = csv.DictReader(csv_file)
for row in csv_reader:
if "make" in row:
row["make"] = row["make"].replace(".", "")
# Convert string values to numbers if possible
for key, value in row.items():
if re.match(r"\b\d+(?:,\d+)?\b", value):
row[key] = 0 if value == "" else value
try:
value = str(int(value.replace(",", "")))
row[key] = float(value) if "." in value else int(value)
except ValueError:
pass
csv_data.append(row)
csv_data: List[Dict[str, Any]] = await read_csv_data(destination_path)

existing_data_map = {
create_unique_key(item, key_fields): item
Expand Down

0 comments on commit 63eec19

Please sign in to comment.