Skip to content

Commit

Permalink
logs csv_export use DefaultStorage
Browse files Browse the repository at this point in the history
  • Loading branch information
nucleogenesis committed Dec 20, 2024
1 parent 63722d7 commit 793b83f
Showing 1 changed file with 23 additions and 7 deletions.
30 changes: 23 additions & 7 deletions kolibri/core/logger/csv_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@

import csv
import datetime
import io
import logging
import math
import os
from collections import OrderedDict
from functools import partial

from dateutil import parser
from django.core.cache import cache
from django.core.files.storage import DefaultStorage
from django.utils.translation import gettext_lazy as _
from django.utils.translation import pgettext_lazy
from le_utils.constants import content_kinds
Expand All @@ -18,7 +19,6 @@
from .models import ContentSummaryLog
from kolibri.core.content.models import ChannelMetadata
from kolibri.core.content.models import ContentNode
from kolibri.core.utils.csv import open_csv_for_writing
from kolibri.core.utils.csv import output_mapper


Expand Down Expand Up @@ -146,6 +146,7 @@ def cache_content_title(obj):
def csv_file_generator(
facility, log_type, filepath, start_date, end_date, overwrite=False
):
file_storage = DefaultStorage()

if log_type not in ("summary", "session"):
raise ValueError(
Expand All @@ -160,8 +161,9 @@ def csv_file_generator(
else parser.parse(end_date) + datetime.timedelta(days=1)
)

if not overwrite and os.path.exists(filepath):
raise ValueError("{} already exists".format(filepath))
filename = file_storage.generate_filename(filepath.split("/")[-1])
if not overwrite and file_storage.exists(filename):
raise ValueError("{} already exists".format(filename))
queryset = log_info["queryset"].filter(
dataset_id=facility.dataset_id,
)
Expand All @@ -179,14 +181,28 @@ def csv_file_generator(
if log_type == "summary" or label != labels["completion_timestamp"]
)

csv_file = open_csv_for_writing(filepath)
csv_file = io.BytesIO()

with csv_file as f:
writer = csv.DictWriter(f, header_labels)
logger.info("Creating csv file {filename}".format(filename=filepath))
writer = csv.DictWriter(io.TextIOWrapper(f, encoding="utf-8"), header_labels)
logger.info(
"Creating {logtype} csv file {filename}".format(
logtype=log_type, filename=filename
)
)
writer.writeheader()
for item in queryset.select_related("user", "user__facility").values(
*log_info["db_columns"]
):
writer.writerow(map_object(item))
yield

f.seek(0)
file = file_storage.save(filename, f)

try:
# If the file is local, we can get the path
logger.info("File saved - Path: {}".format(file_storage.path(file)))
except NotImplementedError:
# But if path is not implemented, we assume we can get the URL
logger.info("File saved - Path: {}".format(file_storage.url(file)))

0 comments on commit 793b83f

Please sign in to comment.