Skip to content

Commit

Permalink
Add time&mem test for snake oil plotting
Browse files Browse the repository at this point in the history
  • Loading branch information
yngve-sk committed Nov 28, 2024
1 parent 403a55e commit 49dd7a7
Showing 1 changed file with 87 additions and 0 deletions.
87 changes: 87 additions & 0 deletions tests/ert/unit_tests/gui/tools/plot/test_plot_api.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import contextlib
import gc
import os
import time
from datetime import datetime, timedelta
from textwrap import dedent
from typing import Dict
Expand All @@ -11,6 +13,7 @@
import polars
import pytest
import xarray as xr
from httpx import RequestError
from pandas.testing import assert_frame_equal
from starlette.testclient import TestClient

Expand Down Expand Up @@ -198,6 +201,21 @@ def api_and_storage(monkeypatch, tmp_path):
gc.collect()


@pytest.fixture
def api_and_snake_oil_storage(snake_oil_case_storage, monkeypatch):
with open_storage(snake_oil_case_storage.ens_path, mode="r") as storage:
monkeypatch.setenv("ERT_STORAGE_NO_TOKEN", "yup")
monkeypatch.setenv("ERT_STORAGE_ENS_PATH", storage.path)

api = PlotApi()
yield api, storage

if enkf._storage is not None:
enkf._storage.close()
enkf._storage = None
gc.collect()


@pytest.mark.parametrize(
"num_reals, num_dates, num_keys, max_memory_mb",
[ # Tested 24.11.22 on macbook pro M1 max
Expand Down Expand Up @@ -275,6 +293,75 @@ def test_plot_api_big_summary_memory_usage(
assert total_memory_usage < max_memory_mb


def test_plotter_on_all_snake_oil_responses_time(api_and_snake_oil_storage):
api, _ = api_and_snake_oil_storage
t0 = time.time()
key_infos = api.all_data_type_keys()
all_ensembles = api.get_all_ensembles()
t1 = time.time()
# Cycle through all ensembles and get all responses
for key_info in key_infos:
for ensemble in all_ensembles:
api.data_for_key(ensemble_id=ensemble.id, key=key_info.key)

if key_info.observations:
with contextlib.suppress(RequestError, TimeoutError):
api.observations_for_key(
[ens.id for ens in all_ensembles], key_info.key
)

# Note: Does not test for fields
if not (str(key_info.key).endswith("H") or "H:" in str(key_info.key)):
with contextlib.suppress(RequestError, TimeoutError):
api.history_data(
key_info.key,
[e.id for e in all_ensembles],
)

t2 = time.time()
time_to_get_metadata = t1 - t0
time_to_cycle_through_responses = t2 - t1

assert time_to_get_metadata < 1 # 0.09 on py312 macbook pro m1 max
assert time_to_cycle_through_responses < 14 # 4.48 on py312 macbook pro m1 max

gc.collect()


def test_plotter_on_all_snake_oil_responses_memory(api_and_snake_oil_storage):
api, _ = api_and_snake_oil_storage

with memray.Tracker("memray.bin", follow_fork=True, native_traces=True):
key_infos = api.all_data_type_keys()
all_ensembles = api.get_all_ensembles()
# Cycle through all ensembles and get all responses
for key_info in key_infos:
for ensemble in all_ensembles:
api.data_for_key(ensemble_id=ensemble.id, key=key_info.key)

if key_info.observations:
with contextlib.suppress(RequestError, TimeoutError):
api.observations_for_key(
[ens.id for ens in all_ensembles], key_info.key
)

# Note: Does not test for fields
if not (str(key_info.key).endswith("H") or "H:" in str(key_info.key)):
with contextlib.suppress(RequestError, TimeoutError):
api.history_data(
key_info.key,
[e.id for e in all_ensembles],
)

stats = memray._memray.compute_statistics("memray.bin")
os.remove("memray.bin")
total_memory_mb = stats.total_memory_allocated / (1024**2)
peak_memory_mb = stats.peak_memory_allocated / (1024**2)

assert total_memory_mb < 5000 # Tested locally to 3579mb on macbook pro m1 max
assert peak_memory_mb < 1500 # Tested locally to 840mb on macbook pro m1 max


def test_plot_api_handles_urlescape(api_and_storage):
api, storage = api_and_storage
key = "WBHP:46/3-7S"
Expand Down

0 comments on commit 49dd7a7

Please sign in to comment.