Skip to content

Commit

Permalink
Merge branch 'main' into neuroconv_aws
Browse files Browse the repository at this point in the history
  • Loading branch information
h-mayorquin authored Dec 6, 2024
2 parents 253855b + 54e6ea9 commit cfe9ade
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 66 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@
* Added the `rclone_transfer_batch_job` helper function for executing Rclone data transfers in AWS Batch jobs. [PR #1085](https://github.com/catalystneuro/neuroconv/pull/1085)
* Added the `deploy_neuroconv_batch_job` helper function for deploying NeuroConv AWS Batch jobs. [PR #1086](https://github.com/catalystneuro/neuroconv/pull/1086)


## Improvements
* Use mixing tests for ecephy's mocks [PR #1136](https://github.com/catalystneuro/neuroconv/pull/1136)
* Use pytest format for dandi tests to avoid window permission error on teardown [PR #1151](https://github.com/catalystneuro/neuroconv/pull/1151)

# v0.6.5 (November 1, 2024)

Expand Down
111 changes: 45 additions & 66 deletions tests/test_minimal/test_tools/dandi_transfer_tools.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
import os
import sys
from datetime import datetime
from pathlib import Path
from platform import python_version as get_python_version
from shutil import rmtree
from tempfile import mkdtemp

import pytest
from hdmf.testing import TestCase
from pynwb import NWBHDF5IO

from neuroconv.tools.data_transfers import automatic_dandi_upload
Expand All @@ -24,80 +20,63 @@
not HAVE_DANDI_KEY,
reason="You must set your DANDI_API_KEY to run this test!",
)
class TestAutomaticDANDIUpload(TestCase):
def setUp(self):
self.tmpdir = Path(mkdtemp())
self.nwb_folder_path = self.tmpdir / "test_nwb"
self.nwb_folder_path.mkdir()
metadata = get_default_nwbfile_metadata()
metadata["NWBFile"].update(
session_start_time=datetime.now().astimezone(),
session_id=f"test-automatic-upload-{sys.platform}-{get_python_version().replace('.', '-')}",
)
metadata.update(Subject=dict(subject_id="foo", species="Mus musculus", age="P1D", sex="U"))
with NWBHDF5IO(path=self.nwb_folder_path / "test_nwb_1.nwb", mode="w") as io:
io.write(make_nwbfile_from_metadata(metadata=metadata))
def test_automatic_dandi_upload(tmp_path):
nwb_folder_path = tmp_path / "test_nwb"
nwb_folder_path.mkdir()
metadata = get_default_nwbfile_metadata()
metadata["NWBFile"].update(
session_start_time=datetime.now().astimezone(),
session_id=f"test-automatic-upload-{sys.platform}-{get_python_version().replace('.', '-')}",
)
metadata.update(Subject=dict(subject_id="foo", species="Mus musculus", age="P1D", sex="U"))
with NWBHDF5IO(path=nwb_folder_path / "test_nwb_1.nwb", mode="w") as io:
io.write(make_nwbfile_from_metadata(metadata=metadata))

def tearDown(self):
rmtree(self.tmpdir)

def test_automatic_dandi_upload(self):
automatic_dandi_upload(dandiset_id="200560", nwb_folder_path=self.nwb_folder_path, staging=True)
automatic_dandi_upload(dandiset_id="200560", nwb_folder_path=nwb_folder_path, staging=True)


@pytest.mark.skipif(
not HAVE_DANDI_KEY,
reason="You must set your DANDI_API_KEY to run this test!",
)
class TestAutomaticDANDIUploadNonParallel(TestCase):
def setUp(self):
self.tmpdir = Path(mkdtemp())
self.nwb_folder_path = self.tmpdir / "test_nwb"
self.nwb_folder_path.mkdir()
metadata = get_default_nwbfile_metadata()
metadata["NWBFile"].update(
session_start_time=datetime.now().astimezone(),
session_id=f"test-automatic-upload-{sys.platform}-{get_python_version().replace('.', '-')}-non-parallel",
)
metadata.update(Subject=dict(subject_id="foo", species="Mus musculus", age="P1D", sex="U"))
with NWBHDF5IO(path=self.nwb_folder_path / "test_nwb_2.nwb", mode="w") as io:
io.write(make_nwbfile_from_metadata(metadata=metadata))

def tearDown(self):
rmtree(self.tmpdir)
def test_automatic_dandi_upload_non_parallel(tmp_path):
nwb_folder_path = tmp_path / "test_nwb"
nwb_folder_path.mkdir()
metadata = get_default_nwbfile_metadata()
metadata["NWBFile"].update(
session_start_time=datetime.now().astimezone(),
session_id=(f"test-automatic-upload-{sys.platform}-" f"{get_python_version().replace('.', '-')}-non-parallel"),
)
metadata.update(Subject=dict(subject_id="foo", species="Mus musculus", age="P1D", sex="U"))
with NWBHDF5IO(path=nwb_folder_path / "test_nwb_2.nwb", mode="w") as io:
io.write(make_nwbfile_from_metadata(metadata=metadata))

def test_automatic_dandi_upload_non_parallel(self):
automatic_dandi_upload(
dandiset_id="200560", nwb_folder_path=self.nwb_folder_path, staging=True, number_of_jobs=1
)
automatic_dandi_upload(dandiset_id="200560", nwb_folder_path=nwb_folder_path, staging=True, number_of_jobs=1)


@pytest.mark.skipif(
not HAVE_DANDI_KEY,
reason="You must set your DANDI_API_KEY to run this test!",
)
class TestAutomaticDANDIUploadNonParallelNonThreaded(TestCase):
def setUp(self):
self.tmpdir = Path(mkdtemp())
self.nwb_folder_path = self.tmpdir / "test_nwb"
self.nwb_folder_path.mkdir()
metadata = get_default_nwbfile_metadata()
metadata["NWBFile"].update(
session_start_time=datetime.now().astimezone(),
session_id=f"test-automatic-upload-{sys.platform}-{get_python_version().replace('.', '-')}-non-parallel-non-threaded",
)
metadata.update(Subject=dict(subject_id="foo", species="Mus musculus", age="P1D", sex="U"))
with NWBHDF5IO(path=self.nwb_folder_path / "test_nwb_3.nwb", mode="w") as io:
io.write(make_nwbfile_from_metadata(metadata=metadata))

def tearDown(self):
rmtree(self.tmpdir)
def test_automatic_dandi_upload_non_parallel_non_threaded(tmp_path):
nwb_folder_path = tmp_path / "test_nwb"
nwb_folder_path.mkdir()
metadata = get_default_nwbfile_metadata()
metadata["NWBFile"].update(
session_start_time=datetime.now().astimezone(),
session_id=(
f"test-automatic-upload-{sys.platform}-"
f"{get_python_version().replace('.', '-')}-non-parallel-non-threaded"
),
)
metadata.update(Subject=dict(subject_id="foo", species="Mus musculus", age="P1D", sex="U"))
with NWBHDF5IO(path=nwb_folder_path / "test_nwb_3.nwb", mode="w") as io:
io.write(make_nwbfile_from_metadata(metadata=metadata))

def test_automatic_dandi_upload_non_parallel_non_threaded(self):
automatic_dandi_upload(
dandiset_id="200560",
nwb_folder_path=self.nwb_folder_path,
staging=True,
number_of_jobs=1,
number_of_threads=1,
)
automatic_dandi_upload(
dandiset_id="200560",
nwb_folder_path=nwb_folder_path,
staging=True,
number_of_jobs=1,
number_of_threads=1,
)

0 comments on commit cfe9ade

Please sign in to comment.