Skip to content

Commit

Permalink
v1.0.1: Improve external usability
Browse files Browse the repository at this point in the history
  • Loading branch information
pgarrison committed Jul 10, 2024
1 parent 0bbbd54 commit 9f21bab
Show file tree
Hide file tree
Showing 24 changed files with 291 additions and 613 deletions.
4 changes: 2 additions & 2 deletions .github/actions/dependencies/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,5 @@ runs:

- name: Install dependencies
shell: bash
# --clean flag in case the environment is restored from a stale cache.
run: pdm sync -d --clean
# --only-keep flag in case the environment is restored from a stale cache.
run: pdm sync -d --only-keep
2 changes: 1 addition & 1 deletion .github/workflows/cache-dependencies.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ on:

jobs:
dependencies:
runs-on: self-hosted
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: ./.github/actions/dependencies
15 changes: 12 additions & 3 deletions .github/workflows/run-workflows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,21 @@ on:
workflow_dispatch:

jobs:
run-workflows:
runs-on: self-hosted
colony-area:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: ./.github/actions/dependencies

- name: Run cloud-capable workflows
- name: Run colony area workflow
shell: bash
run: pdm run python run_all_manuscript_workflows.py --only figure1_main_text

error-analysis:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: ./.github/actions/dependencies
- name: Run error workflow
shell: bash
run: pdm run python run_all_manuscript_workflows.py --only error
3 changes: 2 additions & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@ concurrency:
cancel-in-progress: true

jobs:

test:
runs-on: self-hosted
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: ./.github/actions/dependencies
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ config.yaml
*.log
__pypackages__
.pdm-python
pdm.toml
.tool-versions
requirements/local
test_data
Expand Down
9 changes: 8 additions & 1 deletion docs/INSTALL.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,15 @@ PDM, version 2.10.4
## Install the project dependencies
From the `nuc-morph-analysis` directory, use `pdm` to install the dependencies.
```bash
pdm sync
pdm sync -d
```
> [!NOTE]
> If you are on the Allen Institute for Cell Science local network, you can speed up data loading by installing `aicsfiles`.
> Use the following steps.
> 1. `pdm config --local pypi.artifactory.url https://artifactory.corp.alleninstitute.org/artifactory/api/pypi/pypi-virtual/simple`
> 2. `pdm config --local pypi.artifactory-snapshot.url https://artifactory.corp.alleninstitute.org/artifactory/api/pypi/pypi-snapshot-local/simple`
> 3. `pdm sync -d -G internal`
This will create a virtual environment at `nuc-morph-analysis/.venv`.
It is recommended use `pdm` to run Python in this environment without activating it via `pdm run python {options}`, though you can activate it with `eval $(pdm venv activate)` or `source .venv/bin/activate`.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
colony = "medium"

# load the tracking CSV for medium from FMS
df = global_dataset_filtering.load_dataset_with_features(colony, load_local=True)
df = global_dataset_filtering.load_dataset_with_features(colony)
df_fmb = figure_helper.assemble_formation_middle_breakdown_dataframe(df)
# load the images for each timepoint
# %%
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
# show segmentations as sum projections of contours to emphasize 3D ness
use_cv_contours_for_3d = False

df = load_dataset_with_features(colony, load_local=True)
df = load_dataset_with_features(colony)
df["centroid_y_inv"] = 3120 - df["centroid_y"].values # seg.shape[1]-cy
df["centroid_z_inv"] = 109 - df["centroid_z"].values # seg.shape[0]-cz
df["centroid_z_inv_adjust"] = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
# show segmentations as sum projections of contours to emphasize 3D ness
use_cv_contours_for_3d = False

df = load_dataset_with_features(name, load_local=True)
df = load_dataset_with_features(name)

# %%
# find the label number for the chosen track
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# cannot run "aphidicolin_lamin_exp2_rep1" in this analysis because the perturb colony does not have enough lamin shell formation events
# also puromycin may need to be removed, if analysis is not included in the paper
pair_list = ["importazole_lamin_exp1_rep1"]
dfo = load_dataset_with_features(dataset="all_drug_perturbation", load_local=True)
dfo = load_dataset_with_features(dataset="all_drug_perturbation")

for pairs in pair_list:
print(pairs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

# %%

dfo = load_dataset_with_features(dataset="all_drug_perturbation", load_local=True)
dfo = load_dataset_with_features(dataset="all_drug_perturbation")

pair_dict = dataset_info.drug_analysis_pairs()
# iterate through the drug analysis control-perturb pairs.
Expand Down
4 changes: 3 additions & 1 deletion nuc_morph_analysis/lib/preprocessing/add_colony_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,9 @@ def _calculate_distance_density(labels, neighbors, centroids):
neighbor_labels = neighbors[lbl]
# rest of your code
except KeyError:
print(f"KeyError: {lbl} not found in neighbors")
# The missing lbl is a symptom of the problem. The count of
# neighbors is confused by the cells stacked in the z dimension.
print(f"KeyError: {lbl} not found in neighbors. This may be caused by cell tracks overlapping in the xy plane")
continue
centroid = np.array(centroids[lbl])
dists = []
Expand Down
Loading

0 comments on commit 9f21bab

Please sign in to comment.