-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
29 changed files
with
1,996 additions
and
476 deletions.
There are no files selected for viewing
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,9 +1,9 @@ | ||
# Changelog | ||
## 1.2.5 | ||
* Allow `bean run .. tiling` for untranslated `--allele-df-key`. | ||
|
||
## 1.2.8 | ||
* Change .pyx files to be compatible with more recent numpy versions | ||
## 1.2.7 | ||
* **CRITICAL** Fix sample ordering & masking issue for survival screens | ||
## 1.2.6 | ||
* Fix overflow in `bean run survival` and autograde error related to inplace assignment for `bean run survival tiling`. | ||
|
||
## 1.2.7 | ||
* **CRITICAL** Fix sample ordering & masking issue for survival screens | ||
## 1.2.5 | ||
* Allow `bean run .. tiling` for untranslated `--allele-df-key`. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,72 @@ | ||
import pickle as pkl | ||
import numpy as np | ||
import torch | ||
from bean.model.run import _get_guide_target_info | ||
from bean.model.parser import parse_args | ||
from bean.cli.run import main as get_screendata | ||
from bean.preprocessing.data_class import SortingScreenData | ||
|
||
|
||
def generate_prior_data_for_disjoint_library_pair( | ||
command1: str, command2: str, output1_path: str, prior_params_path: str | ||
): | ||
"""Generate prior for a two batches with disjoint guides but with shared variants.""" | ||
with open(output1_path, "rb") as f: | ||
data = pkl.load(f) | ||
ndata = data["data"] | ||
parser = parse_args() | ||
command1 = command1.split("bean run ")[-1] | ||
command2 = command2.split("bean run ")[-1] | ||
args = parser.parse_args(command1.split(" ")) | ||
args2 = parser.parse_args(command2.split(" ")) | ||
ndata2 = get_screendata(args2, return_data=True) | ||
target_df = _get_guide_target_info( | ||
ndata.screen, args, cols_include=[args.negctrl_col] | ||
) | ||
target_df2 = _get_guide_target_info( | ||
ndata2.screen, args2, cols_include=[args2.negctrl_col] | ||
) | ||
batch1_idx = np.where( | ||
target_df.index.map(lambda s: s in target_df2.index.tolist()) | ||
)[0] | ||
batch2_idx = [] | ||
for i in batch1_idx: | ||
batch2_idx.append( | ||
np.where(target_df.index.tolist()[i] == target_df2.index)[0].item() | ||
) | ||
batch2_idx = np.array(batch2_idx) | ||
if isinstance(ndata, SortingScreenData): | ||
mu_loc = torch.zeros((ndata2.n_targets, 1)) | ||
mu_loc[batch2_idx, :] = data["params"]["mu_loc"][batch1_idx, :] | ||
mu_scale = torch.ones((ndata2.n_targets, 1)) | ||
mu_scale[batch2_idx, :] = data["params"]["mu_scale"][batch1_idx, :] | ||
sd_loc = torch.zeros((ndata2.n_targets, 1)) | ||
sd_loc[batch2_idx, :] = data["params"]["sd_loc"][batch1_idx, :] | ||
sd_scale = torch.ones((ndata2.n_targets, 1)) * 0.01 | ||
sd_scale[batch2_idx, :] = data["params"]["sd_scale"][batch1_idx, :] | ||
prior_params = { | ||
"mu_loc": mu_loc, | ||
"mu_scale": mu_scale, | ||
"sd_loc": sd_loc, | ||
"sd_scale": sd_scale, | ||
} | ||
else: | ||
mu_loc = torch.zeros((ndata2.n_targets, 1)) | ||
mu_loc[batch2_idx, :] = data["params"]["mu_loc"][batch1_idx, :] | ||
mu_scale = torch.ones((ndata2.n_targets, 1)) | ||
mu_scale[batch2_idx, :] = data["params"]["mu_scale"][batch1_idx, :] | ||
prior_params = { | ||
"mu_loc": mu_loc, | ||
"mu_scale": mu_scale, | ||
} | ||
with open(prior_params_path, "wb") as f: | ||
pkl.dump(prior_params, f) | ||
print( | ||
f"Successfully generated prior parameters at {prior_params_path}. To use this parameter, run:\nbean run {command2+' --prior-params '+prior_params_path}" | ||
) | ||
|
||
|
||
def main(args): | ||
generate_prior_data_for_disjoint_library_pair( | ||
args.command1, args.command2, args.raw_run_output1, args.output_path | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.