Skip to content

Commit

Permalink
Apply isort and black reformatting
Browse files Browse the repository at this point in the history
Signed-off-by: zpx01 <[email protected]>
  • Loading branch information
zpx01 committed Nov 9, 2024
1 parent 207f308 commit d2966b2
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 1 deletion.
2 changes: 1 addition & 1 deletion nemo/collections/diffusion/models/dit/dit_embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def __init__(self, in_channels: int, time_embed_dim: int, seed=None):
torch.manual_seed(seed)
self.linear_1.reset_parameters()
self.linear_2.reset_parameters()

if parallel_state.get_pipeline_model_parallel_world_size() > 1:
setattr(self.linear_1.weight, "pipeline_parallel", True)
setattr(self.linear_1.bias, "pipeline_parallel", True)
Expand Down
2 changes: 2 additions & 0 deletions nemo/collections/diffusion/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,7 @@ def mock_ditllama5b_8k() -> run.Partial:
recipe.resume = None
return recipe


@run.cli.factory(target=llm.train)
def mock_dit7b_8k() -> run.Partial:
recipe = mock_ditllama5b_8k()
Expand All @@ -261,6 +262,7 @@ def mock_dit7b_8k() -> run.Partial:
recipe.log.log_dir = 'nemo_experiments/mock_dit7b_8k'
return recipe


@run.cli.factory(target=llm.train)
def pretrain_7b() -> run.Partial:
"""DiT-7B Pretraining Recipe"""
Expand Down

0 comments on commit d2966b2

Please sign in to comment.