Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Apr 25, 2024
1 parent d00598e commit ce11487
Showing 1 changed file with 3 additions and 6 deletions.
9 changes: 3 additions & 6 deletions nemo/core/optim/distributed_adam.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,12 @@
from megatron.core.dist_checkpointing.optimizer import get_param_id_to_sharded_param_map, optim_state_to_sharding_state
from transformer_engine.pytorch.cpp_extensions import cast_to_fp8

from nemo.utils import str_to_dtype
from nemo.utils import logging
from nemo.utils import logging, str_to_dtype
from nemo.utils.te_utils import is_float8tensor

_distribute_within_nodes_pgs = {}


def create_distribute_within_nodes_pgs():
"""Create process groups for distributing with nodes.
Expand Down Expand Up @@ -412,10 +412,7 @@ def init_param_buffer(self) -> None:
# `param.data.set_()` failed to change storage.
# `param.set_()` invalidates bprop hook.
param.data = torch.as_strided(
buffer_view,
param.size(),
param.stride(),
storage_offset=buffer_view.storage_offset(),
buffer_view, param.size(), param.stride(), storage_offset=buffer_view.storage_offset(),
)

def try_grad_sync(self, params: Iterable[torch.nn.Parameter]) -> None:
Expand Down

0 comments on commit ce11487

Please sign in to comment.