Skip to content

Commit

Permalink
Fix (llama-tp) remove unnecessary codes and files
Browse files Browse the repository at this point in the history
  • Loading branch information
SeungyounShin committed Aug 11, 2024
1 parent 5237c3c commit f8846ce
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 82 deletions.
8 changes: 0 additions & 8 deletions src/transformers/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,6 @@
from torch import Tensor, nn
from torch.nn import CrossEntropyLoss, Identity
from torch.utils.checkpoint import checkpoint
from torch.distributed._tensor import init_device_mesh, Shard, Replicate
from torch.distributed.tensor.parallel import (
parallelize_module,
ColwiseParallel,
RowwiseParallel,
PrepareModuleInput,
SequenceParallel,
)

from .activations import get_activation
from .configuration_utils import PretrainedConfig
Expand Down
2 changes: 0 additions & 2 deletions src/transformers/models/llama/modeling_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,6 @@
replace_return_docstrings,
)
from .configuration_llama import LlamaConfig
from .tensor_parallel_plan_llama import LlamaTensorParallelPlan


logger = logging.get_logger(__name__)

Expand Down
72 changes: 0 additions & 72 deletions src/transformers/models/llama/tensor_parallel_plan_llama.py

This file was deleted.

0 comments on commit f8846ce

Please sign in to comment.