Skip to content

Commit

Permalink
[Vision] fix small nit on BeitDropPath layers (#20587)
Browse files Browse the repository at this point in the history
* fix small nit

* add last file
  • Loading branch information
younesbelkada authored Dec 5, 2022
1 parent e135a6c commit 0911057
Show file tree
Hide file tree
Showing 15 changed files with 30 additions and 30 deletions.
4 changes: 2 additions & 2 deletions src/transformers/models/beit/modeling_beit.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,8 +118,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/convnext/modeling_convnext.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/cvt/modeling_cvt.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/data2vec/modeling_data2vec_vision.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/dinat/modeling_dinat.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,8 +295,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/donut/modeling_donut_swin.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,8 +325,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/glpn/modeling_glpn.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -285,8 +285,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/nat/modeling_nat.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,8 +289,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/poolformer/modeling_poolformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/segformer/modeling_segformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/swin/modeling_swin.py
Original file line number Diff line number Diff line change
Expand Up @@ -397,8 +397,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/swinv2/modeling_swinv2.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,8 +262,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/van/modeling_van.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/models/x_clip/modeling_x_clip.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,8 +381,8 @@ def __init__(self, drop_prob: Optional[float] = None) -> None:
super().__init__()
self.drop_prob = drop_prob

def forward(self, x: torch.Tensor) -> torch.Tensor:
return drop_path(x, self.drop_prob, self.training)
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
return drop_path(hidden_states, self.drop_prob, self.training)

def extra_repr(self) -> str:
return "p={}".format(self.drop_prob)
Expand Down

0 comments on commit 0911057

Please sign in to comment.