Skip to content
This repository has been archived by the owner on Oct 9, 2023. It is now read-only.

Commit

Permalink
Format code with autopep8
Browse files Browse the repository at this point in the history
  • Loading branch information
deepsource-autofix[bot] authored Aug 5, 2021
1 parent 19c74bb commit d2e63d3
Show file tree
Hide file tree
Showing 11 changed files with 23 additions and 23 deletions.
6 changes: 3 additions & 3 deletions flash/audio/speech_recognition/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@ class BaseSpeechRecognition:
def _load_sample(self, sample: Dict[str, Any]) -> Any:
path = sample[DefaultDataKeys.INPUT]
if (
not os.path.isabs(path)
and DefaultDataKeys.METADATA in sample
and "root" in sample[DefaultDataKeys.METADATA]
not os.path.isabs(path) and
DefaultDataKeys.METADATA in sample and
"root" in sample[DefaultDataKeys.METADATA]
):
path = os.path.join(sample[DefaultDataKeys.METADATA]["root"], path)
speech_array, sampling_rate = sf.read(path)
Expand Down
2 changes: 1 addition & 1 deletion flash/core/finetuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def finetune_function(
if epoch == self.unfreeze_milestones[0]:
# unfreeze num_layers last layers
self.unfreeze_and_add_param_group(
modules=backbone_modules[-self.num_layers :],
modules=backbone_modules[-self.num_layers:],
optimizer=optimizer,
train_bn=self.train_bn,
)
Expand Down
6 changes: 3 additions & 3 deletions flash/core/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,9 +402,9 @@ def build_data_pipeline(
deserializer = getattr(self.datamodule.data_pipeline, "_deserializer", None)

elif (
self.trainer is not None
and hasattr(self.trainer, "datamodule")
and getattr(self.trainer.datamodule, "data_pipeline", None) is not None
self.trainer is not None and
hasattr(self.trainer, "datamodule") and
getattr(self.trainer.datamodule, "data_pipeline", None) is not None
):
old_data_source = getattr(self.trainer.datamodule.data_pipeline, "data_source", None)
preprocess = getattr(self.trainer.datamodule.data_pipeline, "_preprocess_pipeline", None)
Expand Down
4 changes: 2 additions & 2 deletions flash/image/segmentation/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,8 @@ def load_data(

data = filter(
lambda sample: (
has_file_allowed_extension(sample[0], self.extensions)
and has_file_allowed_extension(sample[1], self.extensions)
has_file_allowed_extension(sample[0], self.extensions) and
has_file_allowed_extension(sample[1], self.extensions)
),
zip(input_data, target_data),
)
Expand Down
6 changes: 3 additions & 3 deletions flash/pointcloud/detection/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,9 +120,9 @@ def __init__(
def compute_loss(self, losses: Dict[str, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]:
losses = losses["loss"]
return (
self.hparams.lambda_loss_cls * losses["loss_cls"]
+ self.hparams.lambda_loss_bbox * losses["loss_bbox"]
+ self.hparams.lambda_loss_dir * losses["loss_dir"]
self.hparams.lambda_loss_cls * losses["loss_cls"] +
self.hparams.lambda_loss_bbox * losses["loss_bbox"] +
self.hparams.lambda_loss_dir * losses["loss_dir"]
)

def compute_logs(self, logs: Dict[str, Any], losses: Dict[str, torch.Tensor]):
Expand Down
2 changes: 1 addition & 1 deletion flash/pointcloud/segmentation/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def finetune_function(
if epoch != self.unfreeze_epoch:
return
self.unfreeze_and_add_param_group(
modules=list(pl_module.backbone.children())[-self.num_layers :],
modules=list(pl_module.backbone.children())[-self.num_layers:],
optimizer=optimizer,
train_bn=self.train_bn,
)
Expand Down
4 changes: 2 additions & 2 deletions flash/tabular/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,8 +200,8 @@ def __init__(
),
},
default_data_source=DefaultDataSources.CSV,
deserializer=deserializer
or TabularDeserializer(
deserializer=deserializer or
TabularDeserializer(
cat_cols=cat_cols,
num_cols=num_cols,
target_col=target_col,
Expand Down
2 changes: 1 addition & 1 deletion flash/text/seq2seq/core/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def _count_ngram(ngram_input_list: List[str], n_gram: int) -> Counter:

for i in range(1, n_gram + 1):
for j in range(len(ngram_input_list) - i + 1):
ngram_key = tuple(ngram_input_list[j : (i + j)])
ngram_key = tuple(ngram_input_list[j: (i + j)])
ngram_counter[ngram_key] += 1

return ngram_counter
Expand Down
2 changes: 1 addition & 1 deletion flash/video/classification/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def finetune_function(
if epoch != self.unfreeze_epoch:
return
self.unfreeze_and_add_param_group(
modules=list(pl_module.backbone.children())[-self.num_layers :],
modules=list(pl_module.backbone.children())[-self.num_layers:],
optimizer=optimizer,
train_bn=self.train_bn,
)
Expand Down
8 changes: 4 additions & 4 deletions tests/core/serve/test_dag/test_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -1223,8 +1223,8 @@ def test_fuse_subgraphs():
"inc-6",
inkeys,
),
)
+ inkeys,
) +
inkeys,
}
)
)
Expand All @@ -1248,8 +1248,8 @@ def test_fuse_subgraphs():
"add-2",
inkeys,
),
)
+ inkeys,
) +
inkeys,
"add-2": "inc-add-1",
"inc-6": (inc, (inc, "add-2")),
}
Expand Down
4 changes: 2 additions & 2 deletions tests/image/segmentation/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@

def build_checkboard(n, m, k=8):
x = np.zeros((n, m))
x[k :: k * 2, ::k] = 1
x[:: k * 2, k :: k * 2] = 1
x[k:: k * 2, ::k] = 1
x[:: k * 2, k:: k * 2] = 1
return x


Expand Down

0 comments on commit d2e63d3

Please sign in to comment.