-
Notifications
You must be signed in to change notification settings - Fork 664
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Summary: Import from github Reviewed By: mthrok Differential Revision: D28606124 fbshipit-source-id: 05dcb07efc5537d928bec682a68e6ccee7cc325e
- Loading branch information
1 parent
7f6ac05
commit 81db19b
Showing
28 changed files
with
682 additions
and
352 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
10 changes: 10 additions & 0 deletions
10
test/torchaudio_unittest/rnnt/torchscript_consistency_cpu_test.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
import torch | ||
|
||
from torchaudio_unittest.common_utils import PytorchTestCase | ||
from .utils import skipIfNoTransducer | ||
from .torchscript_consistency_impl import RNNTLossTorchscript | ||
|
||
|
||
@skipIfNoTransducer | ||
class TestRNNTLoss(RNNTLossTorchscript, PytorchTestCase): | ||
device = torch.device('cpu') |
11 changes: 11 additions & 0 deletions
11
test/torchaudio_unittest/rnnt/torchscript_consistency_cuda_test.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
import torch | ||
|
||
from torchaudio_unittest.common_utils import PytorchTestCase, skipIfNoCuda | ||
from .utils import skipIfNoTransducer | ||
from .torchscript_consistency_impl import RNNTLossTorchscript | ||
|
||
|
||
@skipIfNoTransducer | ||
@skipIfNoCuda | ||
class TestRNNTLoss(RNNTLossTorchscript, PytorchTestCase): | ||
device = torch.device('cuda') |
70 changes: 70 additions & 0 deletions
70
test/torchaudio_unittest/rnnt/torchscript_consistency_impl.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
import torch | ||
from torchaudio_unittest.common_utils import TempDirMixin, TestBaseMixin | ||
from torchaudio.prototype.rnnt_loss import RNNTLoss, rnnt_loss | ||
|
||
|
||
class RNNTLossTorchscript(TempDirMixin, TestBaseMixin): | ||
"""Implements test for RNNT Loss that are performed for different devices""" | ||
def _assert_consistency(self, func, tensor, shape_only=False): | ||
tensor = tensor.to(device=self.device, dtype=self.dtype) | ||
|
||
path = self.get_temp_path('func.zip') | ||
torch.jit.script(func).save(path) | ||
ts_func = torch.jit.load(path) | ||
|
||
torch.random.manual_seed(40) | ||
input_tensor = tensor.clone().detach().requires_grad_(True) | ||
output = func(input_tensor) | ||
|
||
torch.random.manual_seed(40) | ||
input_tensor = tensor.clone().detach().requires_grad_(True) | ||
ts_output = ts_func(input_tensor) | ||
|
||
self.assertEqual(ts_output, output) | ||
|
||
def test_rnnt_loss(self): | ||
def func( | ||
logits, | ||
): | ||
targets = torch.tensor([[1, 2]], device=logits.device, dtype=torch.int32) | ||
logit_lengths = torch.tensor([2], device=logits.device, dtype=torch.int32) | ||
target_lengths = torch.tensor([2], device=logits.device, dtype=torch.int32) | ||
return rnnt_loss(logits, targets, logit_lengths, target_lengths) | ||
|
||
logits = torch.tensor([[[[0.1, 0.6, 0.1, 0.1, 0.1], | ||
[0.1, 0.1, 0.6, 0.1, 0.1], | ||
[0.1, 0.1, 0.2, 0.8, 0.1]], | ||
[[0.1, 0.6, 0.1, 0.1, 0.1], | ||
[0.1, 0.1, 0.2, 0.1, 0.1], | ||
[0.7, 0.1, 0.2, 0.1, 0.1]]]]) | ||
|
||
self._assert_consistency(func, logits) | ||
|
||
def test_RNNTLoss(self): | ||
func = RNNTLoss() | ||
|
||
logits = torch.tensor([[[[0.1, 0.6, 0.1, 0.1, 0.1], | ||
[0.1, 0.1, 0.6, 0.1, 0.1], | ||
[0.1, 0.1, 0.2, 0.8, 0.1]], | ||
[[0.1, 0.6, 0.1, 0.1, 0.1], | ||
[0.1, 0.1, 0.2, 0.1, 0.1], | ||
[0.7, 0.1, 0.2, 0.1, 0.1]]]]) | ||
targets = torch.tensor([[1, 2]], device=self.device, dtype=torch.int32) | ||
logit_lengths = torch.tensor([2], device=self.device, dtype=torch.int32) | ||
target_lengths = torch.tensor([2], device=self.device, dtype=torch.int32) | ||
|
||
tensor = logits.to(device=self.device, dtype=self.dtype) | ||
|
||
path = self.get_temp_path('func.zip') | ||
torch.jit.script(func).save(path) | ||
ts_func = torch.jit.load(path) | ||
|
||
torch.random.manual_seed(40) | ||
input_tensor = tensor.clone().detach().requires_grad_(True) | ||
output = func(input_tensor, targets, logit_lengths, target_lengths) | ||
|
||
torch.random.manual_seed(40) | ||
input_tensor = tensor.clone().detach().requires_grad_(True) | ||
ts_output = ts_func(input_tensor, targets, logit_lengths, target_lengths) | ||
|
||
self.assertEqual(ts_output, output) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.