Skip to content

Commit a3a69e8

Browse files
chore(format): run black on dev (#144)
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
1 parent f9ae0b5 commit a3a69e8

File tree

3 files changed

+11
-4
lines changed

3 files changed

+11
-4
lines changed

infer/modules/train/train.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
except Exception:
3333
pass
3434
finally:
35-
if not ('GradScaler' in globals() and 'autocast' in globals()):
35+
if not ("GradScaler" in globals() and "autocast" in globals()):
3636
from torch.amp.grad_scaler import GradScaler
3737
from torch.amp.autocast_mode import autocast
3838

rvc/layers/encoders.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -213,6 +213,7 @@ def remove_weight_norm(self):
213213

214214
def __prepare_scriptable__(self):
215215
from torch.nn.utils import parametrize
216+
216217
if parametrize.is_parametrized(self.enc, "weight"):
217218
parametrize.remove_parametrizations(self.enc, "weight")
218219
return self

rvc/layers/norms.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,9 @@ def __init__(
5151
cond_layer = torch.nn.Conv1d(
5252
gin_channels, 2 * hidden_channels * n_layers, 1
5353
)
54-
self.cond_layer = torch.nn.utils.parametrizations.weight_norm(cond_layer, name="weight")
54+
self.cond_layer = torch.nn.utils.parametrizations.weight_norm(
55+
cond_layer, name="weight"
56+
)
5557

5658
for i in range(n_layers):
5759
dilation = dilation_rate**i
@@ -63,7 +65,9 @@ def __init__(
6365
dilation=dilation,
6466
padding=padding,
6567
)
66-
in_layer = torch.nn.utils.parametrizations.weight_norm(in_layer, name="weight")
68+
in_layer = torch.nn.utils.parametrizations.weight_norm(
69+
in_layer, name="weight"
70+
)
6771
self.in_layers.append(in_layer)
6872

6973
# last one is not necessary
@@ -73,7 +77,9 @@ def __init__(
7377
res_skip_channels = hidden_channels
7478

7579
res_skip_layer = torch.nn.Conv1d(hidden_channels, res_skip_channels, 1)
76-
res_skip_layer = torch.nn.utils.parametrizations.weight_norm(res_skip_layer, name="weight")
80+
res_skip_layer = torch.nn.utils.parametrizations.weight_norm(
81+
res_skip_layer, name="weight"
82+
)
7783
self.res_skip_layers.append(res_skip_layer)
7884

7985
def __call__(

0 commit comments

Comments
 (0)