Skip to content

Commit 5bd5dfe

Browse files
committed
remove unnecessary f0 loss in discrimnator
1 parent a4db3db commit 5bd5dfe

File tree

3 files changed

+27
-27
lines changed

3 files changed

+27
-27
lines changed

cosyvoice/bin/train.py

+2
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,8 @@ def main():
9595
override_dict.pop('hift')
9696
with open(args.config, 'r') as f:
9797
configs = load_hyperpyyaml(f, overrides=override_dict)
98+
if gan is True:
99+
configs['train_conf'] = configs['train_conf_gan']
98100
configs['train_conf'].update(vars(args))
99101

100102
# Init env for ddp

cosyvoice/hifigan/hifigan.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,5 @@ def forward_discriminator(self, batch, device):
6464
loss_tpr = tpr_loss(y_d_rs, y_d_gs, self.tpr_loss_tau)
6565
else:
6666
loss_tpr = torch.zeros(1).to(device)
67-
loss_f0 = F.l1_loss(generated_f0, pitch_feat)
68-
loss = loss_disc + self.tpr_loss_weight * loss_tpr + loss_f0
69-
return {'loss': loss, 'loss_disc': loss_disc, 'loss_tpr': loss_tpr, 'loss_f0': loss_f0}
67+
loss = loss_disc + self.tpr_loss_weight * loss_tpr
68+
return {'loss': loss, 'loss_disc': loss_disc, 'loss_tpr': loss_tpr}

cosyvoice/utils/train_utils.py

+23-24
Original file line numberDiff line numberDiff line change
@@ -110,30 +110,29 @@ def wrap_cuda_model(args, model):
110110

111111

112112
def init_optimizer_and_scheduler(args, configs, model, gan):
113-
key = 'train_conf_gan' if gan is True else 'train_conf'
114-
if configs[key]['optim'] == 'adam':
115-
optimizer = optim.Adam(model.parameters(), **configs[key]['optim_conf'])
116-
elif configs[key]['optim'] == 'adamw':
117-
optimizer = optim.AdamW(model.parameters(), **configs[key]['optim_conf'])
113+
if configs['train_conf']['optim'] == 'adam':
114+
optimizer = optim.Adam(model.parameters(), **configs['train_conf']['optim_conf'])
115+
elif configs['train_conf']['optim'] == 'adamw':
116+
optimizer = optim.AdamW(model.parameters(), **configs['train_conf']['optim_conf'])
118117
else:
119-
raise ValueError("unknown optimizer: " + configs[key])
118+
raise ValueError("unknown optimizer: " + configs['train_conf'])
120119

121-
if configs[key]['scheduler'] == 'warmuplr':
120+
if configs['train_conf']['scheduler'] == 'warmuplr':
122121
scheduler_type = WarmupLR
123-
scheduler = WarmupLR(optimizer, **configs[key]['scheduler_conf'])
124-
elif configs[key]['scheduler'] == 'NoamHoldAnnealing':
122+
scheduler = WarmupLR(optimizer, **configs['train_conf']['scheduler_conf'])
123+
elif configs['train_conf']['scheduler'] == 'NoamHoldAnnealing':
125124
scheduler_type = NoamHoldAnnealing
126-
scheduler = NoamHoldAnnealing(optimizer, **configs[key]['scheduler_conf'])
127-
elif configs[key]['scheduler'] == 'constantlr':
125+
scheduler = NoamHoldAnnealing(optimizer, **configs['train_conf']['scheduler_conf'])
126+
elif configs['train_conf']['scheduler'] == 'constantlr':
128127
scheduler_type = ConstantLR
129128
scheduler = ConstantLR(optimizer)
130129
else:
131-
raise ValueError("unknown scheduler: " + configs[key])
130+
raise ValueError("unknown scheduler: " + configs['train_conf'])
132131

133132
# use deepspeed optimizer for speedup
134133
if args.train_engine == "deepspeed":
135134
def scheduler(opt):
136-
return scheduler_type(opt, **configs[key]['scheduler_conf'])
135+
return scheduler_type(opt, **configs['train_conf']['scheduler_conf'])
137136
model, optimizer, _, scheduler = deepspeed.initialize(
138137
args=args,
139138
model=model,
@@ -143,24 +142,24 @@ def scheduler(opt):
143142

144143
# currently we wrap generator and discriminator in one model, so we cannot use deepspeed
145144
if gan is True:
146-
if configs[key]['optim_d'] == 'adam':
147-
optimizer_d = optim.Adam(model.module.discriminator.parameters(), **configs[key]['optim_conf'])
148-
elif configs[key]['optim_d'] == 'adamw':
149-
optimizer_d = optim.AdamW(model.module.discriminator.parameters(), **configs[key]['optim_conf'])
145+
if configs['train_conf']['optim_d'] == 'adam':
146+
optimizer_d = optim.Adam(model.module.discriminator.parameters(), **configs['train_conf']['optim_conf'])
147+
elif configs['train_conf']['optim_d'] == 'adamw':
148+
optimizer_d = optim.AdamW(model.module.discriminator.parameters(), **configs['train_conf']['optim_conf'])
150149
else:
151-
raise ValueError("unknown optimizer: " + configs[key])
150+
raise ValueError("unknown optimizer: " + configs['train_conf'])
152151

153-
if configs[key]['scheduler_d'] == 'warmuplr':
152+
if configs['train_conf']['scheduler_d'] == 'warmuplr':
154153
scheduler_type = WarmupLR
155-
scheduler_d = WarmupLR(optimizer_d, **configs[key]['scheduler_conf'])
156-
elif configs[key]['scheduler_d'] == 'NoamHoldAnnealing':
154+
scheduler_d = WarmupLR(optimizer_d, **configs['train_conf']['scheduler_conf'])
155+
elif configs['train_conf']['scheduler_d'] == 'NoamHoldAnnealing':
157156
scheduler_type = NoamHoldAnnealing
158-
scheduler_d = NoamHoldAnnealing(optimizer_d, **configs[key]['scheduler_conf'])
159-
elif configs[key]['scheduler'] == 'constantlr':
157+
scheduler_d = NoamHoldAnnealing(optimizer_d, **configs['train_conf']['scheduler_conf'])
158+
elif configs['train_conf']['scheduler'] == 'constantlr':
160159
scheduler_type = ConstantLR
161160
scheduler_d = ConstantLR(optimizer_d)
162161
else:
163-
raise ValueError("unknown scheduler: " + configs[key])
162+
raise ValueError("unknown scheduler: " + configs['train_conf'])
164163
else:
165164
optimizer_d, scheduler_d = None, None
166165
return model, optimizer, scheduler, optimizer_d, scheduler_d

0 commit comments

Comments
 (0)