2023-05-25 05:59:33 +00:00
|
|
|
from collections import OrderedDict
|
|
|
|
|
|
|
|
import torch
|
|
|
|
|
|
|
|
import utils
|
|
|
|
from models import SynthesizerTrn
|
|
|
|
|
|
|
|
|
|
|
|
def copyStateDict(state_dict):
|
|
|
|
if list(state_dict.keys())[0].startswith('module'):
|
|
|
|
start_idx = 1
|
|
|
|
else:
|
|
|
|
start_idx = 0
|
|
|
|
new_state_dict = OrderedDict()
|
|
|
|
for k, v in state_dict.items():
|
|
|
|
name = ','.join(k.split('.')[start_idx:])
|
|
|
|
new_state_dict[name] = v
|
|
|
|
return new_state_dict
|
|
|
|
|
|
|
|
|
2023-06-30 20:27:18 +00:00
|
|
|
def removeOptimizer(config: str, input_model: str, ishalf: bool, output_model: str):
|
2023-05-25 05:59:33 +00:00
|
|
|
hps = utils.get_hparams_from_file(config)
|
|
|
|
|
|
|
|
net_g = SynthesizerTrn(hps.data.filter_length // 2 + 1,
|
|
|
|
hps.train.segment_size // hps.data.hop_length,
|
|
|
|
**hps.model)
|
|
|
|
|
|
|
|
optim_g = torch.optim.AdamW(net_g.parameters(),
|
|
|
|
hps.train.learning_rate,
|
|
|
|
betas=hps.train.betas,
|
|
|
|
eps=hps.train.eps)
|
|
|
|
|
|
|
|
state_dict_g = torch.load(input_model, map_location="cpu")
|
|
|
|
new_dict_g = copyStateDict(state_dict_g)
|
|
|
|
keys = []
|
|
|
|
for k, v in new_dict_g['model'].items():
|
2023-07-12 16:22:22 +00:00
|
|
|
if "enc_q" in k: continue # noqa: E701
|
2023-05-25 05:59:33 +00:00
|
|
|
keys.append(k)
|
2023-06-30 20:27:18 +00:00
|
|
|
|
|
|
|
new_dict_g = {k: new_dict_g['model'][k].half() for k in keys} if ishalf else {k: new_dict_g['model'][k] for k in keys}
|
2023-05-25 05:59:33 +00:00
|
|
|
|
|
|
|
torch.save(
|
|
|
|
{
|
|
|
|
'model': new_dict_g,
|
|
|
|
'iteration': 0,
|
|
|
|
'optimizer': optim_g.state_dict(),
|
|
|
|
'learning_rate': 0.0001
|
|
|
|
}, output_model)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
import argparse
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument("-c",
|
|
|
|
"--config",
|
|
|
|
type=str,
|
|
|
|
default='configs/config.json')
|
|
|
|
parser.add_argument("-i", "--input", type=str)
|
|
|
|
parser.add_argument("-o", "--output", type=str, default=None)
|
2023-06-30 20:27:18 +00:00
|
|
|
parser.add_argument('-hf', '--half', action='store_true', default=False, help='Save as FP16')
|
|
|
|
|
2023-05-25 05:59:33 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
output = args.output
|
|
|
|
|
|
|
|
if output is None:
|
|
|
|
import os.path
|
|
|
|
filename, ext = os.path.splitext(args.input)
|
2023-06-30 20:27:18 +00:00
|
|
|
half = "_half" if args.half else ""
|
|
|
|
output = filename + "_release" + half + ext
|
2023-05-25 05:59:33 +00:00
|
|
|
|
2023-06-30 20:27:18 +00:00
|
|
|
removeOptimizer(args.config, args.input, args.half, output)
|