forked from MoonInTheRiver/DiffSinger
-
Notifications
You must be signed in to change notification settings - Fork 291
/
Copy pathbase.yaml
94 lines (86 loc) · 1.81 KB
/
base.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
# task
task_cls: null
#############
# dataset
#############
sort_by_len: true
raw_data_dir: null
binary_data_dir: null
binarizer_cls: null
binarization_args:
shuffle: false
num_workers: 0
audio_sample_rate: 44100
hop_size: 512
win_size: 2048
fft_size: 2048 # Extra window size is filled with 0 paddings to match this parameter
sampler_frame_count_grid: 6
ds_workers: 4
dataloader_prefetch_factor: 2
#########
# model
#########
hidden_size: 256
dropout: 0.1
use_pos_embed: true
enc_layers: 4
num_heads: 2
enc_ffn_kernel_size: 9
ffn_act: gelu
use_spk_id: false
###########
# optimization
###########
optimizer_args:
optimizer_cls: torch.optim.AdamW
lr: 0.0004
beta1: 0.9
beta2: 0.98
weight_decay: 0
lr_scheduler_args:
scheduler_cls: torch.optim.lr_scheduler.StepLR
step_size: 50000
gamma: 0.5
clip_grad_norm: 1
###########
# train and eval
###########
num_ckpt_keep: 5
accumulate_grad_batches: 1
log_interval: 100
num_sanity_val_steps: 1 # steps of validation at the beginning
val_check_interval: 2000
max_updates: 120000
max_batch_frames: 32000
max_batch_size: 100000
max_val_batch_frames: 60000
max_val_batch_size: 1
pe: parselmouth
pe_ckpt: 'checkpoints/rmvpe/model.pt'
hnsep: vr
hnsep_ckpt: 'checkpoints/vr/model.pt'
f0_min: 65
f0_max: 1100
num_valid_plots: 10
###########
# pytorch lightning
# Read https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api for possible values
###########
pl_trainer_accelerator: 'auto'
pl_trainer_devices: 'auto'
pl_trainer_precision: '16-mixed'
pl_trainer_num_nodes: 1
pl_trainer_strategy:
name: auto
process_group_backend: nccl
find_unused_parameters: false
nccl_p2p: true
###########
# finetune
###########
finetune_enabled: false
finetune_ckpt_path: null
finetune_ignored_params: []
finetune_strict_shapes: true
freezing_enabled: false
frozen_params: []