-
Notifications
You must be signed in to change notification settings - Fork 1.2k
/
cyclegan_horse2zebra.yaml
132 lines (123 loc) · 2.78 KB
/
cyclegan_horse2zebra.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
epochs: 200
output_dir: output_dir
find_unused_parameters: True
model:
name: CycleGANModel
generator:
name: ResnetGenerator
output_nc: 3
n_blocks: 9
ngf: 64
use_dropout: False
norm_type: instance
input_nc: 3
discriminator:
name: NLayerDiscriminator
ndf: 64
n_layers: 3
norm_type: instance
input_nc: 3
cycle_criterion:
name: L1Loss
idt_criterion:
name: L1Loss
loss_weight: 0.5
gan_criterion:
name: GANLoss
gan_mode: lsgan
# training model under @to_static
to_static: False
export_model:
- {name: 'netG_A', inputs_num: 1}
- {name: 'netG_B', inputs_num: 1}
dataset:
train:
name: UnpairedDataset
dataroot_a: data/horse2zebra/trainA
dataroot_b: data/horse2zebra/trainB
num_workers: 0
batch_size: 1
is_train: True
max_size: inf
preprocess:
- name: LoadImageFromFile
key: A
- name: LoadImageFromFile
key: B
- name: Transforms
input_keys: [A, B]
pipeline:
- name: Resize
size: [286, 286]
interpolation: 'bicubic' #cv2.INTER_CUBIC
keys: ['image', 'image']
- name: RandomCrop
size: [256, 256]
keys: ['image', 'image']
- name: RandomHorizontalFlip
prob: 0.5
keys: ['image', 'image']
- name: Transpose
keys: ['image', 'image']
- name: Normalize
mean: [127.5, 127.5, 127.5]
std: [127.5, 127.5, 127.5]
keys: ['image', 'image']
test:
name: UnpairedDataset
dataroot_a: data/horse2zebra/testA
dataroot_b: data/horse2zebra/testB
num_workers: 0
batch_size: 1
max_size: inf
is_train: False
preprocess:
- name: LoadImageFromFile
key: A
- name: LoadImageFromFile
key: B
- name: Transforms
input_keys: [A, B]
pipeline:
- name: Resize
size: [256, 256]
interpolation: 'bicubic' #cv2.INTER_CUBIC
keys: ['image', 'image']
- name: Transpose
keys: ['image', 'image']
- name: Normalize
mean: [127.5, 127.5, 127.5]
std: [127.5, 127.5, 127.5]
keys: ['image', 'image']
lr_scheduler:
name: LinearDecay
learning_rate: 0.0002
start_epoch: 100
decay_epochs: 100
# will get from real dataset
iters_per_epoch: 1
optimizer:
optimG:
name: Adam
net_names:
- netG_A
- netG_B
beta1: 0.5
optimD:
name: Adam
net_names:
- netD_A
- netD_B
beta1: 0.5
log_config:
interval: 100
visiual_interval: 500
snapshot_config:
interval: 5
validate:
interval: 30000
save_img: false
metrics:
fid: # metric name, can be arbitrary
name: FID
batch_size: 8