Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
T
tongue-diagnosis
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
zhiyang.zhou
tongue-diagnosis
Commits
5bc3eadd
Commit
5bc3eadd
authored
Jul 08, 2021
by
zhiyang.zhou
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
add dnns
parent
9a3e236d
Pipeline
#189
canceled with stages
Changes
4
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
404 additions
and
0 deletions
+404
-0
__init__.py
models/__init__.py
+0
-0
densenet.py
models/densenet.py
+197
-0
resnet.py
models/resnet.py
+115
-0
wideresnet.py
models/wideresnet.py
+92
-0
No files found.
models/__init__.py
0 → 100644
View file @
5bc3eadd
models/densenet.py
0 → 100644
View file @
5bc3eadd
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import
math
import
torch
import
torch.nn
as
nn
import
torch.nn.functional
as
F
class
BasicBlock
(
nn
.
Module
):
def
__init__
(
self
,
in_planes
,
out_planes
,
dropRate
=
0.0
):
super
(
BasicBlock
,
self
)
.
__init__
()
self
.
bn1
=
nn
.
BatchNorm2d
(
in_planes
)
self
.
relu
=
nn
.
ReLU
(
inplace
=
True
)
self
.
conv1
=
nn
.
Conv2d
(
in_planes
,
out_planes
,
kernel_size
=
3
,
stride
=
1
,
padding
=
1
,
bias
=
False
)
self
.
droprate
=
dropRate
def
forward
(
self
,
x
):
out
=
self
.
conv1
(
self
.
relu
(
self
.
bn1
(
x
)))
if
self
.
droprate
>
0
:
out
=
F
.
dropout
(
out
,
p
=
self
.
droprate
,
training
=
self
.
training
)
return
torch
.
cat
([
x
,
out
],
1
)
class
BottleneckBlock
(
nn
.
Module
):
def
__init__
(
self
,
in_planes
,
out_planes
,
dropRate
=
0.0
):
super
(
BottleneckBlock
,
self
)
.
__init__
()
inter_planes
=
out_planes
*
4
self
.
bn1
=
nn
.
BatchNorm2d
(
in_planes
)
self
.
relu
=
nn
.
ReLU
(
inplace
=
True
)
self
.
conv1
=
nn
.
Conv2d
(
in_planes
,
inter_planes
,
kernel_size
=
1
,
stride
=
1
,
padding
=
0
,
bias
=
False
)
self
.
bn2
=
nn
.
BatchNorm2d
(
inter_planes
)
self
.
conv2
=
nn
.
Conv2d
(
inter_planes
,
out_planes
,
kernel_size
=
3
,
stride
=
1
,
padding
=
1
,
bias
=
False
)
self
.
droprate
=
dropRate
def
forward
(
self
,
x
):
out
=
self
.
conv1
(
self
.
relu
(
self
.
bn1
(
x
)))
if
self
.
droprate
>
0
:
out
=
F
.
dropout
(
out
,
p
=
self
.
droprate
,
inplace
=
False
,
training
=
self
.
training
)
out
=
self
.
conv2
(
self
.
relu
(
self
.
bn2
(
out
)))
if
self
.
droprate
>
0
:
out
=
F
.
dropout
(
out
,
p
=
self
.
droprate
,
inplace
=
False
,
training
=
self
.
training
)
return
torch
.
cat
([
x
,
out
],
1
)
class
TransitionBlock
(
nn
.
Module
):
def
__init__
(
self
,
in_planes
,
out_planes
,
dropRate
=
0.0
):
super
(
TransitionBlock
,
self
)
.
__init__
()
self
.
bn1
=
nn
.
BatchNorm2d
(
in_planes
)
self
.
relu
=
nn
.
ReLU
(
inplace
=
True
)
self
.
conv1
=
nn
.
Conv2d
(
in_planes
,
out_planes
,
kernel_size
=
1
,
stride
=
1
,
padding
=
0
,
bias
=
False
)
self
.
droprate
=
dropRate
def
forward
(
self
,
x
):
out
=
self
.
conv1
(
self
.
relu
(
self
.
bn1
(
x
)))
if
self
.
droprate
>
0
:
out
=
F
.
dropout
(
out
,
p
=
self
.
droprate
,
inplace
=
False
,
training
=
self
.
training
)
return
F
.
avg_pool2d
(
out
,
2
)
class
DenseBlock
(
nn
.
Module
):
def
__init__
(
self
,
nb_layers
,
in_planes
,
growth_rate
,
block
,
dropRate
=
0.0
):
super
(
DenseBlock
,
self
)
.
__init__
()
self
.
layer
=
self
.
_make_layer
(
block
,
in_planes
,
growth_rate
,
nb_layers
,
dropRate
)
def
_make_layer
(
self
,
block
,
in_planes
,
growth_rate
,
nb_layers
,
dropRate
):
layers
=
[]
for
i
in
range
(
nb_layers
):
layers
.
append
(
block
(
in_planes
+
i
*
growth_rate
,
growth_rate
,
dropRate
))
return
nn
.
Sequential
(
*
layers
)
def
forward
(
self
,
x
):
return
self
.
layer
(
x
)
class
DenseNet3
(
nn
.
Module
):
def
__init__
(
self
,
depth
,
num_classes
,
growth_rate
=
12
,
reduction
=
0.5
,
bottleneck
=
True
,
dropRate
=
0.0
,
normalizer
=
None
,
out_classes
=
100
):
super
(
DenseNet3
,
self
)
.
__init__
()
in_planes
=
2
*
growth_rate
n
=
(
depth
-
4
)
/
3
if
bottleneck
==
True
:
n
=
int
(
n
/
2
)
block
=
BottleneckBlock
else
:
block
=
BasicBlock
# 1st conv before any dense block
self
.
conv1
=
nn
.
Conv2d
(
3
,
in_planes
,
kernel_size
=
3
,
stride
=
1
,
padding
=
1
,
bias
=
False
)
# 1st block
self
.
block1
=
DenseBlock
(
n
,
in_planes
,
growth_rate
,
block
,
dropRate
)
in_planes
=
int
(
in_planes
+
n
*
growth_rate
)
self
.
trans1
=
TransitionBlock
(
in_planes
,
int
(
math
.
floor
(
in_planes
*
reduction
)),
dropRate
=
dropRate
)
in_planes
=
int
(
math
.
floor
(
in_planes
*
reduction
))
# 2nd block
self
.
block2
=
DenseBlock
(
n
,
in_planes
,
growth_rate
,
block
,
dropRate
)
in_planes
=
int
(
in_planes
+
n
*
growth_rate
)
self
.
trans2
=
TransitionBlock
(
in_planes
,
int
(
math
.
floor
(
in_planes
*
reduction
)),
dropRate
=
dropRate
)
in_planes
=
int
(
math
.
floor
(
in_planes
*
reduction
))
# 3rd block
self
.
block3
=
DenseBlock
(
n
,
in_planes
,
growth_rate
,
block
,
dropRate
)
in_planes
=
int
(
in_planes
+
n
*
growth_rate
)
# global average pooling and classifier
self
.
bn1
=
nn
.
BatchNorm2d
(
in_planes
)
self
.
relu
=
nn
.
ReLU
(
inplace
=
True
)
self
.
fc
=
nn
.
Linear
(
in_planes
,
num_classes
)
self
.
in_planes
=
in_planes
self
.
normalizer
=
normalizer
for
m
in
self
.
modules
():
if
isinstance
(
m
,
nn
.
Conv2d
):
m
.
weight
.
data
.
normal_
(
0
,
math
.
sqrt
(
2.
/
n
))
elif
isinstance
(
m
,
nn
.
BatchNorm2d
):
m
.
weight
.
data
.
fill_
(
1
)
m
.
bias
.
data
.
zero_
()
elif
isinstance
(
m
,
nn
.
Linear
):
m
.
bias
.
data
.
zero_
()
def
forward
(
self
,
x
):
if
self
.
normalizer
is
not
None
:
x
=
x
.
clone
()
x
[:,
0
,:,:]
=
(
x
[:,
0
,:,:]
-
self
.
normalizer
.
mean
[
0
])
/
self
.
normalizer
.
std
[
0
]
x
[:,
1
,:,:]
=
(
x
[:,
1
,:,:]
-
self
.
normalizer
.
mean
[
1
])
/
self
.
normalizer
.
std
[
1
]
x
[:,
2
,:,:]
=
(
x
[:,
2
,:,:]
-
self
.
normalizer
.
mean
[
2
])
/
self
.
normalizer
.
std
[
2
]
out
=
self
.
conv1
(
x
)
out
=
self
.
trans1
(
self
.
block1
(
out
))
out
=
self
.
trans2
(
self
.
block2
(
out
))
out
=
self
.
block3
(
out
)
out
=
self
.
relu
(
self
.
bn1
(
out
))
out
=
F
.
avg_pool2d
(
out
,
8
)
out
=
out
.
view
(
-
1
,
self
.
in_planes
)
out
=
self
.
fc
(
out
)
return
out
# function to extact the multiple features
def
feature_list
(
self
,
x
):
if
self
.
normalizer
is
not
None
:
x
=
x
.
clone
()
x
[:,
0
,:,:]
=
(
x
[:,
0
,:,:]
-
self
.
normalizer
.
mean
[
0
])
/
self
.
normalizer
.
std
[
0
]
x
[:,
1
,:,:]
=
(
x
[:,
1
,:,:]
-
self
.
normalizer
.
mean
[
1
])
/
self
.
normalizer
.
std
[
1
]
x
[:,
2
,:,:]
=
(
x
[:,
2
,:,:]
-
self
.
normalizer
.
mean
[
2
])
/
self
.
normalizer
.
std
[
2
]
out_list
=
[]
out
=
self
.
conv1
(
x
)
out_list
.
append
(
out
)
out
=
self
.
trans1
(
self
.
block1
(
out
))
out_list
.
append
(
out
)
out
=
self
.
trans2
(
self
.
block2
(
out
))
out_list
.
append
(
out
)
out
=
self
.
block3
(
out
)
out
=
self
.
relu
(
self
.
bn1
(
out
))
out_list
.
append
(
out
)
out
=
F
.
avg_pool2d
(
out
,
8
)
out
=
out
.
view
(
-
1
,
self
.
in_planes
)
return
self
.
fc
(
out
),
out_list
def
intermediate_forward
(
self
,
x
,
layer_index
):
if
self
.
normalizer
is
not
None
:
x
=
x
.
clone
()
x
[:,
0
,:,:]
=
(
x
[:,
0
,:,:]
-
self
.
normalizer
.
mean
[
0
])
/
self
.
normalizer
.
std
[
0
]
x
[:,
1
,:,:]
=
(
x
[:,
1
,:,:]
-
self
.
normalizer
.
mean
[
1
])
/
self
.
normalizer
.
std
[
1
]
x
[:,
2
,:,:]
=
(
x
[:,
2
,:,:]
-
self
.
normalizer
.
mean
[
2
])
/
self
.
normalizer
.
std
[
2
]
out
=
self
.
conv1
(
x
)
if
layer_index
==
1
:
out
=
self
.
trans1
(
self
.
block1
(
out
))
elif
layer_index
==
2
:
out
=
self
.
trans1
(
self
.
block1
(
out
))
out
=
self
.
trans2
(
self
.
block2
(
out
))
elif
layer_index
==
3
:
out
=
self
.
trans1
(
self
.
block1
(
out
))
out
=
self
.
trans2
(
self
.
block2
(
out
))
out
=
self
.
block3
(
out
)
out
=
self
.
relu
(
self
.
bn1
(
out
))
return
out
# function to extact the penultimate features
def
penultimate_forward
(
self
,
x
):
if
self
.
normalizer
is
not
None
:
x
=
x
.
clone
()
x
[:,
0
,:,:]
=
(
x
[:,
0
,:,:]
-
self
.
normalizer
.
mean
[
0
])
/
self
.
normalizer
.
std
[
0
]
x
[:,
1
,:,:]
=
(
x
[:,
1
,:,:]
-
self
.
normalizer
.
mean
[
1
])
/
self
.
normalizer
.
std
[
1
]
x
[:,
2
,:,:]
=
(
x
[:,
2
,:,:]
-
self
.
normalizer
.
mean
[
2
])
/
self
.
normalizer
.
std
[
2
]
out
=
self
.
conv1
(
x
)
out
=
self
.
trans1
(
self
.
block1
(
out
))
out
=
self
.
trans2
(
self
.
block2
(
out
))
out
=
self
.
block3
(
out
)
penultimate
=
self
.
relu
(
self
.
bn1
(
out
))
out
=
F
.
avg_pool2d
(
penultimate
,
8
)
out
=
out
.
view
(
-
1
,
self
.
in_planes
)
return
self
.
fc
(
out
),
penultimate
models/resnet.py
0 → 100644
View file @
5bc3eadd
import
torch
import
torch.nn
as
nn
import
torch.nn.functional
as
F
class
BasicBlock
(
nn
.
Module
):
expansion
=
1
def
__init__
(
self
,
in_planes
,
planes
,
stride
=
1
):
super
(
BasicBlock
,
self
)
.
__init__
()
self
.
conv1
=
nn
.
Conv2d
(
in_planes
,
planes
,
kernel_size
=
3
,
stride
=
stride
,
padding
=
1
,
bias
=
False
)
self
.
bn1
=
nn
.
BatchNorm2d
(
planes
)
self
.
conv2
=
nn
.
Conv2d
(
planes
,
planes
,
kernel_size
=
3
,
stride
=
1
,
padding
=
1
,
bias
=
False
)
self
.
bn2
=
nn
.
BatchNorm2d
(
planes
)
self
.
shortcut
=
nn
.
Sequential
()
if
stride
!=
1
or
in_planes
!=
self
.
expansion
*
planes
:
self
.
shortcut
=
nn
.
Sequential
(
nn
.
Conv2d
(
in_planes
,
self
.
expansion
*
planes
,
kernel_size
=
1
,
stride
=
stride
,
bias
=
False
),
nn
.
BatchNorm2d
(
self
.
expansion
*
planes
)
)
def
forward
(
self
,
x
):
out
=
F
.
relu
(
self
.
bn1
(
self
.
conv1
(
x
)))
out
=
self
.
bn2
(
self
.
conv2
(
out
))
out
+=
self
.
shortcut
(
x
)
out
=
F
.
relu
(
out
)
return
out
class
Bottleneck
(
nn
.
Module
):
expansion
=
4
def
__init__
(
self
,
in_planes
,
planes
,
stride
=
1
):
super
(
Bottleneck
,
self
)
.
__init__
()
self
.
conv1
=
nn
.
Conv2d
(
in_planes
,
planes
,
kernel_size
=
1
,
bias
=
False
)
self
.
bn1
=
nn
.
BatchNorm2d
(
planes
)
self
.
conv2
=
nn
.
Conv2d
(
planes
,
planes
,
kernel_size
=
3
,
stride
=
stride
,
padding
=
1
,
bias
=
False
)
self
.
bn2
=
nn
.
BatchNorm2d
(
planes
)
self
.
conv3
=
nn
.
Conv2d
(
planes
,
self
.
expansion
*
planes
,
kernel_size
=
1
,
bias
=
False
)
self
.
bn3
=
nn
.
BatchNorm2d
(
self
.
expansion
*
planes
)
self
.
shortcut
=
nn
.
Sequential
()
if
stride
!=
1
or
in_planes
!=
self
.
expansion
*
planes
:
self
.
shortcut
=
nn
.
Sequential
(
nn
.
Conv2d
(
in_planes
,
self
.
expansion
*
planes
,
kernel_size
=
1
,
stride
=
stride
,
bias
=
False
),
nn
.
BatchNorm2d
(
self
.
expansion
*
planes
)
)
def
forward
(
self
,
x
):
out
=
F
.
relu
(
self
.
bn1
(
self
.
conv1
(
x
)))
out
=
F
.
relu
(
self
.
bn2
(
self
.
conv2
(
out
)))
out
=
self
.
bn3
(
self
.
conv3
(
out
))
out
+=
self
.
shortcut
(
x
)
out
=
F
.
relu
(
out
)
return
out
class
ResNet
(
nn
.
Module
):
def
__init__
(
self
,
block
,
num_blocks
,
num_classes
=
10
):
super
(
ResNet
,
self
)
.
__init__
()
self
.
in_planes
=
64
self
.
conv1
=
nn
.
Conv2d
(
3
,
64
,
kernel_size
=
3
,
stride
=
1
,
padding
=
1
,
bias
=
False
)
self
.
bn1
=
nn
.
BatchNorm2d
(
64
)
self
.
layer1
=
self
.
_make_layer
(
block
,
64
,
num_blocks
[
0
],
stride
=
1
)
self
.
layer2
=
self
.
_make_layer
(
block
,
128
,
num_blocks
[
1
],
stride
=
2
)
self
.
layer3
=
self
.
_make_layer
(
block
,
256
,
num_blocks
[
2
],
stride
=
2
)
self
.
layer4
=
self
.
_make_layer
(
block
,
512
,
num_blocks
[
3
],
stride
=
2
)
self
.
linear
=
nn
.
Linear
(
512
*
block
.
expansion
,
num_classes
)
def
_make_layer
(
self
,
block
,
planes
,
num_blocks
,
stride
):
strides
=
[
stride
]
+
[
1
]
*
(
num_blocks
-
1
)
layers
=
[]
for
stride
in
strides
:
layers
.
append
(
block
(
self
.
in_planes
,
planes
,
stride
))
self
.
in_planes
=
planes
*
block
.
expansion
return
nn
.
Sequential
(
*
layers
)
def
forward
(
self
,
x
):
out
=
F
.
relu
(
self
.
bn1
(
self
.
conv1
(
x
)))
out
=
self
.
layer1
(
out
)
out
=
self
.
layer2
(
out
)
out
=
self
.
layer3
(
out
)
out
=
self
.
layer4
(
out
)
out
=
F
.
avg_pool2d
(
out
,
4
)
out
=
out
.
view
(
out
.
size
(
0
),
-
1
)
out
=
self
.
linear
(
out
)
return
out
def
ResNet18
(
num_classes
=
100
):
return
ResNet
(
BasicBlock
,
[
2
,
2
,
2
,
2
],
num_classes
=
num_classes
)
def
ResNet34
(
num_classes
=
100
):
return
ResNet
(
BasicBlock
,
[
3
,
4
,
6
,
3
],
num_classes
=
num_classes
)
def
ResNet50
(
num_classes
=
100
):
return
ResNet
(
Bottleneck
,
[
3
,
4
,
6
,
3
],
num_classes
=
num_classes
)
def
ResNet101
(
num_classes
=
100
):
return
ResNet
(
Bottleneck
,
[
3
,
4
,
23
,
3
],
num_classes
=
num_classes
)
def
ResNet152
(
num_classes
=
100
):
return
ResNet
(
Bottleneck
,
[
3
,
8
,
36
,
3
],
num_classes
=
num_classes
)
def
test
():
net
=
ResNet18
(
num_classes
=
100
)
y
=
net
(
torch
.
randn
(
1
,
3
,
32
,
32
))
print
(
y
.
size
())
models/wideresnet.py
0 → 100644
View file @
5bc3eadd
import
math
import
torch
import
torch.nn
as
nn
import
torch.nn.functional
as
F
class
BasicBlock
(
nn
.
Module
):
def
__init__
(
self
,
in_planes
,
out_planes
,
stride
,
dropRate
=
0.0
):
super
(
BasicBlock
,
self
)
.
__init__
()
self
.
bn1
=
nn
.
BatchNorm2d
(
in_planes
)
self
.
relu1
=
nn
.
ReLU
(
inplace
=
True
)
self
.
conv1
=
nn
.
Conv2d
(
in_planes
,
out_planes
,
kernel_size
=
3
,
stride
=
stride
,
padding
=
1
,
bias
=
False
)
self
.
bn2
=
nn
.
BatchNorm2d
(
out_planes
)
self
.
relu2
=
nn
.
ReLU
(
inplace
=
True
)
self
.
conv2
=
nn
.
Conv2d
(
out_planes
,
out_planes
,
kernel_size
=
3
,
stride
=
1
,
padding
=
1
,
bias
=
False
)
self
.
droprate
=
dropRate
self
.
equalInOut
=
(
in_planes
==
out_planes
)
self
.
convShortcut
=
(
not
self
.
equalInOut
)
and
nn
.
Conv2d
(
in_planes
,
out_planes
,
kernel_size
=
1
,
stride
=
stride
,
padding
=
0
,
bias
=
False
)
or
None
def
forward
(
self
,
x
):
if
not
self
.
equalInOut
:
x
=
self
.
relu1
(
self
.
bn1
(
x
))
else
:
out
=
self
.
relu1
(
self
.
bn1
(
x
))
out
=
self
.
relu2
(
self
.
bn2
(
self
.
conv1
(
out
if
self
.
equalInOut
else
x
)))
if
self
.
droprate
>
0
:
out
=
F
.
dropout
(
out
,
p
=
self
.
droprate
,
training
=
self
.
training
)
out
=
self
.
conv2
(
out
)
return
torch
.
add
(
x
if
self
.
equalInOut
else
self
.
convShortcut
(
x
),
out
)
class
NetworkBlock
(
nn
.
Module
):
def
__init__
(
self
,
nb_layers
,
in_planes
,
out_planes
,
block
,
stride
,
dropRate
=
0.0
):
super
(
NetworkBlock
,
self
)
.
__init__
()
self
.
layer
=
self
.
_make_layer
(
block
,
in_planes
,
out_planes
,
nb_layers
,
stride
,
dropRate
)
def
_make_layer
(
self
,
block
,
in_planes
,
out_planes
,
nb_layers
,
stride
,
dropRate
):
layers
=
[]
for
i
in
range
(
int
(
nb_layers
)):
layers
.
append
(
block
(
i
==
0
and
in_planes
or
out_planes
,
out_planes
,
i
==
0
and
stride
or
1
,
dropRate
))
return
nn
.
Sequential
(
*
layers
)
def
forward
(
self
,
x
):
return
self
.
layer
(
x
)
class
WideResNet
(
nn
.
Module
):
def
__init__
(
self
,
depth
=
34
,
num_classes
=
10
,
widen_factor
=
10
,
dropRate
=
0.0
):
super
(
WideResNet
,
self
)
.
__init__
()
nChannels
=
[
16
,
16
*
widen_factor
,
32
*
widen_factor
,
64
*
widen_factor
]
assert
((
depth
-
4
)
%
6
==
0
)
n
=
(
depth
-
4
)
/
6
block
=
BasicBlock
# 1st conv before any network block
self
.
conv1
=
nn
.
Conv2d
(
3
,
nChannels
[
0
],
kernel_size
=
3
,
stride
=
1
,
padding
=
1
,
bias
=
False
)
# 1st block
self
.
block1
=
NetworkBlock
(
n
,
nChannels
[
0
],
nChannels
[
1
],
block
,
1
,
dropRate
)
# 1st sub-block
self
.
sub_block1
=
NetworkBlock
(
n
,
nChannels
[
0
],
nChannels
[
1
],
block
,
1
,
dropRate
)
# 2nd block
self
.
block2
=
NetworkBlock
(
n
,
nChannels
[
1
],
nChannels
[
2
],
block
,
2
,
dropRate
)
# 3rd block
self
.
block3
=
NetworkBlock
(
n
,
nChannels
[
2
],
nChannels
[
3
],
block
,
2
,
dropRate
)
# global average pooling and classifier
self
.
bn1
=
nn
.
BatchNorm2d
(
nChannels
[
3
])
self
.
relu
=
nn
.
ReLU
(
inplace
=
True
)
self
.
fc
=
nn
.
Linear
(
nChannels
[
3
],
num_classes
)
self
.
nChannels
=
nChannels
[
3
]
for
m
in
self
.
modules
():
if
isinstance
(
m
,
nn
.
Conv2d
):
n
=
m
.
kernel_size
[
0
]
*
m
.
kernel_size
[
1
]
*
m
.
out_channels
m
.
weight
.
data
.
normal_
(
0
,
math
.
sqrt
(
2.
/
n
))
elif
isinstance
(
m
,
nn
.
BatchNorm2d
):
m
.
weight
.
data
.
fill_
(
1
)
m
.
bias
.
data
.
zero_
()
elif
isinstance
(
m
,
nn
.
Linear
):
m
.
bias
.
data
.
zero_
()
def
forward
(
self
,
x
):
out
=
self
.
conv1
(
x
)
out
=
self
.
block1
(
out
)
out
=
self
.
block2
(
out
)
out
=
self
.
block3
(
out
)
out
=
self
.
relu
(
self
.
bn1
(
out
))
out
=
F
.
avg_pool2d
(
out
,
8
)
out
=
out
.
view
(
-
1
,
self
.
nChannels
)
return
self
.
fc
(
out
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment