Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Kiryuu Sakuya
TensorFlow-Homework
Commits
0d607f02
Unverified
Commit
0d607f02
authored
Apr 14, 2020
by
Kiryuu Sakuya
🎵
Browse files
Add exam.4
parent
479c1957
Changes
3
Hide whitespace changes
Inline
Side-by-side
exam/step4/AlexNetCompleted.py
0 → 100644
View file @
0d607f02
import
tensorflow
as
tf
import
os
# os.environ[\"TF_CPP_MIN_LOG_LEVEL\"]='3'
#----以下是答案部分 begin----#
# 定义placeholder 开始
keeProb
=
tf
.
placeholder
(
tf
.
float32
,
shape
=
())
batchImgInput
=
tf
.
placeholder
(
tf
.
float32
,
shape
=
(
None
,
224
,
224
,
3
))
labels
=
tf
.
placeholder
(
tf
.
float32
,
shape
=
(
None
,
4
))
# 第一层卷积+归一化+池化 开始
conv1
=
tf
.
layers
.
Conv2D
(
filters
=
96
,
kernel_size
=
(
11
,
11
),
strides
=
(
4
,
4
),
padding
=
'valid'
,
activation
=
tf
.
nn
.
relu
)(
batchImgInput
)
lrn1
=
tf
.
nn
.
local_response_normalization
(
conv1
,
alpha
=
1e-4
,
beta
=
0.75
,
depth_radius
=
2
,
bias
=
2.0
)
pool1
=
tf
.
layers
.
MaxPooling2D
(
pool_size
=
(
3
,
3
),
strides
=
(
2
,
2
),
padding
=
'valid'
)(
conv1
)
# 第一层卷积+归一化+池化 结束
# 第二层卷积+归一化+池化 开始
conv2
=
tf
.
layers
.
Conv2D
(
filters
=
256
,
kernel_size
=
(
5
,
5
),
strides
=
(
1
,
1
),
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
)(
pool1
)
lrn2
=
tf
.
nn
.
local_response_normalization
(
conv2
,
alpha
=
1e-4
,
beta
=
0.75
,
depth_radius
=
2
,
bias
=
2.0
)
pool2
=
tf
.
layers
.
MaxPooling2D
(
pool_size
=
(
3
,
3
),
strides
=
(
2
,
2
),
padding
=
'valid'
)(
conv2
)
# 第二层卷积+归一化+池化 结束
# 定义三层直接连接的卷积 开始
conv3
=
tf
.
layers
.
Conv2D
(
filters
=
192
,
kernel_size
=
(
3
,
3
),
strides
=
(
1
,
1
),
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
)(
pool2
)
conv4
=
tf
.
layers
.
Conv2D
(
filters
=
192
,
kernel_size
=
(
3
,
3
),
strides
=
(
1
,
1
),
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
)(
conv3
)
conv5
=
tf
.
layers
.
Conv2D
(
filters
=
128
,
kernel_size
=
(
3
,
3
),
strides
=
(
1
,
1
),
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
)(
conv4
)
# 定义三层直接连接的卷积 结束
# 池化后变为一维 开始
pool3
=
tf
.
layers
.
MaxPooling2D
(
pool_size
=
(
3
,
3
),
strides
=
(
2
,
2
),
padding
=
'valid'
)(
conv5
)
flatten
=
tf
.
layers
.
Flatten
()(
pool3
)
# 池化后变为一维 结束
# 第一层全连接+随机失活 开始
dense1
=
tf
.
layers
.
Dense
(
units
=
512
,
activation
=
tf
.
nn
.
relu
)(
flatten
)
dropout1
=
tf
.
nn
.
dropout
(
dense1
,
keeProb
)
# 第一层全连接+随机失活 结束
# 第二层全连接+随机失活 开始
dense2
=
tf
.
layers
.
Dense
(
units
=
512
,
activation
=
tf
.
nn
.
relu
)(
dropout1
)
dropout2
=
tf
.
nn
.
dropout
(
dense2
,
keeProb
)
# 第二层全连接+随机失活 结束
# 第三层全连接+随机失活 开始
dense3
=
tf
.
layers
.
Dense
(
units
=
256
,
activation
=
tf
.
nn
.
relu
)(
dropout2
)
dropout3
=
tf
.
nn
.
dropout
(
dense3
,
keeProb
)
# 第三层全连接+随机失活 结束
# 额外加了一层全连接层 输出为类别数量 开始
outPuts
=
tf
.
layers
.
Dense
(
units
=
4
,
activation
=
None
)(
dropout3
)
# 额外加了一层全连接层 输出为类别数量 结束
loss
=
tf
.
reduce_mean
(
tf
.
nn
.
softmax_cross_entropy_with_logits_v2
(
logits
=
outPuts
,
labels
=
labels
))
train
=
tf
.
train
.
AdamOptimizer
().
minimize
(
loss
)
#----以上是答案部分 end----#
with
tf
.
Session
()
as
sess
:
sess
.
run
(
tf
.
global_variables_initializer
())
tf
.
train
.
export_meta_graph
(
filename
=
\
"step4/modelInfo/AlexNet
\"
,
graph=tf.get_default_graph())
tf.reset_default_graph()
\ No newline at end of file
exam/step4/AlexNetForUsers.py
0 → 100644
View file @
0d607f02
import
tensorflow
as
tf
# 本关你仅需要按要求搭建网络结构。
# 首先需要定义三个placeholder 分别是dropout的神经元保存比率、每个batch的图片输入、每个batch输入对应的标签
# 然后需要依次堆叠下列网络层:
# 所有卷积使用tf.keras.layers.Conv2D(),
# 所有池化使用tf.keras.layers.MaxPooling2D()
# 所有归一化使用tf.nn.local_response_normalization(), alpha取1e-4, beta取0.75, depth_radius取2, bias取2.0
# 扁平化使用tf.keras.layers.Flatten()
# 全连接使用tf.keras.layers.Dense()
# dropout使用tf.nn.dropout()
# 第一层卷积+归一化+池化
# 卷积层96个卷积核,大小11×11, 步长4,padding为'valid',激活函数relu
# 归一化
# 最大池化范围为3×3,步长为2,padding为valid
# 第二层卷积+归一化+池化
# 这里卷积层256个卷积核,大小5×5, 步长1,padding为'same',激活函数relu
# 归一化
# 池化同上
# 三层直接连接的卷积
# 要求堆叠三个卷积层,卷积核大小均为3×3,步长1,padding为same,激活函数用relu
# 卷积核数分别为192 192 128
# 池化+扁平化
# 第一层全连接+随机失活
# 该层要求512个神经元,激活函数relu
# 第二层全连接+随机失活 要求同上
# 第三层全连接+随机失活
# 该层要求256个神经元,激活函数relu
# 全连接层 输出为类别数量,不需要激活
# 损失: tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2())
# 优化器: tf.train.AdamOptimizer()
#********** Begin **********#
#********** End **********#
#---以下代码不要改动---#
#---否则影响测评---#
with
tf
.
Session
()
as
sess
:
sess
.
run
(
tf
.
global_variables_initializer
())
tf
.
train
.
export_meta_graph
(
filename
=
\
"step4/userModelInfo/AlexNet
\"
,
graph=tf.get_default_graph())
\ No newline at end of file
exam/step4/AlexNetTest.py
0 → 100644
View file @
0d607f02
import
os
os
.
environ
[
\
"TF_CPP_MIN_LOG_LEVEL
\"
]='3'
import warnings
warnings.filterwarnings('ignore')
import AlexNetCompleted
import AlexNetForUsers
rightModelPath = 'step4/modelInfo/AlexNet'
userModelPath = 'step4/userModelInfo/AlexNet'
# print(os.path.exists(rightModelPath))
# print(os.path.exists(userModelPath))
# print(os.path.getsize(rightModelPath))
# print(os.path.getsize(userModelPath))
try:
# isRight = IsEqual(rightModelPath, userModelPath)
# print(isRight)
if os.path.getsize(rightModelPath)==os.path.getsize(userModelPath):
print('恭喜你通过本关测试!模型结构正确,你已经掌握了AlexNet的结构!',end='')
else:
print('模型结构有误!未能通过本关测试!')
except:
print('模型结构文件保存有误!未能通过本关测试')
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment