tensorflow---alexnet training (tflearn)
生活随笔
收集整理的這篇文章主要介紹了
tensorflow---alexnet training (tflearn)
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
?
# 輸入數據
import input_data
mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)import tensorflow as tf# 定義網絡超參數
learning_rate = 0.001
training_iters = 200000
batch_size = 64
display_step = 20# 定義網絡參數
n_input = 784 # 輸入的維度
n_classes = 10 # 標簽的維度
dropout = 0.8 # Dropout 的概率# 占位符輸入
x = tf.placeholder(tf.types.float32, [None, n_input])
y = tf.placeholder(tf.types.float32, [None, n_classes])
keep_prob = tf.placeholder(tf.types.float32)# 卷積操作
def conv2d(name, l_input, w, b):return tf.nn.relu(tf.nn.bias_add(tf.nn.conv2d(l_input, w, strides=[1, 1, 1, 1], padding='SAME'),b), name=name)# 最大下采樣操作
def max_pool(name, l_input, k):return tf.nn.max_pool(l_input, ksize=[1, k, k, 1], strides=[1, k, k, 1], padding='SAME', name=name)# 歸一化操作
def norm(name, l_input, lsize=4):return tf.nn.lrn(l_input, lsize, bias=1.0, alpha=0.001 / 9.0, beta=0.75, name=name)# 定義整個網絡
def alex_net(_X, _weights, _biases, _dropout):# 向量轉為矩陣_X = tf.reshape(_X, shape=[-1, 28, 28, 1])# 卷積層conv1 = conv2d('conv1', _X, _weights['wc1'], _biases['bc1'])# 下采樣層pool1 = max_pool('pool1', conv1, k=2)# 歸一化層norm1 = norm('norm1', pool1, lsize=4)# Dropoutnorm1 = tf.nn.dropout(norm1, _dropout)# 卷積conv2 = conv2d('conv2', norm1, _weights['wc2'], _biases['bc2'])# 下采樣pool2 = max_pool('pool2', conv2, k=2)# 歸一化norm2 = norm('norm2', pool2, lsize=4)# Dropoutnorm2 = tf.nn.dropout(norm2, _dropout)# 卷積conv3 = conv2d('conv3', norm2, _weights['wc3'], _biases['bc3'])# 下采樣pool3 = max_pool('pool3', conv3, k=2)# 歸一化norm3 = norm('norm3', pool3, lsize=4)# Dropoutnorm3 = tf.nn.dropout(norm3, _dropout)# 全連接層,先把特征圖轉為向量dense1 = tf.reshape(norm3, [-1, _weights['wd1'].get_shape().as_list()[0]]) dense1 = tf.nn.relu(tf.matmul(dense1, _weights['wd1']) + _biases['bd1'], name='fc1') # 全連接層dense2 = tf.nn.relu(tf.matmul(dense1, _weights['wd2']) + _biases['bd2'], name='fc2') # Relu activation# 網絡輸出層out = tf.matmul(dense2, _weights['out']) + _biases['out']return out# 存儲所有的網絡參數
weights = {'wc1': tf.Variable(tf.random_normal([3, 3, 1, 64])),'wc2': tf.Variable(tf.random_normal([3, 3, 64, 128])),'wc3': tf.Variable(tf.random_normal([3, 3, 128, 256])),'wd1': tf.Variable(tf.random_normal([4\*4\*256, 1024])),'wd2': tf.Variable(tf.random_normal([1024, 1024])),'out': tf.Variable(tf.random_normal([1024, 10]))
}
biases = {'bc1': tf.Variable(tf.random_normal([64])),'bc2': tf.Variable(tf.random_normal([128])),'bc3': tf.Variable(tf.random_normal([256])),'bd1': tf.Variable(tf.random_normal([1024])),'bd2': tf.Variable(tf.random_normal([1024])),'out': tf.Variable(tf.random_normal([n_classes]))
}# 構建模型
pred = alex_net(x, weights, biases, keep_prob)# 定義損失函數和學習步驟
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)# 測試網絡
correct_pred = tf.equal(tf.argmax(pred,1), tf.argmax(y,1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))# 初始化所有的共享變量
init = tf.initialize_all_variables()# 開啟一個訓練
with tf.Session() as sess:sess.run(init)step = 1# Keep training until reach max iterationswhile step \* batch_size < training_iters:batch_xs, batch_ys = mnist.train.next_batch(batch_size)# 獲取批數據sess.run(optimizer, feed_dict={x: batch_xs, y: batch_ys, keep_prob: dropout})if step % display_step == 0:# 計算精度acc = sess.run(accuracy, feed_dict={x: batch_xs, y: batch_ys, keep_prob: 1.})# 計算損失值loss = sess.run(cost, feed_dict={x: batch_xs, y: batch_ys, keep_prob: 1.})print "Iter " + str(step\*batch_size) + ", Minibatch Loss= " + "{:.6f}".format(loss) + ", Training Accuracy= " + "{:.5f}".format(acc)step += 1print "Optimization Finished!"# 計算測試精度print "Testing Accuracy:", sess.run(accuracy, feed_dict={x: mnist.test.images[:256], y: mnist.test.labels[:256], keep_prob: 1.})
?
tensorflow 是強大的分布式跨平臺深度學習框架
keras,TensorLayer,Tflearn 都是基于tensorflow 開發的庫(提供傻瓜式編程)
知識點:?
from __future__ import print_function ? : 為了老版本的python 兼顧新特性 (from __future import *)
轉載于:https://www.cnblogs.com/fanhaha/p/7645326.html
總結
以上是生活随笔為你收集整理的tensorflow---alexnet training (tflearn)的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 天康肉牛育肥期精料补充料985B,多少钱
- 下一篇: 95汽油多少钱一升啊?