1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63
| import tensorflow as tf from tensorflow.examples.tutorials.mnist import input_data from keras.layers import Input, Dense, Dropout, BatchNormalization,\ LeakyReLU, concatenate,Conv2D, MaxPooling2D,\ AveragePooling2D, GlobalAveragePooling2D,Conv1D,\ Flatten,merge,Reshape,MaxPooling1D,Lambda,LSTM,\ GlobalMaxPooling2D
img = tf.placeholder(tf.float32, shape=(None, 784)) labels = tf.placeholder(tf.float32, shape=(None, 10)) x = Dense(128, activation='relu')(img) x = Dense(128, activation='relu')(x) prediction = Dense(10, activation='softmax')(x) loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=prediction, labels=labels))
train_optim = tf.train.AdamOptimizer().minimize(loss) mnist_data = input_data.read_data_sets('MNIST_data/', one_hot=True) with tf.Session() as sess: init = tf.global_variables_initializer() sess.run(init) for i in range(1000): batch_x, batch_y = mnist_data.train.next_batch(50) sess.run(train_optim, feed_dict={img: batch_x, labels: batch_y}) acc_pred = tf.keras.metrics.categorical_accuracy(labels, prediction) pred = sess.run(acc_pred, feed_dict={labels: mnist_data.test.labels, img: mnist_data.test.images}) print('accuracy: %.3f' % (sum(pred)/len(mnist_data.test.labels))) ```
1 通过K.function()函数打印模型中间层的输出 2 通过函数API打印模型中间层的输出
```python import matplotlib.pyplot as plt
history = model.fit(x, y, validation_split=0.25, epochs=50, batch_size=16, verbose=1)
plt.plot(history.history['acc']) plt.plot(history.history['val_acc']) plt.title('Model accuracy') plt.ylabel('Accuracy') plt.xlabel('Epoch') plt.legend(['Train', 'Test'], loc='upper left') plt.show()
plt.plot(history.history['loss']) plt.plot(history.history['val_loss']) plt.title('Model loss') plt.ylabel('Loss') plt.xlabel('Epoch') plt.legend(['Train', 'Test'], loc='upper left') plt.show()
|