ne.py 946 B

1234567891011121314151617181920212223242526272829303132
  1. import tensorflow as tf
  2. mnist = tf.keras.datasets.mnist
  3. (x_train, y_train), (x_test, y_test) = mnist.load_data()
  4. x_train, x_test = x_train / 255.0, x_test / 255.0
  5. model = tf.keras.models.Sequential([
  6. tf.keras.layers.Flatten(input_shape=(28, 28)),
  7. tf.keras.layers.Dense(128, activation='relu'),
  8. tf.keras.layers.Dropout(0.2),
  9. tf.keras.layers.Dense(10)
  10. ])
  11. predictions = model(x_train[:1]).numpy()
  12. print(predictions)
  13. predictions_softmax = tf.nn.softmax(predictions).numpy()
  14. print(predictions_softmax)
  15. loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
  16. loss = loss_fn(y_train[:1], predictions).numpy()
  17. print(loss)
  18. model.compile(optimizer='adam', loss=loss_fn, metrics=['accuracy'])
  19. model.fit(x_train, y_train, epochs=25)
  20. model.evaluate(x_test, y_test, verbose=2)
  21. probability_model = tf.keras.Sequential([model, tf.keras.layers.Softmax()])
  22. test0 = probability_model(x_test[:5])
  23. print(test0)
  24. print("doen")