24
24
import streamlit as st
25
25
from streamlit import config
26
26
27
- from keras .datasets import mnist
28
- from keras .layers import Conv2D , MaxPooling2D , Dropout , Dense , Flatten
29
- from keras .models import Sequential
30
- from keras .optimizers import SGD
27
+ from tensorflow . keras .datasets import mnist
28
+ from tensorflow . keras .layers import Conv2D , MaxPooling2D , Dropout , Dense , Flatten
29
+ from tensorflow . keras .models import Sequential
30
+ from tensorflow . keras .optimizers import SGD
31
31
from keras .utils import np_utils
32
- import keras
32
+ from tensorflow import keras
33
33
import math
34
34
import numpy as np
35
35
import pandas as pd
36
36
import time
37
37
38
- # https://kobkrit.com/using-allow-growth-memory-option-in-tensorflow-and-keras-dc8c8081bc96
39
- from keras .backend .tensorflow_backend import set_session
40
38
import tensorflow as tf
41
39
42
- tf_config = tf .ConfigProto ()
43
40
# dynamically grow the memory used on the GPU
44
41
# this option is fine on non gpus as well.
42
+ tf_config = tf .compat .v1 .ConfigProto ()
45
43
tf_config .gpu_options .allow_growth = True
46
44
tf_config .log_device_placement = True
47
- set_session (tf .Session (config = tf_config ))
45
+
46
+ # https://kobkrit.com/using-allow-growth-memory-option-in-tensorflow-and-keras-dc8c8081bc96
47
+ tf .compat .v1 .keras .backend .set_session (tf .compat .v1 .Session (config = tf_config ))
48
48
49
49
50
50
class MyCallback (keras .callbacks .Callback ):
@@ -67,10 +67,12 @@ def on_epoch_begin(self, epoch, logs=None):
67
67
68
68
def on_batch_end (self , batch , logs = None ):
69
69
rows = pd .DataFrame (
70
- [[logs ["loss" ], logs ["accuracy" ]]], columns = ["loss" , "accuracy" ])
70
+ [[logs ["loss" ], logs ["accuracy" ]]], columns = ["loss" , "accuracy" ]
71
+ )
71
72
if batch % 10 == 0 :
72
- self ._epoch_chart .add_rows ({"loss" : [logs ["loss" ]],
73
- "accuracy" : [logs ["accuracy" ]]})
73
+ self ._epoch_chart .add_rows (
74
+ {"loss" : [logs ["loss" ]], "accuracy" : [logs ["accuracy" ]]}
75
+ )
74
76
if batch % 100 == 99 :
75
77
self ._summary_chart .add_rows (rows )
76
78
percent_complete = logs ["batch" ] * logs ["size" ] / self .params ["samples" ]
@@ -96,6 +98,7 @@ def on_epoch_end(self, epoch, logs=None):
96
98
% {"epoch" : epoch , "summary" : summary }
97
99
)
98
100
101
+
99
102
st .title ("MNIST CNN" )
100
103
101
104
(x_train , y_train ), (x_test , y_test ) = mnist .load_data ()
@@ -134,8 +137,7 @@ def on_epoch_end(self, epoch, logs=None):
134
137
model .add (Dense (8 , activation = "relu" ))
135
138
model .add (Dense (num_classes , activation = "softmax" ))
136
139
137
- model .compile (
138
- loss = "categorical_crossentropy" , optimizer = sgd , metrics = ["accuracy" ])
140
+ model .compile (loss = "categorical_crossentropy" , optimizer = sgd , metrics = ["accuracy" ])
139
141
140
142
show_terminal_output = not config .get_option ("server.liveSave" )
141
143
model .fit (
0 commit comments