Ethan Goldfarb 6 年 前
コミット
ecb63ae52a
1 ファイル変更7 行追加4 行削除
  1. 7 4
      ethan_data_processing_scripts/classifier.py

+ 7 - 4
ethan_data_processing_scripts/classifier.py

@@ -2,6 +2,7 @@ import tensorflow as tf
 
 
 def main():
+    # train(4, [[.1, .2, .3], [.3, .2, .3], [.3, .3, .3], [.3, .4, .5]], [1,2,3,0], [[.1, .2, .3], [.3, .2, .3], [.3, .3, .3], [.3, .4, .5]], [1,2,3,0])
     pass
     '''
     mnist = tf.keras.datasets.mnist
@@ -11,6 +12,8 @@ def main():
     x_train = tf.keras.utils.normalize(x_train, axis=1)
     x_test = tf.keras.utils.normalize(x_test, axis=1)
 
+    print(x_train[0])
+
     model = tf.keras.models.Sequential()
     model.add(tf.keras.layers.Flatten())
     model.add(tf.keras.layers.Dense(128, activation=tf.nn.relu))
@@ -29,13 +32,13 @@ def train(classifications: int, data: list, results: list, testdata: list, testr
     numberOfNeurons = (len(data[0]) + classifications)/2
     model = tf.keras.models.Sequential()
     model.add(tf.keras.layers.Flatten())
-    model.add(tf.keras.layers.Dense(numberOfNeurons), activation=tf.nn.relu)
-    model.add(tf.keras.layers.Dense(numberOfNeurons), activation=tf.nn.relu)
-    model.add(tf.keras.layers.Dense(classifications), tf.nn.softmax)
+    model.add(tf.keras.layers.Dense(numberOfNeurons, activation=tf.nn.relu))
+    model.add(tf.keras.layers.Dense(numberOfNeurons, activation=tf.nn.relu))
+    model.add(tf.keras.layers.Dense(classifications, tf.nn.softmax))
 
     model.compile(optimizer='SGD',
                   loss='sparse_categorical_crossentropy',
-                  metrics=['accuraccy'])
+                  metrics=['accuracy'])
     model.fit(data, results, epochs=5)
 
     loss, accuracy = model.evaluate(testdata, testresults)