ich bin gerade dabei ein Neuronales Netz zu trainieren. Das Problem dabei ist, dass es mit der jetzigen Datenmenge sehr lange braucht, etwa 40 Minuten pro Epoche.
Code: Alles auswählen
import os
import time
import numpy as np
import tensorflow as tf
from tensorflow.keras.optimizers import RMSprop
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout
from keras.preprocessing import image
from keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.callbacks import TensorBoard
from PIL import ImageFile
ImageFile.LOAD_TRUNCATED_IMAGES = True
CLASS_NAME = ["Klasse_1", "Klasse_2", "Klasse_3"]
train_datagen = ImageDataGenerator(rescale=1. / 255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
# Vorverarbeiten der Testdaten
test_datagen = ImageDataGenerator(rescale=1. / 255)
BS = 128
# Batch Size: Wieviele Bilder pro durchgang verwendet werden
# Trainingsdaten erschaffen
training_set = train_datagen.flow_from_directory(r"D:/Bilder/Train",
target_size=(128, 128),
batch_size=BS,
classes=CLASS_NAME,
class_mode="categorical")
# Testdaten erschaffen
test_set = test_datagen.flow_from_directory(r"D:/Bilder/Test",
target_size=(128, 128),
batch_size=BS,
classes=CLASS_NAME,
class_mode="categorical")
dense_layers = [1] # Dense Layer = Anzahl Output Neuronen
layer_sizes = [64] # Anzahl der Neuronen im Layer
conv_layers = [3] # Anzahl faltener Layer
for dense_layer in dense_layers:
for layer_size in layer_sizes:
for conv_layer in conv_layers:
NAME = "{}-conv-{}-nodes-{}-dense-{}".format(conv_layer, layer_size, dense_layer, int(time.time()))
print(NAME)
tensorBoard = TensorBoard(log_dir="logs/{}".format(NAME))
model = Sequential()
# Input Layer
model.add(Conv2D(16, (3, 3), activation="relu", input_shape=(128, 128, 3)))
model.add(MaxPooling2D(2, 2))
for l in range(conv_layer - 1):
model.add(Conv2D(layer_size, (3, 3), activation="relu"))
model.add(MaxPooling2D(2, 2))
model.add(Flatten())
for l in range(dense_layer):
model.add(Dense(dense_layer, activation="relu"))
model.add(Dropout(0.1))
model.add(Dense(3, activation="softmax"))
model.summary()
opt = RMSprop(lr=0.001)
model.compile(loss="categorical_crossentropy",
optimizer=opt,
metrics=["accuracy"])
model.fit(training_set, batch_size=BS, epochs=3, callbacks=[tensorBoard], verbose=1, validation_data=test_set)
# Speichern des Models
model.save(NAME + ".model")