Please provide a valid number of training cycles (numeric only)
Please provide a valid number for the learning rate (between 0 and 1)
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, InputLayer, Dropout, Conv1D, Conv2D, Flatten, Reshape, MaxPooling1D, MaxPooling2D, BatchNormalization, TimeDistributed
from tensorflow.keras.optimizers import Adam
# model architecture
model = Sequential()
model.add(Dense(90, activation='tanh',
activity_regularizer=tf.keras.regularizers.l1(0.00001)))
model.add(Dropout(0.25))
model.add(Dense(120, activation='tanh',
activity_regularizer=tf.keras.regularizers.l1(0.00001)))
model.add(Dropout(0.25))
model.add(Dense(90, activation='tanh',
activity_regularizer=tf.keras.regularizers.l1(0.00001)))
model.add(Dropout(0.25))
model.add(Dense(12, activation='tanh',
activity_regularizer=tf.keras.regularizers.l1(0.00001)))
model.add(Dense(classes, activation='softmax', name='y_pred'))
# this controls the learning rate
opt = Adam(lr=0.0005, beta_1=0.9, beta_2=0.999)
# this controls the batch size, or you can manipulate the tf.data.Dataset objects yourself
BATCH_SIZE = 32
train_dataset = train_dataset.batch(BATCH_SIZE, drop_remainder=False)
validation_dataset = validation_dataset.batch(BATCH_SIZE, drop_remainder=False)
callbacks.append(BatchLoggerCallback(BATCH_SIZE, train_sample_count))
callbacks = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=30)
# train the neural network
model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy'])
model.fit(train_dataset, epochs=600, validation_data=validation_dataset, verbose=2, callbacks=callbacks)
Input layer (48 features)
Dense layer (90 neurons)
Dense layer (120 neurons)
Dense layer (90 neurons)
Dense layer (12 neurons)
Output layer (11 classes)
Model version: