# compile model model.compile(loss='binary_crossentropy', optimizer=tf.keras.optimizers.Adam(learning_rate = 0.0001, decay=1e-6), metrics=['accuracy', 'Recall', 'Precision']) # make directory for logs logdir = os.path.join('/content/gdrive/MyDrive/Image Dataset/logs', model_name) # os.mkdir(logdir) from math import floor N_FOLDS = 5 INIT_LR = 3e-4 T_BS = 16 V_BS = 16 decay_rate = 0.95 decay_step = 1 # early stopping cp = EarlyStopping(monitor ='val_loss', mode = 'min', verbose = 2, patience = PATIENCE, restore_best_weights=True) mc = ModelCheckpoint(model_name, monitor = 'val_loss', mode = 'min', verbose = 2, save_best_only = True) tsb = TensorBoard(log_dir=logdir) lrs = LearningRateScheduler(lambda epoch : INIT_LR * pow(decay_rate, floor(epoch / decay_step))), # training start = timer() # Fit the model history= model.fit(train_g, epochs=EPOCHS, steps_per_epoch=len(train_g), validation_data=val_g, validation_steps=len(val_g), callbacks= [cp, mc, tsb, lrs]) end = timer() elapsed = end - start print('Total Time Elapsed: ', int(elapsed//60), ' minutes ', (round(elapsed%60)), ' seconds')
Preview:
downloadDownload PNG
downloadDownload JPEG
downloadDownload SVG
Tip: You can change the style, width & colours of the snippet with the inspect tool before clicking Download!
Click to optimize width for Twitter