From 48cecaa8cf090ad33173084c16ea4160f3dc909a Mon Sep 17 00:00:00 2001 From: Israel Figueroa Date: Sun, 16 Mar 2025 20:32:03 -0300 Subject: [PATCH] tess --- TrainerClass.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/TrainerClass.py b/TrainerClass.py index a26e153..8cb9c51 100644 --- a/TrainerClass.py +++ b/TrainerClass.py @@ -199,7 +199,6 @@ class eNoseTrainer: model.compile(optimizer=keras.optimizers.Adam(learning_rate=config['lr']), loss='mse') return model - def train_model_conv1D(config): X_trainc1D = ray.get(X_train_ref) Y_trainc1D = ray.get(Y_train_ref) @@ -217,7 +216,7 @@ class eNoseTrainer: validation_data=(X_testc1D, Y_testc1D), epochs=config['epochs'], batch_size=config['batch_size'], - verbose=0, + verbose=1, callbacks=[early_stopping] ) @@ -226,7 +225,7 @@ class eNoseTrainer: tune.report({'mse': mse}) config_space = { - 'filters': tune.choice([32, 64, 128]), + 'filters': tune.choice([16, 32, 64]), 'kernel_size': tune.choice([3, 5]), 'pool_size': tune.choice([2, 3]), 'dense_units': tune.choice([32, 64, 128]), @@ -240,7 +239,7 @@ class eNoseTrainer: # analysis = tune.run(train_model, config=config_space, num_samples=num_samples, scheduler=scheduler) analysis = tune.run( tune.with_parameters(train_model_conv1D), config=config_space, num_samples=num_samples, scheduler=scheduler, max_concurrent_trials=3 ) best_config = analysis.get_best_config(metric='mse', mode='min') - best_model = build_model_conv1D(best_config, X_train_ref.shape[1:], Y_train_ref.shape[1]) + best_model = build_model_conv1D(best_config, X_train_orig.shape[1:], Y_train_orig.shape[1]) ray.internal.free([X_train_ref, Y_train_ref, X_test_ref, Y_test_ref])