main
ifiguero 2025-03-16 22:24:57 -03:00
parent 714fa2e87e
commit 925c2f9c20
1 changed files with 17 additions and 10 deletions

View File

@ -184,12 +184,16 @@ class eNoseTrainer:
X_test_ref = ray.put(X_test_orig)
Y_test_ref = ray.put(Y_test_orig)
# input channels 80 window
# l1 minimo 2, maximo 5 => 40 | 16
# maxpool 2 o 3 => 20 |
# l2 minimo 2 maximo 5 => 4 | 2/5
# maxpool 2 o 3 => 2 | 2/15
def build_model_conv1D(config, input_shape, output_dim):
model = keras.Sequential([
layers.Conv1D(filters=config['filters'], kernel_size=config['kernel_size'], stride=config['kernel_size']//2, activation='relu', input_shape=input_shape),
layers.Conv1D(filters=config['filters'], kernel_size=config['kernel_l1'], stride=config['kernel_l1']//2, activation='relu', padding='causal', input_shape=input_shape),
layers.MaxPooling1D(pool_size=config['pool_size']),
layers.Conv1D(filters=config['filters'] * 2, kernel_size=config['kernel_size'], stride=config['kernel_size']//2, activation='relu'),
layers.Conv1D(filters=config['filters'] * 2, kernel_size=config['kernel_l2'], stride=config['kernel_l2']//2, activation='relu', padding='causal'),
layers.MaxPooling1D(pool_size=config['pool_size']),
layers.Flatten(),
layers.Dense(config['dense_units'], activation='relu'),
@ -226,15 +230,18 @@ class eNoseTrainer:
config_space = {
'filters': tune.choice([16, 32, 64]),
'kernel_size': tune.choice([3, 5, 7]),
'kernel_l1': tune.choice([3, 5, 7]),
'kernel_l2': tune.choice([3, 5, 7]),
'pool_size': tune.choice([2, 3]),
'dense_units': tune.choice([32, 64, 128]),
'dropout': tune.choice([0.1, 0.2, 0.3]),
'lr': tune.choice([0.001, 0.0005, 0.0001]),
'dense_units': tune.choice([32, 64, 128, 256]),
'dropout': tune.choice([0.05, 0.15, 0.3]),
'lr': tune.choice([0.01, 0.005, 0.001]),
'batch_size': tune.choice([16, 32, 64]),
'epochs': epochs
'epochs': epochssample_space
}
total_space = (3*3*2*3*3*3*3)
total_space = (3*3*3*2*3*3*3*3)
self.logger.debug(f"total_space: {total_space}, num_samples: {int(nsamples*total_space)}")
scheduler = ASHAScheduler(metric='mse', mode='min', max_t=epochs, grace_period=5, reduction_factor=2)
# analysis = tune.run(train_model, config=config_space, num_samples=num_samples, scheduler=scheduler)
@ -354,7 +361,7 @@ class eNoseTrainer:
plt.close()
def fit(self):
windows = [32, 64, 128]
windows = [128, 256, 384]
total_train_queue = 2*int(1/self.ratio)*(len(self.get_model_train())+len(windows))
self.logger.info("{:=^60}".format(f'Begin Fit {total_train_queue} Models'))
self.trained = 0