somewhat working

This commit is contained in:
Dominik Moritz Roth 2024-05-26 00:28:33 +02:00
parent 2ce2e8c384
commit fb51634417

View File

@ -1,5 +1,5 @@
name: DEFAULT
project: Spikey
project: Spikey_1
slurm:
name: 'Spikey_{config[name]}'
@ -22,7 +22,7 @@ runner: spikey
scheduler:
reps_per_version: 1
agents_per_job: 1
agents_per_job: 100
reps_per_agent: 1
wandb:
@ -37,35 +37,6 @@ wandb:
monitor_gym: False
save_code: False
---
name: Test
import: $
latent_projector:
type: rnn # Options: 'fc', 'rnn'
input_size: 195 # =0.01s 19531 # =1s Input size for the Latent Projector (length of snippets).
latent_size: 4 # Size of the latent representation before message passing.
#layer_shapes: [256, 32] # List of layer sizes for the latent projector (if type is 'fc').
#activations: ['ReLU', 'ReLU'] # Activation functions for the latent projector layers (if type is 'fc').
rnn_hidden_size: 4 # Hidden size for the RNN projector (if type is 'rnn').
rnn_num_layers: 1 # Number of layers for the RNN projector (if type is 'rnn').
middle_out:
output_size: 4 # Size of the latent representation after message passing.
num_peers: 3 # Number of most correlated peers to consider.
predictor:
layer_shapes: [4] # List of layer sizes for the predictor.
activations: ['ELU'] # Activation functions for the predictor layers.
training:
epochs: 1024 # Number of training epochs.
batch_size: 16 # 64 # Batch size for training.
num_batches: 4 # Batches per epoch
learning_rate: 0.05 # Learning rate for the optimizer.
eval_freq: -1 # 8 # Frequency of evaluation during training (in epochs).
save_path: models # Directory to save the best model and encoder.
evaluation:
full_compression: false # Perform full compression during evaluation
@ -79,4 +50,86 @@ data:
cut_length: null # Optional length to cut sequences to.
profiler:
enable: false
enable: false
training:
eval_freq: -1 # 8 # Frequency of evaluation during training (in epochs).
save_path: models # Directory to save the best model and encoder.
---
name: FC
import: $
latent_projector:
type: fc # Options: 'fc', 'rnn'
input_size: 1953 # =0.1s 19531 # =1s Input size for the Latent Projector (length of snippets).
latent_size: 4 # Size of the latent representation before message passing.
layer_shapes: [32, 8] # List of layer sizes for the latent projector (if type is 'fc').
activations: ['ReLU', 'ReLU'] # Activation functions for the latent projector layers (if type is 'fc').
#rnn_hidden_size: 4 # Hidden size for the RNN projector (if type is 'rnn').
#rnn_num_layers: 1 # Number of layers for the RNN projector (if type is 'rnn').
middle_out:
output_size: 4 # Size of the latent representation after message passing.
num_peers: 3 # Number of most correlated peers to consider.
predictor:
layer_shapes: [3] # List of layer sizes for the predictor.
activations: ['ReLU'] # Activation functions for the predictor layers.
training:
epochs: 1024 # Number of training epochs.
batch_size: 32 # Batch size for training.
num_batches: 1 # Batches per epoch
learning_rate: 0.01 # Learning rate for the optimizer.
---
name: FC6
import: $
latent_projector:
type: fc # Options: 'fc', 'rnn'
input_size: 195 # =0.1s 19531 # =1s Input size for the Latent Projector (length of snippets).
latent_size: 4 # Size of the latent representation before message passing.
layer_shapes: [16] # List of layer sizes for the latent projector (if type is 'fc').
activations: ['ReLU'] # Activation functions for the latent projector layers (if type is 'fc').
#rnn_hidden_size: 4 # Hidden size for the RNN projector (if type is 'rnn').
#rnn_num_layers: 1 # Number of layers for the RNN projector (if type is 'rnn').
middle_out:
output_size: 8 # Size of the latent representation after message passing.
num_peers: 3 # Number of most correlated peers to consider.
predictor:
layer_shapes: [3] # List of layer sizes for the predictor.
activations: ['ReLU'] # Activation functions for the predictor layers.
training:
epochs: 1024 # Number of training epochs.
batch_size: 16 # Batch size for training.
num_batches: 1 # Batches per epoch
learning_rate: 0.01 # Learning rate for the optimizer.
---
name: RNN
import: $
latent_projector:
type: rnn # Options: 'fc', 'rnn'
input_size: 1953 # =0.1s 19531 # =1s Input size for the Latent Projector (length of snippets).
latent_size: 4 # Size of the latent representation before message passing.
#layer_shapes: [32, 8] # List of layer sizes for the latent projector (if type is 'fc').
#activations: ['ReLU', 'ReLU'] # Activation functions for the latent projector layers (if type is 'fc').
rnn_hidden_size: 3 # Hidden size for the RNN projector (if type is 'rnn').
rnn_num_layers: 2 # Number of layers for the RNN projector (if type is 'rnn').
middle_out:
output_size: 4 # Size of the latent representation after message passing.
num_peers: 3 # Number of most correlated peers to consider.
predictor:
layer_shapes: [3] # List of layer sizes for the predictor.
activations: ['ReLU'] # Activation functions for the predictor layers.
training:
epochs: 1024 # Number of training epochs.
batch_size: 64 # Batch size for training.
num_batches: 2 # Batches per epoch
learning_rate: 0.01 # Learning rate for the optimizer.