Commit fafee0d5 authored by Martin Řepa's avatar Martin Řepa

Use bigger neural network

parent 376527aa
......@@ -139,5 +139,6 @@ if __name__ == "__main__":
log_file = f'{base_dir}/log'
with open(log_file, 'a') as log:
sys.stderr = log
sys.stdout = log
main(experiment_conf, base_dir)
......@@ -139,6 +139,7 @@ if __name__ == "__main__":
log_file = f'{base_dir}/log'
with open(log_file, 'a') as log:
sys.stderr = log
sys.stdout = log
main(experiment_conf, base_dir)
......
experiments_per_setup: 8 # 6
legacy_folder: /home/ignac/experiments/learning_epochs/18000to30000by4000_8times # Change in regards with configuration
legacy_folder: /home/ignac/experiments/learning_epochs/2000to30000by4000_8times # Change in regards with configuration
epochs:
lower_bound: 18000
number_of_steps: 4 # 8 (6-8 should be enough)
lower_bound: 2000
number_of_steps: 8 # 8 (6-8 should be enough)
upper_bound: 30000 # 30000
conf:
i_a: 1
......
......@@ -177,6 +177,7 @@ if __name__ == "__main__":
log_file = f'{base_dir}/log'
with open(log_file, 'a') as log:
sys.stderr = log
sys.stdout = log
main(experiment_conf, base_dir)
# main(experiment_conf, base_dir)
......@@ -70,13 +70,13 @@ class NeuralNetwork:
def __init__(self, input_features=2,
nn_conf: NeuralNetworkConfig = NeuralNetworkConfig()):
self.model = nn.Sequential(
nn.Linear(input_features, 5),
nn.Linear(input_features, 20),
nn.ReLU(),
nn.Linear(5, 5),
nn.Linear(20, 15),
nn.ReLU(),
nn.Linear(5, 5),
nn.Linear(15, 20),
nn.ReLU(),
nn.Linear(5, 1),
nn.Linear(20, 1),
nn.Tanh(),
SoftClip(50)
# nn.Sigmoid()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment