-
Notifications
You must be signed in to change notification settings - Fork 30
/
Copy pathexperiments_2dcnn.py
executable file
·107 lines (79 loc) · 3.65 KB
/
experiments_2dcnn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
#!/usr/bin/env python3
# coding: utf-8
"""
Script to run the DeepSphere experiment.
Both the fully convolutional (FCN) and the classic (CNN) architecture variants
are supported.
"""
import os
import shutil
import sys
import numpy as np
import time
from deepsphere.utils import build_index
from deepsphere import models, experiment_helper
from deepsphere.data import LabeledDatasetWithNoise, LabeledDataset
from grid import pgrid
import hyperparameters
def single_experiment(sigma, order, sigma_noise, experiment_type):
ename = '_'+experiment_type
Nside = 1024
EXP_NAME = '40sim_{}sides_{}noise_{}order_{}sigma{}'.format(
Nside, sigma_noise, order, sigma, ename)
x_raw_train, labels_raw_train, x_raw_std = experiment_helper.get_training_data(sigma, order)
x_raw_test, labels_test, _ = experiment_helper.get_testing_data(sigma, order, sigma_noise, x_raw_std)
ret = experiment_helper.data_preprossing(x_raw_train, labels_raw_train, x_raw_test, sigma_noise, feature_type=None)
features_train, labels_train, features_validation, labels_validation, features_test = ret
nx = Nside//order
nlevels = np.round(np.log2(nx)).astype(np.int)
index = build_index(nlevels).astype(np.int)
features_train = features_train[:, index]
features_validation = features_validation[:, index]
shuffle = np.random.permutation(len(features_test))
features_test = features_test[:, index]
features_test = features_test[shuffle]
labels_test = labels_test[shuffle]
training = LabeledDatasetWithNoise(features_train, labels_train, end_level=sigma_noise)
validation = LabeledDataset(features_validation, labels_validation)
# Better implementation, but it doesn't work for some reason.
# params = hyperparameters.get_params_CNN2D(training.N, EXP_NAME, order, Nside, experiment_type)
# model = Healpix2CNN(**params)
params = hyperparameters.get_params(training.N, EXP_NAME, order, Nside, experiment_type)
model = models.cnn2d(**params)
# Cleanup before running again.
shutil.rmtree('summaries/{}/'.format(EXP_NAME), ignore_errors=True)
shutil.rmtree('checkpoints/{}/'.format(EXP_NAME), ignore_errors=True)
model.fit(training, validation)
error_validation = experiment_helper.model_error(model, features_validation, labels_validation)
print('The validation error is {}%'.format(error_validation * 100), flush=True)
error_test = experiment_helper.model_error(model, features_test, labels_test)
print('The testing error is {}%'.format(error_test * 100), flush=True)
return error_test
if __name__ == '__main__':
if len(sys.argv) > 1:
experiment_type = sys.argv[1]
else:
experiment_type = 'FCN' # 'CNN'
if len(sys.argv) > 2:
sigma = int(sys.argv[2])
order = int(sys.argv[3])
sigma_noise = float(sys.argv[4])
grid = [(sigma, order, sigma_noise)]
else:
grid = pgrid()
ename = '_'+experiment_type
path = 'results/2dcnn/'
os.makedirs(path, exist_ok=True)
for sigma, order, sigma_noise in grid:
print('Launch experiment for sigma={}, order={}, noise={}'.format(sigma, order, sigma_noise))
# avoid all jobs starting at the same time
time.sleep(np.random.rand()*100)
res = single_experiment(sigma, order, sigma_noise, experiment_type)
filepath = os.path.join(path, 'reg3_deepsphere_results_list_sigma{}{}'.format(sigma,ename))
new_data = [order, sigma_noise, res]
if os.path.isfile(filepath+'.npz'):
results = np.load(filepath+'.npz')['data'].tolist()
else:
results = []
results.append(new_data)
np.savez(filepath, data=results)