-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathhyparams_config.py
62 lines (52 loc) · 1.54 KB
/
hyparams_config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
def get_hyparams_config_class(dataset_name):
if dataset_name not in globals():
raise NotImplementedError("Dataset not found: {}".format(dataset_name))
return globals()[dataset_name]
class Boiler:
def __init__(self):
super(Boiler, self).__init__()
self.test_per_step = 50
self.training_steps = 1000
self.drop_prob = 0
self.learning_rate = 0.0015
self.coeff = 10
self.h_dim = 10
self.dense_dim = 100
self.lstm_layer = 1
self.weight_decay = 4e-7
class HHAR:
def __init__(self):
super(HHAR, self).__init__()
self.test_per_step = 50
self.training_steps = 3000
self.drop_prob = 0
self.learning_rate = 1e-3
self.coeff = 10
self.h_dim = 10
self.dense_dim = 100
self.lstm_layer = 1
self.weight_decay = 4e-7
class HAR:
def __init__(self):
super(HAR, self).__init__()
self.test_per_step = 50
self.training_steps = 3000
self.drop_prob = 0
self.learning_rate = 0.0015
self.coeff = 10
self.h_dim = 10
self.dense_dim = 100
self.lstm_layer = 1
self.weight_decay = 4e-7
class WISDM:
def __init__(self):
super(WISDM, self).__init__()
self.test_per_step = 50
self.training_steps = 3000
self.drop_prob = 0
self.learning_rate = 0.0015
self.coeff = 10
self.h_dim = 10
self.dense_dim = 100
self.lstm_layer = 1
self.weight_decay = 4e-7