-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrandom_initialization.py
83 lines (82 loc) · 5.51 KB
/
random_initialization.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 16 17:10:21 2022
@author: mahom
"""
import random
import torch
from to_torch import to_torch
def random_initialization(model,likelihood,kernel_type,task_num,method):
if method == "gpi_ori":
if kernel_type == "linear":
print("Include random initialization for linear kernel!")
if kernel_type == "rbf":
ub = model.likelihood.noise_covar.raw_noise_constraint.upper_bound
lb = model.likelihood.noise_covar.raw_noise_constraint.lower_bound
noise0 = to_torch(random.uniform(lb,ub))
model.likelihood.noise =noise0.requires_grad_(True)
#################################################
ub = model.covar_module.kernels[0].raw_outputscale_constraint.upper_bound
lb = model.covar_module.kernels[0].raw_outputscale_constraint.lower_bound
outputscale0 = to_torch(random.uniform(lb,ub))
model.covar_module.kernels[0].outputscale =outputscale0.requires_grad_(True)
#################################################
ub = model.covar_module.kernels[0].base_kernel.raw_lengthscale_constraint.upper_bound
lb = model.covar_module.kernels[0].base_kernel.raw_lengthscale_constraint.lower_bound
lengthscale0 = to_torch(random.uniform(lb,ub))
model.covar_module.kernels[0].base_kernel.lengthscale = lengthscale0.requires_grad_(True)
#################################################
ub = model.covar_module.kernels[1].raw_bias_constraint.upper_bound
lb = model.covar_module.kernels[1].raw_bias_constraint.lower_bound
bias0 = to_torch(random.uniform(lb,ub))
model.covar_module.kernels[1].bias = bias0.requires_grad_(True)
elif method == "gpi_ori":
if kernel_type == "rbf":
ub = model.likelihood.noise_covar.raw_noise_constraint.upper_bound
lb = model.likelihood.noise_covar.raw_noise_constraint.lower_bound
noise0 = to_torch(random.uniform(lb,ub))
model.likelihood.noise =noise0.requires_grad_(True)
#################################################
ub = model.covar_module.kernels[0].raw_outputscale_constraint.upper_bound
lb = model.covar_module.kernels[0].raw_outputscale_constraint.lower_bound
outputscale0 = to_torch(random.uniform(lb,ub))
model.covar_module.kernels[0].outputscale =outputscale0.requires_grad_(True)
#################################################
ub = model.covar_module.kernels[0].base_kernel.raw_lengthscale_constraint.upper_bound
lb = model.covar_module.kernels[0].base_kernel.raw_lengthscale_constraint.lower_bound
lengthscale0 = to_torch(random.uniform(lb,ub))
model.covar_module.kernels[0].base_kernel.lengthscale = lengthscale0.requires_grad_(True)
#################################################
ub = model.covar_module.kernels[1].raw_bias_constraint.upper_bound
lb = model.covar_module.kernels[1].raw_bias_constraint.lower_bound
bias0 = to_torch(random.uniform(lb,ub))
model.covar_module.kernels[1].bias = bias0.requires_grad_(True)
elif method == "gpmt":
if kernel_type == "rbf":
ub = model.likelihood.raw_noise_constraint.upper_bound
lb = model.likelihood.raw_noise_constraint.lower_bound
noise0 = to_torch(random.uniform(lb,ub))
model.likelihood.noise =noise0.requires_grad_(True)
#################################################
ub = model.likelihood.raw_task_noises_constraint.upper_bound
lb = model.likelihood.raw_task_noises_constraint.lower_bound
noise0 = to_torch(random.uniform(lb,ub))
model.likelihood.task_noises = noise0*torch.ones(task_num,requires_grad=True)
#################################################
ub = model.covar_module.covar_module_list[0].data_covar_module.kernels[0].raw_outputscale_constraint.upper_bound
lb = model.covar_module.covar_module_list[0].data_covar_module.kernels[0].raw_outputscale_constraint.lower_bound
outputscale0 = to_torch(random.uniform(lb,ub))
model.covar_module.covar_module_list[0].data_covar_module.kernels[0].outputscale =outputscale0.requires_grad_(True)
#################################################
ub = model.covar_module.covar_module_list[0].data_covar_module.kernels[0].base_kernel.raw_lengthscale_constraint.upper_bound
lb = model.covar_module.covar_module_list[0].data_covar_module.kernels[0].base_kernel.raw_lengthscale_constraint.lower_bound
lengthscale0 = to_torch(random.uniform(lb,ub))
model.covar_module.covar_module_list[0].data_covar_module.kernels[0].base_kernel.lengthscale = lengthscale0
#################################################
ub = model.covar_module.covar_module_list[0].data_covar_module.kernels[1].raw_bias_constraint.upper_bound
lb = model.covar_module.covar_module_list[0].data_covar_module.kernels[1].raw_bias_constraint.lower_bound
bias0 = to_torch(random.uniform(lb,ub))
model.covar_module.covar_module_list[0].data_covar_module.kernels[1].bias = bias0.requires_grad_(True)
#################################################
for t in range(0,task_num):
model.mean_module.base_means[t].constant = torch.nn.Parameter(torch.tensor(0.))