qlearn/commun/dropout_toy_regression.py [46:89]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    def reset_noise(self):
        self.fc1.reset_noise()
        self.fc2.reset_noise()


class MLP(nn.Module):
    def __init__(self):
        nn.Module.__init__(self)
        self.fc1 = nn.Linear(1, 100)
        self.fc2 = nn.Linear(100, 1)

    def forward(self, x):
        x = self.fc1(x)
        x = F.relu(x)
        x = self.fc2(x)
        return x

#X = torch.Tensor(20, 1).uniform_(-4, 4)
X = np.random.uniform(-4, 4, (20, 1)).astype('float32')
# X = np.random.rand(20, 1).astype('float32') * 8 - 4
sigma = 3
epsilon = np.random.normal(size=X.shape).astype('float32')
Y = np.power(X, 3) + sigma * epsilon

if BAYES:
    regressor = RegressionModel()
else:
    regressor = MLP()

x = Variable(torch.from_numpy(X))
y = Variable(torch.from_numpy(Y))
if use_cuda:
    x = x.cuda()
    y = y.cuda()
optimiser = optim.Adam(regressor.parameters(), lr=0.01)

if use_cuda:
    regressor.cuda()
    # y = y.cuda()

regressor.train()
for epoch in range(1000):
    regressor.zero_grad()
    if BAYES:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



qlearn/commun/toy_regression.py [55:100]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    def reset_noise(self):
        self.fc1.reset_noise()
        self.fc2.reset_noise()


class MLP(nn.Module):
    def __init__(self):
        nn.Module.__init__(self)
        self.fc1 = nn.Linear(1, 100)
        self.fc2 = nn.Linear(100, 1)

    def forward(self, x):
        x = self.fc1(x)
        x = F.relu(x)
        x = self.fc2(x)
        return x

#X = torch.Tensor(20, 1).uniform_(-4, 4)


X = np.random.uniform(-4, 4, (20, 1)).astype('float32')
sigma = 3
epsilon = np.random.normal(size=X.shape).astype('float32')
Y = np.power(X, 3) + sigma * epsilon

if BAYES:
    regressor = RegressionModel()
else:
    regressor = MLP()


x = Variable(torch.from_numpy(X))
y = Variable(torch.from_numpy(Y))
if use_cuda:
    x = x.cuda()
    y = y.cuda()
optimiser = optim.Adam(regressor.parameters(), lr=0.01)

if use_cuda:
    regressor.cuda()
    # y = y.cuda()

regressor.train()
for epoch in range(1000):
    regressor.zero_grad()
    if BAYES:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



