Skip to content

Commit

Permalink
running black on src code
Browse files Browse the repository at this point in the history
  • Loading branch information
beckynevin committed Mar 25, 2024
1 parent 6442245 commit 38aa58b
Show file tree
Hide file tree
Showing 3 changed files with 206 additions and 152 deletions.
3 changes: 2 additions & 1 deletion src/scripts/DeepEnsemble.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import argparse
import logging


def parse_args():
parser = argparse.ArgumentParser(
description="Transferring data from embargo butler to another butler"
Expand Down Expand Up @@ -274,4 +275,4 @@ def parse_args():
if datalist_exposure:
butler.pruneDatasets(refs=datasetRefs_exposure, unstore=True, purge=True)
if datalist_no_exposure:
butler.pruneDatasets(refs=datasetRefs_no_exposure, unstore=True, purge=True)
butler.pruneDatasets(refs=datasetRefs_no_exposure, unstore=True, purge=True)
54 changes: 27 additions & 27 deletions src/scripts/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,24 +54,21 @@ def forward(self, x):
mu = x[:, 0]
# softplus enforces positivity
var = nn.functional.softplus(x[:, 1])
#var = x[:, 1]
# var = x[:, 1]
return torch.stack((mu, var), dim=1)



def model_setup_DE(loss_type, DEVICE):#, INIT_LR=0.001):
def model_setup_DE(loss_type, DEVICE): # , INIT_LR=0.001):
# initialize the model from scratch
if loss_type == "no_var_loss":
#model = de_no_var().to(DEVICE)
# model = de_no_var().to(DEVICE)
lossFn = torch.nn.MSELoss(reduction="mean")
if loss_type == "var_loss":
#model = de_var().to(DEVICE)
# model = de_var().to(DEVICE)
Layer = MuVarLayer
lossFn = torch.nn.GaussianNLLLoss(full=False,
eps=1e-06,
reduction="mean")
lossFn = torch.nn.GaussianNLLLoss(full=False, eps=1e-06, reduction="mean")
if loss_type == "bnll_loss":
#model = de_var().to(DEVICE)
# model = de_var().to(DEVICE)
Layer = MuVarLayer
lossFn = loss_bnll
model = torch.nn.Sequential(Model(2), Layer())
Expand Down Expand Up @@ -146,29 +143,30 @@ def forward(self, x):
return self.model(x)




def loss_der(y, y_pred, coeff):
gamma, nu, alpha, beta = y[:, 0], y[:, 1], y[:, 2], y[:, 3]
error = gamma - y_pred
omega = 2.0 * beta * (1.0 + nu)


# define aleatoric and epistemic uncert
u_al = np.sqrt(
beta.detach().numpy()
* (1 + nu.detach().numpy())
/ (alpha.detach().numpy() * nu.detach().numpy())
)
u_ep = 1 / np.sqrt(nu.detach().numpy())
return torch.mean(
0.5 * torch.log(math.pi / nu)
- alpha * torch.log(omega)
+ (alpha + 0.5) * torch.log(error**2 * nu + omega)
+ torch.lgamma(alpha)
- torch.lgamma(alpha + 0.5)
+ coeff * torch.abs(error) * (2.0 * nu + alpha)
), u_al, u_ep
return (
torch.mean(
0.5 * torch.log(math.pi / nu)
- alpha * torch.log(omega)
+ (alpha + 0.5) * torch.log(error**2 * nu + omega)
+ torch.lgamma(alpha)
- torch.lgamma(alpha + 0.5)
+ coeff * torch.abs(error) * (2.0 * nu + alpha)
),
u_al,
u_ep,
)


def loss_sder(y, y_pred, coeff):
Expand All @@ -184,21 +182,22 @@ def loss_sder(y, y_pred, coeff):
)
u_ep = 1 / np.sqrt(nu.detach().numpy())

return torch.mean(torch.log(var)
+ (1.0 + coeff * nu) * error**2 / var), u_al, u_ep
return torch.mean(torch.log(var) + (1.0 + coeff * nu) * error**2 / var), u_al, u_ep


# from martius lab
# https://github.com/martius-lab/beta-nll
# and Seitzer+2020

def loss_bnll(mean, variance, target, beta):#beta=0.5):

def loss_bnll(mean, variance, target, beta): # beta=0.5):
"""Compute beta-NLL loss
:param mean: Predicted mean of shape B x D
:param variance: Predicted variance of shape B x D
:param target: Target of shape B x D
:param beta: Parameter from range [0, 1] controlling relative
weighting between data points, where `0` corresponds to
:param beta: Parameter from range [0, 1] controlling relative
weighting between data points, where `0` corresponds to
high weight on low error points and `1` to an equal weighting.
:returns: Loss per batch element of shape B
"""
Expand All @@ -207,6 +206,7 @@ def loss_bnll(mean, variance, target, beta):#beta=0.5):
loss = loss * (variance.detach() ** beta)
return loss.sum(axis=-1)


'''
def get_loss(transform, beta=None):
if beta:
Expand All @@ -228,4 +228,4 @@ def negative_log_likelihood(targets, outputs):
loglik = - K.log(var) - K.square((y - mu)) / var
return - loglik
return negative_log_likelihood
'''
'''
Loading

0 comments on commit 38aa58b

Please sign in to comment.