Skip to content

Commit

Permalink
Update ReplicaABC.py
Browse files Browse the repository at this point in the history
  • Loading branch information
ARenanse authored May 7, 2020
1 parent e115e30 commit 03a8775
Showing 1 changed file with 16 additions and 9 deletions.
25 changes: 16 additions & 9 deletions ReplicaABC.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __init__(self, Model, NumSamples, GlobalFraction, Temperature, UseLG, LGProb
lr : (float), Learning Rate.
RWStepSize : (float), Step Size for Random Walk.
ChildConn : (mp.connection) It's used to transfer the Likelihood and Prior prob back to main process.
LossFunc : torch.nn 's Method, the Loss function to use while evaluating Langevin Gradients, used in self.GiveMeTheLoss
LossFunc : torch.nn 's Method, the Loss function to use while evaluating Langevin Gradients, used in self.ReturnLoss
"""
Expand Down Expand Up @@ -96,7 +96,7 @@ def Likelihood(self):


@abstractmethod
def GiveMeTheLoss(self):
def ReturnLoss(self):
"""
Returns the loss [torch.tensor] using the self.LossFunc AFTER computing y_pred from Model as desired.
Expand All @@ -109,9 +109,9 @@ def GiveMeTheLoss(self):


@abstractmethod
def InitializeMiscParameters(self):
def InitializeMetaParameters(self):
"""
Call this function to initialize the Miscellaneous Parameters.
Call this function to initialize the Meta Parameters.
After this function is called, these three class variables should hold respective initial data:
1. self.CurrentPriorProb : Holds the Current value of Log Prior Likelihood at each iteration, thus it needs to be initialized.
Expand Down Expand Up @@ -281,7 +281,7 @@ def run(self):
#Step 1: Make a copy of current model parameters as a List
#----------->Already done.
#Step 2: Do a backward pass to obtain gradients
loss = self.GiveMeTheLoss()
loss = self.ReturnLoss()
self.Model.zero_grad()
loss.backward()

Expand All @@ -308,7 +308,7 @@ def run(self):
self.Model.load_state_dict(ProposalStateDict)

#Step 2: Do a backward pass to obtain gradients of model parameters wrt to Theta_proposal
loss2 = self.GiveMeTheLoss()
loss2 = self.ReturnLoss()
self.Model.zero_grad()
loss2.backward()

Expand Down Expand Up @@ -358,10 +358,16 @@ def run(self):

#Calculate Likelihood Probability with the Theta_proposal and New Proposals for Miscellaneous Parameters.(Note this is a log probability)
LHProposalProb, infoLH = self.Likelihood(MiscProposalList, Theta_proposal)
if (len(infoLH == 0) or (infoLH[0] == None)):
maxLoss = None

else:
if maxLoss < infoLH[0]:
maxLoss = infoLH[0]

#print("Likelihood Loss on the Proposed Parameters: ", infoLH[0])

if maxLoss < infoLH[0]:
maxLoss = infoLH[0]

#Calculate Prior Probability with the New Proposals for Misc Parameters and/or/maybe the Theta_Proposal too( and if that happens, it implies
# that calculation of the prior is also dependent on the model which is a highly unlikely case.).
# Note this is a log probability.
Expand Down Expand Up @@ -438,7 +444,8 @@ def run(self):
print("-----> Statistics of {}".format(self.name))
print("{}-->> Temperature: ".format(self.name), self.Temperature)
print("{}-->> Number of Accepts In this Run / {}: {}".format(self.name, self.NumSamples , self.AcceptsInThisRun))
print("{}-->> Maximum Likelihood Loss on Proposed Parameters: ".format(self.name), maxLoss)
if (maxLoss != None):
print("{}-->> Maximum Likelihood Loss on Proposed Parameters: ".format(self.name), maxLoss)
print("{}-->> Current Log Likelihood Prob after the run: ".format(self.name), self.CurrentLikelihoodProb)
print("{}-->> Current Likelihood Loss after the run: ".format(self.name), infoLH[0])
print("Returning from the loop!! of {}".format(self.name))
Expand Down

0 comments on commit 03a8775

Please sign in to comment.