summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorEdoardo Pasca <edo.paskino@gmail.com>2019-10-19 21:17:27 +0100
committerGitHub <noreply@github.com>2019-10-19 21:17:27 +0100
commit8839dffdee7ef1fff72eb305bf09fe30917ec238 (patch)
treef912b116cf27d280d381cb5820a680ed4b5f262f
parentfa3ca4ad4f119dad07bcb1d90b6c1b43df921a84 (diff)
downloadframework-plugins-8839dffdee7ef1fff72eb305bf09fe30917ec238.tar.gz
framework-plugins-8839dffdee7ef1fff72eb305bf09fe30917ec238.tar.bz2
framework-plugins-8839dffdee7ef1fff72eb305bf09fe30917ec238.tar.xz
framework-plugins-8839dffdee7ef1fff72eb305bf09fe30917ec238.zip
added FGP_dTV (#32)
-rw-r--r--Wrappers/Python/ccpi/plugins/regularisers.py45
1 files changed, 45 insertions, 0 deletions
diff --git a/Wrappers/Python/ccpi/plugins/regularisers.py b/Wrappers/Python/ccpi/plugins/regularisers.py
index 6ed9fb2..ef79231 100644
--- a/Wrappers/Python/ccpi/plugins/regularisers.py
+++ b/Wrappers/Python/ccpi/plugins/regularisers.py
@@ -91,6 +91,51 @@ class FGP_TV(Function):
out = x.copy()
out.fill(res)
return out
+
+class FGP_dTV(Function):
+ def __init__(self, refdata, regularisation_parameter, iterations,
+ tolerance, eta_const, methodTV, nonneg, device='cpu'):
+ # set parameters
+ self.lambdaReg = regularisation_parameter
+ self.iterationsTV = iterations
+ self.tolerance = tolerance
+ self.methodTV = methodTV
+ self.nonnegativity = nonneg
+ self.device = device # string for 'cpu' or 'gpu'
+ self.refData = np.asarray(refdata.as_array(), dtype=np.float32)
+ self.eta = eta_const
+
+ def __call__(self,x):
+ # evaluate objective function of TV gradient
+ EnergyValTV = TV_ENERGY(np.asarray(x.as_array(), dtype=np.float32), np.asarray(x.as_array(), dtype=np.float32), self.lambdaReg, 2)
+ return 0.5*EnergyValTV[0]
+ def proximal(self,x,tau, out=None):
+ pars = {'algorithm' : FGP_dTV, \
+ 'input' : np.asarray(x.as_array(), dtype=np.float32),\
+ 'regularization_parameter':self.lambdaReg*tau, \
+ 'number_of_iterations' :self.iterationsTV ,\
+ 'tolerance_constant':self.tolerance,\
+ 'methodTV': self.methodTV ,\
+ 'nonneg': self.nonnegativity ,\
+ 'eta_const' : self.eta,\
+ 'refdata':self.refData}
+ #inputData, refdata, regularisation_parameter, iterations,
+ # tolerance_param, eta_const, methodTV, nonneg, device='cpu'
+ res , info = regularisers.FGP_dTV(pars['input'],
+ pars['refdata'],
+ pars['regularization_parameter'],
+ pars['number_of_iterations'],
+ pars['tolerance_constant'],
+ pars['eta_const'],
+ pars['methodTV'],
+ pars['nonneg'],
+ self.device)
+ if out is not None:
+ out.fill(res)
+ else:
+ out = x.copy()
+ out.fill(res)
+ return out
class SB_TV(Function):
def __init__(self,lambdaReg,iterationsTV,tolerance,methodTV,printing,device):