diff --git a/pints/_abc/__init__.py b/pints/_abc/__init__.py index a3e49a892..9982bcdec 100644 --- a/pints/_abc/__init__.py +++ b/pints/_abc/__init__.py @@ -16,10 +16,9 @@ class ABCSampler(pints.Loggable, pints.TunableMethod): :class:`pints.TunableMethod` interfaces. """ + @classmethod def name(self): - """ - Returns this method's full name. - """ + """ Returns this method's full name. """ raise NotImplementedError def ask(self): diff --git a/pints/_abc/_abc_rejection.py b/pints/_abc/_abc_rejection.py index 7833cfe95..8819e8c57 100644 --- a/pints/_abc/_abc_rejection.py +++ b/pints/_abc/_abc_rejection.py @@ -46,6 +46,7 @@ def __init__(self, log_prior): self._xs = None self._ready_for_tell = False + @classmethod def name(self): """ See :meth:`pints.ABCSampler.name()`. """ return 'Rejection ABC' diff --git a/pints/_abc/_abc_smc.py b/pints/_abc/_abc_smc.py index b92d782a6..7014f9cdc 100644 --- a/pints/_abc/_abc_smc.py +++ b/pints/_abc/_abc_smc.py @@ -106,6 +106,7 @@ def __init__(self, log_prior, perturbation_kernel=None, raise ValueError('Provided perturbation kernel must be an instance' ' of pints.LogPrior') + @classmethod def name(self): """ See :meth:`pints.ABCSampler.name()`. """ return 'ABC-SMC' diff --git a/pints/_boundaries.py b/pints/_boundaries.py index c1c96ae93..d5c8c18d0 100644 --- a/pints/_boundaries.py +++ b/pints/_boundaries.py @@ -129,7 +129,7 @@ class RectangularBoundaries(Boundaries): The corresponding upper boundaries """ def __init__(self, lower, upper): - super(RectangularBoundaries, self).__init__() + super().__init__() # Convert to shape (n,) vectors, copy to ensure they remain unchanged self._lower = pints.vector(lower) @@ -205,7 +205,7 @@ class LogPDFBoundaries(Boundaries): within bounds. Anything _above_ the threshold counts as within bounds. """ def __init__(self, log_pdf, threshold=-np.inf): - super(LogPDFBoundaries, self).__init__() + super().__init__() # Check log pdf if not isinstance(log_pdf, pints.LogPDF): diff --git a/pints/_core.py b/pints/_core.py index d474b7f25..620fbf279 100644 --- a/pints/_core.py +++ b/pints/_core.py @@ -20,7 +20,7 @@ class ForwardModel(object): """ def __init__(self): - super(ForwardModel, self).__init__() + super().__init__() def n_parameters(self): """ @@ -67,7 +67,7 @@ class ForwardModelS1(ForwardModel): """ def __init__(self): - super(ForwardModelS1, self).__init__() + super().__init__() def simulateS1(self, parameters, times): """ diff --git a/pints/_error_measures.py b/pints/_error_measures.py index ac52bcffa..0017d075b 100644 --- a/pints/_error_measures.py +++ b/pints/_error_measures.py @@ -54,7 +54,7 @@ class ProblemErrorMeasure(ErrorMeasure): :class:`multi-output` problems. """ def __init__(self, problem): - super(ProblemErrorMeasure, self).__init__() + super().__init__() self._problem = problem self._times = problem.times() self._values = problem.values() @@ -96,7 +96,7 @@ class MeanSquaredError(ProblemErrorMeasure): """ def __init__(self, problem, weights=None): - super(MeanSquaredError, self).__init__(problem) + super().__init__(problem) self._ninv = 1.0 / np.prod(self._values.shape) if weights is None: @@ -147,7 +147,7 @@ class NormalisedRootMeanSquaredError(ProblemErrorMeasure): A :class:`pints.SingleOutputProblem`. """ def __init__(self, problem): - super(NormalisedRootMeanSquaredError, self).__init__(problem) + super().__init__(problem) if not isinstance(problem, pints.SingleOutputProblem): raise ValueError( @@ -174,7 +174,7 @@ class ProbabilityBasedError(ErrorMeasure): The LogPDF to base this error on. """ def __init__(self, log_pdf): - super(ProbabilityBasedError, self).__init__() + super().__init__() if not isinstance(log_pdf, pints.LogPDF): raise ValueError( 'Given log_pdf must be an instance of pints.LogPDF.') @@ -216,7 +216,7 @@ class RootMeanSquaredError(ProblemErrorMeasure): A :class:`pints.SingleOutputProblem`. """ def __init__(self, problem): - super(RootMeanSquaredError, self).__init__(problem) + super().__init__(problem) if not isinstance(problem, pints.SingleOutputProblem): raise ValueError( @@ -271,7 +271,7 @@ class SumOfErrors(ErrorMeasure): """ def __init__(self, error_measures, weights=None): - super(SumOfErrors, self).__init__() + super().__init__() # Check input arguments if len(error_measures) < 1: @@ -360,7 +360,7 @@ class SumOfSquaresError(ProblemErrorMeasure): """ def __init__(self, problem, weights=None): - super(SumOfSquaresError, self).__init__(problem) + super().__init__(problem) if weights is None: weights = [1] * self._n_outputs diff --git a/pints/_evaluation.py b/pints/_evaluation.py index 9f46404ff..26c991363 100644 --- a/pints/_evaluation.py +++ b/pints/_evaluation.py @@ -193,7 +193,7 @@ def __init__( max_tasks_per_worker=500, n_numpy_threads=1, args=None): - super(ParallelEvaluator, self).__init__(function, args) + super().__init__(function, args) # Determine number of workers if n_workers is None: @@ -437,7 +437,7 @@ class MultiSequentialEvaluator(Evaluator): ``f(x, *args)``. """ def __init__(self, functions, args=None): - super(MultiSequentialEvaluator, self).__init__(functions[0], args) + super().__init__(functions[0], args) # Check functions for function in functions: @@ -473,7 +473,7 @@ class SequentialEvaluator(Evaluator): specified, ``f`` will be called as ``f(x, *args)``. """ def __init__(self, function, args=None): - super(SequentialEvaluator, self).__init__(function, args) + super().__init__(function, args) def _evaluate(self, positions): scores = [0] * len(positions) @@ -530,7 +530,7 @@ class _Worker(multiprocessing.Process): def __init__( self, function, args, tasks, results, max_tasks, max_threads, errors, error): - super(_Worker, self).__init__() + super().__init__() self.daemon = True self._function = function self._args = args diff --git a/pints/_log_likelihoods.py b/pints/_log_likelihoods.py index 4627c8bbb..88950f4ed 100644 --- a/pints/_log_likelihoods.py +++ b/pints/_log_likelihoods.py @@ -69,7 +69,7 @@ class AR1LogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem): - super(AR1LogLikelihood, self).__init__(problem) + super().__init__(problem) # Get number of times, number of outputs self._nt = len(self._times) - 1 @@ -149,7 +149,7 @@ class ARMA11LogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem): - super(ARMA11LogLikelihood, self).__init__(problem) + super().__init__(problem) # Get number of times, number of outputs self._nt = len(self._times) - 2 @@ -205,7 +205,7 @@ class CauchyLogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem): - super(CauchyLogLikelihood, self).__init__(problem) + super().__init__(problem) # Get number of times, number of outputs self._nt = len(self._times) @@ -326,7 +326,7 @@ class CensoredGaussianLogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem, lower=None, upper=None): - super(CensoredGaussianLogLikelihood, self).__init__(problem) + super().__init__(problem) # Get number of times, number of outputs self._nt = len(self._times) @@ -617,8 +617,7 @@ class ConstantAndMultiplicativeGaussianLogLikelihood( """ def __init__(self, problem): - super(ConstantAndMultiplicativeGaussianLogLikelihood, self).__init__( - problem) + super().__init__(problem) # Get number of times and number of noise parameters self._nt = len(self._times) @@ -819,8 +818,7 @@ class GaussianIntegratedLogUniformLogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem): - super(GaussianIntegratedLogUniformLogLikelihood, - self).__init__(problem) + super().__init__(problem) # Get number of times, number of outputs self._nt = len(self._times) @@ -908,7 +906,7 @@ class GaussianIntegratedUniformLogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem, lower, upper): - super(GaussianIntegratedUniformLogLikelihood, self).__init__(problem) + super().__init__(problem) # Get number of times, number of outputs self._nt = len(self._times) @@ -1012,7 +1010,7 @@ class GaussianKnownSigmaLogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem, sigma): - super(GaussianKnownSigmaLogLikelihood, self).__init__(problem) + super().__init__(problem) # Store counts self._no = problem.n_outputs() @@ -1110,7 +1108,7 @@ class GaussianLogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem): - super(GaussianLogLikelihood, self).__init__(problem) + super().__init__(problem) # Get number of times, number of outputs self._nt = len(self._times) @@ -1169,7 +1167,7 @@ def __init__(self, problem, sigma): warnings.warn( 'The class `pints.KnownNoiseLogLikelihood` is deprecated.' ' Please use `pints.GaussianKnownSigmaLogLikelihood` instead.') - super(KnownNoiseLogLikelihood, self).__init__(problem, sigma) + super().__init__(problem, sigma) class LogNormalLogLikelihood(pints.ProblemLogLikelihood): @@ -1214,7 +1212,7 @@ class LogNormalLogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem, mean_adjust=False): - super(LogNormalLogLikelihood, self).__init__(problem) + super().__init__(problem) # Get number of times, number of outputs self._nt = len(self._times) @@ -1352,7 +1350,7 @@ class MultiplicativeGaussianLogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem): - super(MultiplicativeGaussianLogLikelihood, self).__init__(problem) + super().__init__(problem) # Get number of times and number of outputs self._nt = len(self._times) @@ -1411,13 +1409,11 @@ def __init__(self, log_likelihood): if not isinstance(log_likelihood, pints.ProblemLogLikelihood): raise ValueError( 'Given log_likelihood must extend pints.ProblemLogLikelihood') - - # Call parent constructor - super(ScaledLogLikelihood, self).__init__(log_likelihood._problem) - - # Store log-likelihood self._log_likelihood = log_likelihood + # Call this only after checking log_likelihood type + super().__init__(log_likelihood._problem) + # Pre-calculate parts self._f = 1.0 / np.prod(self._values.shape) @@ -1465,7 +1461,7 @@ class StudentTLogLikelihood(pints.ProblemLogLikelihood): """ def __init__(self, problem): - super(StudentTLogLikelihood, self).__init__(problem) + super().__init__(problem) # Get number of times, number of outputs self._nt = len(self._times) @@ -1515,4 +1511,5 @@ def __init__(self, problem): warnings.warn( 'The class `pints.UnknownNoiseLogLikelihood` is deprecated.' ' Please use `pints.GaussianLogLikelihood` instead.') - super(UnknownNoiseLogLikelihood, self).__init__(problem) + super().__init__(problem) + diff --git a/pints/_log_pdfs.py b/pints/_log_pdfs.py index 9c11f1e4d..6139368f3 100644 --- a/pints/_log_pdfs.py +++ b/pints/_log_pdfs.py @@ -182,7 +182,7 @@ class PooledLogPDF(LogPDF): pooled=[False, True]) """ def __init__(self, log_pdfs, pooled): - super(PooledLogPDF, self).__init__() + super().__init__() # Check input arguments if len(log_pdfs) < 2: @@ -329,7 +329,7 @@ class ProblemLogLikelihood(LogPDF): """ def __init__(self, problem): - super(ProblemLogLikelihood, self).__init__() + super().__init__() self._problem = problem # Cache some problem variables self._values = problem.values() @@ -364,7 +364,7 @@ class LogPosterior(LogPDF): space. """ def __init__(self, log_likelihood, log_prior): - super(LogPosterior, self).__init__() + super().__init__() # Check arguments if not isinstance(log_prior, LogPrior): @@ -453,7 +453,7 @@ class SumOfIndependentLogPDFs(LogPDF): ]) """ def __init__(self, log_likelihoods): - super(SumOfIndependentLogPDFs, self).__init__() + super().__init__() # Check input arguments if len(log_likelihoods) < 2: diff --git a/pints/_log_priors.py b/pints/_log_priors.py index 1cf55b07e..935fb10df 100644 --- a/pints/_log_priors.py +++ b/pints/_log_priors.py @@ -1053,7 +1053,7 @@ def __init__(self, mean, standard_deviation): warnings.warn( 'The class `pints.NormalLogPrior` is deprecated.' ' Please use `pints.GaussianLogPrior` instead.') - super(NormalLogPrior, self).__init__(mean, standard_deviation) + super().__init__(mean, standard_deviation) class StudentTLogPrior(pints.LogPrior): diff --git a/pints/_logger.py b/pints/_logger.py index 14c7576cd..709213c23 100644 --- a/pints/_logger.py +++ b/pints/_logger.py @@ -33,7 +33,7 @@ class Logger(object): """ def __init__(self): - super(Logger, self).__init__() + super().__init__() # Log to screen self._stream = sys.stdout diff --git a/pints/_mcmc/__init__.py b/pints/_mcmc/__init__.py index 5c137d5bc..24913e091 100644 --- a/pints/_mcmc/__init__.py +++ b/pints/_mcmc/__init__.py @@ -18,6 +18,7 @@ class MCMCSampler(pints.Loggable, pints.TunableMethod): :class:`pints.TunableMethod` interfaces. """ + @classmethod def name(self): """ Returns this method's full name. @@ -1128,8 +1129,7 @@ def __init__(self, log_pdf, chains, x0, sigma0=None, transformation=None, warnings.warn( 'The class `pints.MCMCSampling` is deprecated.' ' Please use `pints.MCMCController` instead.') - super(MCMCSampling, self).__init__(log_pdf, chains, x0, sigma0, - transformation, method=method) + super().__init__(log_pdf, chains, x0, sigma0, transformation, method) def mcmc_sample(log_pdf, chains, x0, sigma0=None, transformation=None, diff --git a/pints/_mcmc/_adaptive_covariance.py b/pints/_mcmc/_adaptive_covariance.py index c895daf06..ff1b9d0c7 100644 --- a/pints/_mcmc/_adaptive_covariance.py +++ b/pints/_mcmc/_adaptive_covariance.py @@ -27,7 +27,7 @@ class AdaptiveCovarianceMC(pints.SingleChainMCMC): """ def __init__(self, x0, sigma0=None): - super(AdaptiveCovarianceMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Current running status, used to initialise on first run and check # that certain methods are only called before or during run. diff --git a/pints/_mcmc/_differential_evolution.py b/pints/_mcmc/_differential_evolution.py index feb87c673..e83eff9bd 100644 --- a/pints/_mcmc/_differential_evolution.py +++ b/pints/_mcmc/_differential_evolution.py @@ -44,7 +44,7 @@ class DifferentialEvolutionMCMC(pints.MultiChainMCMC): """ def __init__(self, chains, x0, sigma0=None): - super(DifferentialEvolutionMCMC, self).__init__(chains, x0, sigma0) + super().__init__(chains, x0, sigma0) # Need at least 3 chains if self._n_chains < 3: @@ -176,6 +176,7 @@ def n_hyper_parameters(self): """ See :meth:`TunableMethod.n_hyper_parameters()`. """ return 5 + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Differential Evolution MCMC' diff --git a/pints/_mcmc/_dram_ac.py b/pints/_mcmc/_dram_ac.py index e3d43144c..a1b8c1cf7 100644 --- a/pints/_mcmc/_dram_ac.py +++ b/pints/_mcmc/_dram_ac.py @@ -59,7 +59,7 @@ class DramACMC(pints.AdaptiveCovarianceMC): https://doi.org/10.1007/s11222-006-9438-0 """ def __init__(self, x0, sigma0=None): - super(DramACMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) self._log_lambda = 0 self._n_kernels = 2 # This is fixed! @@ -104,6 +104,7 @@ def _generate_proposal(self): self._sigma[self._proposal_count]) return proposed + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Delayed Rejection Adaptive Metropolis (Dram) MCMC' diff --git a/pints/_mcmc/_dream.py b/pints/_mcmc/_dream.py index 619476db0..fcafe0ebb 100644 --- a/pints/_mcmc/_dream.py +++ b/pints/_mcmc/_dream.py @@ -65,7 +65,7 @@ class DreamMCMC(pints.MultiChainMCMC): """ def __init__(self, chains, x0, sigma0=None): - super(DreamMCMC, self).__init__(chains, x0, sigma0) + super().__init__(chains, x0, sigma0) # Need at least 3 chains if self._n_chains < 3: @@ -214,6 +214,7 @@ def _log_write(self, logger): # logger.log(self._acceptance) # TODO + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'DiffeRential Evolution Adaptive Metropolis (DREAM) MCMC' diff --git a/pints/_mcmc/_emcee_hammer.py b/pints/_mcmc/_emcee_hammer.py index 3d6f447df..cb707a667 100644 --- a/pints/_mcmc/_emcee_hammer.py +++ b/pints/_mcmc/_emcee_hammer.py @@ -44,7 +44,7 @@ class EmceeHammerMCMC(pints.MultiChainMCMC): """ def __init__(self, chains, x0, sigma0=None): - super(EmceeHammerMCMC, self).__init__(chains, x0, sigma0) + super().__init__(chains, x0, sigma0) # Need at least 3 chains if self._n_chains < 3: @@ -136,6 +136,7 @@ def _initialise(self): # Update sampler state self._running = True + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Emcee Hammer MCMC' diff --git a/pints/_mcmc/_haario_ac.py b/pints/_mcmc/_haario_ac.py index 33bafb84c..b10dac9a1 100644 --- a/pints/_mcmc/_haario_ac.py +++ b/pints/_mcmc/_haario_ac.py @@ -57,7 +57,7 @@ class HaarioACMC(pints.AdaptiveCovarianceMC): Heikki Haario, Eero Saksman, and Johanna Tamminen (2001) Bernoulli. """ def __init__(self, x0, sigma0=None): - super(HaarioACMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) self._log_lambda = 0 def _adapt_internal(self, accepted, log_ratio): @@ -70,6 +70,7 @@ def _generate_proposal(self): return np.random.multivariate_normal( self._current, self._sigma * np.exp(self._log_lambda)) + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Haario adaptive covariance MCMC' diff --git a/pints/_mcmc/_haario_bardenet_ac.py b/pints/_mcmc/_haario_bardenet_ac.py index 07fde6622..dc36e7172 100644 --- a/pints/_mcmc/_haario_bardenet_ac.py +++ b/pints/_mcmc/_haario_bardenet_ac.py @@ -57,7 +57,7 @@ class HaarioBardenetACMC(pints.AdaptiveCovarianceMC): https://doi.org/10.2307/3318737 """ def __init__(self, x0, sigma0=None): - super(HaarioBardenetACMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Initial log lambda is zero self._log_lambda = 0 @@ -72,6 +72,7 @@ def _generate_proposal(self): return np.random.multivariate_normal( self._current, self._sigma * np.exp(self._log_lambda)) + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Haario-Bardenet adaptive covariance MCMC' @@ -89,5 +90,5 @@ def __init__(self, x0, sigma0=None): warnings.warn( 'The class `pints.AdaptiveCovarianceMCMC` is deprecated.' ' Please use `pints.HaarioBardenetACMC` instead.') - super(AdaptiveCovarianceMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) diff --git a/pints/_mcmc/_hamiltonian.py b/pints/_mcmc/_hamiltonian.py index 5840c3dca..3604bc32c 100644 --- a/pints/_mcmc/_hamiltonian.py +++ b/pints/_mcmc/_hamiltonian.py @@ -55,7 +55,7 @@ class HamiltonianMCMC(pints.SingleChainMCMC): Galin Jones, and Xiao-Li Meng. """ def __init__(self, x0, sigma0=None): - super(HamiltonianMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Set initial state self._running = False @@ -185,6 +185,7 @@ def n_hyper_parameters(self): """ See :meth:`TunableMethod.n_hyper_parameters()`. """ return 2 + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Hamiltonian Monte Carlo' diff --git a/pints/_mcmc/_mala.py b/pints/_mcmc/_mala.py index f893ef16d..f524de24c 100644 --- a/pints/_mcmc/_mala.py +++ b/pints/_mcmc/_mala.py @@ -78,7 +78,7 @@ class MALAMCMC(pints.SingleChainMCMC): """ def __init__(self, x0, sigma0=None): - super(MALAMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Set initial state self._running = False @@ -176,6 +176,7 @@ def _log_write(self, logger): """ See :meth:`Loggable._log_write()`. """ logger.log(self._acceptance) + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Metropolis-Adjusted Langevin Algorithm (MALA)' diff --git a/pints/_mcmc/_metropolis.py b/pints/_mcmc/_metropolis.py index 50871cc57..d59d492ab 100644 --- a/pints/_mcmc/_metropolis.py +++ b/pints/_mcmc/_metropolis.py @@ -36,7 +36,7 @@ class MetropolisRandomWalkMCMC(pints.SingleChainMCMC): """ def __init__(self, x0, sigma0=None): - super(MetropolisRandomWalkMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Set initial state self._running = False @@ -100,6 +100,7 @@ def _log_write(self, logger): """ See :meth:`Loggable._log_write()`. """ logger.log(self._acceptance) + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Metropolis random walk MCMC' diff --git a/pints/_mcmc/_monomial_gamma_hamiltonian.py b/pints/_mcmc/_monomial_gamma_hamiltonian.py index a68323000..829845cb4 100644 --- a/pints/_mcmc/_monomial_gamma_hamiltonian.py +++ b/pints/_mcmc/_monomial_gamma_hamiltonian.py @@ -77,7 +77,7 @@ class MonomialGammaHamiltonianMCMC(pints.SingleChainMCMC): Carlo by Steve Brooks, Andrew Gelman, Galin Jones, and Xiao-Li Meng. """ def __init__(self, x0, sigma0=None): - super(MonomialGammaHamiltonianMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Set initial state self._running = False @@ -291,6 +291,7 @@ def mass(self): """ return self._m + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Monomial-Gamma Hamiltonian Monte Carlo' diff --git a/pints/_mcmc/_nuts.py b/pints/_mcmc/_nuts.py index 9e5e71102..a1fb3c9c8 100644 --- a/pints/_mcmc/_nuts.py +++ b/pints/_mcmc/_nuts.py @@ -513,7 +513,7 @@ class NoUTurnMCMC(pints.SingleChainMCMC): """ def __init__(self, x0, sigma0=None): - super(NoUTurnMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # hyperparameters self._adaptor = [ @@ -659,6 +659,7 @@ def n_hyper_parameters(self): """ See :meth:`TunableMethod.n_hyper_parameters()`. """ return 1 + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'No-U-Turn MCMC' diff --git a/pints/_mcmc/_population.py b/pints/_mcmc/_population.py index be5a8d5d6..26abed716 100644 --- a/pints/_mcmc/_population.py +++ b/pints/_mcmc/_population.py @@ -49,7 +49,7 @@ class PopulationMCMC(pints.SingleChainMCMC): https://doi.org/10.1007/s11222-007-9028-9 """ def __init__(self, x0, sigma0=None): - super(PopulationMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Set initial state self._running = False @@ -153,6 +153,7 @@ def _log_write(self, logger): logger.log(self._j) logger.log('yes' if self._have_exchanged else 'no') + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Population MCMC' diff --git a/pints/_mcmc/_rao_blackwell_ac.py b/pints/_mcmc/_rao_blackwell_ac.py index 69876c690..678c44215 100644 --- a/pints/_mcmc/_rao_blackwell_ac.py +++ b/pints/_mcmc/_rao_blackwell_ac.py @@ -47,7 +47,7 @@ class RaoBlackwellACMC(pints.AdaptiveCovarianceMC): https://doi.org/10.1007/s11222-008-9110-y """ def __init__(self, x0, sigma0=None): - super(RaoBlackwellACMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # heuristic based on normal approximation self._lambda = (2.38**2) / self._n_parameters @@ -78,6 +78,7 @@ def _generate_proposal(self): return np.random.multivariate_normal( self._current, self._lambda * self._sigma) + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Rao-Blackwell adaptive covariance MCMC' @@ -87,5 +88,5 @@ def tell(self, fx): self._Y = np.copy(self._proposed) self._X = np.copy(self._current) - return super(RaoBlackwellACMC, self).tell(fx) + return super().tell(fx) diff --git a/pints/_mcmc/_relativistic.py b/pints/_mcmc/_relativistic.py index 2c4b73441..16f5b72a8 100644 --- a/pints/_mcmc/_relativistic.py +++ b/pints/_mcmc/_relativistic.py @@ -66,7 +66,7 @@ class RelativisticMCMC(pints.SingleChainMCMC): 2017, Proceedings of Machine Learning Research. """ def __init__(self, x0, sigma0=None): - super(RelativisticMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Set initial state self._running = False @@ -320,6 +320,7 @@ def n_hyper_parameters(self): """ See :meth:`TunableMethod.n_hyper_parameters()`. """ return 4 + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Relativistic MCMC' diff --git a/pints/_mcmc/_slice_doubling.py b/pints/_mcmc/_slice_doubling.py index 62565ede6..1ef107243 100644 --- a/pints/_mcmc/_slice_doubling.py +++ b/pints/_mcmc/_slice_doubling.py @@ -119,7 +119,7 @@ class SliceDoublingMCMC(pints.SingleChainMCMC): """ def __init__(self, x0, sigma0=None): - super(SliceDoublingMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Set initial state self._x0 = np.asarray(x0, dtype=float) @@ -385,6 +385,7 @@ def expansion_steps(self): """ return self._p + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Slice Sampling - Doubling' diff --git a/pints/_mcmc/_slice_rank_shrinking.py b/pints/_mcmc/_slice_rank_shrinking.py index dfd52c2b8..98436f988 100644 --- a/pints/_mcmc/_slice_rank_shrinking.py +++ b/pints/_mcmc/_slice_rank_shrinking.py @@ -75,7 +75,7 @@ class SliceRankShrinkingMCMC(pints.SingleChainMCMC): """ def __init__(self, x0, sigma0=None): - super(SliceRankShrinkingMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Set initial state self._x0 = np.asarray(x0, dtype=float) @@ -151,6 +151,7 @@ def current_slice_height(self): """ return self._current_log_y + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Slice Sampling - Covariance-Adaptive: Rank Shrinking.' diff --git a/pints/_mcmc/_slice_stepout.py b/pints/_mcmc/_slice_stepout.py index e675910b9..723d2942d 100644 --- a/pints/_mcmc/_slice_stepout.py +++ b/pints/_mcmc/_slice_stepout.py @@ -130,7 +130,7 @@ class SliceStepoutMCMC(pints.SingleChainMCMC): """ def __init__(self, x0, sigma0=None): - super(SliceStepoutMCMC, self).__init__(x0, sigma0) + super().__init__(x0, sigma0) # Set initial state self._x0 = np.asarray(x0, dtype=float) @@ -420,6 +420,7 @@ def prob_overrelaxed(self): """ return self._prob_overrelaxed + @classmethod def name(self): """ See :meth:`pints.MCMCSampler.name()`. """ return 'Slice Sampling - Stepout' diff --git a/pints/_nested/__init__.py b/pints/_nested/__init__.py index c8c24462d..93c26e92d 100644 --- a/pints/_nested/__init__.py +++ b/pints/_nested/__init__.py @@ -93,6 +93,7 @@ def n_hyper_parameters(self): """ See :meth:`TunableMethod.n_hyper_parameters()`. """ raise NotImplementedError + @classmethod def name(self): """ Name of sampler """ raise NotImplementedError diff --git a/pints/_nested/_ellipsoid.py b/pints/_nested/_ellipsoid.py index 2ee3999ab..0fb30baa1 100644 --- a/pints/_nested/_ellipsoid.py +++ b/pints/_nested/_ellipsoid.py @@ -106,7 +106,7 @@ class NestedEllipsoidSampler(pints.NestedSampler): """ def __init__(self, log_prior): - super(NestedEllipsoidSampler, self).__init__(log_prior) + super().__init__(log_prior) # Gaps between updating ellipsoid self.set_ellipsoid_update_gap() @@ -340,6 +340,7 @@ def _draw_from_ellipsoid(self, covmat, cent, npts): return pnts + @classmethod def name(self): """ See :meth:`pints.NestedSampler.name()`. """ return 'Nested ellipsoidal sampler' diff --git a/pints/_nested/_rejection.py b/pints/_nested/_rejection.py index d2888286d..39259f3f9 100644 --- a/pints/_nested/_rejection.py +++ b/pints/_nested/_rejection.py @@ -72,7 +72,7 @@ class NestedRejectionSampler(pints.NestedSampler): https://doi.org/10.1214/06-BA127 """ def __init__(self, log_prior): - super(NestedRejectionSampler, self).__init__(log_prior) + super().__init__(log_prior) self._needs_sensitivities = False @@ -102,6 +102,7 @@ def set_hyper_parameters(self, x): """ self.set_n_active_points(x[0]) + @classmethod def name(self): """ See :meth:`pints.NestedSampler.name()`. """ return 'Nested rejection sampler' diff --git a/pints/_optimisers/__init__.py b/pints/_optimisers/__init__.py index 22341ce5f..cedc2213f 100644 --- a/pints/_optimisers/__init__.py +++ b/pints/_optimisers/__init__.py @@ -186,10 +186,9 @@ def f_guessed(self): """ return self.f_best() + @classmethod def name(self): - """ - Returns this method's full name. - """ + """ Returns this method's full name. """ raise NotImplementedError def needs_sensitivities(self): @@ -264,7 +263,7 @@ class PopulationBasedOptimiser(Optimiser): """ def __init__(self, x0, sigma0=None, boundaries=None): - super(PopulationBasedOptimiser, self).__init__(x0, sigma0, boundaries) + super().__init__(x0, sigma0, boundaries) # Set initial population size using heuristic self._population_size = self._suggested_population_size() @@ -1126,7 +1125,7 @@ def __init__( warnings.warn( 'The class `pints.Optimisation` is deprecated.' ' Please use `pints.OptimisationController` instead.') - super(Optimisation, self).__init__( + super().__init__( function, x0, sigma0, boundaries, transformation, method=method) diff --git a/pints/_optimisers/_adam.py b/pints/_optimisers/_adam.py index 3842397b6..1a6cf9e25 100644 --- a/pints/_optimisers/_adam.py +++ b/pints/_optimisers/_adam.py @@ -119,6 +119,7 @@ def _log_write(self, logger): logger.log(self._b1t) logger.log(self._b2t) + @classmethod def name(self): """ See :meth:`Optimiser.name()`. """ return 'Adam' diff --git a/pints/_optimisers/_cmaes.py b/pints/_optimisers/_cmaes.py index 6cb55c443..cb474b2fe 100644 --- a/pints/_optimisers/_cmaes.py +++ b/pints/_optimisers/_cmaes.py @@ -55,7 +55,7 @@ class CMAES(pints.PopulationBasedOptimiser): """ def __init__(self, x0, sigma0=None, boundaries=None): - super(CMAES, self).__init__(x0, sigma0, boundaries) + super().__init__(x0, sigma0, boundaries) # 1-D is not supported if len(x0) < 2: @@ -161,6 +161,7 @@ def _initialise(self): # Update optimiser state self._running = True + @classmethod def name(self): """ See :meth:`Optimiser.name()`. """ return 'Covariance Matrix Adaptation Evolution Strategy (CMA-ES)' diff --git a/pints/_optimisers/_cmaes_bare.py b/pints/_optimisers/_cmaes_bare.py index dc6ede94b..05bfe8fae 100644 --- a/pints/_optimisers/_cmaes_bare.py +++ b/pints/_optimisers/_cmaes_bare.py @@ -42,7 +42,7 @@ class BareCMAES(pints.PopulationBasedOptimiser): """ def __init__(self, x0, sigma0=0.1, boundaries=None): - super(BareCMAES, self).__init__(x0, sigma0, boundaries) + super().__init__(x0, sigma0, boundaries) # Set initial state self._running = False @@ -223,6 +223,7 @@ def _initialise(self): # Update optimiser state self._running = True + @classmethod def name(self): """ See :meth:`Optimiser.name()`. """ return 'Bare-bones CMA-ES' diff --git a/pints/_optimisers/_gradient_descent.py b/pints/_optimisers/_gradient_descent.py index ada45c26f..557f30268 100644 --- a/pints/_optimisers/_gradient_descent.py +++ b/pints/_optimisers/_gradient_descent.py @@ -20,7 +20,7 @@ class GradientDescent(pints.Optimiser): """ def __init__(self, x0, sigma0=0.1, boundaries=None): - super(GradientDescent, self).__init__(x0, sigma0, boundaries) + super().__init__(x0, sigma0, boundaries) # Set optimiser state self._running = False @@ -60,6 +60,7 @@ def learning_rate(self): """ Returns this optimiser's learning rate. """ return self._eta + @classmethod def name(self): """ See :meth:`Optimiser.name()`. """ return 'Gradient descent' diff --git a/pints/_optimisers/_irpropmin.py b/pints/_optimisers/_irpropmin.py index 53dea6598..197a8988b 100644 --- a/pints/_optimisers/_irpropmin.py +++ b/pints/_optimisers/_irpropmin.py @@ -165,6 +165,7 @@ def min_step_size(self): """ Returns the minimum step size (or ``None`` if not set). """ return self._step_min + @classmethod def name(self): """ See :meth:`Optimiser.name()`. """ return 'iRprop-' diff --git a/pints/_optimisers/_nelder_mead.py b/pints/_optimisers/_nelder_mead.py index 799f6155d..e2f8c5155 100644 --- a/pints/_optimisers/_nelder_mead.py +++ b/pints/_optimisers/_nelder_mead.py @@ -129,7 +129,7 @@ class NelderMead(pints.Optimiser): """ def __init__(self, x0, sigma0=None, boundaries=None): - super(NelderMead, self).__init__(x0, sigma0, boundaries) + super().__init__(x0, sigma0, boundaries) if self._boundaries is not None: warnings.warn( @@ -218,6 +218,7 @@ def f_best(self): """ See: :meth:`pints.Optimiser.f_best()`. """ return self._fs[0] if self._running else np.inf + @classmethod def name(self): """ See: :meth:`pints.Optimiser.name()`. """ return 'Nelder-Mead' diff --git a/pints/_optimisers/_pso.py b/pints/_optimisers/_pso.py index 33d75625e..dea4137b1 100644 --- a/pints/_optimisers/_pso.py +++ b/pints/_optimisers/_pso.py @@ -72,7 +72,7 @@ class PSO(pints.PopulationBasedOptimiser): """ def __init__(self, x0, sigma0=None, boundaries=None): - super(PSO, self).__init__(x0, sigma0, boundaries) + super().__init__(x0, sigma0, boundaries) # Set initial state self._running = False @@ -172,6 +172,7 @@ def _log_write(self, logger): for f in self._fl: logger.log(f) + @classmethod def name(self): """ See :meth:`Optimiser.name()`. """ return 'Particle Swarm Optimisation (PSO)' diff --git a/pints/_optimisers/_snes.py b/pints/_optimisers/_snes.py index 7f5ac180b..7a6719e61 100644 --- a/pints/_optimisers/_snes.py +++ b/pints/_optimisers/_snes.py @@ -36,7 +36,7 @@ class SNES(pints.PopulationBasedOptimiser): http://pybrain.org """ def __init__(self, x0, sigma0=None, boundaries=None): - super(SNES, self).__init__(x0, sigma0, boundaries) + super().__init__(x0, sigma0, boundaries) # Set initial state self._running = False @@ -120,6 +120,7 @@ def _initialise(self): # Update optimiser state self._running = True + @classmethod def name(self): """ See :meth:`Optimiser.name()`. """ return 'Seperable Natural Evolution Strategy (SNES)' diff --git a/pints/_optimisers/_xnes.py b/pints/_optimisers/_xnes.py index 2475d451e..8153ecaee 100644 --- a/pints/_optimisers/_xnes.py +++ b/pints/_optimisers/_xnes.py @@ -36,7 +36,7 @@ class XNES(pints.PopulationBasedOptimiser): """ def __init__(self, x0, sigma0=None, boundaries=None): - super(XNES, self).__init__(x0, sigma0, boundaries) + super().__init__(x0, sigma0, boundaries) # Set initial state self._running = False @@ -131,6 +131,7 @@ def _initialise(self): # Update optimiser state self._running = True + @classmethod def name(self): """ See :meth:`Optimiser.name()`. """ return 'Exponential Natural Evolution Strategy (xNES)' diff --git a/pints/_transformation.py b/pints/_transformation.py index f6f3f0772..2162a92bd 100644 --- a/pints/_transformation.py +++ b/pints/_transformation.py @@ -1196,7 +1196,7 @@ class TransformedLogPrior(TransformedLogPDF, pints.LogPrior): A :class:`pints.Transformation`. """ def __init__(self, log_prior, transformation): - super(TransformedLogPrior, self).__init__(log_prior, transformation) + super().__init__(log_prior, transformation) def sample(self, n): """ diff --git a/pints/tests/shared.py b/pints/tests/shared.py index afe863298..cbc14fd52 100644 --- a/pints/tests/shared.py +++ b/pints/tests/shared.py @@ -7,7 +7,6 @@ # import io import os -import shutil import sys import tempfile @@ -24,7 +23,7 @@ class StreamCapture(object): Warning: This class is not thread-safe. """ def __init__(self, stdout=True, stderr=False): - super(StreamCapture, self).__init__() + super().__init__() # True if currently capturing self._capturing = False @@ -146,7 +145,7 @@ class SubCapture(object): Warning: This class is not thread-safe. """ def __init__(self, dump_on_error=False): - super(SubCapture, self).__init__() + super().__init__() self._capturing = False self._captured = [] self._dump_on_error = bool(dump_on_error) @@ -287,60 +286,6 @@ def text(self): return ''.join(self._captured) -class TemporaryDirectory(object): - """ - ContextManager that provides a temporary directory to create temporary - files in. Deletes the directory and its contents when the context is - exited. - """ - def __init__(self): - super(TemporaryDirectory, self).__init__() - self._dir = None - - def __enter__(self): - self._dir = os.path.realpath(tempfile.mkdtemp()) - return self - - def path(self, path): - """ - Returns an absolute path to a file or directory name inside this - temporary directory, that can be used to write to. - - Example:: - - with TemporaryDirectory() as d: - filename = d.path('test.txt') - with open(filename, 'w') as f: - f.write('Hello') - with open(filename, 'r') as f: - print(f.read()) - """ - if self._dir is None: - raise RuntimeError( - 'TemporaryDirectory.path() can only be called from inside the' - ' context.') - - path = os.path.realpath(os.path.join(self._dir, path)) - if path[0:len(self._dir)] != self._dir: - raise ValueError( - 'Relative path specified to location outside of temporary' - ' directory.') - - return path - - def __exit__(self, exc_type, exc_value, traceback): - try: - shutil.rmtree(self._dir) - finally: - self._dir = None - - def __str__(self): - if self._dir is None: - return '' - else: - return self._dir - - class CircularBoundaries(pints.Boundaries): """ Circular boundaries, to test boundaries that are non-rectangular. @@ -353,7 +298,7 @@ class CircularBoundaries(pints.Boundaries): The radius (in all directions). """ def __init__(self, center, radius=1): - super(CircularBoundaries, self).__init__() + super().__init__() # Check arguments center = pints.vector(center) diff --git a/pints/tests/test_abc_controller.py b/pints/tests/test_abc_controller.py index 6ca494d92..63df06b20 100755 --- a/pints/tests/test_abc_controller.py +++ b/pints/tests/test_abc_controller.py @@ -6,13 +6,16 @@ # released under the BSD 3-clause license. See accompanying LICENSE.md for # copyright notice and full license details. # +import os +import tempfile + import pints import pints.toy import pints.toy.stochastic import unittest import numpy as np -from shared import StreamCapture, TemporaryDirectory +from shared import StreamCapture LOG_SCREEN_1 = [ @@ -211,8 +214,8 @@ def test_logging(self): # With output to file np.random.seed(1) - with TemporaryDirectory() as d: - filename = d.path('test.txt') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.txt') abc = pints.ABCController( self.error_measure, self.log_prior, @@ -241,8 +244,8 @@ def test_logging(self): # With output to CSV file np.random.seed(1) - with TemporaryDirectory() as d: - filename = d.path('test.txt') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.txt') abc = pints.ABCController( self.error_measure, self.log_prior, method=pints.RejectionABC) abc.set_max_iterations(6) diff --git a/pints/tests/test_error_measures.py b/pints/tests/test_error_measures.py index 83f0a3a0a..67725d8c2 100755 --- a/pints/tests/test_error_measures.py +++ b/pints/tests/test_error_measures.py @@ -54,7 +54,7 @@ def values(self): class BigMiniProblem(MiniProblem): def __init__(self): - super(BigMiniProblem, self).__init__() + super().__init__() self._t = pints.vector([1, 2, 3, 4, 5, 6]) self._v = pints.vector([-1, 2, 3, 4, 5, -6]) @@ -64,7 +64,7 @@ def n_parameters(self): class BadMiniProblem(MiniProblem): def __init__(self, bad_value=np.inf): - super(BadMiniProblem, self).__init__() + super().__init__() self._v = pints.vector([bad_value, 2, -3]) def n_parameters(self): @@ -73,7 +73,7 @@ def n_parameters(self): class BadErrorMeasure(pints.ErrorMeasure): def __init__(self, bad_value=-np.inf): - super(BadErrorMeasure, self).__init__() + super().__init__() self._v = bad_value def n_parameters(self): diff --git a/pints/tests/test_evaluators.py b/pints/tests/test_evaluators.py index b3fc7895e..72bf9d9cd 100755 --- a/pints/tests/test_evaluators.py +++ b/pints/tests/test_evaluators.py @@ -17,7 +17,7 @@ class TestEvaluators(unittest.TestCase): Tests the evaluator classes and methods. """ def __init__(self, name): - super(TestEvaluators, self).__init__(name) + super().__init__(name) def test_function(self): diff --git a/pints/tests/test_io.py b/pints/tests/test_io.py index 3e8dac99e..eaf8218c6 100755 --- a/pints/tests/test_io.py +++ b/pints/tests/test_io.py @@ -1,19 +1,19 @@ #!/usr/bin/env python3 # -# Tests the Pints io methods. +# Tests the PINTS io methods. # # This file is part of PINTS (https://github.com/pints-team/pints/) which is # released under the BSD 3-clause license. See accompanying LICENSE.md for # copyright notice and full license details. # import os +import tempfile + import pints import pints.io import numpy as np import unittest -from shared import TemporaryDirectory - class TestIO(unittest.TestCase): """ @@ -34,9 +34,9 @@ def test_load_save_samples(self): chain2.append(list(row)) # Check saving and loading - with TemporaryDirectory() as d: + with tempfile.TemporaryDirectory() as d: # Single chain - filename = d.path('test.csv') + filename = os.path.join(d, 'test.csv') pints.io.save_samples(filename, chain0) self.assertTrue(os.path.isfile(filename)) test0 = pints.io.load_samples(filename) @@ -45,11 +45,11 @@ def test_load_save_samples(self): self.assertFalse(chain0 is test0) # Multiple chains - filename = d.path('multi.csv') + filename = os.path.join(d, 'multi.csv') pints.io.save_samples(filename, chain0, chain1, chain2) - self.assertTrue(os.path.isfile(d.path('multi_0.csv'))) - self.assertTrue(os.path.isfile(d.path('multi_1.csv'))) - self.assertTrue(os.path.isfile(d.path('multi_2.csv'))) + self.assertTrue(os.path.isfile(os.path.join(d, 'multi_0.csv'))) + self.assertTrue(os.path.isfile(os.path.join(d, 'multi_1.csv'))) + self.assertTrue(os.path.isfile(os.path.join(d, 'multi_2.csv'))) test0, test1, test2 = pints.io.load_samples(filename, 3) self.assertEqual(chain0.shape, test0.shape) self.assertTrue(np.all(chain0 == test0)) @@ -78,7 +78,7 @@ def test_load_save_samples(self): self.assertRaisesRegex( ValueError, 'integer greater than zero', pints.io.load_samples, filename, 0) - filename = d.path('x.csv') + filename = os.path.join(d, 'x.csv') self.assertRaises( FileNotFoundError, pints.io.load_samples, filename) self.assertRaises( diff --git a/pints/tests/test_logger.py b/pints/tests/test_logger.py index b13e6c698..7acf17079 100755 --- a/pints/tests/test_logger.py +++ b/pints/tests/test_logger.py @@ -8,10 +8,12 @@ # import os import sys -import pints +import tempfile import unittest -from shared import StreamCapture, TemporaryDirectory +import pints + +from shared import StreamCapture data = [ @@ -175,8 +177,8 @@ def test_file_only_fields_hidden_on_screen(self): def test_file_writing_txt(self): # Log with file-only fields, and shorter name, and file with StreamCapture() as c: - with TemporaryDirectory() as d: - filename = d.path('test.txt') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.txt') log = pints.Logger() log.set_filename(filename) log.add_counter('#', width=2) @@ -195,8 +197,8 @@ def test_file_writing_txt(self): def test_file_writing_csv(self): # Repeat in csv mode with StreamCapture() as c: - with TemporaryDirectory() as d: - filename = d.path('test.csv') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.csv') log = pints.Logger() log.set_filename(filename, csv=True) log.add_counter('#', width=2) @@ -215,8 +217,8 @@ def test_file_writing_csv(self): def test_file_writing_no_screen_csv(self): # Repeat without screen output with StreamCapture() as c: - with TemporaryDirectory() as d: - filename = d.path('test.csv') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.csv') log = pints.Logger() log.set_filename(filename, csv=True) log.set_stream(None) @@ -236,8 +238,8 @@ def test_file_writing_no_screen_csv(self): def test_file_writing_no_screen_txt(self): # Repeat without screen output, outside of csv mode with StreamCapture() as c: - with TemporaryDirectory() as d: - filename = d.path('test.csv') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.csv') log = pints.Logger() log.set_filename(filename, csv=False) log.set_stream(None) @@ -256,8 +258,8 @@ def test_file_writing_no_screen_txt(self): # Unset file output with StreamCapture() as c: - with TemporaryDirectory() as d: - filename = d.path('test.csv') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.csv') log = pints.Logger() log.set_filename(filename, csv=False) log.set_filename(None) @@ -285,8 +287,8 @@ def test_no_output(self): # Repeat on stderr with StreamCapture(stdout=True, stderr=True) as c: - with TemporaryDirectory() as d: - filename = d.path('test.csv') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.csv') log = pints.Logger() log.set_filename(filename, csv=False) log.set_stream(sys.stderr) diff --git a/pints/tests/test_mcmc_controller.py b/pints/tests/test_mcmc_controller.py index 5c213a9e8..557aebc3a 100755 --- a/pints/tests/test_mcmc_controller.py +++ b/pints/tests/test_mcmc_controller.py @@ -7,16 +7,19 @@ # copyright notice and full license details. # import os -import pints -import pints.io -import pints.toy +import tempfile import unittest import unittest.mock +import warnings + import numpy as np import numpy.testing as npt -import warnings -from shared import StreamCapture, TemporaryDirectory +import pints +import pints.io +import pints.toy + +from shared import StreamCapture debug = False @@ -574,8 +577,8 @@ def test_logging(self): # With output to file np.random.seed(1) with StreamCapture() as capture: - with TemporaryDirectory() as d: - filename = d.path('test.txt') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.txt') mcmc = pints.MCMCController(self.log_posterior, nchains, xs) mcmc.set_initial_phase_iterations(5) mcmc.set_max_iterations(10) @@ -951,15 +954,15 @@ def test_writing_chains_only(self): mcmc.set_log_to_file(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(cpath) @@ -1005,15 +1008,15 @@ def test_writing_chains_only(self): mcmc.set_log_to_file(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(cpath) @@ -1067,15 +1070,15 @@ def test_writing_chains_only_no_memory_single(self): mcmc.set_chain_storage(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(cpath) @@ -1126,15 +1129,15 @@ def test_writing_chains_only_no_memory_single(self): mcmc.set_chain_storage(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(cpath) @@ -1193,15 +1196,15 @@ def test_writing_chains_only_no_memory_multi(self): mcmc.set_chain_storage(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(cpath) @@ -1248,15 +1251,15 @@ def test_writing_priors_and_likelihoods(self): mcmc.set_log_to_file(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(None) @@ -1311,15 +1314,15 @@ def test_writing_chains_likelihoods_and_priors_single(self): mcmc.set_log_to_file(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(cpath) @@ -1379,15 +1382,15 @@ def test_writing_chains_likelihoods_and_priors_multi(self): mcmc.set_log_to_file(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(cpath) @@ -1446,15 +1449,15 @@ def test_writing_chains_and_likelihoods_single(self): mcmc.set_log_to_file(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(cpath) @@ -1510,15 +1513,15 @@ def test_writing_chains_likelihoods_and_priors_one_chain(self): mcmc.set_log_to_file(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(cpath) @@ -1575,15 +1578,15 @@ def test_disabling_disk_storage(self): mcmc.set_log_to_file(False) with StreamCapture() as c: - with TemporaryDirectory() as d: - cpath = d.path('chain.csv') - p0 = d.path('chain_0.csv') - p1 = d.path('chain_1.csv') - p2 = d.path('chain_2.csv') - epath = d.path('evals.csv') - p3 = d.path('evals_0.csv') - p4 = d.path('evals_1.csv') - p5 = d.path('evals_2.csv') + with tempfile.TemporaryDirectory() as d: + cpath = os.path.join(d, 'chain.csv') + p0 = os.path.join(d, 'chain_0.csv') + p1 = os.path.join(d, 'chain_1.csv') + p2 = os.path.join(d, 'chain_2.csv') + epath = os.path.join(d, 'evals.csv') + p3 = os.path.join(d, 'evals_0.csv') + p4 = os.path.join(d, 'evals_1.csv') + p5 = os.path.join(d, 'evals_2.csv') # Test files aren't created before mcmc runs mcmc.set_chain_filename(cpath) @@ -1754,13 +1757,13 @@ def go(self, chains): sampler.set_chain(chains[i]) # Run, while logging to disk - with TemporaryDirectory() as d: + with tempfile.TemporaryDirectory() as d: # Store chains - chain_path = d.path('chain.csv') + chain_path = os.path.join(d, 'chain.csv') mcmc.set_chain_filename(chain_path) # Store log pdfs - evals_path = d.path('evals.csv') + evals_path = os.path.join(d, 'evals.csv') mcmc.set_log_pdf_filename(evals_path) # Run @@ -1936,13 +1939,13 @@ def go(self, chains): mcmc.sampler().set_chains(chains) # Run, while logging to disk - with TemporaryDirectory() as d: + with tempfile.TemporaryDirectory() as d: # Store chains - chain_path = d.path('chain.csv') + chain_path = os.path.join(d, 'chain.csv') mcmc.set_chain_filename(chain_path) # Store log pdfs - evals_path = d.path('evals.csv') + evals_path = os.path.join(d, 'evals.csv') mcmc.set_log_pdf_filename(evals_path) # Run diff --git a/pints/tests/test_nested_controller.py b/pints/tests/test_nested_controller.py index e6efdfe65..8b2c174ed 100755 --- a/pints/tests/test_nested_controller.py +++ b/pints/tests/test_nested_controller.py @@ -6,14 +6,17 @@ # released under the BSD 3-clause license. See accompanying LICENSE.md for # copyright notice and full license details. # +import os import re +import tempfile import unittest + import numpy as np import pints import pints.toy -from shared import StreamCapture, TemporaryDirectory +from shared import StreamCapture class TestNestedController(unittest.TestCase): @@ -169,8 +172,8 @@ def test_logging(self): # Log to file with StreamCapture() as c: - with TemporaryDirectory() as d: - filename = d.path('test.txt') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.txt') sampler = pints.NestedController( self.log_likelihood, self.log_prior) sampler.set_n_posterior_samples(2) diff --git a/pints/tests/test_opt_controller.py b/pints/tests/test_opt_controller.py index bd59ac0e4..719e2c722 100755 --- a/pints/tests/test_opt_controller.py +++ b/pints/tests/test_opt_controller.py @@ -6,6 +6,8 @@ # released under the BSD 3-clause license. See accompanying LICENSE.md for # copyright notice and full license details. # +import os +import tempfile import unittest import warnings @@ -14,7 +16,7 @@ import pints import pints.toy -from shared import StreamCapture, TemporaryDirectory +from shared import StreamCapture debug = False method = pints.XNES @@ -44,6 +46,7 @@ def __init__(self, x0, sigma0=None, boundaries=None): def ask(self): return np.array(self.xs[self._i: self._i + self.np]) + @classmethod def name(self): return 'List1D' @@ -105,8 +108,8 @@ def cb(i, opt): opt.set_callback(cb) opt.set_log_to_screen(False) opt.set_log_interval(1) - with TemporaryDirectory() as d: - p = d.path('out.csv') + with tempfile.TemporaryDirectory() as d: + p = os.path.join(d, 'out.csv') opt.set_log_to_file(p, csv=True) x1, f1 = opt.run() csv = np.genfromtxt(p, delimiter=',', skip_header=1)[:-1] diff --git a/pints/tests/test_opt_pso.py b/pints/tests/test_opt_pso.py index a3c73002f..7bb7daeb9 100755 --- a/pints/tests/test_opt_pso.py +++ b/pints/tests/test_opt_pso.py @@ -6,14 +6,17 @@ # released under the BSD 3-clause license. See accompanying LICENSE.md for # copyright notice and full license details. # +import os import re +import tempfile import unittest + import numpy as np import pints import pints.toy -from shared import StreamCapture, TemporaryDirectory, CircularBoundaries +from shared import StreamCapture, CircularBoundaries debug = False @@ -145,8 +148,8 @@ def test_logging(self): opt = pints.OptimisationController(r, x, s, b, method=method) opt.set_max_iterations(10) with StreamCapture() as c: - with TemporaryDirectory() as d: - filename = d.path('test.txt') + with tempfile.TemporaryDirectory() as d: + filename = os.path.join(d, 'test.txt') opt.set_log_to_screen(False) opt.set_log_to_file(filename) opt.run() diff --git a/pints/tests/test_test_shared.py b/pints/tests/test_test_shared.py index e0ce7e243..127515313 100755 --- a/pints/tests/test_test_shared.py +++ b/pints/tests/test_test_shared.py @@ -6,13 +6,12 @@ # released under the BSD 3-clause license. See accompanying LICENSE.md for # copyright notice and full license details. # -import os import sys import unittest import numpy as np -from shared import StreamCapture, TemporaryDirectory, UnitCircleBoundaries2D +from shared import StreamCapture, UnitCircleBoundaries2D class TestSharedTestModule(unittest.TestCase): @@ -49,33 +48,6 @@ def test_stream_capture(self): sys.stdout.write(t2) self.assertEqual(c.text(), (tt, et)) - def test_temporary_directory(self): - # Tests the temporary directory class. - - with TemporaryDirectory() as d: - # Test dir creation - tempdir = d.path('') - self.assertTrue(os.path.isdir(tempdir)) - - # Test file creation - text = 'Hello\nWorld' - filename = d.path('test.txt') - with open(filename, 'w') as f: - f.write(text) - with open(filename, 'r') as f: - self.assertTrue(f.read() == text) - self.assertTrue(os.path.isfile(filename)) - - # Test invalid file creation - self.assertRaises(ValueError, d.path, '../illegal.txt') - - # Test file and dir removal - self.assertFalse(os.path.isfile(filename)) - self.assertFalse(os.path.isdir(tempdir)) - - # Test runtime error when used outside of context - self.assertRaises(RuntimeError, d.path, 'hello.txt') - def test_unit_circle_boundaries_2d(self): # Tests the 2d unit circle boundaries used in composed boundaries # testing. @@ -94,7 +66,7 @@ def test_unit_circle_boundaries_2d(self): self.assertTrue(c.check([-1 + 1e-12, 0])) self.assertTrue(c.check([0, 1 - 1e-12])) self.assertTrue(c.check([0, -1 + 1e-12])) - x, y = np.cos(0.123), np.sin(0.123) + x, y = np.cos(0.123), np.sin(0.123) - 1e9 self.assertFalse(c.check([x, y])) xs = c.sample(100) self.assertEqual(xs.shape, (100, 2)) diff --git a/pints/tests/test_timer.py b/pints/tests/test_timer.py index 78e31c0cb..5eaf65d87 100755 --- a/pints/tests/test_timer.py +++ b/pints/tests/test_timer.py @@ -16,7 +16,7 @@ class TestTimer(unittest.TestCase): Tests the basic methods of the Timer class. """ def __init__(self, name): - super(TestTimer, self).__init__(name) + super().__init__(name) def test_timing(self): # Test the time() and reset() methods. diff --git a/pints/toy/_constant_model.py b/pints/toy/_constant_model.py index 4041cc1bb..8854383f8 100644 --- a/pints/toy/_constant_model.py +++ b/pints/toy/_constant_model.py @@ -51,7 +51,7 @@ class ConstantModel(pints.ForwardModelS1): """ def __init__(self, n, force_multi_output=False): - super(ConstantModel, self).__init__() + super().__init__() n = int(n) if n < 1: diff --git a/pints/toy/_fitzhugh_nagumo_model.py b/pints/toy/_fitzhugh_nagumo_model.py index d1573fb88..e94a26b9f 100644 --- a/pints/toy/_fitzhugh_nagumo_model.py +++ b/pints/toy/_fitzhugh_nagumo_model.py @@ -66,7 +66,7 @@ class FitzhughNagumoModel(ToyODEModel, pints.ForwardModelS1): """ def __init__(self, y0=None): - super(FitzhughNagumoModel, self).__init__() + super().__init__() # Check initial values if y0 is None: diff --git a/pints/toy/_goodwin_oscillator_model.py b/pints/toy/_goodwin_oscillator_model.py index bf213c9ab..71b18d10b 100644 --- a/pints/toy/_goodwin_oscillator_model.py +++ b/pints/toy/_goodwin_oscillator_model.py @@ -45,7 +45,7 @@ class GoodwinOscillatorModel(ToyODEModel, pints.ForwardModelS1): Computational Statistics and Data Analysis. """ def __init__(self): - super(GoodwinOscillatorModel, self).__init__() + super().__init__() self._y0 = [0.0054, 0.053, 1.93] def _dfdp(self, state, time, parameters): diff --git a/pints/toy/_hes1_michaelis_menten.py b/pints/toy/_hes1_michaelis_menten.py index 765e9cd11..bf67ec842 100644 --- a/pints/toy/_hes1_michaelis_menten.py +++ b/pints/toy/_hes1_michaelis_menten.py @@ -146,7 +146,7 @@ def set_m0(self, m0): if m0 < 0: raise ValueError('Initial condition cannot be negative.') y0 = [m0, self._p0[0], self._p0[1]] - super(Hes1Model, self).set_initial_conditions(y0) + super().set_initial_conditions(y0) def set_fixed_parameters(self, k): """ diff --git a/pints/toy/_hh_ik_model.py b/pints/toy/_hh_ik_model.py index da7d2433b..66d0212a9 100644 --- a/pints/toy/_hh_ik_model.py +++ b/pints/toy/_hh_ik_model.py @@ -79,7 +79,7 @@ class HodgkinHuxleyIKModel(pints.ForwardModel, ToyModel): """ def __init__(self, initial_condition=0.3): - super(HodgkinHuxleyIKModel, self).__init__() + super().__init__() # Initial conditions self._n0 = float(initial_condition) diff --git a/pints/toy/_logistic_model.py b/pints/toy/_logistic_model.py index 52e7e3dc6..17a91498e 100644 --- a/pints/toy/_logistic_model.py +++ b/pints/toy/_logistic_model.py @@ -42,7 +42,7 @@ class LogisticModel(pints.ForwardModelS1, ToyModel): """ def __init__(self, initial_population_size=2): - super(LogisticModel, self).__init__() + super().__init__() self._p0 = float(initial_population_size) if self._p0 < 0: raise ValueError('Population size cannot be negative.') diff --git a/pints/toy/_repressilator_model.py b/pints/toy/_repressilator_model.py index 669be2880..fa337f139 100644 --- a/pints/toy/_repressilator_model.py +++ b/pints/toy/_repressilator_model.py @@ -64,7 +64,7 @@ class RepressilatorModel(pints.ForwardModel, ToyModel): """ def __init__(self, y0=None): - super(RepressilatorModel, self).__init__() + super().__init__() # Check initial values if y0 is None: diff --git a/pints/toy/_sho_model.py b/pints/toy/_sho_model.py index dd92003c8..279672a58 100644 --- a/pints/toy/_sho_model.py +++ b/pints/toy/_sho_model.py @@ -36,7 +36,7 @@ class SimpleHarmonicOscillatorModel(pints.ForwardModelS1, ToyModel): .. [1] https://en.wikipedia.org/wiki/Simple_harmonic_motion """ def __init__(self): - super(SimpleHarmonicOscillatorModel, self).__init__() + super().__init__() def n_parameters(self): """ See :meth:`pints.ForwardModel.n_parameters()`. """ diff --git a/pints/toy/_sir_model.py b/pints/toy/_sir_model.py index 4faf95e1b..23577d28c 100644 --- a/pints/toy/_sir_model.py +++ b/pints/toy/_sir_model.py @@ -72,7 +72,7 @@ class SIRModel(pints.ForwardModel, ToyModel): """ def __init__(self, y0=None): - super(SIRModel, self).__init__() + super().__init__() # Check initial values if y0 is None: diff --git a/pints/toy/stochastic/_degradation_model.py b/pints/toy/stochastic/_degradation_model.py index e6b0ecc54..33095c7d2 100644 --- a/pints/toy/stochastic/_degradation_model.py +++ b/pints/toy/stochastic/_degradation_model.py @@ -29,8 +29,7 @@ class DegradationModel(MarkovJumpModel): def __init__(self, initial_molecule_count=20): V = [[-1]] init_list = [initial_molecule_count] - super(DegradationModel, self).__init__( - init_list, V, self._propensities) + super().__init__(init_list, V, self._propensities) @staticmethod def _propensities(xs, ks): diff --git a/pints/toy/stochastic/_logistic_model.py b/pints/toy/stochastic/_logistic_model.py index 10df3eda8..10fb098ef 100644 --- a/pints/toy/stochastic/_logistic_model.py +++ b/pints/toy/stochastic/_logistic_model.py @@ -47,8 +47,7 @@ class LogisticModel(MarkovJumpModel): def __init__(self, initial_molecule_count=50): V = [[1]] init_list = [initial_molecule_count] - super(LogisticModel, self).__init__(init_list, - V, self._propensities) + super().__init__(init_list, V, self._propensities) def n_parameters(self): """ diff --git a/pints/toy/stochastic/_markov_jump_model.py b/pints/toy/stochastic/_markov_jump_model.py index 324c07b45..2aaf73e53 100644 --- a/pints/toy/stochastic/_markov_jump_model.py +++ b/pints/toy/stochastic/_markov_jump_model.py @@ -76,7 +76,7 @@ class MarkovJumpModel(pints.ForwardModel, ToyModel): https://doi.org/10.1016/0021-9991(76)90041-3 """ def __init__(self, x0, V, propensities): - super(MarkovJumpModel, self).__init__() + super().__init__() self._x0 = np.asarray(x0) self._V = V self._propensities = propensities diff --git a/pints/toy/stochastic/_michaelis_menten_model.py b/pints/toy/stochastic/_michaelis_menten_model.py index bac2934f4..e61d5b9b1 100644 --- a/pints/toy/stochastic/_michaelis_menten_model.py +++ b/pints/toy/stochastic/_michaelis_menten_model.py @@ -32,8 +32,7 @@ def __init__(self, initial_molecule_count): V = [[-1, -1, 1, 0], [1, 1, -1, 0], [0, 1, -1, 1]] - super(MichaelisMentenModel, self).__init__(initial_molecule_count, - V, self._propensities) + super().__init__(initial_molecule_count, V, self._propensities) @staticmethod def _propensities(xs, ks): diff --git a/pints/toy/stochastic/_production_degradation_model.py b/pints/toy/stochastic/_production_degradation_model.py index 3d4a066ed..996ae866d 100644 --- a/pints/toy/stochastic/_production_degradation_model.py +++ b/pints/toy/stochastic/_production_degradation_model.py @@ -30,8 +30,7 @@ class ProductionDegradationModel(MarkovJumpModel): def __init__(self, initial_molecule_count=20): V = [[-1], [1]] init_list = [initial_molecule_count] - super(ProductionDegradationModel, self).__init__(init_list, - V, self._propensities) + super().__init__(init_list, V, self._propensities) @staticmethod def _propensities(xs, ks): diff --git a/pints/toy/stochastic/_schlogl_model.py b/pints/toy/stochastic/_schlogl_model.py index 2534905e1..ff2564873 100644 --- a/pints/toy/stochastic/_schlogl_model.py +++ b/pints/toy/stochastic/_schlogl_model.py @@ -33,8 +33,7 @@ class SchloglModel(MarkovJumpModel): def __init__(self, initial_molecule_count=20): V = [[1], [-1], [1], [-1]] init_list = [initial_molecule_count] - super(SchloglModel, self).__init__(init_list, - V, self._propensities) + super().__init__(init_list, V, self._propensities) @staticmethod def _propensities(xs, ks):