From bff247e7b40658d69481b96f6320928d13e19cd3 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Thu, 14 Jan 2021 19:41:34 +0000 Subject: [PATCH 01/52] Started adding loggers --- pbjam/__init__.py | 7 ++++++- pbjam/star.py | 6 ++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/pbjam/__init__.py b/pbjam/__init__.py index e2c2e403..65c9de9b 100644 --- a/pbjam/__init__.py +++ b/pbjam/__init__.py @@ -4,6 +4,11 @@ import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) +import logging + +_logger = logging.getLogger(__name__) +# TODO: add stream handler if need be + from .version import __version__ from .priors import kde from .session import session @@ -12,4 +17,4 @@ from .ellone import ellone from .star import star from .mcmc import mcmc -from .mcmc import nested \ No newline at end of file +from .mcmc import nested diff --git a/pbjam/star.py b/pbjam/star.py index 9f260c15..7920a2eb 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -27,6 +27,9 @@ from astroquery.simbad import Simbad import astropy.units as units +import logging + +_logger = logging.getLogger(__name__) # For module-level logging class star(plotting): """ Class for each star to be peakbagged @@ -83,6 +86,9 @@ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], self.ID = ID + self._logger = logging.getLogger('.'.join([__name__, self.__class__.__name__])) + self._logger.info(f"Initialising star with ID {self.ID}.") + if numax[0] < 25: warnings.warn('The input numax is less than 25. The prior is not well defined here, so be careful with the result.') self.numax = numax From e27bee55f32b6403ef330b5a7b771f2396172b9a Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Thu, 14 Jan 2021 22:43:55 +0000 Subject: [PATCH 02/52] Added module-level log messages --- pbjam/star.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/pbjam/star.py b/pbjam/star.py index 7920a2eb..fc4f2ce5 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -30,6 +30,7 @@ import logging _logger = logging.getLogger(__name__) # For module-level logging +_logger.debug('Initialized module logger.') class star(plotting): """ Class for each star to be peakbagged @@ -87,7 +88,7 @@ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], self.ID = ID self._logger = logging.getLogger('.'.join([__name__, self.__class__.__name__])) - self._logger.info(f"Initialising star with ID {self.ID}.") + self._logger.debug('Initialized class logger.') if numax[0] < 25: warnings.warn('The input numax is less than 25. The prior is not well defined here, so be careful with the result.') @@ -115,7 +116,8 @@ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], self.prior_file = get_priorpath() else: self.prior_file = prior_file - + + self._logger.info(f"Initialized star with ID {self.ID}.") def _checkTeffBpRp(self, teff, bp_rp): """ Set the Teff and/or bp_rp values @@ -448,12 +450,12 @@ def _querySimbad(ID): Gaia DR2 source ID. Returns None if no Gaia ID is found. """ - print('Querying Simbad for Gaia ID') + _logger.debug('Querying Simbad for Gaia ID.') try: job = Simbad.query_objectids(ID) except: - print(f'Unable to resolve {ID} with Simbad') + _logger.debug(f'Unable to resolve {ID} with Simbad.') return None for line in job['ID']: @@ -487,7 +489,7 @@ def _queryTIC(ID, radius = 20): Gaia bp-rp value from the TIC. """ - print('Querying TIC for Gaia bp-rp values.') + _logger.debug('Querying TIC for Gaia bp-rp values.') job = Catalogs.query_object(objectname=ID, catalog='TIC', objType='STAR', radius = radius*units.arcsec) @@ -518,7 +520,7 @@ def _queryMAST(ID): """ - print(f'Querying MAST for the {ID} coordinates.') + _logger.debug(f'Querying MAST for the {ID} coordinates.') mastobs = AsqMastObsCl() try: return mastobs.resolve_object(objectname = ID) @@ -550,7 +552,7 @@ def _queryGaia(ID=None,coords=None, radius = 20): Gaia bp-rp value of the requested target from the Gaia archive. """ - print('Querying Gaia archive for bp-rp values.') + _logger.debug('Querying Gaia archive for bp-rp values.') from astroquery.gaia import Gaia @@ -559,6 +561,7 @@ def _queryGaia(ID=None,coords=None, radius = 20): try: job = Gaia.launch_job(adql_query).get_results() except: + _logger.debug(f'Unable to query Gaia archive using ID={ID}.') return None return float(job['bp_rp'][0]) @@ -570,6 +573,7 @@ def _queryGaia(ID=None,coords=None, radius = 20): try: job = Gaia.launch_job(adql_query).get_results() except: + _logger.debug('Unable to query Gaia archive using coords={coords}.') return None return float(job['bp_rp'][0]) else: @@ -659,7 +663,8 @@ def get_bp_rp(ID): coords = _queryMAST(ID) bp_rp = _queryGaia(coords=coords) except: - print(f'Unable to retrieve a bp_rp value for {ID}.') + # Note that _logger.exception gives the full Traceback + _logger.exception(f'Unable to retrieve a bp_rp value for {ID}.') bp_rp = np.nan - return bp_rp \ No newline at end of file + return bp_rp From cd9eda21caeafe2f587562c1e518cd39c81f0727 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Thu, 14 Jan 2021 22:44:07 +0000 Subject: [PATCH 03/52] Removed new line --- pbjam/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pbjam/__init__.py b/pbjam/__init__.py index 65c9de9b..9d7174ac 100644 --- a/pbjam/__init__.py +++ b/pbjam/__init__.py @@ -5,7 +5,6 @@ PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) import logging - _logger = logging.getLogger(__name__) # TODO: add stream handler if need be From cf38574014a8c06caba40429490c582076962510 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Thu, 14 Jan 2021 23:43:23 +0000 Subject: [PATCH 04/52] Added stream handler to package logger --- pbjam/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pbjam/__init__.py b/pbjam/__init__.py index 9d7174ac..9ecc21cd 100644 --- a/pbjam/__init__.py +++ b/pbjam/__init__.py @@ -6,7 +6,8 @@ import logging _logger = logging.getLogger(__name__) -# TODO: add stream handler if need be +_logger.addHandler(logging.StreamHandler()) +# TODO: format stream handler if need be. from .version import __version__ from .priors import kde From 544c68a5244a4045dfc290f3a218a0b14499ede0 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Fri, 15 Jan 2021 18:53:05 +0000 Subject: [PATCH 05/52] Added default stream handler and logger level for pbjam --- pbjam/__init__.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pbjam/__init__.py b/pbjam/__init__.py index 9ecc21cd..88925900 100644 --- a/pbjam/__init__.py +++ b/pbjam/__init__.py @@ -6,8 +6,17 @@ import logging _logger = logging.getLogger(__name__) -_logger.addHandler(logging.StreamHandler()) -# TODO: format stream handler if need be. +_logger.setLevel('DEBUG') + +# if len(_logger.handlers) == 0: +# Don't add a stream handler if any handler already exists, i.e. user knows what they're doing +_handler = logging.StreamHandler() +_FMT = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s: %(message)s', datefmt='%Y-%m-%d, %H:%M:%S',) +_handler.setFormatter(_FMT) +_handler.setLevel('INFO') +_logger.addHandler(_handler) + +_logger.info('Importing PBjam') from .version import __version__ from .priors import kde From a8c3225162f5828f3147b446a15cee959216804d Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Fri, 15 Jan 2021 18:53:20 +0000 Subject: [PATCH 06/52] Added log wrapper and file_handler class --- pbjam/jar.py | 132 ++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 130 insertions(+), 2 deletions(-) diff --git a/pbjam/jar.py b/pbjam/jar.py index 78483895..5c9fb48b 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -4,11 +4,139 @@ """ -from . import PACKAGEDIR +from . import PACKAGEDIR, _FMT import os import numpy as np from scipy.special import erf +import functools, logging +from . import _logger as pbjam_logger + +_logger = logging.getLogger(__name__) +_logger.debug('Initialised module logger') + +def _entering_function(func, logger): + """ Pre function logging. """ + logger.debug("Entering %s", func.__qualname__) + # TODO: stuff to check before entering function + +def _exiting_function(func, logger): + """ Post function logging. """ + # TODO: stuff to check before exiting function + logger.debug("Exiting %s", func.__qualname__) + +def log(logger): + """ + Function logging decorator. + + Parameters + ---------- + logger: logging.Logger + Specify the logger in which to submit entering and exiting logs, highly recommended to be the module-level + logger (see Examples). + + Examples + -------- + Logging a function called `my_func` defined in a module with name `__name__`, + + ```python + import logging + from pbjam.jar import log + + _logger = logging.getLogger(__name__) + + @log(_logger) + def my_func(a, b): + _logger.debug('Function in progress.') + return a + b + + if __name__ == "__main__": + logging.basicConfig() + _logger.setLevel('DEBUG') + + result = my_func(1, 2) + _logger.debug(f'result = {result}') + ``` + + Outputs, + + ```python + DEBUG:__main__:Entering my_func + DEBUG:__main__:Function in progress. + DEBUG:__main__:Exiting my_func + DEBUG:__main__:result = 3 + ``` + + For use within classes, + + ```python + import logging + from pbjam.jar import log + + _logger = logging.getLogger(__name__) + + + class myClass: + + @log(_logger) + def __init__(self): + _logger.debug('Initializing class.') + self.a = 1 + self.b = 2 + + @log(_logger) + def my_mthd(self): + _logger.debug('Method in progress.') + return self.a + self.b + + if __name__ == "__main__": + logging.basicConfig() + _logger.setLevel('DEBUG') + + obj = myClass() + result = obj.my_mthd() + _logger.debug(f'result = {result}') + ``` + + Outputs, + + ```python + DEBUG:__main__:Entering myClass.__init__. + DEBUG:__main__:Initializing class. + DEBUG:__main__:Exiting myClass.__init__. + DEBUG:__main__:Entering myClass.my_mthd. + DEBUG:__main__:Method in progress. + DEBUG:__main__:Exiting myClass.my_mthd. + DEBUG:__main__:result = 3 + ``` + + """ + def _log(func): + @functools.wraps(func) + def wrap(*args, **kwargs): + _entering_function(func, logger) + result = func(*args, **kwargs) + _exiting_function(func, logger) + return result + return wrap + + return _log + + +class file_handler: + def __init__(self, path, level='DEBUG', **kwargs): + self.handler = logging.FileHandler(path, **kwargs) + self.handler.setFormatter(_FMT) + self.handler.setLevel(level) + + def __enter__(self): + pbjam_logger.addHandler(self.handler) + return self.handler + + def __exit__(self, type, value, traceback): + pbjam_logger.handlers.remove(self.handler) + + class references(): """ A class for managing references used when running PBjam. @@ -246,4 +374,4 @@ def normal(x, mu, sigma): if (sigma < 0): return 0.0 - return -0.5 * (x - mu)**2 / sigma**2 \ No newline at end of file + return -0.5 * (x - mu)**2 / sigma**2 From 31fcd26cd954a548b5752af2ecb54b4d1c32f823 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Fri, 15 Jan 2021 18:54:45 +0000 Subject: [PATCH 07/52] Imported new modules for logging --- pbjam/plotting.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/pbjam/plotting.py b/pbjam/plotting.py index c98f3ac1..91ab067a 100644 --- a/pbjam/plotting.py +++ b/pbjam/plotting.py @@ -13,7 +13,13 @@ import astropy.units as u import pandas as pd -class plotting(): +from .jar import log + +_logger = logging.getLogger(__name__) # For module-level logging +_logger.debug('Initialized module logger.') + + +class plotting: """ Class inherited by PBjam modules to plot results This is used to standardize the plots produced at various steps of the @@ -24,7 +30,7 @@ class plotting(): called from. """ - + @log(_logger) def __init__(self): pass @@ -52,7 +58,8 @@ def _save_my_fig(self, fig, figtype, path, ID): if path and ID: outpath = os.path.join(*[path, type(self).__name__+f'_{figtype}_{str(ID)}.png']) fig.savefig(outpath) - + + @log(_logger) def plot_echelle(self, pg=None, path=None, ID=None, savefig=False): """ Make echelle plot From 9960c3c8e4ccb0f399c266326d074af8412609cb Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Fri, 15 Jan 2021 18:54:53 +0000 Subject: [PATCH 08/52] Added logging and file_hander --- pbjam/session.py | 52 ++++++++++++++++++++++++++++++------------------ pbjam/star.py | 43 ++++++++++++++++++++++++--------------- 2 files changed, 60 insertions(+), 35 deletions(-) diff --git a/pbjam/session.py b/pbjam/session.py index 0eed73c4..2aec3212 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -52,7 +52,7 @@ import os, pickle, warnings from .star import star, _format_name from datetime import datetime -from .jar import references +from .jar import references, log, file_handler def _organize_sess_dataframe(vardf): @@ -593,7 +593,18 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, for i, st in enumerate(self.stars): if st.numax[0] > st.f[-1]: warnings.warn("Input numax is greater than Nyquist frequeny for %s" % (st.ID)) - + + # def add_file_handler(self): + # logger = logging.getLogger('pbjam') # <--- logs everything under pbjam + # fpath = os.path.join(self.path, 'session.log') + # self.handler = logging.FileHandler(fpath) + # self.handler.setFormatter(_FMT) + # logger.addHandler(self.handler) + + # def remove_file_handler(self) + # logger = logging.getLogger('pbjam') + # logger.handlers.remove(self.handler) + def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, nthreads=1, verbose=False, make_plots=False, store_chains=False, asy_sampling='emcee', developer_mode=False): @@ -633,25 +644,28 @@ def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, the prior sample. Important: This is not good practice for getting science results! """ - - self.pb_model_type = model_type + # self.add_file_handler() # <--- conder changing this to a "with" statement for safe closing + with file_handler(self.path): + self.pb_model_type = model_type - for i, st in enumerate(self.stars): - try: - st(bw_fac=bw_fac, tune=tune, norders=norders, - model_type=self.pb_model_type, make_plots=make_plots, - store_chains=store_chains, nthreads=nthreads, - asy_sampling=asy_sampling, developer_mode=developer_mode) - - self.references._reflist += st.references._reflist + for i, st in enumerate(self.stars): + try: + st(bw_fac=bw_fac, tune=tune, norders=norders, + model_type=self.pb_model_type, make_plots=make_plots, + store_chains=store_chains, nthreads=nthreads, + asy_sampling=asy_sampling, developer_mode=developer_mode) + + self.references._reflist += st.references._reflist + + self.stars[i] = None - self.stars[i] = None - - # Crude way to send error messages that occur in star up to Session - # without ending the session. Is there a better way? - except Exception as ex: - message = "Star {0} produced an exception of type {1} occurred. Arguments:\n{2!r}".format(st.ID, type(ex).__name__, ex.args) - print(message) + # Crude way to send error messages that occur in star up to Session + # without ending the session. Is there a better way? + except Exception as ex: + message = "Star {0} produced an exception of type {1} occurred. Arguments:\n{2!r}".format(st.ID, type(ex).__name__, ex.args) + print(message) + + # self.remove_file_handler() def _load_fits(files, mission): """ Read fitsfiles into a Lightkurve object diff --git a/pbjam/star.py b/pbjam/star.py index fc4f2ce5..2408bafe 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -28,10 +28,12 @@ import astropy.units as units import logging +from .jar import log, file_handler _logger = logging.getLogger(__name__) # For module-level logging _logger.debug('Initialized module logger.') + class star(plotting): """ Class for each star to be peakbagged @@ -81,15 +83,10 @@ class star(plotting): power spectrum """ - def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], path=None, prior_file=None): - self.ID = ID - self._logger = logging.getLogger('.'.join([__name__, self.__class__.__name__])) - self._logger.debug('Initialized class logger.') - if numax[0] < 25: warnings.warn('The input numax is less than 25. The prior is not well defined here, so be careful with the result.') self.numax = numax @@ -117,7 +114,7 @@ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], else: self.prior_file = prior_file - self._logger.info(f"Initialized star with ID {self.ID}.") + _logger.info(f"Initialized star with ID {self.ID}.") def _checkTeffBpRp(self, teff, bp_rp): """ Set the Teff and/or bp_rp values @@ -375,6 +372,16 @@ def run_peakbag(self, model_type='simple', tune=1500, nthreads=1, peakbag_samps = pd.DataFrame(self.peakbag.samples, columns=self.peakbag.par_names) peakbag_samps.to_csv(self._get_outpath(f'peakbag_chains_{self.ID}.csv'), index=False) + # def add_file_handler(self): + # logger = logging.getLogger('pbjam') # <--- logs everything under pbjam + # fpath = os.path.join(self.path, 'star.log') + # self.handler = logging.FileHandler(fpath) + # self.handler.setFormatter(_FMT) + # logger.addHandler(self.handler) + + # def remove_file_handler(self) + # logger = logging.getLogger('pbjam') + # logger.handlers.remove(self.handler) def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, nthreads=1, make_plots=True, store_chains=False, @@ -412,17 +419,20 @@ def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, the prior sample. Important: This is not good practice for getting science results! """ + # self.add_file_handler() + with file_handler(self.path): + self.run_kde(bw_fac=bw_fac, make_plots=make_plots, store_chains=store_chains) - self.run_kde(bw_fac=bw_fac, make_plots=make_plots, store_chains=store_chains) + self.run_asy_peakbag(norders=norders, make_plots=make_plots, + store_chains=store_chains, method=asy_sampling, + developer_mode=developer_mode) - self.run_asy_peakbag(norders=norders, make_plots=make_plots, - store_chains=store_chains, method=asy_sampling, - developer_mode=developer_mode) + self.run_peakbag(model_type=model_type, tune=tune, nthreads=nthreads, + make_plots=make_plots, store_chains=store_chains) - self.run_peakbag(model_type=model_type, tune=tune, nthreads=nthreads, - make_plots=make_plots, store_chains=store_chains) + self.references._addRef('pandas') - self.references._addRef('pandas') + self.remove_file_handler() def _querySimbad(ID): """ Query any ID at Simbad for Gaia DR2 source ID. @@ -662,9 +672,10 @@ def get_bp_rp(ID): try: coords = _queryMAST(ID) bp_rp = _queryGaia(coords=coords) - except: - # Note that _logger.exception gives the full Traceback - _logger.exception(f'Unable to retrieve a bp_rp value for {ID}.') + except Exception as exc: + # Note that _logger.exception gives the full Traceback or just set exc_info + _logger.debug(f'Exception: {exc}.', exc_info=1) + _logger.warning(f'Unable to retrieve a bp_rp value for {ID}.') bp_rp = np.nan return bp_rp From 9307332787d8eebd987474cd2c7dac804ea1be31 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Fri, 15 Jan 2021 22:15:28 +0000 Subject: [PATCH 09/52] Renamed _logger to logger and implemented file_logger --- pbjam/__init__.py | 20 +++++------ pbjam/jar.py | 84 ++++++++++++++++++++++++++++++++++------------- pbjam/plotting.py | 8 ++--- pbjam/session.py | 8 +++-- pbjam/star.py | 40 ++++++++++++---------- 5 files changed, 103 insertions(+), 57 deletions(-) diff --git a/pbjam/__init__.py b/pbjam/__init__.py index 88925900..6baf2c37 100644 --- a/pbjam/__init__.py +++ b/pbjam/__init__.py @@ -5,18 +5,18 @@ PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) import logging -_logger = logging.getLogger(__name__) -_logger.setLevel('DEBUG') +HANDLER_FMT = logging.Formatter("%(asctime)-15s : %(levelname)-8s : %(name)-17s : %(message)s") -# if len(_logger.handlers) == 0: -# Don't add a stream handler if any handler already exists, i.e. user knows what they're doing -_handler = logging.StreamHandler() -_FMT = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s: %(message)s', datefmt='%Y-%m-%d, %H:%M:%S',) -_handler.setFormatter(_FMT) -_handler.setLevel('INFO') -_logger.addHandler(_handler) +logger = logging.getLogger(__name__) +logger.setLevel('DEBUG') -_logger.info('Importing PBjam') +# Add a stream handler at level=='INFO' - should we do this? +_stream_handler = logging.StreamHandler() +_stream_handler.setFormatter(HANDLER_FMT) +_stream_handler.setLevel('INFO') + +logger.addHandler(_stream_handler) +logger.debug('Importing PBjam') from .version import __version__ from .priors import kde diff --git a/pbjam/jar.py b/pbjam/jar.py index 5c9fb48b..e9dc626f 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -4,26 +4,26 @@ """ -from . import PACKAGEDIR, _FMT +from . import PACKAGEDIR, HANDLER_FMT import os import numpy as np from scipy.special import erf import functools, logging -from . import _logger as pbjam_logger +from contextlib import contextmanager -_logger = logging.getLogger(__name__) -_logger.debug('Initialised module logger') +logger = logging.getLogger(__name__) +logger.debug('Initialised module logger') def _entering_function(func, logger): """ Pre function logging. """ - logger.debug("Entering %s", func.__qualname__) + logger.debug("Entering %s.", func.__qualname__) # TODO: stuff to check before entering function def _exiting_function(func, logger): """ Post function logging. """ # TODO: stuff to check before exiting function - logger.debug("Exiting %s", func.__qualname__) + logger.debug("Exiting %s.", func.__qualname__) def log(logger): """ @@ -43,19 +43,19 @@ def log(logger): import logging from pbjam.jar import log - _logger = logging.getLogger(__name__) + logger = logging.getLogger(__name__) - @log(_logger) + @log(logger) def my_func(a, b): - _logger.debug('Function in progress.') + logger.debug('Function in progress.') return a + b if __name__ == "__main__": logging.basicConfig() - _logger.setLevel('DEBUG') + logger.setLevel('DEBUG') result = my_func(1, 2) - _logger.debug(f'result = {result}') + logger.debug(f'result = {result}') ``` Outputs, @@ -73,29 +73,29 @@ def my_func(a, b): import logging from pbjam.jar import log - _logger = logging.getLogger(__name__) + logger = logging.getLogger(__name__) class myClass: - @log(_logger) + @log(logger) def __init__(self): - _logger.debug('Initializing class.') + logger.debug('Initializing class.') self.a = 1 self.b = 2 - @log(_logger) + @log(logger) def my_mthd(self): - _logger.debug('Method in progress.') + logger.debug('Method in progress.') return self.a + self.b if __name__ == "__main__": logging.basicConfig() - _logger.setLevel('DEBUG') + logger.setLevel('DEBUG') obj = myClass() result = obj.my_mthd() - _logger.debug(f'result = {result}') + logger.debug(f'result = {result}') ``` Outputs, @@ -123,18 +123,56 @@ def wrap(*args, **kwargs): return _log -class file_handler: +class file_logger: + """ + Context manager for file logging. It logs everything under the `pbjam` parent level in some file at a given `path`. + + Parameters + ---------- + path : str + File path to save the log + + level : str, optional + Logging level. Default is 'DEBUG' + + **kwargs : + Keyword arguments passed to `logging.FileHandler`. + + Attributes + ---------- + handler : logging.FileHandler + File handler object. + + Examples + -------- + ```python + from pbjam.jar import file_logger + + with file_logger('example.log') as flog: + # Do some stuff here and it will be logged to 'example.log' + ... + + # Do some stuff here and it won't be logged to 'example.log' + + with flog: + # Do some stuff here and it will be logged to 'example.log' + ... + ``` + + """ + _logger = logging.getLogger('pbjam') + def __init__(self, path, level='DEBUG', **kwargs): self.handler = logging.FileHandler(path, **kwargs) - self.handler.setFormatter(_FMT) + self.handler.setFormatter(HANDLER_FMT) self.handler.setLevel(level) def __enter__(self): - pbjam_logger.addHandler(self.handler) - return self.handler + self._logger.addHandler(self.handler) + return self def __exit__(self, type, value, traceback): - pbjam_logger.handlers.remove(self.handler) + self._logger.handlers.remove(self.handler) class references(): diff --git a/pbjam/plotting.py b/pbjam/plotting.py index 91ab067a..45b34520 100644 --- a/pbjam/plotting.py +++ b/pbjam/plotting.py @@ -15,8 +15,8 @@ from .jar import log -_logger = logging.getLogger(__name__) # For module-level logging -_logger.debug('Initialized module logger.') +logger = logging.getLogger(__name__) # For module-level logging +logger.debug('Initialized module logger.') class plotting: @@ -30,7 +30,7 @@ class plotting: called from. """ - @log(_logger) + @log(logger) def __init__(self): pass @@ -59,7 +59,7 @@ def _save_my_fig(self, fig, figtype, path, ID): outpath = os.path.join(*[path, type(self).__name__+f'_{figtype}_{str(ID)}.png']) fig.savefig(outpath) - @log(_logger) + @log(logger) def plot_echelle(self, pg=None, path=None, ID=None, savefig=False): """ Make echelle plot diff --git a/pbjam/session.py b/pbjam/session.py index 2aec3212..cb1006ca 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -52,8 +52,10 @@ import os, pickle, warnings from .star import star, _format_name from datetime import datetime -from .jar import references, log, file_handler +from .jar import references, log, file_logger +logger = logging.getLogger(__name__) +logger.debug('Initialised module logger') def _organize_sess_dataframe(vardf): """ Takes input dataframe and tidies it up. @@ -598,7 +600,7 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, # logger = logging.getLogger('pbjam') # <--- logs everything under pbjam # fpath = os.path.join(self.path, 'session.log') # self.handler = logging.FileHandler(fpath) - # self.handler.setFormatter(_FMT) + # self.handler.setFormatter(HANDLER_FMT) # logger.addHandler(self.handler) # def remove_file_handler(self) @@ -645,7 +647,7 @@ def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, science results! """ # self.add_file_handler() # <--- conder changing this to a "with" statement for safe closing - with file_handler(self.path): + with file_logger(self.path): self.pb_model_type = model_type for i, st in enumerate(self.stars): diff --git a/pbjam/star.py b/pbjam/star.py index 2408bafe..655886b7 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -28,10 +28,10 @@ import astropy.units as units import logging -from .jar import log, file_handler +from .jar import log, file_logger -_logger = logging.getLogger(__name__) # For module-level logging -_logger.debug('Initialized module logger.') +logger = logging.getLogger(__name__) # For module-level logging +logger.debug('Initialized module logger.') class star(plotting): @@ -114,7 +114,7 @@ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], else: self.prior_file = prior_file - _logger.info(f"Initialized star with ID {self.ID}.") + logger.info(f"Initialized star with ID {self.ID}.") def _checkTeffBpRp(self, teff, bp_rp): """ Set the Teff and/or bp_rp values @@ -376,7 +376,7 @@ def run_peakbag(self, model_type='simple', tune=1500, nthreads=1, # logger = logging.getLogger('pbjam') # <--- logs everything under pbjam # fpath = os.path.join(self.path, 'star.log') # self.handler = logging.FileHandler(fpath) - # self.handler.setFormatter(_FMT) + # self.handler.setFormatter(HANDLER_FMT) # logger.addHandler(self.handler) # def remove_file_handler(self) @@ -420,7 +420,7 @@ def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, science results! """ # self.add_file_handler() - with file_handler(self.path): + with file_logger(self.path): self.run_kde(bw_fac=bw_fac, make_plots=make_plots, store_chains=store_chains) self.run_asy_peakbag(norders=norders, make_plots=make_plots, @@ -432,8 +432,9 @@ def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, self.references._addRef('pandas') - self.remove_file_handler() + # self.remove_file_handler() +@log(logger) def _querySimbad(ID): """ Query any ID at Simbad for Gaia DR2 source ID. @@ -460,12 +461,12 @@ def _querySimbad(ID): Gaia DR2 source ID. Returns None if no Gaia ID is found. """ - _logger.debug('Querying Simbad for Gaia ID.') + logger.debug('Querying Simbad for Gaia ID.') try: job = Simbad.query_objectids(ID) except: - _logger.debug(f'Unable to resolve {ID} with Simbad.') + logger.debug(f'Unable to resolve {ID} with Simbad.') return None for line in job['ID']: @@ -473,6 +474,7 @@ def _querySimbad(ID): return line.replace('Gaia DR2 ', '') return None +@log(logger) def _queryTIC(ID, radius = 20): """ Query TIC for bp-rp value @@ -499,7 +501,7 @@ def _queryTIC(ID, radius = 20): Gaia bp-rp value from the TIC. """ - _logger.debug('Querying TIC for Gaia bp-rp values.') + logger.debug('Querying TIC for Gaia bp-rp values.') job = Catalogs.query_object(objectname=ID, catalog='TIC', objType='STAR', radius = radius*units.arcsec) @@ -509,6 +511,7 @@ def _queryTIC(ID, radius = 20): else: return None +@log(logger) def _queryMAST(ID): """ Query any ID at MAST @@ -530,13 +533,14 @@ def _queryMAST(ID): """ - _logger.debug(f'Querying MAST for the {ID} coordinates.') + logger.debug(f'Querying MAST for the {ID} coordinates.') mastobs = AsqMastObsCl() try: return mastobs.resolve_object(objectname = ID) except: return None +@log(logger) def _queryGaia(ID=None,coords=None, radius = 20): """ Query Gaia archive for bp-rp @@ -562,7 +566,7 @@ def _queryGaia(ID=None,coords=None, radius = 20): Gaia bp-rp value of the requested target from the Gaia archive. """ - _logger.debug('Querying Gaia archive for bp-rp values.') + logger.debug('Querying Gaia archive for bp-rp values.') from astroquery.gaia import Gaia @@ -571,7 +575,7 @@ def _queryGaia(ID=None,coords=None, radius = 20): try: job = Gaia.launch_job(adql_query).get_results() except: - _logger.debug(f'Unable to query Gaia archive using ID={ID}.') + logger.debug(f'Unable to query Gaia archive using ID={ID}.') return None return float(job['bp_rp'][0]) @@ -583,12 +587,13 @@ def _queryGaia(ID=None,coords=None, radius = 20): try: job = Gaia.launch_job(adql_query).get_results() except: - _logger.debug('Unable to query Gaia archive using coords={coords}.') + logger.debug('Unable to query Gaia archive using coords={coords}.') return None return float(job['bp_rp'][0]) else: raise ValueError('No ID or coordinates provided when querying the Gaia archive.') +@log(logger) def _format_name(ID): """ Format input ID @@ -632,6 +637,7 @@ def _format_name(ID): return fname return ID +@log(logger) def get_bp_rp(ID): """ Search online for bp_rp values based on ID. @@ -673,9 +679,9 @@ def get_bp_rp(ID): coords = _queryMAST(ID) bp_rp = _queryGaia(coords=coords) except Exception as exc: - # Note that _logger.exception gives the full Traceback or just set exc_info - _logger.debug(f'Exception: {exc}.', exc_info=1) - _logger.warning(f'Unable to retrieve a bp_rp value for {ID}.') + # Note that logger.exception gives the full Traceback or just set exc_info + logger.debug(f'Exception: {exc}.', exc_info=1) + logger.warning(f'Unable to retrieve a bp_rp value for {ID}.') bp_rp = np.nan return bp_rp From e6bfd929f01fa67ed8c835e5f42f6e54b699abfa Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sat, 16 Jan 2021 17:19:09 +0000 Subject: [PATCH 10/52] Moved handlers to config.py --- pbjam/__init__.py | 18 ++++++++++-------- pbjam/config.py | 23 +++++++++++++++++++++++ 2 files changed, 33 insertions(+), 8 deletions(-) create mode 100644 pbjam/config.py diff --git a/pbjam/__init__.py b/pbjam/__init__.py index 6baf2c37..33b6e800 100644 --- a/pbjam/__init__.py +++ b/pbjam/__init__.py @@ -5,20 +5,20 @@ PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) import logging -HANDLER_FMT = logging.Formatter("%(asctime)-15s : %(levelname)-8s : %(name)-17s : %(message)s") +from .config import stdout_handler, stderr_handler +# Setup global pbjam logger logger = logging.getLogger(__name__) -logger.setLevel('DEBUG') +logger.setLevel('DEBUG') # <--- minimum possible level for global pbjam logger -# Add a stream handler at level=='INFO' - should we do this? -_stream_handler = logging.StreamHandler() -_stream_handler.setFormatter(HANDLER_FMT) -_stream_handler.setLevel('INFO') +logger.addHandler(stdout_handler()) +logger.addHandler(stderr_handler()) -logger.addHandler(_stream_handler) -logger.debug('Importing PBjam') +logger.debug(f'Initializing {__name__}') from .version import __version__ +logger.debug(f'version == {__version__}') + from .priors import kde from .session import session from .asy_peakbag import asymp_spec_model, asymptotic_fit @@ -27,3 +27,5 @@ from .star import star from .mcmc import mcmc from .mcmc import nested + +logger.debug(f'Initialized {__name__}') diff --git a/pbjam/config.py b/pbjam/config.py new file mode 100644 index 00000000..2d63cf91 --- /dev/null +++ b/pbjam/config.py @@ -0,0 +1,23 @@ +# Configures the pbjam package upon import +import logging, sys + +HANDLER_FMT = logging.Formatter("%(asctime)-15s : %(levelname)-8s : %(name)-17s : %(message)s") + + +class info_filter(logging.Filter): + def filter(self, rec): + return rec.levelno == logging.INFO + + +class stdout_handler(logging.StreamHandler): + def __init__(self): + super().__init__(stream=sys.stdout) + self.setFormatter(HANDLER_FMT) + self.addFilter(info_filter()) + + +class stderr_handler(logging.StreamHandler): + def __init__(self): + super().__init__(stream=sys.stderr) + self.setFormatter(HANDLER_FMT) + self.setLevel('WARNING') From d1d7a9b2571bf4896fe7eba80d9ff0502797efc0 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sat, 16 Jan 2021 17:19:33 +0000 Subject: [PATCH 11/52] Changed to file_logging --- pbjam/jar.py | 30 +++++++++++++++++++----------- pbjam/session.py | 6 +++--- pbjam/star.py | 4 ++-- 3 files changed, 24 insertions(+), 16 deletions(-) diff --git a/pbjam/jar.py b/pbjam/jar.py index e9dc626f..a8226389 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -4,7 +4,7 @@ """ -from . import PACKAGEDIR, HANDLER_FMT +from . import PACKAGEDIR import os import numpy as np from scipy.special import erf @@ -123,7 +123,7 @@ def wrap(*args, **kwargs): return _log -class file_logger: +class file_logging: """ Context manager for file logging. It logs everything under the `pbjam` parent level in some file at a given `path`. @@ -146,9 +146,9 @@ class file_logger: Examples -------- ```python - from pbjam.jar import file_logger + from pbjam.jar import file_logging - with file_logger('example.log') as flog: + with file_logging('example.log') as flog: # Do some stuff here and it will be logged to 'example.log' ... @@ -161,18 +161,26 @@ class file_logger: """ _logger = logging.getLogger('pbjam') - - def __init__(self, path, level='DEBUG', **kwargs): - self.handler = logging.FileHandler(path, **kwargs) - self.handler.setFormatter(HANDLER_FMT) - self.handler.setLevel(level) + def __init__(self, path, level='DEBUG', handler_kwargs={}): + self.path = path + self.level = level + self.handler_kwargs = handler_kwargs + self.file_handler = None + def add_file_handler(self): + self.file_handler = logging.FileHandler(self.path, **self.handler_kwargs) + self.file_handler.setFormatter(HANDLER_FMT) + self.file_handler.setLevel(self.level) + def __enter__(self): - self._logger.addHandler(self.handler) + self.add_file_handler(self) + self._logger.addHandler(self.file_handler) return self def __exit__(self, type, value, traceback): - self._logger.handlers.remove(self.handler) + self._logger.removeHandler(self.file_handler) + self.file_handler.close() + self.file_handler = None class references(): diff --git a/pbjam/session.py b/pbjam/session.py index cb1006ca..906dd89f 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -49,10 +49,10 @@ import numpy as np import astropy.units as units import pandas as pd -import os, pickle, warnings +import os, pickle, warnings, logging from .star import star, _format_name from datetime import datetime -from .jar import references, log, file_logger +from .jar import references, log, file_logging logger = logging.getLogger(__name__) logger.debug('Initialised module logger') @@ -647,7 +647,7 @@ def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, science results! """ # self.add_file_handler() # <--- conder changing this to a "with" statement for safe closing - with file_logger(self.path): + with file_logging(self.path): self.pb_model_type = model_type for i, st in enumerate(self.stars): diff --git a/pbjam/star.py b/pbjam/star.py index 655886b7..56e1fd3e 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -28,7 +28,7 @@ import astropy.units as units import logging -from .jar import log, file_logger +from .jar import log, file_logging logger = logging.getLogger(__name__) # For module-level logging logger.debug('Initialized module logger.') @@ -420,7 +420,7 @@ def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, science results! """ # self.add_file_handler() - with file_logger(self.path): + with file_logging(self.path): self.run_kde(bw_fac=bw_fac, make_plots=make_plots, store_chains=store_chains) self.run_asy_peakbag(norders=norders, make_plots=make_plots, From 44b36be525192b0770f3965b7ba7af872ccd1107 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sat, 16 Jan 2021 18:48:48 +0000 Subject: [PATCH 12/52] Simplified base logger stream handlers to one console handler --- pbjam/__init__.py | 8 +++++--- pbjam/config.py | 28 +++++++++++++++++----------- pbjam/session.py | 2 +- pbjam/star.py | 2 +- 4 files changed, 24 insertions(+), 16 deletions(-) diff --git a/pbjam/__init__.py b/pbjam/__init__.py index 33b6e800..240c7016 100644 --- a/pbjam/__init__.py +++ b/pbjam/__init__.py @@ -5,14 +5,16 @@ PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) import logging -from .config import stdout_handler, stderr_handler +# from .config import stdout_handler, stderr_handler +from .config import console_handler # Setup global pbjam logger logger = logging.getLogger(__name__) logger.setLevel('DEBUG') # <--- minimum possible level for global pbjam logger -logger.addHandler(stdout_handler()) -logger.addHandler(stderr_handler()) +# logger.addHandler(stdout_handler()) +# logger.addHandler(stderr_handler()) +logger.addHandler(console_handler()) logger.debug(f'Initializing {__name__}') diff --git a/pbjam/config.py b/pbjam/config.py index 2d63cf91..687cbfd7 100644 --- a/pbjam/config.py +++ b/pbjam/config.py @@ -4,20 +4,26 @@ HANDLER_FMT = logging.Formatter("%(asctime)-15s : %(levelname)-8s : %(name)-17s : %(message)s") -class info_filter(logging.Filter): - def filter(self, rec): - return rec.levelno == logging.INFO +# class info_filter(logging.Filter): +# def filter(self, rec): +# return rec.levelno == logging.INFO -class stdout_handler(logging.StreamHandler): - def __init__(self): - super().__init__(stream=sys.stdout) - self.setFormatter(HANDLER_FMT) - self.addFilter(info_filter()) +# class stdout_handler(logging.StreamHandler): +# def __init__(self): +# super().__init__(stream=sys.stdout) +# self.setFormatter(HANDLER_FMT) +# self.addFilter(info_filter()) + +# class stderr_handler(logging.StreamHandler): +# def __init__(self): +# super().__init__(stream=sys.stderr) +# self.setFormatter(HANDLER_FMT) +# self.setLevel('WARNING') -class stderr_handler(logging.StreamHandler): +class console_handler(logging.StreamHandler): def __init__(self): - super().__init__(stream=sys.stderr) + super().__init__() self.setFormatter(HANDLER_FMT) - self.setLevel('WARNING') + self.setLevel('INFO') diff --git a/pbjam/session.py b/pbjam/session.py index 906dd89f..6f4dfe82 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -647,7 +647,7 @@ def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, science results! """ # self.add_file_handler() # <--- conder changing this to a "with" statement for safe closing - with file_logging(self.path): + with file_logging(os.path.join(self.path, 'session.log')): self.pb_model_type = model_type for i, st in enumerate(self.stars): diff --git a/pbjam/star.py b/pbjam/star.py index 56e1fd3e..29a63635 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -420,7 +420,7 @@ def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, science results! """ # self.add_file_handler() - with file_logging(self.path): + with file_logging(os.path.join(self.path, 'star.log')): self.run_kde(bw_fac=bw_fac, make_plots=make_plots, store_chains=store_chains) self.run_asy_peakbag(norders=norders, make_plots=make_plots, From f4d0c88e8edf28e9d0cf3987a7ab90a4da648ab2 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 17 Jan 2021 19:15:13 +0000 Subject: [PATCH 13/52] Removed config module --- pbjam/config.py | 29 ----------------------------- 1 file changed, 29 deletions(-) delete mode 100644 pbjam/config.py diff --git a/pbjam/config.py b/pbjam/config.py deleted file mode 100644 index 687cbfd7..00000000 --- a/pbjam/config.py +++ /dev/null @@ -1,29 +0,0 @@ -# Configures the pbjam package upon import -import logging, sys - -HANDLER_FMT = logging.Formatter("%(asctime)-15s : %(levelname)-8s : %(name)-17s : %(message)s") - - -# class info_filter(logging.Filter): -# def filter(self, rec): -# return rec.levelno == logging.INFO - - -# class stdout_handler(logging.StreamHandler): -# def __init__(self): -# super().__init__(stream=sys.stdout) -# self.setFormatter(HANDLER_FMT) -# self.addFilter(info_filter()) - - -# class stderr_handler(logging.StreamHandler): -# def __init__(self): -# super().__init__(stream=sys.stderr) -# self.setFormatter(HANDLER_FMT) -# self.setLevel('WARNING') - -class console_handler(logging.StreamHandler): - def __init__(self): - super().__init__() - self.setFormatter(HANDLER_FMT) - self.setLevel('INFO') From 5a4c652859599bb45ae6173f8a65db2028ad72ea Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 17 Jan 2021 19:15:31 +0000 Subject: [PATCH 14/52] Renamed and simplified handlers and context manager --- pbjam/__init__.py | 15 +++++------- pbjam/jar.py | 60 +++++++++++++++++++++++++++++------------------ 2 files changed, 43 insertions(+), 32 deletions(-) diff --git a/pbjam/__init__.py b/pbjam/__init__.py index 240c7016..ddd624dd 100644 --- a/pbjam/__init__.py +++ b/pbjam/__init__.py @@ -4,18 +4,15 @@ import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) -import logging -# from .config import stdout_handler, stderr_handler -from .config import console_handler - # Setup global pbjam logger +import logging logger = logging.getLogger(__name__) -logger.setLevel('DEBUG') # <--- minimum possible level for global pbjam logger - -# logger.addHandler(stdout_handler()) -# logger.addHandler(stderr_handler()) -logger.addHandler(console_handler()) +logger.setLevel('DEBUG') # <--- minimum level for global pbjam package logger +# Setup console handler +from .jar import stream_handler +console_handler = stream_handler(level='INFO') +logger.addHandler(console_handler) logger.debug(f'Initializing {__name__}') from .version import __version__ diff --git a/pbjam/jar.py b/pbjam/jar.py index a8226389..d739bec2 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -12,6 +12,7 @@ import functools, logging from contextlib import contextmanager +HANDLER_FMT = logging.Formatter("%(asctime)-15s : %(levelname)-8s : %(name)-17s : %(message)s") logger = logging.getLogger(__name__) logger.debug('Initialised module logger') @@ -123,24 +124,42 @@ def wrap(*args, **kwargs): return _log +class _handler(logging.Handler): + def __init__(self, level='NOTSET', **kwargs): + super().__init__(**kwargs) + self.setFormatter(HANDLER_FMT) + self.setLevel(level) + + +class stream_handler(_handler, logging.StreamHandler): + def __init__(self, level='INFO', **kwargs): + super().__init__(level=level, **kwargs) + + +class file_handler(_handler, logging.FileHandler): + def __init__(self, filename, level='DEBUG', **kwargs): + super().__init__(filename=filename, level=level, **kwargs) + + class file_logging: """ - Context manager for file logging. It logs everything under the `pbjam` parent level in some file at a given `path`. + Context manager for file logging. It logs everything under the `loggername` logger, by default this is the `'pbjam'` + logger (i.e. logs everything from the pbjam package). Parameters ---------- - path : str - File path to save the log + filename : str + Filename to save the log level : str, optional - Logging level. Default is 'DEBUG' + Logging level. Default is 'DEBUG'. - **kwargs : - Keyword arguments passed to `logging.FileHandler`. + loggername : str, optional + Name of logger which will send logs to `filename`. Default is `'pbjam'`. Attributes ---------- - handler : logging.FileHandler + handler : pbjam.jar.file_handler File handler object. Examples @@ -160,27 +179,22 @@ class file_logging: ``` """ - _logger = logging.getLogger('pbjam') - def __init__(self, path, level='DEBUG', handler_kwargs={}): - self.path = path - self.level = level - self.handler_kwargs = handler_kwargs - self.file_handler = None - - def add_file_handler(self): - self.file_handler = logging.FileHandler(self.path, **self.handler_kwargs) - self.file_handler.setFormatter(HANDLER_FMT) - self.file_handler.setLevel(self.level) + + def __init__(self, filename, level='DEBUG', loggername='pbjam'): + self._filename = filename + self._level = level + self._logger = logging.getLogger(loggername) + self.handler = None def __enter__(self): - self.add_file_handler(self) - self._logger.addHandler(self.file_handler) + self.handler = file_handler(self._filename, level=self._level) + self._logger.addHandler(self.handler) return self def __exit__(self, type, value, traceback): - self._logger.removeHandler(self.file_handler) - self.file_handler.close() - self.file_handler = None + self._logger.removeHandler(self.handler) + self.handler.close() + self.handler = None class references(): From 40930f70751dee8e2236bed824df8046ec1db677 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 17 Jan 2021 21:46:51 +0000 Subject: [PATCH 15/52] Added open and close methods to file_logging --- pbjam/jar.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/pbjam/jar.py b/pbjam/jar.py index d739bec2..5080add5 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -162,6 +162,14 @@ class file_logging: handler : pbjam.jar.file_handler File handler object. + Methods + ------- + open() : + Activates file logging process + + close() : + Safely closes file logging process + Examples -------- ```python @@ -186,16 +194,22 @@ def __init__(self, filename, level='DEBUG', loggername='pbjam'): self._logger = logging.getLogger(loggername) self.handler = None - def __enter__(self): + def open(self): self.handler = file_handler(self._filename, level=self._level) self._logger.addHandler(self.handler) - return self - - def __exit__(self, type, value, traceback): + + def close(self): self._logger.removeHandler(self.handler) self.handler.close() self.handler = None + def __enter__(self): + self.open() + return self + + def __exit__(self, type, value, traceback): + self.close() + class references(): """ A class for managing references used when running PBjam. From 9aef7f34a8ff99825813d94dd8c7a87f5847694c Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 17 Jan 2021 21:47:07 +0000 Subject: [PATCH 16/52] Added more log decorators --- pbjam/priors.py | 17 ++++++++++++----- pbjam/star.py | 26 +++++++++++--------------- 2 files changed, 23 insertions(+), 20 deletions(-) diff --git a/pbjam/priors.py b/pbjam/priors.py index 55a9a9a5..23fd2f4b 100644 --- a/pbjam/priors.py +++ b/pbjam/priors.py @@ -12,7 +12,11 @@ import warnings from .plotting import plotting import statsmodels.api as sm -from .jar import get_priorpath, to_log10, normal +from .jar import get_priorpath, to_log10, normal, log + +import logging +logger = logging.getLogger(__name__) + class kde(plotting): """ A class to produce prior for asy_peakbag and initial starting location. @@ -52,7 +56,7 @@ class kde(plotting): to compute the KDE. Default is to use pbjam/data/prior_data.csv """ - + @log(logger) def __init__(self, starinst=None, prior_file=None): if starinst: @@ -74,6 +78,7 @@ def __init__(self, starinst=None, prior_file=None): self.verbose = False + @log(logger) def select_prior_data(self, numax=None, KDEsize = 100): """ Selects useful prior data based on proximity to estimated numax. @@ -170,7 +175,7 @@ def _prior_size_check(self, pdata, numax, KDEsize): return pdata.sample(KDEsize, weights=idx, replace=False) - + @log(logger) def make_kde(self, bw_fac=1.0): """ Takes the prior data and constructs a KDE function @@ -223,7 +228,7 @@ def make_kde(self, bw_fac=1.0): var_type='c'*len(self.par_names), bw=bw) - + @log(logger) def prior(self, p): """ Calculates the log prior for the initial guess fit. @@ -254,6 +259,7 @@ def prior(self, p): return lp + @log(logger) def likelihood(self, p): """ Calculate likelihood for the initial guess fit @@ -282,6 +288,7 @@ def likelihood(self, p): return lnlike + @log(logger) def kde_predict(self, n): """ Predict the l=0 mode frequencies from the KDE samples. @@ -314,7 +321,7 @@ def kde_predict(self, n): return freq.mean(axis=1), freq.std(axis=1) - + @log(logger) def kde_sampler(self, nwalkers=50): """ Samples the posterior distribution with the KDE prior diff --git a/pbjam/star.py b/pbjam/star.py index 29a63635..034a9eef 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -83,9 +83,12 @@ class star(plotting): power spectrum """ + @log(logger) def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], path=None, prior_file=None): + self.ID = ID + logger.info(f"Initializing star with ID {self.ID}.") if numax[0] < 25: warnings.warn('The input numax is less than 25. The prior is not well defined here, so be careful with the result.') @@ -114,8 +117,6 @@ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], else: self.prior_file = prior_file - logger.info(f"Initialized star with ID {self.ID}.") - def _checkTeffBpRp(self, teff, bp_rp): """ Set the Teff and/or bp_rp values @@ -223,11 +224,11 @@ def _set_outpath(self, path): try: os.makedirs(self.path) except Exception as ex: - message = "Could not create directory for Star {0} because an exception of type {1} occurred. Arguments:\n{2!r}".format(self.ID, type(ex).__name__, ex.args) - print(message) - + # message = "Could not create directory for Star {0} because an exception of type {1} occurred. Arguments:\n{2!r}".format(self.ID, type(ex).__name__, ex.args) + logger.exception(f"Could not create directory for star {self.ID}.") + @log(logger) def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): """ Run all steps involving KDE. @@ -247,7 +248,7 @@ def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): """ - print('Starting KDE estimation') + logger.info('Starting KDE estimation') # Init kde(self) @@ -271,7 +272,7 @@ def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): kde_samps = pd.DataFrame(self.kde.samples, columns=self.kde.par_names) kde_samps.to_csv(self._get_outpath(f'kde_chains_{self.ID}.csv'), index=False) - + @log(logger) def run_asy_peakbag(self, norders, make_plots=False, store_chains=False, method='emcee', developer_mode=False): @@ -300,7 +301,7 @@ def run_asy_peakbag(self, norders, make_plots=False, """ - print('Starting asymptotic peakbagging') + logger.info('Starting asymptotic peakbagging') # Init asymptotic_fit(self, norders=norders) @@ -326,7 +327,7 @@ def run_asy_peakbag(self, norders, make_plots=False, asy_samps = pd.DataFrame(self.asy_fit.samples, columns=self.asy_fit.par_names) asy_samps.to_csv(self._get_outpath(f'asymptotic_fit_chains_{self.ID}.csv'), index=False) - + @log(logger) def run_peakbag(self, model_type='simple', tune=1500, nthreads=1, make_plots=False, store_chains=False): """ Run all steps involving peakbag. @@ -350,7 +351,7 @@ def run_peakbag(self, model_type='simple', tune=1500, nthreads=1, """ - print('Starting peakbagging') + logger.info('Starting peakbagging') # Init peakbag(self, self.asy_fit) @@ -434,7 +435,6 @@ def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, # self.remove_file_handler() -@log(logger) def _querySimbad(ID): """ Query any ID at Simbad for Gaia DR2 source ID. @@ -474,7 +474,6 @@ def _querySimbad(ID): return line.replace('Gaia DR2 ', '') return None -@log(logger) def _queryTIC(ID, radius = 20): """ Query TIC for bp-rp value @@ -511,7 +510,6 @@ def _queryTIC(ID, radius = 20): else: return None -@log(logger) def _queryMAST(ID): """ Query any ID at MAST @@ -540,7 +538,6 @@ def _queryMAST(ID): except: return None -@log(logger) def _queryGaia(ID=None,coords=None, radius = 20): """ Query Gaia archive for bp-rp @@ -593,7 +590,6 @@ def _queryGaia(ID=None,coords=None, radius = 20): else: raise ValueError('No ID or coordinates provided when querying the Gaia archive.') -@log(logger) def _format_name(ID): """ Format input ID From f923aa14d05d8383818e09526daa421422b51533 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 18 Jan 2021 11:13:36 +0000 Subject: [PATCH 17/52] Added jam for recording to log file --- pbjam/jar.py | 58 ++++++++++++++-- pbjam/session.py | 170 ++++++++++++++++++++++------------------------- pbjam/star.py | 88 +++++++++++------------- 3 files changed, 173 insertions(+), 143 deletions(-) diff --git a/pbjam/jar.py b/pbjam/jar.py index 5080add5..744b3e47 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -193,15 +193,20 @@ def __init__(self, filename, level='DEBUG', loggername='pbjam'): self._level = level self._logger = logging.getLogger(loggername) self.handler = None + self._isopen = False def open(self): - self.handler = file_handler(self._filename, level=self._level) - self._logger.addHandler(self.handler) + if not self._isopen: + self.handler = file_handler(self._filename, level=self._level) + self._logger.addHandler(self.handler) + self._isopen = True def close(self): - self._logger.removeHandler(self.handler) - self.handler.close() - self.handler = None + if self._isopen: + self._logger.removeHandler(self.handler) + self.handler.close() + self.handler = None + self._isopen = False def __enter__(self): self.open() @@ -211,6 +216,49 @@ def __exit__(self, type, value, traceback): self.close() +class jam: + """ + Base pbjam class. Currently has a method `record` for recording logs to `log_file`. This can be used as a method + decorator in subclasses, e.g. + + ```python + # pbjam/example.py + import logging + from .jar import jam, file_logging + logger = logging.getLogger(__name__) # here, __name__ == 'pbjam.example' + + class example_class(jam): + def __init__(self): + self.log_file = file_logging('example.log') + + with self.log_file: + # Records content in context to `log_file` + logger.info('Initializing class.') + ... + + @jam.record # records content of `example_method` to `log_file` + def example_method(self): + logger.info('Performing function tasks.') + ... + ``` + + """ + log_file = file_logging('pbjam.log') # Placeholder variable, overwrite in subclass __init__ + + @staticmethod + def record(func): + """ + Decorator for recording logs to `log_file` during function operation, closing the log file upon completion. + """ + @functools.wraps(mthd) + def wrap(self, *args, **kwargs): + self.log_file.open() + result = func(self, *args, **kwargs) + self.log_file.close() + return result + return wrap + + class references(): """ A class for managing references used when running PBjam. diff --git a/pbjam/session.py b/pbjam/session.py index 6f4dfe82..a10c9f24 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -52,7 +52,7 @@ import os, pickle, warnings, logging from .star import star, _format_name from datetime import datetime -from .jar import references, log, file_logging +from .jar import references, log, file_logging, jam logger = logging.getLogger(__name__) logger.debug('Initialised module logger') @@ -407,7 +407,7 @@ def _lk_to_pg(ID, tsIn, specIn): -class session(): +class session(jam): """ Main class used to initiate peakbagging. Use this class to initialize a star class instance for one or more targets. @@ -536,77 +536,72 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, cadence=None, campaign=None, sector=None, month=None, quarter=None, mission=None, path=None, download_dir=None): - self.stars = [] - self.references = references() - self.references._addRef(['python', 'pandas', 'numpy', 'astropy', - 'lightkurve']) - - if isinstance(dictlike, (dict, np.recarray, pd.DataFrame, str)): - if isinstance(dictlike, str): - vardf = pd.read_csv(dictlike) - else: - try: - vardf = pd.DataFrame.from_records(dictlike) - except TypeError: - print('Unrecognized type in dictlike. Must be able to convert to dataframe through pandas.DataFrame.from_records()') - - if any([ID, numax, dnu, teff, bp_rp]): - warnings.warn('Dictlike provided as input, ignoring other input fit parameters.') - - _organize_sess_dataframe(vardf) - - elif ID: - vardf = _organize_sess_input(ID=ID, numax=numax, dnu=dnu, teff=teff, - bp_rp=bp_rp, cadence=cadence, - campaign=campaign, sector=sector, - month=month, quarter=quarter, - mission=mission) - - _format_col(vardf, timeseries, 'timeseries') - _format_col(vardf, spectrum, 'spectrum') - else: - raise TypeError('session.__init__ requires either ID or dictlike') - for i in vardf.index: - - lkwargs = {x: vardf.loc[i, x] for x in ['cadence', 'month', - 'sector', 'campaign', - 'quarter', 'mission']} - - vardf.at[i, 'timeseries'] = _lc_to_lk(vardf.loc[i, 'ID'], - vardf.loc[i, 'timeseries'], - vardf.loc[i, 'spectrum'], - download_dir, - use_cached, - lkwargs) - - vardf.at[i,'spectrum'] = _lk_to_pg(vardf.loc[i,'ID'], - vardf.loc[i, 'timeseries'], - vardf.loc[i, 'spectrum']) + self.log_file = file_logging(os.path.join(path, 'session.log'), level='DEBUG') + with self.log_file: + # Records everything in context to the log file + logger.info('Starting session.') + self.stars = [] + self.references = references() + self.references._addRef(['python', 'pandas', 'numpy', 'astropy', + 'lightkurve']) - self.stars.append(star(ID=vardf.loc[i, 'ID'], - pg=vardf.loc[i, 'spectrum'], - numax=vardf.loc[i, ['numax', 'numax_err']].values, - dnu=vardf.loc[i, ['dnu', 'dnu_err']].values, - teff=vardf.loc[i, ['teff', 'teff_err']].values, - bp_rp=vardf.loc[i, ['bp_rp', 'bp_rp_err']].values, - path=path)) - - for i, st in enumerate(self.stars): - if st.numax[0] > st.f[-1]: - warnings.warn("Input numax is greater than Nyquist frequeny for %s" % (st.ID)) - - # def add_file_handler(self): - # logger = logging.getLogger('pbjam') # <--- logs everything under pbjam - # fpath = os.path.join(self.path, 'session.log') - # self.handler = logging.FileHandler(fpath) - # self.handler.setFormatter(HANDLER_FMT) - # logger.addHandler(self.handler) - - # def remove_file_handler(self) - # logger = logging.getLogger('pbjam') - # logger.handlers.remove(self.handler) + if isinstance(dictlike, (dict, np.recarray, pd.DataFrame, str)): + if isinstance(dictlike, str): + vardf = pd.read_csv(dictlike) + else: + try: + vardf = pd.DataFrame.from_records(dictlike) + except TypeError: + print('Unrecognized type in dictlike. Must be able to convert to dataframe through pandas.DataFrame.from_records()') + + if any([ID, numax, dnu, teff, bp_rp]): + warnings.warn('Dictlike provided as input, ignoring other input fit parameters.') + + _organize_sess_dataframe(vardf) + + elif ID: + vardf = _organize_sess_input(ID=ID, numax=numax, dnu=dnu, teff=teff, + bp_rp=bp_rp, cadence=cadence, + campaign=campaign, sector=sector, + month=month, quarter=quarter, + mission=mission) + + _format_col(vardf, timeseries, 'timeseries') + _format_col(vardf, spectrum, 'spectrum') + else: + raise TypeError('session.__init__ requires either ID or dictlike') + + for i in vardf.index: + + lkwargs = {x: vardf.loc[i, x] for x in ['cadence', 'month', + 'sector', 'campaign', + 'quarter', 'mission']} + + vardf.at[i, 'timeseries'] = _lc_to_lk(vardf.loc[i, 'ID'], + vardf.loc[i, 'timeseries'], + vardf.loc[i, 'spectrum'], + download_dir, + use_cached, + lkwargs) + + vardf.at[i,'spectrum'] = _lk_to_pg(vardf.loc[i,'ID'], + vardf.loc[i, 'timeseries'], + vardf.loc[i, 'spectrum']) + + self.stars.append(star(ID=vardf.loc[i, 'ID'], + pg=vardf.loc[i, 'spectrum'], + numax=vardf.loc[i, ['numax', 'numax_err']].values, + dnu=vardf.loc[i, ['dnu', 'dnu_err']].values, + teff=vardf.loc[i, ['teff', 'teff_err']].values, + bp_rp=vardf.loc[i, ['bp_rp', 'bp_rp_err']].values, + path=path)) + + for i, st in enumerate(self.stars): + if st.numax[0] > st.f[-1]: + warnings.warn("Input numax is greater than Nyquist frequeny for %s" % (st.ID)) + @jam.record def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, nthreads=1, verbose=False, make_plots=False, store_chains=False, asy_sampling='emcee', developer_mode=False): @@ -646,28 +641,25 @@ def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, the prior sample. Important: This is not good practice for getting science results! """ - # self.add_file_handler() # <--- conder changing this to a "with" statement for safe closing - with file_logging(os.path.join(self.path, 'session.log')): - self.pb_model_type = model_type + self.pb_model_type = model_type - for i, st in enumerate(self.stars): - try: - st(bw_fac=bw_fac, tune=tune, norders=norders, - model_type=self.pb_model_type, make_plots=make_plots, - store_chains=store_chains, nthreads=nthreads, - asy_sampling=asy_sampling, developer_mode=developer_mode) - - self.references._reflist += st.references._reflist - - self.stars[i] = None + for i, st in enumerate(self.stars): + try: + st(bw_fac=bw_fac, tune=tune, norders=norders, + model_type=self.pb_model_type, make_plots=make_plots, + store_chains=store_chains, nthreads=nthreads, + asy_sampling=asy_sampling, developer_mode=developer_mode) + + self.references._reflist += st.references._reflist - # Crude way to send error messages that occur in star up to Session - # without ending the session. Is there a better way? - except Exception as ex: - message = "Star {0} produced an exception of type {1} occurred. Arguments:\n{2!r}".format(st.ID, type(ex).__name__, ex.args) - print(message) + self.stars[i] = None + + # Crude way to send error messages that occur in star up to Session + # without ending the session. Is there a better way? + except Exception as ex: + message = "Star {0} produced an exception of type {1} occurred. Arguments:\n{2!r}".format(st.ID, type(ex).__name__, ex.args) + print(message) - # self.remove_file_handler() def _load_fits(files, mission): """ Read fitsfiles into a Lightkurve object diff --git a/pbjam/star.py b/pbjam/star.py index 034a9eef..ae10054f 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -28,13 +28,13 @@ import astropy.units as units import logging -from .jar import log, file_logging +from .jar import log, file_logging, jam logger = logging.getLogger(__name__) # For module-level logging logger.debug('Initialized module logger.') -class star(plotting): +class star(plotting, jam): """ Class for each star to be peakbagged Additional attributes are added for each step of the peakbagging process @@ -88,34 +88,35 @@ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], path=None, prior_file=None): self.ID = ID - logger.info(f"Initializing star with ID {self.ID}.") + self._set_outpath(path) + self.log_file = file_logging(os.path.join(self.path, 'star.log'), level='DEBUG') + with self.log_file: + logger.info(f"Initializing star with ID {self.ID}.") - if numax[0] < 25: - warnings.warn('The input numax is less than 25. The prior is not well defined here, so be careful with the result.') - self.numax = numax - self.dnu = dnu + if numax[0] < 25: + warnings.warn('The input numax is less than 25. The prior is not well defined here, so be careful with the result.') + self.numax = numax + self.dnu = dnu - self.references = references() - self.references._addRef(['numpy', 'python', 'lightkurve', 'astropy']) - - teff, bp_rp = self._checkTeffBpRp(teff, bp_rp) - self.teff = teff - self.bp_rp = bp_rp + self.references = references() + self.references._addRef(['numpy', 'python', 'lightkurve', 'astropy']) + + teff, bp_rp = self._checkTeffBpRp(teff, bp_rp) + self.teff = teff + self.bp_rp = bp_rp - self.pg = pg.flatten() # in case user supplies unormalized spectrum - self.f = self.pg.frequency.value - self.s = self.pg.power.value + self.pg = pg.flatten() # in case user supplies unormalized spectrum + self.f = self.pg.frequency.value + self.s = self.pg.power.value - self._obs = {'dnu': self.dnu, 'numax': self.numax, 'teff': self.teff, - 'bp_rp': self.bp_rp} - self._log_obs = {x: to_log10(*self._obs[x]) for x in self._obs.keys() if x != 'bp_rp'} + self._obs = {'dnu': self.dnu, 'numax': self.numax, 'teff': self.teff, + 'bp_rp': self.bp_rp} + self._log_obs = {x: to_log10(*self._obs[x]) for x in self._obs.keys() if x != 'bp_rp'} - self._set_outpath(path) - - if prior_file is None: - self.prior_file = get_priorpath() - else: - self.prior_file = prior_file + if prior_file is None: + self.prior_file = get_priorpath() + else: + self.prior_file = prior_file def _checkTeffBpRp(self, teff, bp_rp): """ Set the Teff and/or bp_rp values @@ -227,7 +228,7 @@ def _set_outpath(self, path): # message = "Could not create directory for Star {0} because an exception of type {1} occurred. Arguments:\n{2!r}".format(self.ID, type(ex).__name__, ex.args) logger.exception(f"Could not create directory for star {self.ID}.") - + @jam.record @log(logger) def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): """ Run all steps involving KDE. @@ -271,7 +272,8 @@ def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): if store_chains: kde_samps = pd.DataFrame(self.kde.samples, columns=self.kde.par_names) kde_samps.to_csv(self._get_outpath(f'kde_chains_{self.ID}.csv'), index=False) - + + @jam.record @log(logger) def run_asy_peakbag(self, norders, make_plots=False, store_chains=False, method='emcee', @@ -326,7 +328,8 @@ def run_asy_peakbag(self, norders, make_plots=False, if store_chains: asy_samps = pd.DataFrame(self.asy_fit.samples, columns=self.asy_fit.par_names) asy_samps.to_csv(self._get_outpath(f'asymptotic_fit_chains_{self.ID}.csv'), index=False) - + + @jam.record @log(logger) def run_peakbag(self, model_type='simple', tune=1500, nthreads=1, make_plots=False, store_chains=False): @@ -372,18 +375,8 @@ def run_peakbag(self, model_type='simple', tune=1500, nthreads=1, if store_chains: peakbag_samps = pd.DataFrame(self.peakbag.samples, columns=self.peakbag.par_names) peakbag_samps.to_csv(self._get_outpath(f'peakbag_chains_{self.ID}.csv'), index=False) - - # def add_file_handler(self): - # logger = logging.getLogger('pbjam') # <--- logs everything under pbjam - # fpath = os.path.join(self.path, 'star.log') - # self.handler = logging.FileHandler(fpath) - # self.handler.setFormatter(HANDLER_FMT) - # logger.addHandler(self.handler) - - # def remove_file_handler(self) - # logger = logging.getLogger('pbjam') - # logger.handlers.remove(self.handler) + @jam.record def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, nthreads=1, make_plots=True, store_chains=False, asy_sampling='emcee', developer_mode=False): @@ -420,20 +413,17 @@ def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, the prior sample. Important: This is not good practice for getting science results! """ - # self.add_file_handler() - with file_logging(os.path.join(self.path, 'star.log')): - self.run_kde(bw_fac=bw_fac, make_plots=make_plots, store_chains=store_chains) + self.run_kde(bw_fac=bw_fac, make_plots=make_plots, store_chains=store_chains) - self.run_asy_peakbag(norders=norders, make_plots=make_plots, - store_chains=store_chains, method=asy_sampling, - developer_mode=developer_mode) + self.run_asy_peakbag(norders=norders, make_plots=make_plots, + store_chains=store_chains, method=asy_sampling, + developer_mode=developer_mode) - self.run_peakbag(model_type=model_type, tune=tune, nthreads=nthreads, - make_plots=make_plots, store_chains=store_chains) + self.run_peakbag(model_type=model_type, tune=tune, nthreads=nthreads, + make_plots=make_plots, store_chains=store_chains) - self.references._addRef('pandas') + self.references._addRef('pandas') - # self.remove_file_handler() def _querySimbad(ID): """ Query any ID at Simbad for Gaia DR2 source ID. From ce4df7d55754239050b971d02a928f2f39a07b4c Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 18 Jan 2021 13:36:18 +0000 Subject: [PATCH 18/52] Improved logging format --- pbjam/jar.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pbjam/jar.py b/pbjam/jar.py index 744b3e47..d39ec150 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -12,7 +12,7 @@ import functools, logging from contextlib import contextmanager -HANDLER_FMT = logging.Formatter("%(asctime)-15s : %(levelname)-8s : %(name)-17s : %(message)s") +HANDLER_FMT = logging.Formatter("%(asctime)-15s :: %(levelname)-8s :: %(name)-17s :: %(message)s") logger = logging.getLogger(__name__) logger.debug('Initialised module logger') @@ -250,7 +250,7 @@ def record(func): """ Decorator for recording logs to `log_file` during function operation, closing the log file upon completion. """ - @functools.wraps(mthd) + @functools.wraps(func) def wrap(self, *args, **kwargs): self.log_file.open() result = func(self, *args, **kwargs) From 8755fd4d7ca4244d01b8488f52b79e8c0dcb3617 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 18 Jan 2021 13:36:27 +0000 Subject: [PATCH 19/52] Fixed tests cleanup --- pbjam/tests/test_asy_peakbag.py | 2 +- pbjam/tests/test_star.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/pbjam/tests/test_asy_peakbag.py b/pbjam/tests/test_asy_peakbag.py index 7aa8a02c..9ac8b7f5 100644 --- a/pbjam/tests/test_asy_peakbag.py +++ b/pbjam/tests/test_asy_peakbag.py @@ -321,7 +321,7 @@ def test_asymp_spec_model_call(): assert_allclose(mod(inp), mod.model(*inp)) def test_clean_up(): - + os.remove(cs.st.log_file._filename) # Removes log file os.rmdir(cs.st.path) # The test functions below require longer runs and are not suitable for GitHub diff --git a/pbjam/tests/test_star.py b/pbjam/tests/test_star.py index 25e0a7c4..0ba5b07d 100644 --- a/pbjam/tests/test_star.py +++ b/pbjam/tests/test_star.py @@ -41,6 +41,7 @@ def test_star_init(): pbt.assert_hasattributes(st, atts) # cleanup + os.remove(st.log_file._filename) # Remove log file os.rmdir(st.path) def test_outpath(): @@ -68,6 +69,7 @@ def test_outpath(): assert(os.path.isdir(os.path.dirname(func(*inp)))) # cleanup + os.remove(st.log_file._filename) os.rmdir(st.path) def test_set_outpath(): @@ -88,6 +90,7 @@ def test_set_outpath(): # Input tests and clean up assert(os.path.isdir(st.path)) + os.remove(st.log_file._filename) os.rmdir(st.path) inp = [pth] @@ -118,6 +121,7 @@ def test_run_kde(): pbt.does_it_run(func, None) # cleanup + os.remove(st.log_file._filename) os.rmdir(st.path) def test_format_name(): From a991befda2da7ed51ae45e63eb55072808f0bb30 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 18 Jan 2021 14:35:37 +0000 Subject: [PATCH 20/52] Added tests --- pbjam/tests/test_jar.py | 109 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 107 insertions(+), 2 deletions(-) diff --git a/pbjam/tests/test_jar.py b/pbjam/tests/test_jar.py index 227721bf..16863b00 100644 --- a/pbjam/tests/test_jar.py +++ b/pbjam/tests/test_jar.py @@ -1,9 +1,12 @@ """Tests for the jar module""" -from pbjam.jar import normal, to_log10, get_priorpath, get_percentiles +from pbjam.jar import normal, to_log10, get_priorpath, get_percentiles, file_logging, jam, log import pbjam.tests.pbjam_tests as pbt import numpy as np from numpy.testing import assert_almost_equal, assert_array_equal +import logging, os + +logger = logging.getLogger('pbjam.tests') def test_normal(): """Test for the log of a normal distribution""" @@ -79,4 +82,106 @@ def test_get_percentiles(): inp = [[0,0,0,1,1], 1] assert_array_equal(func(*inp), [0., 0., 1.]) - \ No newline at end of file +def test_jam(): + """Tests subclassing `jam` to use the log file record decorator""" + test_message = 'This should be logged in file.' + + class jam_test(jam): + def __init__(self): + self.log_file = file_logging('test_jam.log') + logger.debug('This should not be logged in file.') + with self.log_file: + # Records content in context to `log_file` + logger.debug(test_message) + + @jam.record # records content of `example_method` to `log_file` + def method(self): + logger.debug(test_message) + + jt = jam_test() + jt.method() + + filename = jt.log_file._filename + with open(filename, 'r') as file_in: + lines = file_in.read().splitlines() + messages = [line.split('::')[-1].strip() for line in lines] + assert(all([message == test_message for message in messages])) + + os.remove(filename) + +def test_file_logging(): + """Test `file_logging` context manager.""" + filename = 'test_file_logging.log' + test_level = 'DEBUG' + log_file = file_logging(filename, level=test_level) + + with log_file: + test_message = 'This should be logged in file.' + logger.debug(test_message) + logger.debug('This should not be logged in file') + + with open(filename, 'r') as file_in: + lines = file_in.read().splitlines() + assert(len(lines) == 1) + + record = lines.pop().split('::') + level = record[1].strip() + assert(level == test_level) + + message = record[-1].strip() + assert(message == test_message) + + os.remove(filename) + +def test_log_debug(): + """Tests `log` decorator debug messages""" + test_message = 'Function in progress.' + + @log(logger) + def log_test(): + logger.debug(test_message) + + filename = 'test_log.log' + log_file = file_logging(filename) + + with log_file: + log_test() + + with open(filename, 'r') as file_in: + lines = file_in.read().splitlines() + + messages = [line.split('::')[-1].strip() for line in lines] + + end = log_test.__qualname__ + '.' + assert(messages[0].startswith('Entering') and messages[0].endswith(end)) + assert(messages[-1].startswith('Exiting') and messages[-1].endswith(end)) + assert(test_message in messages) + + os.remove(filename) + +def test_log_info(): + """Tests `log` decorator with no debug info.""" + + test_message = 'Function in progress.' + + @log(logger) + def log_test(): + logger.debug(test_message) + logger.info(test_message) + logger.warning(test_message) + logger.error(test_message) + logger.critical(test_message) + + filename = 'test_log.log' + log_file = file_logging(filename, level='INFO') # level='INFO' same as console_handler + + with log_file: + log_test() + + with open(filename, 'r') as file_in: + lines = file_in.read().splitlines() + + levels = [line.split('::')[0].strip() for line in lines] + assert('DEBUG' not in levels) + + os.remove(filename) From 102669f5d388f8c07eafb8507c5a058c40d318e6 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Wed, 20 Jan 2021 12:40:13 +0000 Subject: [PATCH 21/52] Added function_logger class --- pbjam/jar.py | 42 ++++++++++++++++++++++++++++++------------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/pbjam/jar.py b/pbjam/jar.py index d39ec150..c2d0e7a2 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -9,22 +9,39 @@ import numpy as np from scipy.special import erf -import functools, logging -from contextlib import contextmanager +import functools, logging, inspect HANDLER_FMT = logging.Formatter("%(asctime)-15s :: %(levelname)-8s :: %(name)-17s :: %(message)s") logger = logging.getLogger(__name__) logger.debug('Initialised module logger') -def _entering_function(func, logger): - """ Pre function logging. """ - logger.debug("Entering %s.", func.__qualname__) - # TODO: stuff to check before entering function -def _exiting_function(func, logger): - """ Post function logging. """ - # TODO: stuff to check before exiting function - logger.debug("Exiting %s.", func.__qualname__) +class function_logger: + """ Handlers the logging upon entering and exiting functions. """ + def __init__(self, func, logger): + self.func = func + self.signature = inspect.signature(self.func) + self.logger = logger + self.width = 10 # Width of log message prefix + + def _log_bound_args(self, args, kwargs): + """ Logs bound arguments - `args` and `kwargs` passed to func. """ + bargs = self.signature.bind(*args, **kwargs) + self.logger.debug(f"{'Bound args':{self.width}} {dict(bargs.arguments)}") + + def entering_function(self, args, kwargs): + """ Log before function execution. """ + self.logger.debug(f"{'Entering':{self.width}} {self.func.__qualname__}") + self.logger.debug(f"{'Signature':{self.width}} {self.signature}") + self._log_bound_args(args, kwargs) + # TODO: stuff to check before entering function + + def exiting_function(self, result): + """ Log after function execution. """ + # TODO: stuff to check before exiting function + self.logger.debug(f"{'Returns':{self.width}} {repr(result)}") + self.logger.debug(f"{'Exiting':{self.width}} {self.func.__qualname__}") + def log(logger): """ @@ -115,9 +132,10 @@ def my_mthd(self): def _log(func): @functools.wraps(func) def wrap(*args, **kwargs): - _entering_function(func, logger) + flog = function_logger(func, logger) + flog.entering_function(args, kwargs) result = func(*args, **kwargs) - _exiting_function(func, logger) + flog.exiting_function(result) return result return wrap From 24f99efe7aeb3756f816b429171d868c4b18fdc2 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Wed, 20 Jan 2021 14:57:16 +0000 Subject: [PATCH 22/52] Fixed bug in test --- pbjam/tests/test_jar.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pbjam/tests/test_jar.py b/pbjam/tests/test_jar.py index 16863b00..aea02ca7 100644 --- a/pbjam/tests/test_jar.py +++ b/pbjam/tests/test_jar.py @@ -152,7 +152,7 @@ def log_test(): messages = [line.split('::')[-1].strip() for line in lines] - end = log_test.__qualname__ + '.' + end = log_test.__qualname__ assert(messages[0].startswith('Entering') and messages[0].endswith(end)) assert(messages[-1].startswith('Exiting') and messages[-1].endswith(end)) assert(test_message in messages) From 3a3c6c0a02aa22f1e9a2cebd012e67207c7f8b33 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Wed, 20 Jan 2021 17:29:41 +0000 Subject: [PATCH 23/52] Removed loging from prior and likelihood --- pbjam/priors.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pbjam/priors.py b/pbjam/priors.py index 23fd2f4b..4f1683e2 100644 --- a/pbjam/priors.py +++ b/pbjam/priors.py @@ -228,7 +228,6 @@ def make_kde(self, bw_fac=1.0): var_type='c'*len(self.par_names), bw=bw) - @log(logger) def prior(self, p): """ Calculates the log prior for the initial guess fit. @@ -259,7 +258,6 @@ def prior(self, p): return lp - @log(logger) def likelihood(self, p): """ Calculate likelihood for the initial guess fit From c8559337f185b6a19fdfee56851756827a2b5b7c Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Thu, 21 Jan 2021 19:23:02 +0000 Subject: [PATCH 24/52] Added logs to many functions and tested session --- pbjam/asy_peakbag.py | 17 ++++++++++------- pbjam/ellone.py | 21 ++++++++++++++------- pbjam/jar.py | 21 ++++++++++++--------- pbjam/mcmc.py | 31 +++++++++++++++++++++---------- pbjam/peakbag.py | 33 ++++++++++++++++++++++++--------- pbjam/plotting.py | 9 +++++---- pbjam/priors.py | 33 ++++++++++++++++++++++----------- pbjam/session.py | 42 +++++++++++++++++++++++++++++++++++------- pbjam/star.py | 21 ++++++++++++++------- 9 files changed, 157 insertions(+), 71 deletions(-) diff --git a/pbjam/asy_peakbag.py b/pbjam/asy_peakbag.py index 37b05be0..6632ac6f 100755 --- a/pbjam/asy_peakbag.py +++ b/pbjam/asy_peakbag.py @@ -12,9 +12,12 @@ import pandas as pd import scipy.stats as scist from .plotting import plotting -from .jar import normal +from .jar import normal, log from collections import OrderedDict -import warnings +import warnings, logging + +logger = logging.getLogger(__name__) + class asymp_spec_model(): """Class for spectrum model using asymptotic relation. @@ -35,7 +38,7 @@ class asymp_spec_model(): Number of radial order to fit. """ - + # @log(logger) def __init__(self, f, norders): self.f = np.array([f]).flatten() self.norders = int(norders) @@ -326,7 +329,7 @@ class asymptotic_fit(plotting, asymp_spec_model): science results! """ - + # @log(logger) def __init__(self, st, norders=None): self.pg = st.pg @@ -354,7 +357,8 @@ def __init__(self, st, norders=None): self.path = st.path st.asy_fit = self - + + @log(logger) def __call__(self, method, developer_mode): """ Setup, run and parse the asymptotic relation fit. @@ -400,7 +404,6 @@ def __call__(self, method, developer_mode): return {'modeID': self.modeID, 'summary': self.summary} - def prior(self, p): """ Calculates the log prior @@ -514,7 +517,7 @@ def _get_summary_stats(self, fit): return summary - + @log(logger) def get_modeIDs(self, fit, norders): """ Set mode ID in a dataframe diff --git a/pbjam/ellone.py b/pbjam/ellone.py index c4829784..acfeb6d8 100644 --- a/pbjam/ellone.py +++ b/pbjam/ellone.py @@ -31,10 +31,14 @@ from sklearn.preprocessing import MinMaxScaler from sklearn.utils import shuffle as skshuffle import hdbscan as Hdbscan -import warnings +import warnings, logging from .plotting import plotting import astropy.units as units import lightkurve as lk +from .jar import log + +logger = logging.getLogger(__name__) + class ellone(plotting): """ Basic l=1 detection @@ -71,7 +75,7 @@ class ellone(plotting): instead, in which case the l=2,0 modes may be picked up instead of the l=1. """ - + # @log(logger) def __init__(self, pbinst=None, f=None, s=None): if pbinst: @@ -101,7 +105,8 @@ def __init__(self, pbinst=None, f=None, s=None): self.hdblabels = None self.hdbX = None self.hdb_clusterN = None - + + @log(logger) def residual(self,): """ Compute the residual after dividing out l=2,0 @@ -129,7 +134,7 @@ def residual(self,): idx = (flad[0] <= self.f) & (self.f <= flad[-1]) res[idx] /= mod[i,:] return res - + def binning(self, nbin): """ Simply mean-binning @@ -177,7 +182,7 @@ def H0test(self, fbin, sbin, nbin, dnu, reject=0.1): idx = k < reject return idx, k - + @log(logger) def H0_inconsistent(self, dnu, Nmax, rejection_level): """ Find bins inconsistent with noise @@ -220,6 +225,7 @@ def H0_inconsistent(self, dnu, Nmax, rejection_level): return nu, N, pH0s + @log(logger) def clustering_preprocess(self, nu, N, limits = (0, 100000)): """ Preprocess the samples before clustering @@ -268,6 +274,7 @@ def span(self, x): return max(x)-min(x) + @log(logger) def clustering(self, nu, N, Nmax, outlier_limit=0.5, cluster_prob=0.9): """ Perform HDBscan clustering @@ -326,8 +333,7 @@ def clustering(self, nu, N, Nmax, outlier_limit=0.5, cluster_prob=0.9): return nus[1:], nstds[1:] - - + @log(logger) def get_ell1(self, dnu): """ Estimate frequency of l=1 modes (p-modes) @@ -380,6 +386,7 @@ def get_ell1(self, dnu): return nul1s, nul1s_std + @log(logger) def __call__(self, dnu, Nmax = 30, rejection_level = 0.1): """ Perform all the steps to estimate l=1 frequencies diff --git a/pbjam/jar.py b/pbjam/jar.py index c2d0e7a2..b2d0d105 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -7,13 +7,13 @@ from . import PACKAGEDIR import os import numpy as np +import pandas as pd from scipy.special import erf import functools, logging, inspect HANDLER_FMT = logging.Formatter("%(asctime)-15s :: %(levelname)-8s :: %(name)-17s :: %(message)s") logger = logging.getLogger(__name__) -logger.debug('Initialised module logger') class function_logger: @@ -23,6 +23,7 @@ def __init__(self, func, logger): self.signature = inspect.signature(self.func) self.logger = logger self.width = 10 # Width of log message prefix + self._print_options = dict(precision=4, threshold=10, linewidth=99) # Numpy print options def _log_bound_args(self, args, kwargs): """ Logs bound arguments - `args` and `kwargs` passed to func. """ @@ -31,16 +32,18 @@ def _log_bound_args(self, args, kwargs): def entering_function(self, args, kwargs): """ Log before function execution. """ - self.logger.debug(f"{'Entering':{self.width}} {self.func.__qualname__}") - self.logger.debug(f"{'Signature':{self.width}} {self.signature}") - self._log_bound_args(args, kwargs) + with np.printoptions(**self._print_options): + self.logger.debug(f"{'Entering':{self.width}} {self.func.__qualname__}") + self.logger.debug(f"{'Signature':{self.width}} {self.signature}") + self._log_bound_args(args, kwargs) # TODO: stuff to check before entering function def exiting_function(self, result): """ Log after function execution. """ # TODO: stuff to check before exiting function - self.logger.debug(f"{'Returns':{self.width}} {repr(result)}") - self.logger.debug(f"{'Exiting':{self.width}} {self.func.__qualname__}") + with np.printoptions(**self._print_options): + self.logger.debug(f"{'Returns':{self.width}} {repr(result)}") + self.logger.debug(f"{'Exiting':{self.width}} {self.func.__qualname__}") def log(logger): @@ -151,13 +154,13 @@ def __init__(self, level='NOTSET', **kwargs): class stream_handler(_handler, logging.StreamHandler): def __init__(self, level='INFO', **kwargs): - super().__init__(level=level, **kwargs) + super(stream_handler, self).__init__(level=level, **kwargs) class file_handler(_handler, logging.FileHandler): def __init__(self, filename, level='DEBUG', **kwargs): - super().__init__(filename=filename, level=level, **kwargs) - + super(file_handler, self).__init__(filename=filename, level=level, **kwargs) + class file_logging: """ diff --git a/pbjam/mcmc.py b/pbjam/mcmc.py index 6a7fcd61..3688f787 100644 --- a/pbjam/mcmc.py +++ b/pbjam/mcmc.py @@ -10,7 +10,12 @@ import scipy.stats as st import cpnest.model import pandas as pd -import os +import os, logging + +from .jar import log + +logger = logging.getLogger(__name__) + class mcmc(): """ Class for MCMC sampling using `emcee' @@ -50,7 +55,7 @@ class mcmc(): Acceptance fraction at each step. """ - + # @log(logger) def __init__(self, start, likelihood, prior, nwalkers=50): self.start = start @@ -69,6 +74,9 @@ def __init__(self, start, likelihood, prior, nwalkers=50): self.flatlnlike = None self.acceptance = None + def __repr__(self): + return f'' + def logpost(self, p): """ Evaluate the likelihood and prior @@ -116,7 +124,7 @@ def stationarity(self, nfactor=20): converged = np.all(tau * nfactor < self.sampler.iteration) return converged - + @log(logger) def __call__(self, max_iter=20000, spread=1e-4, start_samples=[]): """ Initialize and run the EMCEE afine invariant sampler @@ -165,15 +173,15 @@ def __call__(self, max_iter=20000, spread=1e-4, start_samples=[]): pos, prob, state = self.sampler.run_mcmc(initial_state=pos, nsteps=nsteps) while not self.stationarity(): pos, prob, state = self.sampler.run_mcmc(initial_state=pos, nsteps=nsteps) - print(f'Steps taken: {self.sampler.iteration}') + logger.info(f'Steps taken: {self.sampler.iteration}') if self.sampler.iteration == max_iter: break if self.sampler.iteration < max_iter: - print(f'Chains reached stationary state after {self.sampler.iteration} iterations.') + logger.info(f'Chains reached stationary state after {self.sampler.iteration} iterations.') elif self.sampler.iteration == max_iter: - print(f'Sampler stopped at {max_iter} (maximum). Chains did not necessarily reach a stationary state.') + logger.warning(f'Sampler stopped at {max_iter} (maximum). Chains did not necessarily reach a stationary state.') else: - print('Unhandled exception') + logger.error('Unhandled exception') # Fold in low AR chains and run a little bit to update emcee self.fold(pos, spread=spread) @@ -199,7 +207,7 @@ def __call__(self, max_iter=20000, spread=1e-4, start_samples=[]): return self.flatchain - + # @log(logger) def fold(self, pos, accept_lim = 0.2, spread=0.1): """ Fold low acceptance walkers into main distribution @@ -265,7 +273,7 @@ class nested(cpnest.model.Model): Function that will return the log prior when called as prior(params) """ - + # @log(logger) def __init__(self, names, bounds, likelihood, prior, path): self.names=names self.bounds=bounds @@ -276,6 +284,8 @@ def __init__(self, names, bounds, likelihood, prior, path): if not os.path.isdir(self.path): os.mkdir(self.path) + def __repr__(self): + return f'' def log_likelihood(self, param): """ Wrapper for log likelihood """ @@ -286,6 +296,7 @@ def log_prior(self,p): if not self.in_bounds(p): return -np.inf return self.prior(p.values) + @log(logger) def __call__(self, nlive=100, nthreads=1, maxmcmc=100, poolsize=100): """ Runs the nested sampling @@ -316,4 +327,4 @@ def __call__(self, nlive=100, nthreads=1, maxmcmc=100, poolsize=100): self.samples = pd.DataFrame(self.nest.get_posterior_samples())[self.names] self.flatchain = self.samples.values self.acceptance = None - return self.samples \ No newline at end of file + return self.samples diff --git a/pbjam/peakbag.py b/pbjam/peakbag.py index 860a02fb..f35f0ded 100644 --- a/pbjam/peakbag.py +++ b/pbjam/peakbag.py @@ -7,8 +7,12 @@ import numpy as np import pymc3 as pm -import warnings +import warnings, logging from .plotting import plotting +from .jar import log + +logger = logging.getLogger(__name__) + class peakbag(plotting): """ Class for the final peakbagging. @@ -62,7 +66,7 @@ class peakbag(plotting): See asy_peakbag asymptotic_fit for more details. """ - + # @log(logger) def __init__(self, starinst, init=True, path=None, verbose=False): self.pg = starinst.pg @@ -79,7 +83,10 @@ def __init__(self, starinst, init=True, path=None, verbose=False): starinst.peakbag = self + def __repr__(self): + return '' + @log(logger) def make_start(self): """ Set the starting model for peakbag @@ -111,6 +118,7 @@ def make_start(self): self.n = np.linspace(0.0, 1.0, len(self.start['l0']))[:, None] + @log(logger) def remove_outsiders(self, l0, l2): """ Drop outliers @@ -130,6 +138,7 @@ def remove_outsiders(self, l0, l2): sel = np.where(np.logical_and(l0 < self.f.max(), l0 > self.f.min())) return l0[sel], l2[sel] + @log(logger) def trim_ladder(self, lw_fac=10, extra=0.01, verbose=False): """ Turns mode frequencies into list of pairs @@ -156,18 +165,23 @@ def trim_ladder(self, lw_fac=10, extra=0.01, verbose=False): w = d02_lw + (extra * 10**self.asy_fit.summary.loc['dnu', 'mean']) bw = self.f[1] - self.f[0] w /= bw - if verbose: - print(f'w = {int(w)}') - print(f'bw = {bw}') + # if verbose: + # print(f'w = {int(w)}') + # print(f'bw = {bw}') + logger.debug(f'w = {int(w)}') + logger.debug(f'bw = {bw}') + ladder_trim_f = np.zeros([len(self.start['l0']), int(w)]) ladder_trim_s = np.zeros([len(self.start['l0']), int(w)]) for idx, freq in enumerate(self.start['l0']): loc_mid_02 = np.argmin(np.abs(self.f - (freq - d02/2.0))) if loc_mid_02 == 0: warnings.warn('Did not find optimal pair location') - if verbose: - print(f'loc_mid_02 = {loc_mid_02}') - print(f'w/2 = {int(w/2)}') + # if verbose: + # print(f'loc_mid_02 = {loc_mid_02}') + # print(f'w/2 = {int(w/2)}') + logger.debug(f'loc_mid_02 = {loc_mid_02}') + logger.debug(f'w/2 = {int(w/2)}') ladder_trim_f[idx, :] = \ self.f[loc_mid_02 - int(w/2): loc_mid_02 - int(w/2) + int(w)] ladder_trim_s[idx, :] = \ @@ -236,6 +250,7 @@ def model(self, l0, l2, width0, width2, height0, height2, back): mod += self.lor(l2, width2, height2) return mod.T + @log(logger) def init_model(self, model_type): """ Initialize the pymc3 model for peakbag @@ -339,7 +354,7 @@ def _addPPRatio(self): self.summary.at[idx, 'log_ppr'] = log_ppr[idx] - + @log(logger) def __call__(self, model_type='simple', tune=1500, nthreads=1, maxiter=4, advi=False): """ Perform all the steps in peakbag. diff --git a/pbjam/plotting.py b/pbjam/plotting.py index 45b34520..875e2fcf 100644 --- a/pbjam/plotting.py +++ b/pbjam/plotting.py @@ -16,7 +16,6 @@ from .jar import log logger = logging.getLogger(__name__) # For module-level logging -logger.debug('Initialized module logger.') class plotting: @@ -30,7 +29,7 @@ class plotting: called from. """ - @log(logger) + # @log(logger) def __init__(self): pass @@ -160,6 +159,7 @@ def plot_echelle(self, pg=None, path=None, ID=None, savefig=False): return fig + @log(logger) def plot_corner(self, path=None, ID=None, savefig=False): """ Make corner plot of result. @@ -195,6 +195,7 @@ def plot_corner(self, path=None, ID=None, savefig=False): return fig + @log(logger) def plot_spectrum(self, pg=None, path=None, ID=None, savefig=False): """ Plot the power spectrum @@ -428,8 +429,7 @@ def _make_prior_corner(self, df, numax_rng = 100): return crnr, crnr.get_axes() - - + @log(logger) def plot_prior(self, path=None, ID=None, savefig=False): """ Corner of result in relation to prior sample. @@ -481,6 +481,7 @@ def plot_prior(self, path=None, ID=None, savefig=False): return crnr + @log(logger) def plot_start(self): """ Plot starting point for peakbag diff --git a/pbjam/priors.py b/pbjam/priors.py index 4f1683e2..53f4ac4e 100644 --- a/pbjam/priors.py +++ b/pbjam/priors.py @@ -56,7 +56,7 @@ class kde(plotting): to compute the KDE. Default is to use pbjam/data/prior_data.csv """ - @log(logger) + # @log(logger) def __init__(self, starinst=None, prior_file=None): if starinst: @@ -78,6 +78,9 @@ def __init__(self, starinst=None, prior_file=None): self.verbose = False + def __repr__(self): + return f'' + @log(logger) def select_prior_data(self, numax=None, KDEsize = 100): """ Selects useful prior data based on proximity to estimated numax. @@ -154,7 +157,9 @@ def _prior_size_check(self, pdata, numax, KDEsize): idx = np.abs(pdata.numax.values - numax[0]) < nsigma * numax[1] if not flag_warn: - warnings.warn(f'Only {len(pdata[idx])} star(s) near provided numax. ' + + # warnings.warn(f'Only {len(pdata[idx])} star(s) near provided numax. ' + + # f'Trying to expand the range to include ~{KDEsize} stars.') + logger.warning(f'Only {len(pdata[idx])} star(s) near provided numax. ' + f'Trying to expand the range to include ~{KDEsize} stars.') flag_warn = True @@ -169,12 +174,13 @@ def _prior_size_check(self, pdata, numax, KDEsize): raise ValueError('No prior targets found within range of target. This might mean no prior samples exist for stars like this, consider increasing the uncertainty on your numax input.') elif ntgts < KDEsize: - warnings.warn(f'Sample for estimating KDE is less than the requested {KDEsize}.') + # warnings.warn recommend user change their code but logger.warning does not. Which is best here? I think the former - A. Lyttle + # warnings.warn(f'Sample for estimating KDE is less than the requested {KDEsize}.') + warnings.warn(f'Sample size for estimating KDE is {ntgts}, less than the requested {KDEsize}.') # Add user instruction here, e.g. increase numax uncertainty? KDEsize = ntgts return pdata.sample(KDEsize, weights=idx, replace=False) - @log(logger) def make_kde(self, bw_fac=1.0): """ Takes the prior data and constructs a KDE function @@ -208,19 +214,23 @@ def make_kde(self, bw_fac=1.0): self.select_prior_data(self._log_obs['numax']) - if self.verbose: - print(f'Selected data set length {len(self.prior_data)}') + # if self.verbose: + # print(f'Selected data set length {len(self.prior_data)}') + logger.debug(f'Selected prior dataset length: {len(self.prior_data)}') if bw_fac != 1: + logger.info('Selecting stars for KDE with user-specified bandwidth.') from statsmodels.nonparametric.bandwidths import select_bandwidth bw = select_bandwidth(self.prior_data[self.par_names].values, bw = 'scott', kernel=None) bw *= bw_fac else: - if self.verbose: - print('Selecting sensible stars for kde') - print(f'Full data set length {len(self.prior_data)}') + # if self.verbose: + # print('Selecting sensible stars for kde') + # print(f'Full data set length {len(self.prior_data)}') + + logger.info('Automatically selecting stars for KDE') bw = 'cv_ml' self.kde = sm.nonparametric.KDEMultivariate( @@ -345,8 +355,9 @@ def kde_sampler(self, nwalkers=50): """ - if self.verbose: - print('Running KDE sampler') + # if self.verbose: + # print('Running KDE sampler') + logger.info('Running KDE sampler') x0 = [self._log_obs['dnu'][0], # log10 dnu self._log_obs['numax'][0], # log10 numax diff --git a/pbjam/session.py b/pbjam/session.py index a10c9f24..a0abf1e2 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -55,7 +55,6 @@ from .jar import references, log, file_logging, jam logger = logging.getLogger(__name__) -logger.debug('Initialised module logger') def _organize_sess_dataframe(vardf): """ Takes input dataframe and tidies it up. @@ -537,7 +536,7 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, quarter=None, mission=None, path=None, download_dir=None): - self.log_file = file_logging(os.path.join(path, 'session.log'), level='DEBUG') + self.log_file = file_logging(os.path.join(path or os.getcwd(), 'session.log'), level='DEBUG', loggername='pbjam.session') with self.log_file: # Records everything in context to the log file logger.info('Starting session.') @@ -545,7 +544,7 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, self.references = references() self.references._addRef(['python', 'pandas', 'numpy', 'astropy', 'lightkurve']) - + if isinstance(dictlike, (dict, np.recarray, pd.DataFrame, str)): if isinstance(dictlike, str): vardf = pd.read_csv(dictlike) @@ -553,10 +552,13 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, try: vardf = pd.DataFrame.from_records(dictlike) except TypeError: + # TODO: Shouldn't this raise an exception? print('Unrecognized type in dictlike. Must be able to convert to dataframe through pandas.DataFrame.from_records()') + if any([ID, numax, dnu, teff, bp_rp]): - warnings.warn('Dictlike provided as input, ignoring other input fit parameters.') + # warnings.warn('Dictlike provided as input, ignoring other input fit parameters.') + logger.warning('Dictlike provided as input, ignoring other input fit parameters.') _organize_sess_dataframe(vardf) @@ -572,6 +574,12 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, else: raise TypeError('session.__init__ requires either ID or dictlike') + with pd.option_context( + 'display.max_rows', None, 'display.max_columns', None, + 'expand_frame_repr', False, 'max_colwidth', 15 + ): + logger.debug('Input DataFrame:\n' + str(vardf)) + for i in vardf.index: lkwargs = {x: vardf.loc[i, x] for x in ['cadence', 'month', @@ -589,6 +597,7 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, vardf.loc[i, 'timeseries'], vardf.loc[i, 'spectrum']) + logger.debug(f'Adding star with ID {repr(vardf.loc[i, "ID"])}') self.stars.append(star(ID=vardf.loc[i, 'ID'], pg=vardf.loc[i, 'spectrum'], numax=vardf.loc[i, ['numax', 'numax_err']].values, @@ -600,8 +609,24 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, for i, st in enumerate(self.stars): if st.numax[0] > st.f[-1]: warnings.warn("Input numax is greater than Nyquist frequeny for %s" % (st.ID)) - + + def __repr__(self): + """ Repr for the `session` class. Displays up to 3 IDs from stars in the session. """ + n_stars = len(self.stars) + max_IDs = 3 # Max IDs to display + if n_stars == 1: + ID = repr(self.stars[0].ID) + else: + ID = '[' + _ID = [repr(star.ID) for _, star in zip(range(max_IDs), self.stars)] + ID += ', '.join(_ID) + if n_stars > max_IDs: + ID += ' ...' + ID += ']' + return f'' + @jam.record + @log(logger) def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, nthreads=1, verbose=False, make_plots=False, store_chains=False, asy_sampling='emcee', developer_mode=False): @@ -656,9 +681,12 @@ def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, # Crude way to send error messages that occur in star up to Session # without ending the session. Is there a better way? + # Yes: logging using `logger.exception` - logs full traceback! except Exception as ex: - message = "Star {0} produced an exception of type {1} occurred. Arguments:\n{2!r}".format(st.ID, type(ex).__name__, ex.args) - print(message) + # message = "Star {0} produced an exception of type {1} occurred. Arguments:\n{2!r}".format(st.ID, type(ex).__name__, ex.args) + # print(message) + logger.exception(f"{st} failed due to the following exception, continuing to the next star.") + def _load_fits(files, mission): diff --git a/pbjam/star.py b/pbjam/star.py index b92fe2ec..c5a7d16b 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -31,7 +31,6 @@ from .jar import log, file_logging, jam logger = logging.getLogger(__name__) # For module-level logging -logger.debug('Initialized module logger.') class star(plotting, jam): @@ -74,6 +73,11 @@ class star(plotting, jam): prior_file : str, optional Path to the csv file containing the prior data. Default is pbjam/data/prior_data.csv + level : str, optional + Level at which logs will be recorded to a log file called 'star.log' at + `path`. Default is 'DEBUG' (recommended). Choose from 'DEBUG', 'INFO', + 'WARNING', 'ERROR' and 'CRITICAL'. All logs at levels including and + following `logging_level` will be recorded to the file. Attributes ---------- @@ -83,15 +87,15 @@ class star(plotting, jam): power spectrum """ - @log(logger) + # @log(logger) def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], - path=None, prior_file=None): + path=None, prior_file=None, level='DEBUG'): self.ID = ID self._set_outpath(path) - self.log_file = file_logging(os.path.join(self.path, 'star.log'), level='DEBUG') + self.log_file = file_logging(os.path.join(self.path, 'star.log'), level=level) with self.log_file: - logger.info(f"Initializing star with ID {self.ID}.") + logger.info(f"Initializing star with ID {repr(self.ID)}.") if numax[0] < 25: warnings.warn('The input numax is less than 25. The prior is not well defined here, so be careful with the result.') @@ -118,6 +122,9 @@ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], else: self.prior_file = prior_file + def __repr__(self): + return f'' + def _checkTeffBpRp(self, teff, bp_rp): """ Set the Teff and/or bp_rp values @@ -557,7 +564,7 @@ def _queryGaia(ID=None, coords=None, radius=2): logger.debug('Querying Gaia archive for bp-rp values.') if ID is not None: - print('Querying Gaia archive for bp-rp values by target ID.') + logger.info('Querying Gaia archive for bp-rp values by target ID.') adql_query = "select * from gaiadr2.gaia_source where source_id=%s" % (ID) try: job = Gaia.launch_job(adql_query).get_results() @@ -567,7 +574,7 @@ def _queryGaia(ID=None, coords=None, radius=2): return float(job['bp_rp'][0]) elif coords is not None: - print('Querying Gaia archive for bp-rp values by target coordinates.') + logger.info('Querying Gaia archive for bp-rp values by target coordinates.') ra = coords.ra.value dec = coords.dec.value adql_query = f"SELECT DISTANCE(POINT('ICRS', {ra}, {dec}), POINT('ICRS', ra, dec)) AS dist, * FROM gaiaedr3.gaia_source WHERE 1=CONTAINS( POINT('ICRS', {ra}, {dec}), CIRCLE('ICRS', ra, dec,{radius})) ORDER BY dist ASC" From 6afb3e02ada953c975ac235489d058ae527d5090 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 24 Jan 2021 12:22:31 +0000 Subject: [PATCH 25/52] Added pretty printer --- pbjam/jar.py | 67 +++++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 56 insertions(+), 11 deletions(-) diff --git a/pbjam/jar.py b/pbjam/jar.py index b2d0d105..94c2435e 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -10,40 +10,85 @@ import pandas as pd from scipy.special import erf -import functools, logging, inspect +import functools, logging, inspect, sys +from pprint import PrettyPrinter, saferepr HANDLER_FMT = logging.Formatter("%(asctime)-15s :: %(levelname)-8s :: %(name)-17s :: %(message)s") logger = logging.getLogger(__name__) +class pretty_printer(PrettyPrinter): + + def _format_ndarray(self, object, stream, indent, allowance, context, level): + write = stream.write + max_width = self._width - indent - allowance + with np.printoptions(linewidth=max_width): + string = repr(object) + + lines = string.split('\n') + string = ('\n' + indent * ' ').join(lines) + write(string) + + def _pprint_ndarray(self, object, stream, indent, allowance, context, level): + self._format_ndarray(object, stream, indent, allowance, context, level) + + PrettyPrinter._dispatch[np.ndarray.__repr__] = _pprint_ndarray + + def _format_dataframe(self, object, stream, indent, allowance, context, level): + write = stream.write + max_width = self._width - indent - allowance + with pd.option_context('display.width', max_width, 'display.max_columns', None): + string = repr(object) + + lines = string.split('\n') + string = f'\n{indent*" "}'.join(lines) + write(string) + + def _pprint_dataframe(self, object, stream, indent, allowance, context, level): + self._format_dataframe(object, stream, indent, allowance, context, level) + + PrettyPrinter._dispatch[pd.DataFrame.__repr__] = _pprint_dataframe + + +_pp_kwargs = {'width': 120} +if sys.version_info[0] == 3 and sys.version_info[1] >= 8: + # 'sort_dicts' kwarg new to Python 3.8 + _pp_kwargs['sort_dicts'] = False + +pprinter = pretty_printer(**_pp_kwargs) + + class function_logger: """ Handlers the logging upon entering and exiting functions. """ def __init__(self, func, logger): self.func = func self.signature = inspect.signature(self.func) self.logger = logger - self.width = 10 # Width of log message prefix - self._print_options = dict(precision=4, threshold=10, linewidth=99) # Numpy print options def _log_bound_args(self, args, kwargs): """ Logs bound arguments - `args` and `kwargs` passed to func. """ bargs = self.signature.bind(*args, **kwargs) - self.logger.debug(f"{'Bound args':{self.width}} {dict(bargs.arguments)}") + # self.logger.debug(f"{'Bound args':{self.width}} {dict(bargs.arguments)}") + bargs_dict = dict(bargs.arguments) + self.logger.debug(f"Bound arguments:\n{pprinter.pformat(bargs_dict)}") def entering_function(self, args, kwargs): """ Log before function execution. """ - with np.printoptions(**self._print_options): - self.logger.debug(f"{'Entering':{self.width}} {self.func.__qualname__}") - self.logger.debug(f"{'Signature':{self.width}} {self.signature}") - self._log_bound_args(args, kwargs) + # self.logger.debug(f"{'Entering':{self.width}} {self.func.__qualname__}") + # self.logger.debug(f"{'Signature':{self.width}} {self.signature}") + self.logger.debug(f"Entering {self.func.__qualname__}") + self.logger.debug(f"Signature:\n{self.func.__name__ + str(self.signature)}") + self._log_bound_args(args, kwargs) # TODO: stuff to check before entering function def exiting_function(self, result): """ Log after function execution. """ # TODO: stuff to check before exiting function - with np.printoptions(**self._print_options): - self.logger.debug(f"{'Returns':{self.width}} {repr(result)}") - self.logger.debug(f"{'Exiting':{self.width}} {self.func.__qualname__}") + # self.logger.debug(f"{'Returns':{self.width}} {repr(result)}") + # self.logger.debug(f"{'Exiting':{self.width}} {self.func.__qualname__}") + if result is not None: + self.logger.debug(f"Returns:\n{pprinter.pformat(result)}") + self.logger.debug(f"Exiting {self.func.__qualname__}") def log(logger): From f1f071e8eb2f1a5e87bf1360fb5d0235c031e2dd Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 24 Jan 2021 17:58:22 +0000 Subject: [PATCH 26/52] Fixed bug - changed from nested to cpnest --- pbjam/data/pbjam_references.bib | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pbjam/data/pbjam_references.bib b/pbjam/data/pbjam_references.bib index 29a0b293..23c3776d 100644 --- a/pbjam/data/pbjam_references.bib +++ b/pbjam/data/pbjam_references.bib @@ -1,5 +1,5 @@ -@article{nested, +@article{cpnest, title={johnveitch/cpnest: Minor optimisation}, DOI={10.5281/zenodo.835874}, publisher={Zenodo}, From c0628f39ddaf1552ba72e3783971f3b0c1b0a364 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 24 Jan 2021 18:17:33 +0000 Subject: [PATCH 27/52] Bug fix - replaced depricated pm.stats with arviz --- pbjam/peakbag.py | 7 ++++--- requirements.txt | 1 + 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/pbjam/peakbag.py b/pbjam/peakbag.py index f35f0ded..4be92dd5 100644 --- a/pbjam/peakbag.py +++ b/pbjam/peakbag.py @@ -7,6 +7,7 @@ import numpy as np import pymc3 as pm +import arviz as az import warnings, logging from .plotting import plotting from .jar import log @@ -385,9 +386,9 @@ def __call__(self, model_type='simple', tune=1500, nthreads=1, maxiter=4, # REMOVE THIS WHEN pymc3 v3.8 is a bit older. try: rhatfunc = pm.diagnostics.gelman_rubin - warnings.warn('pymc3.diagnostics.gelman_rubin is depcrecated; upgrade pymc3 to v3.8 or newer.', DeprecationWarning) + # warnings.warn('pymc3.diagnostics.gelman_rubin is depcrecated; upgrade pymc3 to v3.8 or newer.', DeprecationWarning) except: - rhatfunc = pm.stats.rhat + rhatfunc = az.rhat if advi: @@ -420,7 +421,7 @@ def __call__(self, model_type='simple', tune=1500, nthreads=1, maxiter=4, try: self.summary = pm.summary(self.traces) except: - self.summary = pm.stats.summary(self.traces) + self.summary = az.summary(self.traces) self.par_names = self.summary.index diff --git a/requirements.txt b/requirements.txt index 17bf663c..be59dd01 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,3 +12,4 @@ hdbscan scikit-learn<=0.22.0 nbsphinx cpnest>=0.9.9 +arviz From d56d895c7c68a6e59f908f5ef0548ad8ce2c4ab3 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 24 Jan 2021 18:46:00 +0000 Subject: [PATCH 28/52] Moved arviz funcs to be in pm_model context --- pbjam/peakbag.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/pbjam/peakbag.py b/pbjam/peakbag.py index 4be92dd5..5ee1b0f7 100644 --- a/pbjam/peakbag.py +++ b/pbjam/peakbag.py @@ -414,14 +414,16 @@ def __call__(self, model_type='simple', tune=1500, nthreads=1, maxiter=4, init=self.init_sampler, target_accept=self.target_accept, progressbar=False) - Rhat_max = np.max([v.max() for k, v in rhatfunc(self.traces).items()]) - niter += 1 + + Rhat_max = np.max([v.max() for k, v in rhatfunc(self.traces).items()]) + niter += 1 # REMOVE THIS WHEN pymc3 v3.8 is a bit older - try: - self.summary = pm.summary(self.traces) - except: - self.summary = az.summary(self.traces) + with self.pm_model: + try: + self.summary = pm.summary(self.traces) + except: + self.summary = az.summary(self.traces) self.par_names = self.summary.index From d0d1af70f5f05ba6fbaac158017bbcaac65e4753 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 24 Jan 2021 18:48:55 +0000 Subject: [PATCH 29/52] Added mason and moved pretty_printer --- pbjam/jar.py | 55 +++++++++++++++++--------------------------------- pbjam/mason.py | 40 ++++++++++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+), 37 deletions(-) create mode 100644 pbjam/mason.py diff --git a/pbjam/jar.py b/pbjam/jar.py index 94c2435e..7841b53b 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -11,45 +11,11 @@ from scipy.special import erf import functools, logging, inspect, sys -from pprint import PrettyPrinter, saferepr +from .mason import pretty_printer -HANDLER_FMT = logging.Formatter("%(asctime)-15s :: %(levelname)-8s :: %(name)-17s :: %(message)s") +HANDLER_FMT = "%(asctime)-15s :: %(levelname)-8s :: %(name)-17s :: %(message)s" logger = logging.getLogger(__name__) - -class pretty_printer(PrettyPrinter): - - def _format_ndarray(self, object, stream, indent, allowance, context, level): - write = stream.write - max_width = self._width - indent - allowance - with np.printoptions(linewidth=max_width): - string = repr(object) - - lines = string.split('\n') - string = ('\n' + indent * ' ').join(lines) - write(string) - - def _pprint_ndarray(self, object, stream, indent, allowance, context, level): - self._format_ndarray(object, stream, indent, allowance, context, level) - - PrettyPrinter._dispatch[np.ndarray.__repr__] = _pprint_ndarray - - def _format_dataframe(self, object, stream, indent, allowance, context, level): - write = stream.write - max_width = self._width - indent - allowance - with pd.option_context('display.width', max_width, 'display.max_columns', None): - string = repr(object) - - lines = string.split('\n') - string = f'\n{indent*" "}'.join(lines) - write(string) - - def _pprint_dataframe(self, object, stream, indent, allowance, context, level): - self._format_dataframe(object, stream, indent, allowance, context, level) - - PrettyPrinter._dispatch[pd.DataFrame.__repr__] = _pprint_dataframe - - _pp_kwargs = {'width': 120} if sys.version_info[0] == 3 and sys.version_info[1] >= 8: # 'sort_dicts' kwarg new to Python 3.8 @@ -60,6 +26,8 @@ def _pprint_dataframe(self, object, stream, indent, allowance, context, level): class function_logger: """ Handlers the logging upon entering and exiting functions. """ + + def __init__(self, func, logger): self.func = func self.signature = inspect.signature(self.func) @@ -190,19 +158,32 @@ def wrap(*args, **kwargs): return _log +class _formatter(logging.Formatter): + + indent = 2 + def format(self, *args, **kwargs): + s = super(_formatter, self).format(*args, **kwargs) + lines = s.split('\n') + return ('\n' + ' '*self.indent).join(lines) + + class _handler(logging.Handler): + def __init__(self, level='NOTSET', **kwargs): super().__init__(**kwargs) - self.setFormatter(HANDLER_FMT) + fmt = _formatter(HANDLER_FMT) + self.setFormatter(fmt) self.setLevel(level) class stream_handler(_handler, logging.StreamHandler): + def __init__(self, level='INFO', **kwargs): super(stream_handler, self).__init__(level=level, **kwargs) class file_handler(_handler, logging.FileHandler): + def __init__(self, filename, level='DEBUG', **kwargs): super(file_handler, self).__init__(filename=filename, level=level, **kwargs) diff --git a/pbjam/mason.py b/pbjam/mason.py new file mode 100644 index 00000000..9e798c91 --- /dev/null +++ b/pbjam/mason.py @@ -0,0 +1,40 @@ +# mason.py +# Named after John Landis Mason - inventor of the Mason jar +# This contains functions and classes which make things look pretty +import numpy as np +import pandas as pd +from pprint import PrettyPrinter + + +class pretty_printer(PrettyPrinter): + _dispatch = {} + + def _format_ndarray(self, object, stream, indent, allowance, context, level): + write = stream.write + max_width = self._width - indent - allowance + with np.printoptions(linewidth=max_width): + string = repr(object) + + lines = string.split('\n') + string = ('\n' + indent * ' ').join(lines) + write(string) + + def _pprint_ndarray(self, object, stream, indent, allowance, context, level): + self._format_ndarray(object, stream, indent, allowance, context, level) + + _dispatch[np.ndarray.__repr__] = _pprint_ndarray + + def _format_dataframe(self, object, stream, indent, allowance, context, level): + write = stream.write + max_width = self._width - indent - allowance + with pd.option_context('display.width', max_width, 'display.max_columns', None): + string = repr(object) + + lines = string.split('\n') + string = f'\n{indent*" "}'.join(lines) + write(string) + + def _pprint_dataframe(self, object, stream, indent, allowance, context, level): + self._format_dataframe(object, stream, indent, allowance, context, level) + + _dispatch[pd.DataFrame.__repr__] = _pprint_dataframe From 95dc4aed165fa7c42347a3cdb7d1bac886469d6d Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 24 Jan 2021 18:49:03 +0000 Subject: [PATCH 30/52] Moved comment --- pbjam/peakbag.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pbjam/peakbag.py b/pbjam/peakbag.py index 5ee1b0f7..84a101ee 100644 --- a/pbjam/peakbag.py +++ b/pbjam/peakbag.py @@ -418,8 +418,8 @@ def __call__(self, model_type='simple', tune=1500, nthreads=1, maxiter=4, Rhat_max = np.max([v.max() for k, v in rhatfunc(self.traces).items()]) niter += 1 - # REMOVE THIS WHEN pymc3 v3.8 is a bit older with self.pm_model: + # REMOVE THIS WHEN pymc3 v3.8 is a bit older try: self.summary = pm.summary(self.traces) except: From 24cb48db2373609609c591502cbe25cc45aca14f Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 24 Jan 2021 19:22:31 +0000 Subject: [PATCH 31/52] Accounted for pymc3 future warning --- pbjam/peakbag.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/pbjam/peakbag.py b/pbjam/peakbag.py index 84a101ee..555c7ff1 100644 --- a/pbjam/peakbag.py +++ b/pbjam/peakbag.py @@ -8,7 +8,7 @@ import numpy as np import pymc3 as pm import arviz as az -import warnings, logging +import warnings, logging, inspect from .plotting import plotting from .jar import log @@ -404,16 +404,26 @@ def __call__(self, model_type='simple', tune=1500, nthreads=1, maxiter=4, else: Rhat_max = 10 niter = 1 + + sample_kwargs = dict(tune=tune * niter, cores=nthreads, + start=self.start, + init=self.init_sampler, + target_accept=self.target_accept, + progressbar=False) + + # To surpress future warning - check back in future + if 'return_inferencedata' in inspect.getfullargspec(pm.sample).kwonlyargs: + sample_kwargs['return_inferencedata'] = False + while Rhat_max > 1.05: if niter > maxiter: warnings.warn('Did not converge!') break + + sample_kwargs['tune'] = tune * niter + with self.pm_model: - self.traces = pm.sample(tune=tune * niter, cores=nthreads, - start=self.start, - init=self.init_sampler, - target_accept=self.target_accept, - progressbar=False) + self.traces = pm.sample(**sample_kwargs) Rhat_max = np.max([v.max() for k, v in rhatfunc(self.traces).items()]) niter += 1 From 72caaaafcaf7de30b25a17baf228c2f064f5849f Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Sun, 24 Jan 2021 19:43:33 +0000 Subject: [PATCH 32/52] Revert logger.warning to warning.warn to pass test --- pbjam/priors.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pbjam/priors.py b/pbjam/priors.py index 53f4ac4e..edff8a70 100644 --- a/pbjam/priors.py +++ b/pbjam/priors.py @@ -157,10 +157,13 @@ def _prior_size_check(self, pdata, numax, KDEsize): idx = np.abs(pdata.numax.values - numax[0]) < nsigma * numax[1] if not flag_warn: - # warnings.warn(f'Only {len(pdata[idx])} star(s) near provided numax. ' + - # f'Trying to expand the range to include ~{KDEsize} stars.') - logger.warning(f'Only {len(pdata[idx])} star(s) near provided numax. ' + + # If this is a use warning, must give user instructions. + # Otherwise, make this a logger.warning + # Maybe user warning if len(pdata[idx]) == 0? + warnings.warn(f'Only {len(pdata[idx])} star(s) near provided numax. ' + f'Trying to expand the range to include ~{KDEsize} stars.') + # logger.warning(f'Only {len(pdata[idx])} star(s) near provided numax. ' + + # f'Trying to expand the range to include ~{KDEsize} stars.') flag_warn = True if nsigma >= KDEsize: From b625e60cdfcd6c8cb76428e3c079c8c7516fbbc5 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 11:21:01 +0000 Subject: [PATCH 33/52] Updated Python version --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index bb1e0076..c1d6db51 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,8 +5,8 @@ jobs: - name: "Python 3.6 on Linux" python: "3.6" - - name: "Python 3.7 on Linux" - python: "3.7" + - name: "Python 3.7.4 on Linux" + python: "3.7.4" - name: "Python 3.8 on Linux" python: "3.8" From 51ba9262c2e4e44e5f2570452155f14a41c08f2a Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 12:32:14 +0000 Subject: [PATCH 34/52] Simplified file_logger class --- pbjam/jar.py | 97 ++++++++++++++++++++++++++---------------------- pbjam/session.py | 8 ++-- pbjam/star.py | 15 ++++---- 3 files changed, 64 insertions(+), 56 deletions(-) diff --git a/pbjam/jar.py b/pbjam/jar.py index 7841b53b..ab84307f 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -27,7 +27,6 @@ class function_logger: """ Handlers the logging upon entering and exiting functions. """ - def __init__(self, func, logger): self.func = func self.signature = inspect.signature(self.func) @@ -40,20 +39,16 @@ def _log_bound_args(self, args, kwargs): bargs_dict = dict(bargs.arguments) self.logger.debug(f"Bound arguments:\n{pprinter.pformat(bargs_dict)}") - def entering_function(self, args, kwargs): + def _entering_function(self, args, kwargs): """ Log before function execution. """ - # self.logger.debug(f"{'Entering':{self.width}} {self.func.__qualname__}") - # self.logger.debug(f"{'Signature':{self.width}} {self.signature}") self.logger.debug(f"Entering {self.func.__qualname__}") self.logger.debug(f"Signature:\n{self.func.__name__ + str(self.signature)}") self._log_bound_args(args, kwargs) # TODO: stuff to check before entering function - def exiting_function(self, result): + def _exiting_function(self, result): """ Log after function execution. """ # TODO: stuff to check before exiting function - # self.logger.debug(f"{'Returns':{self.width}} {repr(result)}") - # self.logger.debug(f"{'Exiting':{self.width}} {self.func.__qualname__}") if result is not None: self.logger.debug(f"Returns:\n{pprinter.pformat(result)}") self.logger.debug(f"Exiting {self.func.__qualname__}") @@ -112,7 +107,6 @@ def my_func(a, b): class myClass: - @log(logger) def __init__(self): logger.debug('Initializing class.') self.a = 1 @@ -149,9 +143,9 @@ def _log(func): @functools.wraps(func) def wrap(*args, **kwargs): flog = function_logger(func, logger) - flog.entering_function(args, kwargs) + flog._entering_function(args, kwargs) result = func(*args, **kwargs) - flog.exiting_function(result) + flog._exiting_function(result) return result return wrap @@ -188,7 +182,7 @@ def __init__(self, filename, level='DEBUG', **kwargs): super(file_handler, self).__init__(filename=filename, level=level, **kwargs) -class file_logging: +class file_logger: """ Context manager for file logging. It logs everything under the `loggername` logger, by default this is the `'pbjam'` logger (i.e. logs everything from the pbjam package). @@ -220,9 +214,9 @@ class file_logging: Examples -------- ```python - from pbjam.jar import file_logging + from pbjam.jar import file_logger - with file_logging('example.log') as flog: + with file_logger('example.log') as flog: # Do some stuff here and it will be logged to 'example.log' ... @@ -261,39 +255,9 @@ def __enter__(self): def __exit__(self, type, value, traceback): self.close() - - -class jam: - """ - Base pbjam class. Currently has a method `record` for recording logs to `log_file`. This can be used as a method - decorator in subclasses, e.g. - - ```python - # pbjam/example.py - import logging - from .jar import jam, file_logging - logger = logging.getLogger(__name__) # here, __name__ == 'pbjam.example' - - class example_class(jam): - def __init__(self): - self.log_file = file_logging('example.log') - - with self.log_file: - # Records content in context to `log_file` - logger.info('Initializing class.') - ... - - @jam.record # records content of `example_method` to `log_file` - def example_method(self): - logger.info('Performing function tasks.') - ... - ``` - - """ - log_file = file_logging('pbjam.log') # Placeholder variable, overwrite in subclass __init__ @staticmethod - def record(func): + def listen(func): """ Decorator for recording logs to `log_file` during function operation, closing the log file upon completion. """ @@ -303,7 +267,50 @@ def wrap(self, *args, **kwargs): result = func(self, *args, **kwargs) self.log_file.close() return result - return wrap + return wrap + + +# class jam: +# """ +# Base pbjam class. Currently has a method `record` for recording logs to `log_file`. This can be used as a method +# decorator in subclasses, e.g. + +# ```python +# # pbjam/example.py +# import logging +# from .jar import jam, file_logger +# logger = logging.getLogger(__name__) # here, __name__ == 'pbjam.example' + +# class example_class(jam): +# def __init__(self): +# self.log_file = file_logger('example.log') + +# with self.log_file: +# # Records content in context to `log_file` +# logger.info('Initializing class.') +# ... + +# @jam.record # records content of `example_method` to `log_file` +# def example_method(self): +# logger.info('Performing function tasks.') +# ... +# ``` + +# """ +# log_file = file_logger('pbjam.log') # Placeholder variable, overwrite in subclass __init__ + +# @staticmethod +# def record(func): +# """ +# Decorator for recording logs to `log_file` during function operation, closing the log file upon completion. +# """ +# @functools.wraps(func) +# def wrap(self, *args, **kwargs): +# self.log_file.open() +# result = func(self, *args, **kwargs) +# self.log_file.close() +# return result +# return wrap class references(): diff --git a/pbjam/session.py b/pbjam/session.py index a0abf1e2..7d846c99 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -52,7 +52,7 @@ import os, pickle, warnings, logging from .star import star, _format_name from datetime import datetime -from .jar import references, log, file_logging, jam +from .jar import references, log, file_logger logger = logging.getLogger(__name__) @@ -406,7 +406,7 @@ def _lk_to_pg(ID, tsIn, specIn): -class session(jam): +class session(object): """ Main class used to initiate peakbagging. Use this class to initialize a star class instance for one or more targets. @@ -536,7 +536,7 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, quarter=None, mission=None, path=None, download_dir=None): - self.log_file = file_logging(os.path.join(path or os.getcwd(), 'session.log'), level='DEBUG', loggername='pbjam.session') + self.log_file = file_logger(os.path.join(path or os.getcwd(), 'session.log'), level='DEBUG', loggername='pbjam.session') with self.log_file: # Records everything in context to the log file logger.info('Starting session.') @@ -625,7 +625,7 @@ def __repr__(self): ID += ']' return f'' - @jam.record + @file_logger.listen @log(logger) def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, nthreads=1, verbose=False, make_plots=False, store_chains=False, diff --git a/pbjam/star.py b/pbjam/star.py index c5a7d16b..039acf6c 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -28,12 +28,12 @@ import astropy.units as units import logging -from .jar import log, file_logging, jam +from .jar import log, file_logger logger = logging.getLogger(__name__) # For module-level logging -class star(plotting, jam): +class star(plotting): """ Class for each star to be peakbagged Additional attributes are added for each step of the peakbagging process @@ -93,7 +93,8 @@ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], self.ID = ID self._set_outpath(path) - self.log_file = file_logging(os.path.join(self.path, 'star.log'), level=level) + self.log_file = file_logger(os.path.join(self.path, f'{self.ID}.log'), level=level) + # file_logger(os.path.join(self.path, 'star.log'), level=level) with self.log_file: logger.info(f"Initializing star with ID {repr(self.ID)}.") @@ -235,7 +236,7 @@ def _set_outpath(self, path): # message = "Could not create directory for Star {0} because an exception of type {1} occurred. Arguments:\n{2!r}".format(self.ID, type(ex).__name__, ex.args) logger.exception(f"Could not create directory for star {self.ID}.") - @jam.record + @file_logger.listen @log(logger) def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): """ Run all steps involving KDE. @@ -280,7 +281,7 @@ def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): kde_samps = pd.DataFrame(self.kde.samples, columns=self.kde.par_names) kde_samps.to_csv(self._get_outpath(f'kde_chains_{self.ID}.csv'), index=False) - @jam.record + @file_logger.listen @log(logger) def run_asy_peakbag(self, norders, make_plots=False, store_chains=False, method='emcee', @@ -336,7 +337,7 @@ def run_asy_peakbag(self, norders, make_plots=False, asy_samps = pd.DataFrame(self.asy_fit.samples, columns=self.asy_fit.par_names) asy_samps.to_csv(self._get_outpath(f'asymptotic_fit_chains_{self.ID}.csv'), index=False) - @jam.record + @file_logger.listen @log(logger) def run_peakbag(self, model_type='simple', tune=1500, nthreads=1, make_plots=False, store_chains=False): @@ -383,7 +384,7 @@ def run_peakbag(self, model_type='simple', tune=1500, nthreads=1, peakbag_samps = pd.DataFrame(self.peakbag.samples, columns=self.peakbag.par_names) peakbag_samps.to_csv(self._get_outpath(f'peakbag_chains_{self.ID}.csv'), index=False) - @jam.record + @file_logger.listen def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, nthreads=1, make_plots=True, store_chains=False, asy_sampling='emcee', developer_mode=False): From bf7d060290af398f939fd9ccf232ab819852870d Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 12:32:22 +0000 Subject: [PATCH 35/52] Updated test --- pbjam/tests/test_jar.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/pbjam/tests/test_jar.py b/pbjam/tests/test_jar.py index aea02ca7..60bba815 100644 --- a/pbjam/tests/test_jar.py +++ b/pbjam/tests/test_jar.py @@ -1,6 +1,6 @@ """Tests for the jar module""" -from pbjam.jar import normal, to_log10, get_priorpath, get_percentiles, file_logging, jam, log +from pbjam.jar import normal, to_log10, get_priorpath, get_percentiles, file_logger, log import pbjam.tests.pbjam_tests as pbt import numpy as np from numpy.testing import assert_almost_equal, assert_array_equal @@ -88,13 +88,13 @@ def test_jam(): class jam_test(jam): def __init__(self): - self.log_file = file_logging('test_jam.log') + self.log_file = file_logger('test_jam.log') logger.debug('This should not be logged in file.') with self.log_file: # Records content in context to `log_file` logger.debug(test_message) - @jam.record # records content of `example_method` to `log_file` + @file_logger.record # records content of `example_method` to `log_file` def method(self): logger.debug(test_message) @@ -109,11 +109,11 @@ def method(self): os.remove(filename) -def test_file_logging(): - """Test `file_logging` context manager.""" - filename = 'test_file_logging.log' +def test_file_logger(): + """Test `file_logger` context manager.""" + filename = 'test_file_logger.log' test_level = 'DEBUG' - log_file = file_logging(filename, level=test_level) + log_file = file_logger(filename, level=test_level) with log_file: test_message = 'This should be logged in file.' @@ -142,7 +142,7 @@ def log_test(): logger.debug(test_message) filename = 'test_log.log' - log_file = file_logging(filename) + log_file = file_logger(filename) with log_file: log_test() @@ -173,7 +173,7 @@ def log_test(): logger.critical(test_message) filename = 'test_log.log' - log_file = file_logging(filename, level='INFO') # level='INFO' same as console_handler + log_file = file_logger(filename, level='INFO') # level='INFO' same as console_handler with log_file: log_test() From 651af4a820c3059c83c110903c3c35ea63218480 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 12:41:46 +0000 Subject: [PATCH 36/52] Added session identifier attribute --- pbjam/session.py | 42 ++++++++++++++++++++++++------------------ 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/pbjam/session.py b/pbjam/session.py index 7d846c99..ae5216e7 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -519,7 +519,12 @@ class session(object): download_dir : str, optional Directory to cache lightkurve downloads. Lightkurve will place the fits files in the default lightkurve cache path in your home directory. - + session_ID : str, optional + Session identifier. Default is `'session'`. This is the name given to + the `log_file` for the session. Give this a unique name when running + multiple sessions with the same `path`, otherwise logs will be appended + to the same file. + Attributes ---------- stars : list @@ -533,10 +538,11 @@ class session(object): def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, timeseries=None, spectrum=None, dictlike=None, use_cached=False, cadence=None, campaign=None, sector=None, month=None, - quarter=None, mission=None, path=None, download_dir=None): - - - self.log_file = file_logger(os.path.join(path or os.getcwd(), 'session.log'), level='DEBUG', loggername='pbjam.session') + quarter=None, mission=None, path=None, download_dir=None, + session_ID=None): + + self.session_ID = session_ID or 'session' + self.log_file = file_logger(os.path.join(path or os.getcwd(), f'{self.session_ID}.log'), level='DEBUG', loggername='pbjam.session') with self.log_file: # Records everything in context to the log file logger.info('Starting session.') @@ -611,19 +617,19 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, warnings.warn("Input numax is greater than Nyquist frequeny for %s" % (st.ID)) def __repr__(self): - """ Repr for the `session` class. Displays up to 3 IDs from stars in the session. """ - n_stars = len(self.stars) - max_IDs = 3 # Max IDs to display - if n_stars == 1: - ID = repr(self.stars[0].ID) - else: - ID = '[' - _ID = [repr(star.ID) for _, star in zip(range(max_IDs), self.stars)] - ID += ', '.join(_ID) - if n_stars > max_IDs: - ID += ' ...' - ID += ']' - return f'' + """ Repr for the `session` class. """ + # n_stars = len(self.stars) + # max_IDs = 3 # Max IDs to display + # if n_stars == 1: + # ID = repr(self.stars[0].ID) + # else: + # ID = '[' + # _ID = [repr(star.ID) for _, star in zip(range(max_IDs), self.stars)] + # ID += ', '.join(_ID) + # if n_stars > max_IDs: + # ID += ' ...' + # ID += ']' + return f'' @file_logger.listen @log(logger) From 4d0502c869483b01264e98b7845334a2fedbb385 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 14:35:59 +0000 Subject: [PATCH 37/52] Updated test --- pbjam/tests/test_jar.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pbjam/tests/test_jar.py b/pbjam/tests/test_jar.py index 60bba815..b1e78c2e 100644 --- a/pbjam/tests/test_jar.py +++ b/pbjam/tests/test_jar.py @@ -86,7 +86,7 @@ def test_jam(): """Tests subclassing `jam` to use the log file record decorator""" test_message = 'This should be logged in file.' - class jam_test(jam): + class jam_test: def __init__(self): self.log_file = file_logger('test_jam.log') logger.debug('This should not be logged in file.') @@ -94,7 +94,7 @@ def __init__(self): # Records content in context to `log_file` logger.debug(test_message) - @file_logger.record # records content of `example_method` to `log_file` + @file_logger.listen # records content of `example_method` to `log_file` def method(self): logger.debug(test_message) From dec294fb8261540c48e6610987bacf6a24a9fb57 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 15:41:42 +0000 Subject: [PATCH 38/52] Renamed to printer --- pbjam/{mason.py => printer.py} | 3 --- 1 file changed, 3 deletions(-) rename pbjam/{mason.py => printer.py} (90%) diff --git a/pbjam/mason.py b/pbjam/printer.py similarity index 90% rename from pbjam/mason.py rename to pbjam/printer.py index 9e798c91..e2cb4d46 100644 --- a/pbjam/mason.py +++ b/pbjam/printer.py @@ -1,6 +1,3 @@ -# mason.py -# Named after John Landis Mason - inventor of the Mason jar -# This contains functions and classes which make things look pretty import numpy as np import pandas as pd from pprint import PrettyPrinter From aad9e8e17e898987b1f6a1bbcd9762b9ad5faf36 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 15:42:16 +0000 Subject: [PATCH 39/52] Privatised and tidied up --- pbjam/__init__.py | 4 +- pbjam/jar.py | 171 +++++++++++++++++++++++----------------- pbjam/tests/test_jar.py | 24 +++--- 3 files changed, 113 insertions(+), 86 deletions(-) diff --git a/pbjam/__init__.py b/pbjam/__init__.py index ddd624dd..0e7c3577 100644 --- a/pbjam/__init__.py +++ b/pbjam/__init__.py @@ -10,8 +10,8 @@ logger.setLevel('DEBUG') # <--- minimum level for global pbjam package logger # Setup console handler -from .jar import stream_handler -console_handler = stream_handler(level='INFO') +from .jar import _stream_handler +console_handler = _stream_handler(level='INFO') logger.addHandler(console_handler) logger.debug(f'Initializing {__name__}') diff --git a/pbjam/jar.py b/pbjam/jar.py index ab84307f..29cbdb9f 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -11,9 +11,10 @@ from scipy.special import erf import functools, logging, inspect, sys -from .mason import pretty_printer +from .printer import pretty_printer -HANDLER_FMT = "%(asctime)-15s :: %(levelname)-8s :: %(name)-17s :: %(message)s" +HANDLER_FMT = "%(asctime)-23s :: %(levelname)-8s :: %(name)-17s :: %(message)s" +INDENT = 60 # Set to length of logger info before `message` or just indent by 2? logger = logging.getLogger(__name__) _pp_kwargs = {'width': 120} @@ -24,7 +25,7 @@ pprinter = pretty_printer(**_pp_kwargs) -class function_logger: +class _function_logger: """ Handlers the logging upon entering and exiting functions. """ def __init__(self, func, logger): @@ -35,7 +36,6 @@ def __init__(self, func, logger): def _log_bound_args(self, args, kwargs): """ Logs bound arguments - `args` and `kwargs` passed to func. """ bargs = self.signature.bind(*args, **kwargs) - # self.logger.debug(f"{'Bound args':{self.width}} {dict(bargs.arguments)}") bargs_dict = dict(bargs.arguments) self.logger.debug(f"Bound arguments:\n{pprinter.pformat(bargs_dict)}") @@ -56,13 +56,14 @@ def _exiting_function(self, result): def log(logger): """ - Function logging decorator. + Function logging decorator. Logs function metadata upon entering and + sexiting. Parameters ---------- logger: logging.Logger - Specify the logger in which to submit entering and exiting logs, highly recommended to be the module-level - logger (see Examples). + Specify the logger in which to submit entering and exiting logs, highly + recommended to be the module-level logger (see Examples). Examples -------- @@ -142,7 +143,7 @@ def my_mthd(self): def _log(func): @functools.wraps(func) def wrap(*args, **kwargs): - flog = function_logger(func, logger) + flog = _function_logger(func, logger) flog._entering_function(args, kwargs) result = func(*args, **kwargs) flog._exiting_function(result) @@ -154,11 +155,10 @@ def wrap(*args, **kwargs): class _formatter(logging.Formatter): - indent = 2 def format(self, *args, **kwargs): s = super(_formatter, self).format(*args, **kwargs) lines = s.split('\n') - return ('\n' + ' '*self.indent).join(lines) + return ('\n' + ' '*INDENT).join(lines) class _handler(logging.Handler): @@ -170,54 +170,52 @@ def __init__(self, level='NOTSET', **kwargs): self.setLevel(level) -class stream_handler(_handler, logging.StreamHandler): +class _stream_handler(_handler, logging.StreamHandler): def __init__(self, level='INFO', **kwargs): - super(stream_handler, self).__init__(level=level, **kwargs) + super(_stream_handler, self).__init__(level=level, **kwargs) -class file_handler(_handler, logging.FileHandler): +class _file_handler(_handler, logging.FileHandler): def __init__(self, filename, level='DEBUG', **kwargs): - super(file_handler, self).__init__(filename=filename, level=level, **kwargs) - + super(_file_handler, self).__init__(filename=filename, level=level, **kwargs) -class file_logger: + +class log_file: """ - Context manager for file logging. It logs everything under the `loggername` logger, by default this is the `'pbjam'` - logger (i.e. logs everything from the pbjam package). + Context manager for file logging. It logs everything under the `loggername` + logger, by default this is the `'pbjam'` logger (i.e. logs everything from + the pbjam package). Parameters ---------- filename : str Filename to save the log - level : str, optional Logging level. Default is 'DEBUG'. - loggername : str, optional Name of logger which will send logs to `filename`. Default is `'pbjam'`. Attributes ---------- - handler : pbjam.jar.file_handler + handler : pbjam.jar._file_handler File handler object. Methods ------- open() : Activates file logging process - close() : Safely closes file logging process Examples -------- ```python - from pbjam.jar import file_logger + from pbjam.jar import log_file - with file_logger('example.log') as flog: - # Do some stuff here and it will be logged to 'example.log' + with log_file('example.log') as flog: + # Do some pbjam stuff here and it will be logged to 'example.log' ... # Do some stuff here and it won't be logged to 'example.log' @@ -226,9 +224,7 @@ class file_logger: # Do some stuff here and it will be logged to 'example.log' ... ``` - """ - def __init__(self, filename, level='DEBUG', loggername='pbjam'): self._filename = filename self._level = level @@ -237,18 +233,36 @@ def __init__(self, filename, level='DEBUG', loggername='pbjam'): self._isopen = False def open(self): + """ If log file is not open, creates a file handler at the log level """ if not self._isopen: - self.handler = file_handler(self._filename, level=self._level) + self.handler = _file_handler(self._filename, level=self._level) self._logger.addHandler(self.handler) self._isopen = True def close(self): + """ If log file is open, safely closes the file handler """ if self._isopen: self._logger.removeHandler(self.handler) self.handler.close() self.handler = None self._isopen = False + def get_level(self): + return self._level + + def set_level(self, level): + """ Set the level of the file handler. + + Parameters + ---------- + level : str + Choose from 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL' or + 'NOTSET'. + """ + self._level = level + if self._isopen: + self.handler.setLevel(self._level) + def __enter__(self): self.open() return self @@ -256,10 +270,66 @@ def __enter__(self): def __exit__(self, type, value, traceback): self.close() + +class file_logger: + """ + Creates a `log_file` at `filename` to which logs under `loggername` at + a given `level` are recorded when the file logger is listening. This + class is indended to be sub-classed (see Examples). + + To listen to a method in a sub-class of `file_logger` (i.e. record all logs + which occur during the method execution) decorate the class method with + `@file_logger.listen`. + + Parameters + ---------- + filename : str + Filename to save the log + level : str, optional + Logging level. Default is 'DEBUG'. + loggername : str, optional + Name of logger which will send logs to `filename`. Default is `'pbjam'`. + + Attributes + ---------- + log_file : pbjam.jar.log_file + + Methods + ------- + listen : + Decorator for recording logs in a sub-class method to `log_file`. + + Examples + -------- + + ```python + # pbjam/example.py + from .jar import file_logger + + class example_class(file_logger): + def __init__(self): + super(example_class, self).__init__('example.log', level='INFO') + + with self.log_file: + # Records content in context to `log_file` + logger.info('Initializing class.') + ... + + @file_logger.listen # records content of `example_method` to `log_file` + def example_method(self): + logger.info('Performing function tasks.') + ... + ``` + """ + + def __init__(self, *args, **kwargs): + self.log_file = log_file(*args, **kwargs) + @staticmethod def listen(func): """ - Decorator for recording logs to `log_file` during function operation, closing the log file upon completion. + Decorator for recording logs to `log_file` during function operation, + closing the log file upon completion. """ @functools.wraps(func) def wrap(self, *args, **kwargs): @@ -270,49 +340,6 @@ def wrap(self, *args, **kwargs): return wrap -# class jam: -# """ -# Base pbjam class. Currently has a method `record` for recording logs to `log_file`. This can be used as a method -# decorator in subclasses, e.g. - -# ```python -# # pbjam/example.py -# import logging -# from .jar import jam, file_logger -# logger = logging.getLogger(__name__) # here, __name__ == 'pbjam.example' - -# class example_class(jam): -# def __init__(self): -# self.log_file = file_logger('example.log') - -# with self.log_file: -# # Records content in context to `log_file` -# logger.info('Initializing class.') -# ... - -# @jam.record # records content of `example_method` to `log_file` -# def example_method(self): -# logger.info('Performing function tasks.') -# ... -# ``` - -# """ -# log_file = file_logger('pbjam.log') # Placeholder variable, overwrite in subclass __init__ - -# @staticmethod -# def record(func): -# """ -# Decorator for recording logs to `log_file` during function operation, closing the log file upon completion. -# """ -# @functools.wraps(func) -# def wrap(self, *args, **kwargs): -# self.log_file.open() -# result = func(self, *args, **kwargs) -# self.log_file.close() -# return result -# return wrap - - class references(): """ A class for managing references used when running PBjam. diff --git a/pbjam/tests/test_jar.py b/pbjam/tests/test_jar.py index b1e78c2e..cd19f38a 100644 --- a/pbjam/tests/test_jar.py +++ b/pbjam/tests/test_jar.py @@ -1,6 +1,6 @@ """Tests for the jar module""" -from pbjam.jar import normal, to_log10, get_priorpath, get_percentiles, file_logger, log +from pbjam.jar import normal, to_log10, get_priorpath, get_percentiles, log_file, file_logger, log import pbjam.tests.pbjam_tests as pbt import numpy as np from numpy.testing import assert_almost_equal, assert_array_equal @@ -82,13 +82,13 @@ def test_get_percentiles(): inp = [[0,0,0,1,1], 1] assert_array_equal(func(*inp), [0., 0., 1.]) -def test_jam(): +def test_file_logger(): """Tests subclassing `jam` to use the log file record decorator""" test_message = 'This should be logged in file.' - class jam_test: + class file_logger_test(file_logger): def __init__(self): - self.log_file = file_logger('test_jam.log') + super(file_logger_test, self).__init__(filename='test_jam.log') logger.debug('This should not be logged in file.') with self.log_file: # Records content in context to `log_file` @@ -98,7 +98,7 @@ def __init__(self): def method(self): logger.debug(test_message) - jt = jam_test() + jt = file_logger_test() jt.method() filename = jt.log_file._filename @@ -109,13 +109,13 @@ def method(self): os.remove(filename) -def test_file_logger(): +def test_log_file(): """Test `file_logger` context manager.""" filename = 'test_file_logger.log' test_level = 'DEBUG' - log_file = file_logger(filename, level=test_level) + flog = log_file(filename, level=test_level) - with log_file: + with flog: test_message = 'This should be logged in file.' logger.debug(test_message) logger.debug('This should not be logged in file') @@ -142,9 +142,9 @@ def log_test(): logger.debug(test_message) filename = 'test_log.log' - log_file = file_logger(filename) + flog = log_file(filename) - with log_file: + with flog: log_test() with open(filename, 'r') as file_in: @@ -173,9 +173,9 @@ def log_test(): logger.critical(test_message) filename = 'test_log.log' - log_file = file_logger(filename, level='INFO') # level='INFO' same as console_handler + flog = log_file(filename, level='INFO') # level='INFO' same as console_handler - with log_file: + with flog: log_test() with open(filename, 'r') as file_in: From 8b25522d4cdafa3fe5364170e1d10507f969a66c Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 15:42:27 +0000 Subject: [PATCH 40/52] Subclassed with file_logger --- pbjam/plotting.py | 6 +++--- pbjam/session.py | 13 ++++++++++--- pbjam/star.py | 9 +++++---- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/pbjam/plotting.py b/pbjam/plotting.py index 875e2fcf..150b54bd 100644 --- a/pbjam/plotting.py +++ b/pbjam/plotting.py @@ -29,9 +29,9 @@ class plotting: called from. """ - # @log(logger) - def __init__(self): - pass + + def __init__(self, *args, **kwargs): + super(plotting, self).__init__(*args, **kwargs) def _save_my_fig(self, fig, figtype, path, ID): """ Save the figure object diff --git a/pbjam/session.py b/pbjam/session.py index ae5216e7..0a565f1e 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -406,7 +406,7 @@ def _lk_to_pg(ID, tsIn, specIn): -class session(object): +class session(file_logger): """ Main class used to initiate peakbagging. Use this class to initialize a star class instance for one or more targets. @@ -524,6 +524,11 @@ class session(object): the `log_file` for the session. Give this a unique name when running multiple sessions with the same `path`, otherwise logs will be appended to the same file. + level : str, optional + Level at which logs will be recorded to a log file called 'star.log' at + `path`. Default is 'DEBUG' (recommended). Choose from 'DEBUG', 'INFO', + 'WARNING', 'ERROR' and 'CRITICAL'. All logs at levels including and + following `logging_level` will be recorded to the file. Attributes ---------- @@ -539,10 +544,12 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, timeseries=None, spectrum=None, dictlike=None, use_cached=False, cadence=None, campaign=None, sector=None, month=None, quarter=None, mission=None, path=None, download_dir=None, - session_ID=None): + session_ID=None, level='DEBUG'): self.session_ID = session_ID or 'session' - self.log_file = file_logger(os.path.join(path or os.getcwd(), f'{self.session_ID}.log'), level='DEBUG', loggername='pbjam.session') + logfilename = os.path.join(path or os.getcwd(), f'{self.session_ID}.log') + super(session, self).__init__(filename=logfilename, level=level, loggername='pbjam.session') + with self.log_file: # Records everything in context to the log file logger.info('Starting session.') diff --git a/pbjam/star.py b/pbjam/star.py index 039acf6c..174a33ce 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -33,7 +33,7 @@ logger = logging.getLogger(__name__) # For module-level logging -class star(plotting): +class star(plotting, file_logger): """ Class for each star to be peakbagged Additional attributes are added for each step of the peakbagging process @@ -87,14 +87,15 @@ class star(plotting): power spectrum """ - # @log(logger) + def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], path=None, prior_file=None, level='DEBUG'): self.ID = ID self._set_outpath(path) - self.log_file = file_logger(os.path.join(self.path, f'{self.ID}.log'), level=level) - # file_logger(os.path.join(self.path, 'star.log'), level=level) + logfilename = os.path.join(self.path, f'{self.ID}.log') + super(star, self).__init__(filename=logfilename, level=level) + with self.log_file: logger.info(f"Initializing star with ID {repr(self.ID)}.") From 90dc373195790f0d797a43e1c20fb38bb6a62354 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 15:50:46 +0000 Subject: [PATCH 41/52] Updated repr --- pbjam/asy_peakbag.py | 3 +++ pbjam/peakbag.py | 2 +- pbjam/priors.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pbjam/asy_peakbag.py b/pbjam/asy_peakbag.py index 6632ac6f..c3fc96d4 100755 --- a/pbjam/asy_peakbag.py +++ b/pbjam/asy_peakbag.py @@ -358,6 +358,9 @@ def __init__(self, st, norders=None): st.asy_fit = self + def __repr__(self): + return f'' + @log(logger) def __call__(self, method, developer_mode): """ Setup, run and parse the asymptotic relation fit. diff --git a/pbjam/peakbag.py b/pbjam/peakbag.py index 555c7ff1..cc6224b9 100644 --- a/pbjam/peakbag.py +++ b/pbjam/peakbag.py @@ -85,7 +85,7 @@ def __init__(self, starinst, init=True, path=None, verbose=False): starinst.peakbag = self def __repr__(self): - return '' + return '' @log(logger) def make_start(self): diff --git a/pbjam/priors.py b/pbjam/priors.py index edff8a70..1edb2028 100644 --- a/pbjam/priors.py +++ b/pbjam/priors.py @@ -79,7 +79,7 @@ def __init__(self, starinst=None, prior_file=None): self.verbose = False def __repr__(self): - return f'' + return f'' @log(logger) def select_prior_data(self, numax=None, KDEsize = 100): From 30249b873f4746b36f516a4a504d5df384cd89d7 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 15:57:49 +0000 Subject: [PATCH 42/52] Tidied up and replaced print statements --- pbjam/ellone.py | 2 +- pbjam/jar.py | 6 +++--- pbjam/session.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pbjam/ellone.py b/pbjam/ellone.py index acfeb6d8..f3b40362 100644 --- a/pbjam/ellone.py +++ b/pbjam/ellone.py @@ -75,7 +75,7 @@ class ellone(plotting): instead, in which case the l=2,0 modes may be picked up instead of the l=1. """ - # @log(logger) + def __init__(self, pbinst=None, f=None, s=None): if pbinst: diff --git a/pbjam/jar.py b/pbjam/jar.py index 29cbdb9f..5f8d5c24 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -10,7 +10,7 @@ import pandas as pd from scipy.special import erf -import functools, logging, inspect, sys +import functools, logging, inspect, sys, warnings from .printer import pretty_printer HANDLER_FMT = "%(asctime)-23s :: %(levelname)-8s :: %(name)-17s :: %(message)s" @@ -393,12 +393,12 @@ def _findBlockEnd(self, string, idx): a += 1 if (i >= len(string[idx:])-1) and (a != 0): - print('Warning: Reached end of bibtex file with no closing curly bracket. Your .bib file may be formatted incorrectly. The reference list may be garbled.') + warnings.warn('Warning: Reached end of bibtex file with no closing curly bracket. Your .bib file may be formatted incorrectly. The reference list may be garbled.') if a ==0: break if string[idx+i] == '{': - print('Warning: Ended on an opening bracket. Your .bib file may be formatted incorrectly.') + warnings.warn('Warning: Ended on an opening bracket. Your .bib file may be formatted incorrectly.') return idx+i diff --git a/pbjam/session.py b/pbjam/session.py index 0a565f1e..af0b3ab4 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -299,7 +299,7 @@ def _format_col(vardf, col, key): np.array([_arr_to_lk(x, y, vardf.loc[i, 'ID'], key)])) vardf[key] = temp else: - print('Unhandled exception') + logger.critical('Unhandled exception.') def _lc_to_lk(ID, tsIn, specIn, download_dir, use_cached, lkwargs): """ Convert time series column in dataframe to lk.LightCurve object @@ -566,7 +566,7 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, vardf = pd.DataFrame.from_records(dictlike) except TypeError: # TODO: Shouldn't this raise an exception? - print('Unrecognized type in dictlike. Must be able to convert to dataframe through pandas.DataFrame.from_records()') + logger.critical('Unrecognized type in dictlike. Must be able to convert to dataframe through pandas.DataFrame.from_records()') if any([ID, numax, dnu, teff, bp_rp]): From 47f4a0c0df45950cbd67f45017d05d9649a91e6c Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Mon, 25 Jan 2021 17:09:20 +0000 Subject: [PATCH 43/52] Reverted python version change in .travis.yml --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index c1d6db51..bb1e0076 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,8 +5,8 @@ jobs: - name: "Python 3.6 on Linux" python: "3.6" - - name: "Python 3.7.4 on Linux" - python: "3.7.4" + - name: "Python 3.7 on Linux" + python: "3.7" - name: "Python 3.8 on Linux" python: "3.8" From 822f10e32bd4c91c4528b19ceb1d1313126724d9 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Tue, 2 Feb 2021 17:11:21 +0000 Subject: [PATCH 44/52] Removed comment --- pbjam/session.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/pbjam/session.py b/pbjam/session.py index af0b3ab4..83a0e750 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -625,17 +625,6 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, def __repr__(self): """ Repr for the `session` class. """ - # n_stars = len(self.stars) - # max_IDs = 3 # Max IDs to display - # if n_stars == 1: - # ID = repr(self.stars[0].ID) - # else: - # ID = '[' - # _ID = [repr(star.ID) for _, star in zip(range(max_IDs), self.stars)] - # ID += ', '.join(_ID) - # if n_stars > max_IDs: - # ID += ' ...' - # ID += ']' return f'' @file_logger.listen From a0e22a92b31bbae246956aaf2950394721e4f5aa Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Tue, 2 Feb 2021 17:20:52 +0000 Subject: [PATCH 45/52] Renamed log decorator to debug --- pbjam/asy_peakbag.py | 10 +++++----- pbjam/ellone.py | 14 +++++++------- pbjam/jar.py | 10 +++++----- pbjam/mcmc.py | 12 ++++++------ pbjam/peakbag.py | 14 +++++++------- pbjam/plotting.py | 12 ++++++------ pbjam/priors.py | 12 ++++++------ pbjam/session.py | 4 ++-- pbjam/star.py | 10 +++++----- pbjam/tests/test_jar.py | 12 ++++++------ 10 files changed, 55 insertions(+), 55 deletions(-) diff --git a/pbjam/asy_peakbag.py b/pbjam/asy_peakbag.py index c3fc96d4..6ed279fe 100755 --- a/pbjam/asy_peakbag.py +++ b/pbjam/asy_peakbag.py @@ -12,7 +12,7 @@ import pandas as pd import scipy.stats as scist from .plotting import plotting -from .jar import normal, log +from .jar import normal, debug from collections import OrderedDict import warnings, logging @@ -38,7 +38,7 @@ class asymp_spec_model(): Number of radial order to fit. """ - # @log(logger) + # @debug(logger) def __init__(self, f, norders): self.f = np.array([f]).flatten() self.norders = int(norders) @@ -329,7 +329,7 @@ class asymptotic_fit(plotting, asymp_spec_model): science results! """ - # @log(logger) + # @debug(logger) def __init__(self, st, norders=None): self.pg = st.pg @@ -361,7 +361,7 @@ def __init__(self, st, norders=None): def __repr__(self): return f'' - @log(logger) + @debug(logger) def __call__(self, method, developer_mode): """ Setup, run and parse the asymptotic relation fit. @@ -520,7 +520,7 @@ def _get_summary_stats(self, fit): return summary - @log(logger) + @debug(logger) def get_modeIDs(self, fit, norders): """ Set mode ID in a dataframe diff --git a/pbjam/ellone.py b/pbjam/ellone.py index f3b40362..f2d9528e 100644 --- a/pbjam/ellone.py +++ b/pbjam/ellone.py @@ -35,7 +35,7 @@ from .plotting import plotting import astropy.units as units import lightkurve as lk -from .jar import log +from .jar import debug logger = logging.getLogger(__name__) @@ -106,7 +106,7 @@ def __init__(self, pbinst=None, f=None, s=None): self.hdbX = None self.hdb_clusterN = None - @log(logger) + @debug(logger) def residual(self,): """ Compute the residual after dividing out l=2,0 @@ -182,7 +182,7 @@ def H0test(self, fbin, sbin, nbin, dnu, reject=0.1): idx = k < reject return idx, k - @log(logger) + @debug(logger) def H0_inconsistent(self, dnu, Nmax, rejection_level): """ Find bins inconsistent with noise @@ -225,7 +225,7 @@ def H0_inconsistent(self, dnu, Nmax, rejection_level): return nu, N, pH0s - @log(logger) + @debug(logger) def clustering_preprocess(self, nu, N, limits = (0, 100000)): """ Preprocess the samples before clustering @@ -274,7 +274,7 @@ def span(self, x): return max(x)-min(x) - @log(logger) + @debug(logger) def clustering(self, nu, N, Nmax, outlier_limit=0.5, cluster_prob=0.9): """ Perform HDBscan clustering @@ -333,7 +333,7 @@ def clustering(self, nu, N, Nmax, outlier_limit=0.5, cluster_prob=0.9): return nus[1:], nstds[1:] - @log(logger) + @debug(logger) def get_ell1(self, dnu): """ Estimate frequency of l=1 modes (p-modes) @@ -386,7 +386,7 @@ def get_ell1(self, dnu): return nul1s, nul1s_std - @log(logger) + @debug(logger) def __call__(self, dnu, Nmax = 30, rejection_level = 0.1): """ Perform all the steps to estimate l=1 frequencies diff --git a/pbjam/jar.py b/pbjam/jar.py index 5f8d5c24..cd2bd2b6 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -54,7 +54,7 @@ def _exiting_function(self, result): self.logger.debug(f"Exiting {self.func.__qualname__}") -def log(logger): +def debug(logger): """ Function logging decorator. Logs function metadata upon entering and sexiting. @@ -71,11 +71,11 @@ def log(logger): ```python import logging - from pbjam.jar import log + from pbjam.jar import debug logger = logging.getLogger(__name__) - @log(logger) + @debug(logger) def my_func(a, b): logger.debug('Function in progress.') return a + b @@ -101,7 +101,7 @@ def my_func(a, b): ```python import logging - from pbjam.jar import log + from pbjam.jar import debug logger = logging.getLogger(__name__) @@ -113,7 +113,7 @@ def __init__(self): self.a = 1 self.b = 2 - @log(logger) + @debug(logger) def my_mthd(self): logger.debug('Method in progress.') return self.a + self.b diff --git a/pbjam/mcmc.py b/pbjam/mcmc.py index 3688f787..a8dc8170 100644 --- a/pbjam/mcmc.py +++ b/pbjam/mcmc.py @@ -12,7 +12,7 @@ import pandas as pd import os, logging -from .jar import log +from .jar import debug logger = logging.getLogger(__name__) @@ -55,7 +55,7 @@ class mcmc(): Acceptance fraction at each step. """ - # @log(logger) + # @debug(logger) def __init__(self, start, likelihood, prior, nwalkers=50): self.start = start @@ -124,7 +124,7 @@ def stationarity(self, nfactor=20): converged = np.all(tau * nfactor < self.sampler.iteration) return converged - @log(logger) + @debug(logger) def __call__(self, max_iter=20000, spread=1e-4, start_samples=[]): """ Initialize and run the EMCEE afine invariant sampler @@ -207,7 +207,7 @@ def __call__(self, max_iter=20000, spread=1e-4, start_samples=[]): return self.flatchain - # @log(logger) + # @debug(logger) def fold(self, pos, accept_lim = 0.2, spread=0.1): """ Fold low acceptance walkers into main distribution @@ -273,7 +273,7 @@ class nested(cpnest.model.Model): Function that will return the log prior when called as prior(params) """ - # @log(logger) + # @debug(logger) def __init__(self, names, bounds, likelihood, prior, path): self.names=names self.bounds=bounds @@ -296,7 +296,7 @@ def log_prior(self,p): if not self.in_bounds(p): return -np.inf return self.prior(p.values) - @log(logger) + @debug(logger) def __call__(self, nlive=100, nthreads=1, maxmcmc=100, poolsize=100): """ Runs the nested sampling diff --git a/pbjam/peakbag.py b/pbjam/peakbag.py index cc6224b9..41307151 100644 --- a/pbjam/peakbag.py +++ b/pbjam/peakbag.py @@ -10,7 +10,7 @@ import arviz as az import warnings, logging, inspect from .plotting import plotting -from .jar import log +from .jar import debug logger = logging.getLogger(__name__) @@ -67,7 +67,7 @@ class peakbag(plotting): See asy_peakbag asymptotic_fit for more details. """ - # @log(logger) + # @debug(logger) def __init__(self, starinst, init=True, path=None, verbose=False): self.pg = starinst.pg @@ -87,7 +87,7 @@ def __init__(self, starinst, init=True, path=None, verbose=False): def __repr__(self): return '' - @log(logger) + @debug(logger) def make_start(self): """ Set the starting model for peakbag @@ -119,7 +119,7 @@ def make_start(self): self.n = np.linspace(0.0, 1.0, len(self.start['l0']))[:, None] - @log(logger) + @debug(logger) def remove_outsiders(self, l0, l2): """ Drop outliers @@ -139,7 +139,7 @@ def remove_outsiders(self, l0, l2): sel = np.where(np.logical_and(l0 < self.f.max(), l0 > self.f.min())) return l0[sel], l2[sel] - @log(logger) + @debug(logger) def trim_ladder(self, lw_fac=10, extra=0.01, verbose=False): """ Turns mode frequencies into list of pairs @@ -251,7 +251,7 @@ def model(self, l0, l2, width0, width2, height0, height2, back): mod += self.lor(l2, width2, height2) return mod.T - @log(logger) + @debug(logger) def init_model(self, model_type): """ Initialize the pymc3 model for peakbag @@ -355,7 +355,7 @@ def _addPPRatio(self): self.summary.at[idx, 'log_ppr'] = log_ppr[idx] - @log(logger) + @debug(logger) def __call__(self, model_type='simple', tune=1500, nthreads=1, maxiter=4, advi=False): """ Perform all the steps in peakbag. diff --git a/pbjam/plotting.py b/pbjam/plotting.py index 150b54bd..e1385e79 100644 --- a/pbjam/plotting.py +++ b/pbjam/plotting.py @@ -13,7 +13,7 @@ import astropy.units as u import pandas as pd -from .jar import log +from .jar import debug logger = logging.getLogger(__name__) # For module-level logging @@ -58,7 +58,7 @@ def _save_my_fig(self, fig, figtype, path, ID): outpath = os.path.join(*[path, type(self).__name__+f'_{figtype}_{str(ID)}.png']) fig.savefig(outpath) - @log(logger) + @debug(logger) def plot_echelle(self, pg=None, path=None, ID=None, savefig=False): """ Make echelle plot @@ -159,7 +159,7 @@ def plot_echelle(self, pg=None, path=None, ID=None, savefig=False): return fig - @log(logger) + @debug(logger) def plot_corner(self, path=None, ID=None, savefig=False): """ Make corner plot of result. @@ -195,7 +195,7 @@ def plot_corner(self, path=None, ID=None, savefig=False): return fig - @log(logger) + @debug(logger) def plot_spectrum(self, pg=None, path=None, ID=None, savefig=False): """ Plot the power spectrum @@ -429,7 +429,7 @@ def _make_prior_corner(self, df, numax_rng = 100): return crnr, crnr.get_axes() - @log(logger) + @debug(logger) def plot_prior(self, path=None, ID=None, savefig=False): """ Corner of result in relation to prior sample. @@ -481,7 +481,7 @@ def plot_prior(self, path=None, ID=None, savefig=False): return crnr - @log(logger) + @debug(logger) def plot_start(self): """ Plot starting point for peakbag diff --git a/pbjam/priors.py b/pbjam/priors.py index 1edb2028..e33fb8cb 100644 --- a/pbjam/priors.py +++ b/pbjam/priors.py @@ -12,7 +12,7 @@ import warnings from .plotting import plotting import statsmodels.api as sm -from .jar import get_priorpath, to_log10, normal, log +from .jar import get_priorpath, to_log10, normal, debug import logging logger = logging.getLogger(__name__) @@ -56,7 +56,7 @@ class kde(plotting): to compute the KDE. Default is to use pbjam/data/prior_data.csv """ - # @log(logger) + # @debug(logger) def __init__(self, starinst=None, prior_file=None): if starinst: @@ -81,7 +81,7 @@ def __init__(self, starinst=None, prior_file=None): def __repr__(self): return f'' - @log(logger) + @debug(logger) def select_prior_data(self, numax=None, KDEsize = 100): """ Selects useful prior data based on proximity to estimated numax. @@ -184,7 +184,7 @@ def _prior_size_check(self, pdata, numax, KDEsize): return pdata.sample(KDEsize, weights=idx, replace=False) - @log(logger) + @debug(logger) def make_kde(self, bw_fac=1.0): """ Takes the prior data and constructs a KDE function @@ -299,7 +299,7 @@ def likelihood(self, p): return lnlike - @log(logger) + @debug(logger) def kde_predict(self, n): """ Predict the l=0 mode frequencies from the KDE samples. @@ -332,7 +332,7 @@ def kde_predict(self, n): return freq.mean(axis=1), freq.std(axis=1) - @log(logger) + @debug(logger) def kde_sampler(self, nwalkers=50): """ Samples the posterior distribution with the KDE prior diff --git a/pbjam/session.py b/pbjam/session.py index 83a0e750..a3c58b62 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -52,7 +52,7 @@ import os, pickle, warnings, logging from .star import star, _format_name from datetime import datetime -from .jar import references, log, file_logger +from .jar import references, debug, file_logger logger = logging.getLogger(__name__) @@ -628,7 +628,7 @@ def __repr__(self): return f'' @file_logger.listen - @log(logger) + @debug(logger) def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, nthreads=1, verbose=False, make_plots=False, store_chains=False, asy_sampling='emcee', developer_mode=False): diff --git a/pbjam/star.py b/pbjam/star.py index 174a33ce..ecb6d069 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -28,7 +28,7 @@ import astropy.units as units import logging -from .jar import log, file_logger +from .jar import debug, file_logger logger = logging.getLogger(__name__) # For module-level logging @@ -238,7 +238,7 @@ def _set_outpath(self, path): logger.exception(f"Could not create directory for star {self.ID}.") @file_logger.listen - @log(logger) + @debug(logger) def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): """ Run all steps involving KDE. @@ -283,7 +283,7 @@ def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): kde_samps.to_csv(self._get_outpath(f'kde_chains_{self.ID}.csv'), index=False) @file_logger.listen - @log(logger) + @debug(logger) def run_asy_peakbag(self, norders, make_plots=False, store_chains=False, method='emcee', developer_mode=False): @@ -339,7 +339,7 @@ def run_asy_peakbag(self, norders, make_plots=False, asy_samps.to_csv(self._get_outpath(f'asymptotic_fit_chains_{self.ID}.csv'), index=False) @file_logger.listen - @log(logger) + @debug(logger) def run_peakbag(self, model_type='simple', tune=1500, nthreads=1, make_plots=False, store_chains=False): """ Run all steps involving peakbag. @@ -633,7 +633,7 @@ def _format_name(ID): return fname return ID -@log(logger) +@debug(logger) def get_bp_rp(ID): """ Search online for bp_rp values based on ID. diff --git a/pbjam/tests/test_jar.py b/pbjam/tests/test_jar.py index cd19f38a..cf608ef8 100644 --- a/pbjam/tests/test_jar.py +++ b/pbjam/tests/test_jar.py @@ -1,6 +1,6 @@ """Tests for the jar module""" -from pbjam.jar import normal, to_log10, get_priorpath, get_percentiles, log_file, file_logger, log +from pbjam.jar import normal, to_log10, get_priorpath, get_percentiles, log_file, file_logger, debug import pbjam.tests.pbjam_tests as pbt import numpy as np from numpy.testing import assert_almost_equal, assert_array_equal @@ -133,11 +133,11 @@ def test_log_file(): os.remove(filename) -def test_log_debug(): +def test_debug_logger(): """Tests `log` decorator debug messages""" test_message = 'Function in progress.' - @log(logger) + @debug(logger) def log_test(): logger.debug(test_message) @@ -159,12 +159,12 @@ def log_test(): os.remove(filename) -def test_log_info(): - """Tests `log` decorator with no debug info.""" +def test_debug_info(): + """Tests `debug` decorator with INFO level.""" test_message = 'Function in progress.' - @log(logger) + @debug(logger) def log_test(): logger.debug(test_message) logger.info(test_message) From 6a09cf9b0de7d8a99cd8cd556812ee6d12e752a9 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Wed, 3 Feb 2021 10:51:51 +0000 Subject: [PATCH 46/52] Set debugger equal to debug(logger) --- pbjam/asy_peakbag.py | 9 +++++---- pbjam/ellone.py | 13 +++++++------ pbjam/jar.py | 6 ++++-- pbjam/mcmc.py | 11 ++++++----- pbjam/peakbag.py | 13 +++++++------ pbjam/plotting.py | 11 ++++++----- pbjam/priors.py | 13 +++++++------ pbjam/session.py | 3 ++- pbjam/star.py | 9 +++++---- 9 files changed, 49 insertions(+), 39 deletions(-) diff --git a/pbjam/asy_peakbag.py b/pbjam/asy_peakbag.py index 6ed279fe..001817a8 100755 --- a/pbjam/asy_peakbag.py +++ b/pbjam/asy_peakbag.py @@ -17,6 +17,7 @@ import warnings, logging logger = logging.getLogger(__name__) +debugger = debug(logger) class asymp_spec_model(): @@ -38,7 +39,7 @@ class asymp_spec_model(): Number of radial order to fit. """ - # @debug(logger) + # @debugger def __init__(self, f, norders): self.f = np.array([f]).flatten() self.norders = int(norders) @@ -329,7 +330,7 @@ class asymptotic_fit(plotting, asymp_spec_model): science results! """ - # @debug(logger) + # @debugger def __init__(self, st, norders=None): self.pg = st.pg @@ -361,7 +362,7 @@ def __init__(self, st, norders=None): def __repr__(self): return f'' - @debug(logger) + @debugger def __call__(self, method, developer_mode): """ Setup, run and parse the asymptotic relation fit. @@ -520,7 +521,7 @@ def _get_summary_stats(self, fit): return summary - @debug(logger) + @debugger def get_modeIDs(self, fit, norders): """ Set mode ID in a dataframe diff --git a/pbjam/ellone.py b/pbjam/ellone.py index f2d9528e..b1f9739a 100644 --- a/pbjam/ellone.py +++ b/pbjam/ellone.py @@ -38,6 +38,7 @@ from .jar import debug logger = logging.getLogger(__name__) +debugger = debug(logger) class ellone(plotting): @@ -106,7 +107,7 @@ def __init__(self, pbinst=None, f=None, s=None): self.hdbX = None self.hdb_clusterN = None - @debug(logger) + @debugger def residual(self,): """ Compute the residual after dividing out l=2,0 @@ -182,7 +183,7 @@ def H0test(self, fbin, sbin, nbin, dnu, reject=0.1): idx = k < reject return idx, k - @debug(logger) + @debugger def H0_inconsistent(self, dnu, Nmax, rejection_level): """ Find bins inconsistent with noise @@ -225,7 +226,7 @@ def H0_inconsistent(self, dnu, Nmax, rejection_level): return nu, N, pH0s - @debug(logger) + @debugger def clustering_preprocess(self, nu, N, limits = (0, 100000)): """ Preprocess the samples before clustering @@ -274,7 +275,7 @@ def span(self, x): return max(x)-min(x) - @debug(logger) + @debugger def clustering(self, nu, N, Nmax, outlier_limit=0.5, cluster_prob=0.9): """ Perform HDBscan clustering @@ -333,7 +334,7 @@ def clustering(self, nu, N, Nmax, outlier_limit=0.5, cluster_prob=0.9): return nus[1:], nstds[1:] - @debug(logger) + @debugger def get_ell1(self, dnu): """ Estimate frequency of l=1 modes (p-modes) @@ -386,7 +387,7 @@ def get_ell1(self, dnu): return nul1s, nul1s_std - @debug(logger) + @debugger def __call__(self, dnu, Nmax = 30, rejection_level = 0.1): """ Perform all the steps to estimate l=1 frequencies diff --git a/pbjam/jar.py b/pbjam/jar.py index cd2bd2b6..c7a2f73d 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -74,8 +74,9 @@ def debug(logger): from pbjam.jar import debug logger = logging.getLogger(__name__) + debugger = debug(logger) - @debug(logger) + @debugger def my_func(a, b): logger.debug('Function in progress.') return a + b @@ -104,6 +105,7 @@ def my_func(a, b): from pbjam.jar import debug logger = logging.getLogger(__name__) + debugger = debug(logger) class myClass: @@ -113,7 +115,7 @@ def __init__(self): self.a = 1 self.b = 2 - @debug(logger) + @debugger def my_mthd(self): logger.debug('Method in progress.') return self.a + self.b diff --git a/pbjam/mcmc.py b/pbjam/mcmc.py index a8dc8170..70f80704 100644 --- a/pbjam/mcmc.py +++ b/pbjam/mcmc.py @@ -15,6 +15,7 @@ from .jar import debug logger = logging.getLogger(__name__) +debugger = debug(logger) class mcmc(): @@ -55,7 +56,7 @@ class mcmc(): Acceptance fraction at each step. """ - # @debug(logger) + # @debugger def __init__(self, start, likelihood, prior, nwalkers=50): self.start = start @@ -124,7 +125,7 @@ def stationarity(self, nfactor=20): converged = np.all(tau * nfactor < self.sampler.iteration) return converged - @debug(logger) + @debugger def __call__(self, max_iter=20000, spread=1e-4, start_samples=[]): """ Initialize and run the EMCEE afine invariant sampler @@ -207,7 +208,7 @@ def __call__(self, max_iter=20000, spread=1e-4, start_samples=[]): return self.flatchain - # @debug(logger) + # @debugger def fold(self, pos, accept_lim = 0.2, spread=0.1): """ Fold low acceptance walkers into main distribution @@ -273,7 +274,7 @@ class nested(cpnest.model.Model): Function that will return the log prior when called as prior(params) """ - # @debug(logger) + # @debugger def __init__(self, names, bounds, likelihood, prior, path): self.names=names self.bounds=bounds @@ -296,7 +297,7 @@ def log_prior(self,p): if not self.in_bounds(p): return -np.inf return self.prior(p.values) - @debug(logger) + @debugger def __call__(self, nlive=100, nthreads=1, maxmcmc=100, poolsize=100): """ Runs the nested sampling diff --git a/pbjam/peakbag.py b/pbjam/peakbag.py index 41307151..f98da081 100644 --- a/pbjam/peakbag.py +++ b/pbjam/peakbag.py @@ -13,6 +13,7 @@ from .jar import debug logger = logging.getLogger(__name__) +debugger = debug(logger) class peakbag(plotting): @@ -67,7 +68,7 @@ class peakbag(plotting): See asy_peakbag asymptotic_fit for more details. """ - # @debug(logger) + # @debugger def __init__(self, starinst, init=True, path=None, verbose=False): self.pg = starinst.pg @@ -87,7 +88,7 @@ def __init__(self, starinst, init=True, path=None, verbose=False): def __repr__(self): return '' - @debug(logger) + @debugger def make_start(self): """ Set the starting model for peakbag @@ -119,7 +120,7 @@ def make_start(self): self.n = np.linspace(0.0, 1.0, len(self.start['l0']))[:, None] - @debug(logger) + @debugger def remove_outsiders(self, l0, l2): """ Drop outliers @@ -139,7 +140,7 @@ def remove_outsiders(self, l0, l2): sel = np.where(np.logical_and(l0 < self.f.max(), l0 > self.f.min())) return l0[sel], l2[sel] - @debug(logger) + @debugger def trim_ladder(self, lw_fac=10, extra=0.01, verbose=False): """ Turns mode frequencies into list of pairs @@ -251,7 +252,7 @@ def model(self, l0, l2, width0, width2, height0, height2, back): mod += self.lor(l2, width2, height2) return mod.T - @debug(logger) + @debugger def init_model(self, model_type): """ Initialize the pymc3 model for peakbag @@ -355,7 +356,7 @@ def _addPPRatio(self): self.summary.at[idx, 'log_ppr'] = log_ppr[idx] - @debug(logger) + @debugger def __call__(self, model_type='simple', tune=1500, nthreads=1, maxiter=4, advi=False): """ Perform all the steps in peakbag. diff --git a/pbjam/plotting.py b/pbjam/plotting.py index e1385e79..9f87d39a 100644 --- a/pbjam/plotting.py +++ b/pbjam/plotting.py @@ -16,6 +16,7 @@ from .jar import debug logger = logging.getLogger(__name__) # For module-level logging +debugger = debug(logger) class plotting: @@ -58,7 +59,7 @@ def _save_my_fig(self, fig, figtype, path, ID): outpath = os.path.join(*[path, type(self).__name__+f'_{figtype}_{str(ID)}.png']) fig.savefig(outpath) - @debug(logger) + @debugger def plot_echelle(self, pg=None, path=None, ID=None, savefig=False): """ Make echelle plot @@ -159,7 +160,7 @@ def plot_echelle(self, pg=None, path=None, ID=None, savefig=False): return fig - @debug(logger) + @debugger def plot_corner(self, path=None, ID=None, savefig=False): """ Make corner plot of result. @@ -195,7 +196,7 @@ def plot_corner(self, path=None, ID=None, savefig=False): return fig - @debug(logger) + @debugger def plot_spectrum(self, pg=None, path=None, ID=None, savefig=False): """ Plot the power spectrum @@ -429,7 +430,7 @@ def _make_prior_corner(self, df, numax_rng = 100): return crnr, crnr.get_axes() - @debug(logger) + @debugger def plot_prior(self, path=None, ID=None, savefig=False): """ Corner of result in relation to prior sample. @@ -481,7 +482,7 @@ def plot_prior(self, path=None, ID=None, savefig=False): return crnr - @debug(logger) + @debugger def plot_start(self): """ Plot starting point for peakbag diff --git a/pbjam/priors.py b/pbjam/priors.py index e33fb8cb..0d6fe847 100644 --- a/pbjam/priors.py +++ b/pbjam/priors.py @@ -13,9 +13,10 @@ from .plotting import plotting import statsmodels.api as sm from .jar import get_priorpath, to_log10, normal, debug - import logging + logger = logging.getLogger(__name__) +debugger = debug(logger) class kde(plotting): @@ -56,7 +57,7 @@ class kde(plotting): to compute the KDE. Default is to use pbjam/data/prior_data.csv """ - # @debug(logger) + # @debugger def __init__(self, starinst=None, prior_file=None): if starinst: @@ -81,7 +82,7 @@ def __init__(self, starinst=None, prior_file=None): def __repr__(self): return f'' - @debug(logger) + @debugger def select_prior_data(self, numax=None, KDEsize = 100): """ Selects useful prior data based on proximity to estimated numax. @@ -184,7 +185,7 @@ def _prior_size_check(self, pdata, numax, KDEsize): return pdata.sample(KDEsize, weights=idx, replace=False) - @debug(logger) + @debugger def make_kde(self, bw_fac=1.0): """ Takes the prior data and constructs a KDE function @@ -299,7 +300,7 @@ def likelihood(self, p): return lnlike - @debug(logger) + @debugger def kde_predict(self, n): """ Predict the l=0 mode frequencies from the KDE samples. @@ -332,7 +333,7 @@ def kde_predict(self, n): return freq.mean(axis=1), freq.std(axis=1) - @debug(logger) + @debugger def kde_sampler(self, nwalkers=50): """ Samples the posterior distribution with the KDE prior diff --git a/pbjam/session.py b/pbjam/session.py index a3c58b62..435c0252 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -55,6 +55,7 @@ from .jar import references, debug, file_logger logger = logging.getLogger(__name__) +debugger = debug(logger) def _organize_sess_dataframe(vardf): """ Takes input dataframe and tidies it up. @@ -628,7 +629,7 @@ def __repr__(self): return f'' @file_logger.listen - @debug(logger) + @debugger def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, nthreads=1, verbose=False, make_plots=False, store_chains=False, asy_sampling='emcee', developer_mode=False): diff --git a/pbjam/star.py b/pbjam/star.py index ecb6d069..4b00e712 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -31,6 +31,7 @@ from .jar import debug, file_logger logger = logging.getLogger(__name__) # For module-level logging +debugger = debug(logger) class star(plotting, file_logger): @@ -238,7 +239,7 @@ def _set_outpath(self, path): logger.exception(f"Could not create directory for star {self.ID}.") @file_logger.listen - @debug(logger) + @debugger def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): """ Run all steps involving KDE. @@ -283,7 +284,7 @@ def run_kde(self, bw_fac=1.0, make_plots=False, store_chains=False): kde_samps.to_csv(self._get_outpath(f'kde_chains_{self.ID}.csv'), index=False) @file_logger.listen - @debug(logger) + @debugger def run_asy_peakbag(self, norders, make_plots=False, store_chains=False, method='emcee', developer_mode=False): @@ -339,7 +340,7 @@ def run_asy_peakbag(self, norders, make_plots=False, asy_samps.to_csv(self._get_outpath(f'asymptotic_fit_chains_{self.ID}.csv'), index=False) @file_logger.listen - @debug(logger) + @debugger def run_peakbag(self, model_type='simple', tune=1500, nthreads=1, make_plots=False, store_chains=False): """ Run all steps involving peakbag. @@ -633,7 +634,7 @@ def _format_name(ID): return fname return ID -@debug(logger) +@debugger def get_bp_rp(ID): """ Search online for bp_rp values based on ID. From 8fb77746c5d358ae86dc6e4920866a976198d2da Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Wed, 3 Feb 2021 15:18:28 +0000 Subject: [PATCH 47/52] Removed test for warning --- pbjam/tests/test_priors.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pbjam/tests/test_priors.py b/pbjam/tests/test_priors.py index f5c63ba7..9cbe37d9 100644 --- a/pbjam/tests/test_priors.py +++ b/pbjam/tests/test_priors.py @@ -55,10 +55,6 @@ def test_prior_size_check(): for sigma in [10, 100]: pdata_cut = func(pdata, to_log10(numax, sigma), KDEsize) assert((len(pdata_cut) > 0) & (len(pdata_cut) <= KDEsize)) - - # These combinations should show warnings - with pytest.warns(UserWarning): - func(pdata, to_log10(300, 1), 500) # These combinations should raise errors with pytest.raises(ValueError): From 96637bfe4777b6f9f3ecba36c66b341d92edfbdd Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Wed, 3 Feb 2021 15:21:31 +0000 Subject: [PATCH 48/52] Changed varname from level to logging_level --- pbjam/session.py | 15 ++++++++------- pbjam/star.py | 14 +++++++------- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/pbjam/session.py b/pbjam/session.py index 435c0252..3dcb2a1c 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -525,11 +525,12 @@ class session(file_logger): the `log_file` for the session. Give this a unique name when running multiple sessions with the same `path`, otherwise logs will be appended to the same file. - level : str, optional - Level at which logs will be recorded to a log file called 'star.log' at - `path`. Default is 'DEBUG' (recommended). Choose from 'DEBUG', 'INFO', - 'WARNING', 'ERROR' and 'CRITICAL'. All logs at levels including and - following `logging_level` will be recorded to the file. + logging_level : str, optional + Level at which logs will be recorded to a log file called + f'{session_ID}.log' at `path`. Default is 'DEBUG' (recommended). Choose + from 'DEBUG', 'INFO', 'WARNING', 'ERROR' and 'CRITICAL'. All logs at + levels including and following `logging_level` will be recorded to the + file. Attributes ---------- @@ -545,11 +546,11 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, timeseries=None, spectrum=None, dictlike=None, use_cached=False, cadence=None, campaign=None, sector=None, month=None, quarter=None, mission=None, path=None, download_dir=None, - session_ID=None, level='DEBUG'): + session_ID=None, logging_level='DEBUG'): self.session_ID = session_ID or 'session' logfilename = os.path.join(path or os.getcwd(), f'{self.session_ID}.log') - super(session, self).__init__(filename=logfilename, level=level, loggername='pbjam.session') + super(session, self).__init__(filename=logfilename, level=logging_level, loggername='pbjam.session') with self.log_file: # Records everything in context to the log file diff --git a/pbjam/star.py b/pbjam/star.py index 4b00e712..a65c2d14 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -74,11 +74,11 @@ class star(plotting, file_logger): prior_file : str, optional Path to the csv file containing the prior data. Default is pbjam/data/prior_data.csv - level : str, optional - Level at which logs will be recorded to a log file called 'star.log' at - `path`. Default is 'DEBUG' (recommended). Choose from 'DEBUG', 'INFO', - 'WARNING', 'ERROR' and 'CRITICAL'. All logs at levels including and - following `logging_level` will be recorded to the file. + logging_level : str, optional + Level at which logs will be recorded to a log file called f'{ID}.log' + at `path`. Default is 'DEBUG' (recommended). Choose from 'DEBUG', + 'INFO', 'WARNING', 'ERROR' and 'CRITICAL'. All logs at levels including + and following `logging_level` will be recorded to the file. Attributes ---------- @@ -90,12 +90,12 @@ class star(plotting, file_logger): """ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], - path=None, prior_file=None, level='DEBUG'): + path=None, prior_file=None, logging_level='DEBUG'): self.ID = ID self._set_outpath(path) logfilename = os.path.join(self.path, f'{self.ID}.log') - super(star, self).__init__(filename=logfilename, level=level) + super(star, self).__init__(filename=logfilename, level=logging_level) with self.log_file: logger.info(f"Initializing star with ID {repr(self.ID)}.") From 865b0d2242f956bb0de52e6930938e674fe0ebaf Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Wed, 3 Feb 2021 15:40:55 +0000 Subject: [PATCH 49/52] Added debugger decorators everywhere! --- pbjam/session.py | 23 ++++++++++++++++------- pbjam/star.py | 5 +++++ 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/pbjam/session.py b/pbjam/session.py index 3dcb2a1c..5eecfbd1 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -57,6 +57,7 @@ logger = logging.getLogger(__name__) debugger = debug(logger) +@debugger def _organize_sess_dataframe(vardf): """ Takes input dataframe and tidies it up. @@ -92,7 +93,7 @@ def _organize_sess_dataframe(vardf): if 'spectrum' not in vardf.keys(): _format_col(vardf, None, 'spectrum') - +@debugger def _organize_sess_input(**vardct): """ Takes input and organizes them in a dataframe. @@ -133,6 +134,7 @@ def _organize_sess_input(**vardct): vardf[key+'_err'] = np.array(vardct[key]).reshape((-1, 2))[:, 1].flatten() return vardf +@debugger def _sort_lc(lc): """ Sort a lightcurve in Lightkurve object. @@ -156,6 +158,7 @@ def _sort_lc(lc): return lc +@debugger def _query_lightkurve(ID, download_dir, use_cached, lkwargs): """ Get time series using LightKurve @@ -199,7 +202,7 @@ def _query_lightkurve(ID, download_dir, use_cached, lkwargs): return lc - +@debugger def _arr_to_lk(x, y, name, typ): """ LightKurve object from input. @@ -233,7 +236,7 @@ def _arr_to_lk(x, y, name, typ): else: raise KeyError("Don't modify anything but spectrum and timeseries cols") - +@debugger def _format_col(vardf, col, key): """ Add timeseries or spectrum column to dataframe based on input @@ -302,6 +305,7 @@ def _format_col(vardf, col, key): else: logger.critical('Unhandled exception.') +@debugger def _lc_to_lk(ID, tsIn, specIn, download_dir, use_cached, lkwargs): """ Convert time series column in dataframe to lk.LightCurve object @@ -360,7 +364,7 @@ def _lc_to_lk(ID, tsIn, specIn, download_dir, use_cached, lkwargs): return tsOut - +@debugger def _lk_to_pg(ID, tsIn, specIn): """ Convert spectrum column in dataframe to Lightkurve.periodgram objects @@ -406,7 +410,6 @@ def _lk_to_pg(ID, tsIn, specIn): return specOut - class session(file_logger): """ Main class used to initiate peakbagging. @@ -692,7 +695,7 @@ def __call__(self, bw_fac=1, norders=8, model_type='simple', tune=1500, logger.exception(f"{st} failed due to the following exception, continuing to the next star.") - +@debugger def _load_fits(files, mission): """ Read fitsfiles into a Lightkurve object @@ -720,6 +723,7 @@ def _load_fits(files, mission): lc = lccol.PDCSAP_FLUX.stitch() return lc +@debugger def _set_mission(ID, lkwargs): """ Set mission keyword in lkwargs. @@ -745,7 +749,8 @@ def _set_mission(ID, lkwargs): lkwargs['mission'] = 'TESS' else: lkwargs['mission'] = ('Kepler', 'K2', 'TESS') - + +@debugger def _search_and_dump(ID, lkwargs, search_cache): """ Get lightkurve search result online. @@ -785,6 +790,7 @@ def _search_and_dump(ID, lkwargs, search_cache): return resultDict +@debugger def _getMASTidentifier(ID, lkwargs): """ return KIC/TIC/EPIC for given ID. @@ -829,6 +835,7 @@ def _getMASTidentifier(ID, lkwargs): ID = ID.replace(' ', '') return ID +@debugger def _perform_search(ID, lkwargs, use_cached=True, download_dir=None, cache_expire=30): """ Find filenames related to target @@ -884,6 +891,7 @@ def _perform_search(ID, lkwargs, use_cached=True, download_dir=None, return resultDict['result'] +@debugger def _check_lc_cache(search, mission, download_dir=None): """ Query cache directory or download fits files. @@ -926,6 +934,7 @@ def _check_lc_cache(search, mission, download_dir=None): return files_in_cache +@debugger def _clean_lc(lc): """ Perform Lightkurve operations on object. diff --git a/pbjam/star.py b/pbjam/star.py index a65c2d14..b7e58779 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -435,6 +435,7 @@ def __call__(self, bw_fac=1.0, norders=8, model_type='simple', tune=1500, self.references._addRef('pandas') +@debugger def _querySimbad(ID): """ Query any ID at Simbad for Gaia DR2 source ID. @@ -474,6 +475,7 @@ def _querySimbad(ID): return line.replace('Gaia DR2 ', '') return None +@debugger def _queryTIC(ID, radius = 20): """ Query TIC for bp-rp value @@ -510,6 +512,7 @@ def _queryTIC(ID, radius = 20): else: return None +@debugger def _queryMAST(ID): """ Query any ID at MAST @@ -539,6 +542,7 @@ def _queryMAST(ID): except: return None +@debugger def _queryGaia(ID=None, coords=None, radius=2): """ Query Gaia archive for bp-rp @@ -590,6 +594,7 @@ def _queryGaia(ID=None, coords=None, radius=2): else: raise ValueError('No ID or coordinates provided when querying the Gaia archive.') +@debugger def _format_name(ID): """ Format input ID From 659b3afced1f039bf99bce349247467df86440d3 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Wed, 3 Feb 2021 15:58:12 +0000 Subject: [PATCH 50/52] Changed warnings to logger warning --- pbjam/asy_peakbag.py | 2 +- pbjam/ellone.py | 2 +- pbjam/jar.py | 4 ++-- pbjam/peakbag.py | 9 ++++----- pbjam/plotting.py | 2 +- pbjam/priors.py | 20 ++++++++------------ pbjam/session.py | 4 ++-- pbjam/star.py | 2 +- 8 files changed, 20 insertions(+), 25 deletions(-) diff --git a/pbjam/asy_peakbag.py b/pbjam/asy_peakbag.py index 001817a8..d1f715c0 100755 --- a/pbjam/asy_peakbag.py +++ b/pbjam/asy_peakbag.py @@ -388,7 +388,7 @@ def __call__(self, method, developer_mode): self.developer_mode = developer_mode if method not in ['emcee', 'cpnest']: - warnings.warn(f'Method {method} not found: Using method emcee') + logger.warning(f'Method {method} not found: Using default method emcee') method = 'emcee' if method == 'emcee': diff --git a/pbjam/ellone.py b/pbjam/ellone.py index b1f9739a..1d298794 100644 --- a/pbjam/ellone.py +++ b/pbjam/ellone.py @@ -383,7 +383,7 @@ def get_ell1(self, dnu): nul1s_std[i] = self.cluster_stds[nuidx][maxidx] if (nul0s[i] - nul1s[i])/d01 > 0.2: - warnings.warn('Cluster nu_l1 exceeds UP estimate by more than 20%') + logger.warning('Cluster nu_l1 exceeds UP estimate by more than 20%') return nul1s, nul1s_std diff --git a/pbjam/jar.py b/pbjam/jar.py index c7a2f73d..7df3c111 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -395,12 +395,12 @@ def _findBlockEnd(self, string, idx): a += 1 if (i >= len(string[idx:])-1) and (a != 0): - warnings.warn('Warning: Reached end of bibtex file with no closing curly bracket. Your .bib file may be formatted incorrectly. The reference list may be garbled.') + logger.warning('Reached end of bibtex file with no closing curly bracket. Your .bib file may be formatted incorrectly. The reference list may be garbled.') if a ==0: break if string[idx+i] == '{': - warnings.warn('Warning: Ended on an opening bracket. Your .bib file may be formatted incorrectly.') + logger.warning('Ended on an opening bracket. Your .bib file may be formatted incorrectly.') return idx+i diff --git a/pbjam/peakbag.py b/pbjam/peakbag.py index f98da081..fe246fc1 100644 --- a/pbjam/peakbag.py +++ b/pbjam/peakbag.py @@ -178,7 +178,7 @@ def trim_ladder(self, lw_fac=10, extra=0.01, verbose=False): for idx, freq in enumerate(self.start['l0']): loc_mid_02 = np.argmin(np.abs(self.f - (freq - d02/2.0))) if loc_mid_02 == 0: - warnings.warn('Did not find optimal pair location') + logger.warning('Did not find optimal pair location') # if verbose: # print(f'loc_mid_02 = {loc_mid_02}') # print(f'w/2 = {int(w/2)}') @@ -284,7 +284,7 @@ def init_model(self, model_type): if model_type != 'model_gp': if model_type != 'simple': # defaults to simple if bad input - warnings.warn('Model not defined - using simple model') + logger.warning('Model not defined - using simple model') width0 = pm.Lognormal('width0', mu=np.log(self.start['width0']), sigma=width_fac, shape=N) width2 = pm.Lognormal('width2', mu=np.log(self.start['width2']), @@ -294,7 +294,7 @@ def init_model(self, model_type): self.target_accept = 0.9 elif model_type == 'model_gp': - warnings.warn('This model is developmental - use carefully') + logger.warning('This model is developmental - use carefully') # Place a GP over the l=0 mode widths ... m0 = pm.Normal('gradient0', 0, 10) c0 = pm.Normal('intercept0', 0, 10) @@ -387,7 +387,6 @@ def __call__(self, model_type='simple', tune=1500, nthreads=1, maxiter=4, # REMOVE THIS WHEN pymc3 v3.8 is a bit older. try: rhatfunc = pm.diagnostics.gelman_rubin - # warnings.warn('pymc3.diagnostics.gelman_rubin is depcrecated; upgrade pymc3 to v3.8 or newer.', DeprecationWarning) except: rhatfunc = az.rhat @@ -418,7 +417,7 @@ def __call__(self, model_type='simple', tune=1500, nthreads=1, maxiter=4, while Rhat_max > 1.05: if niter > maxiter: - warnings.warn('Did not converge!') + logger.warning('Did not converge!') break sample_kwargs['tune'] = tune * niter diff --git a/pbjam/plotting.py b/pbjam/plotting.py index 9f87d39a..b1a026bd 100644 --- a/pbjam/plotting.py +++ b/pbjam/plotting.py @@ -184,7 +184,7 @@ def plot_corner(self, path=None, ID=None, savefig=False): """ if not hasattr(self, 'samples'): - warnings.warn(f"'{self.__class__.__name__}' has no attribute 'samples'. Can't plot a corner plot.") + logger.error(f"'{self.__class__.__name__}' has no attribute 'samples'. Can't plot a corner plot.") return None fig = corner.corner(self.samples, labels=self.par_names, diff --git a/pbjam/priors.py b/pbjam/priors.py index 0d6fe847..100e511b 100644 --- a/pbjam/priors.py +++ b/pbjam/priors.py @@ -158,14 +158,9 @@ def _prior_size_check(self, pdata, numax, KDEsize): idx = np.abs(pdata.numax.values - numax[0]) < nsigma * numax[1] if not flag_warn: - # If this is a use warning, must give user instructions. - # Otherwise, make this a logger.warning - # Maybe user warning if len(pdata[idx]) == 0? - warnings.warn(f'Only {len(pdata[idx])} star(s) near provided numax. ' + - f'Trying to expand the range to include ~{KDEsize} stars.') - # logger.warning(f'Only {len(pdata[idx])} star(s) near provided numax. ' + - # f'Trying to expand the range to include ~{KDEsize} stars.') - flag_warn = True + logger.warning(f'Only {len(pdata[idx])} star(s) near provided numax. ' + + f'Trying to expand the range to include ~{KDEsize} stars.') + flag_warn = True # So this message only appears once if nsigma >= KDEsize: break @@ -175,12 +170,13 @@ def _prior_size_check(self, pdata, numax, KDEsize): ntgts = len(idx[idx==1]) if ntgts == 0: - raise ValueError('No prior targets found within range of target. This might mean no prior samples exist for stars like this, consider increasing the uncertainty on your numax input.') + raise ValueError('No prior targets found within range of target. This might mean no prior samples exist' + \ + ' for stars like this, consider increasing the uncertainty on your numax input.') elif ntgts < KDEsize: - # warnings.warn recommend user change their code but logger.warning does not. Which is best here? I think the former - A. Lyttle - # warnings.warn(f'Sample for estimating KDE is less than the requested {KDEsize}.') - warnings.warn(f'Sample size for estimating KDE is {ntgts}, less than the requested {KDEsize}.') # Add user instruction here, e.g. increase numax uncertainty? + msg = f'Sample size for estimating prior KDE is {ntgts}, less than the desired {KDEsize} - ' + \ + 'the prior may not comprise similar stars. If your uncertainty on numax is < 1 per cent, it may be too small.' + logger.warning(msg) KDEsize = ntgts return pdata.sample(KDEsize, weights=idx, replace=False) diff --git a/pbjam/session.py b/pbjam/session.py index 5eecfbd1..9e4b774c 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -575,7 +575,6 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, if any([ID, numax, dnu, teff, bp_rp]): - # warnings.warn('Dictlike provided as input, ignoring other input fit parameters.') logger.warning('Dictlike provided as input, ignoring other input fit parameters.') _organize_sess_dataframe(vardf) @@ -626,7 +625,8 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, for i, st in enumerate(self.stars): if st.numax[0] > st.f[-1]: - warnings.warn("Input numax is greater than Nyquist frequeny for %s" % (st.ID)) + # TODO: should this raise an exception? We know this will break later on. + logger.critical("Input numax is greater than Nyquist frequeny for %s" % (st.ID)) def __repr__(self): """ Repr for the `session` class. """ diff --git a/pbjam/star.py b/pbjam/star.py index b7e58779..8d9e4af6 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -101,7 +101,7 @@ def __init__(self, ID, pg, numax, dnu, teff=[None,None], bp_rp=[None,None], logger.info(f"Initializing star with ID {repr(self.ID)}.") if numax[0] < 25: - warnings.warn('The input numax is less than 25. The prior is not well defined here, so be careful with the result.') + logger.warning('The input numax is less than 25. The prior is not well defined here, so be careful with the result.') self.numax = numax self.dnu = dnu From e0493d8ccde9dfa6aab675a5bed88757d20b1523 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Wed, 3 Feb 2021 15:59:36 +0000 Subject: [PATCH 51/52] Added TODO messages --- pbjam/mcmc.py | 3 ++- pbjam/session.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/pbjam/mcmc.py b/pbjam/mcmc.py index 70f80704..5f6223d3 100644 --- a/pbjam/mcmc.py +++ b/pbjam/mcmc.py @@ -182,7 +182,8 @@ def __call__(self, max_iter=20000, spread=1e-4, start_samples=[]): elif self.sampler.iteration == max_iter: logger.warning(f'Sampler stopped at {max_iter} (maximum). Chains did not necessarily reach a stationary state.') else: - logger.error('Unhandled exception') + # TODO: handle this exception + logger.critical('Unhandled exception') # Fold in low AR chains and run a little bit to update emcee self.fold(pos, spread=spread) diff --git a/pbjam/session.py b/pbjam/session.py index 9e4b774c..029b9941 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -303,6 +303,7 @@ def _format_col(vardf, col, key): np.array([_arr_to_lk(x, y, vardf.loc[i, 'ID'], key)])) vardf[key] = temp else: + # TODO: handle this exception logger.critical('Unhandled exception.') @debugger From 703d132a92fad626de1a4662a9905512dd7a8089 Mon Sep 17 00:00:00 2001 From: alexlyttle Date: Thu, 4 Feb 2021 22:25:46 +0000 Subject: [PATCH 52/52] Tidy docs --- pbjam/jar.py | 212 +++++++++++++++++++--------------------- pbjam/session.py | 12 +-- pbjam/star.py | 6 +- pbjam/tests/test_jar.py | 8 +- 4 files changed, 116 insertions(+), 122 deletions(-) diff --git a/pbjam/jar.py b/pbjam/jar.py index 7df3c111..a8cb9da2 100644 --- a/pbjam/jar.py +++ b/pbjam/jar.py @@ -14,7 +14,7 @@ from .printer import pretty_printer HANDLER_FMT = "%(asctime)-23s :: %(levelname)-8s :: %(name)-17s :: %(message)s" -INDENT = 60 # Set to length of logger info before `message` or just indent by 2? +INDENT = 60 # Set to length of logger info before message or just indent by 2? logger = logging.getLogger(__name__) _pp_kwargs = {'width': 120} @@ -34,7 +34,7 @@ def __init__(self, func, logger): self.logger = logger def _log_bound_args(self, args, kwargs): - """ Logs bound arguments - `args` and `kwargs` passed to func. """ + """ Logs bound arguments - ``args`` and ``kwargs`` passed to func. """ bargs = self.signature.bind(*args, **kwargs) bargs_dict = dict(bargs.arguments) self.logger.debug(f"Bound arguments:\n{pprinter.pformat(bargs_dict)}") @@ -57,7 +57,7 @@ def _exiting_function(self, result): def debug(logger): """ Function logging decorator. Logs function metadata upon entering and - sexiting. + exiting. Parameters ---------- @@ -67,79 +67,79 @@ def debug(logger): Examples -------- - Logging a function called `my_func` defined in a module with name `__name__`, + Logging a function called ``my_func`` defined in a module with name ``__name__``, - ```python - import logging - from pbjam.jar import debug + .. code-block:: python - logger = logging.getLogger(__name__) - debugger = debug(logger) + import logging + from pbjam.jar import debug - @debugger - def my_func(a, b): - logger.debug('Function in progress.') - return a + b + logger = logging.getLogger(__name__) + debugger = debug(logger) - if __name__ == "__main__": - logging.basicConfig() - logger.setLevel('DEBUG') - - result = my_func(1, 2) - logger.debug(f'result = {result}') - ``` + @debugger + def my_func(a, b): + logger.debug('Function in progress.') + return a + b + if __name__ == "__main__": + logging.basicConfig() + logger.setLevel('DEBUG') + + result = my_func(1, 2) + logger.debug(f'result = {result}') + Outputs, - ```python - DEBUG:__main__:Entering my_func - DEBUG:__main__:Function in progress. - DEBUG:__main__:Exiting my_func - DEBUG:__main__:result = 3 - ``` + .. code-block:: text + DEBUG:__main__:Entering my_func + DEBUG:__main__:Function in progress. + DEBUG:__main__:Exiting my_func + DEBUG:__main__:result = 3 + For use within classes, - ```python - import logging - from pbjam.jar import debug + .. code-block:: python - logger = logging.getLogger(__name__) - debugger = debug(logger) + import logging + from pbjam.jar import debug + logger = logging.getLogger(__name__) + debugger = debug(logger) - class myClass: - def __init__(self): - logger.debug('Initializing class.') - self.a = 1 - self.b = 2 + class myClass: - @debugger - def my_mthd(self): - logger.debug('Method in progress.') - return self.a + self.b + def __init__(self): + logger.debug('Initializing class.') + self.a = 1 + self.b = 2 - if __name__ == "__main__": - logging.basicConfig() - logger.setLevel('DEBUG') - - obj = myClass() - result = obj.my_mthd() - logger.debug(f'result = {result}') - ``` + @debugger + def my_mthd(self): + logger.debug('Method in progress.') + return self.a + self.b + if __name__ == "__main__": + logging.basicConfig() + logger.setLevel('DEBUG') + + obj = myClass() + result = obj.my_mthd() + logger.debug(f'result = {result}') + Outputs, - ```python - DEBUG:__main__:Entering myClass.__init__. - DEBUG:__main__:Initializing class. - DEBUG:__main__:Exiting myClass.__init__. - DEBUG:__main__:Entering myClass.my_mthd. - DEBUG:__main__:Method in progress. - DEBUG:__main__:Exiting myClass.my_mthd. - DEBUG:__main__:result = 3 - ``` + .. code-block:: text + + DEBUG:__main__:Entering myClass.__init__. + DEBUG:__main__:Initializing class. + DEBUG:__main__:Exiting myClass.__init__. + DEBUG:__main__:Entering myClass.my_mthd. + DEBUG:__main__:Method in progress. + DEBUG:__main__:Exiting myClass.my_mthd. + DEBUG:__main__:result = 3 """ def _log(func): @@ -186,8 +186,8 @@ def __init__(self, filename, level='DEBUG', **kwargs): class log_file: """ - Context manager for file logging. It logs everything under the `loggername` - logger, by default this is the `'pbjam'` logger (i.e. logs everything from + Context manager for file logging. It logs everything under the ``loggername`` + logger, by default this is the ``'pbjam'`` logger (i.e. logs everything from the pbjam package). Parameters @@ -197,35 +197,29 @@ class log_file: level : str, optional Logging level. Default is 'DEBUG'. loggername : str, optional - Name of logger which will send logs to `filename`. Default is `'pbjam'`. + Name of logger which will send logs to ``filename``. Default is ``'pbjam'``. Attributes ---------- handler : pbjam.jar._file_handler File handler object. - Methods - ------- - open() : - Activates file logging process - close() : - Safely closes file logging process - Examples -------- - ```python - from pbjam.jar import log_file + .. code-block:: python - with log_file('example.log') as flog: - # Do some pbjam stuff here and it will be logged to 'example.log' - ... + from pbjam.jar import log_file - # Do some stuff here and it won't be logged to 'example.log' + with log_file('example.log') as flog: + # Do some pbjam stuff here and it will be logged to 'example.log' + ... + + # Do some stuff here and it won't be logged to 'example.log' - with flog: - # Do some stuff here and it will be logged to 'example.log' - ... - ``` + with flog: + # Do some stuff here and it will be logged to 'example.log' + ... + """ def __init__(self, filename, level='DEBUG', loggername='pbjam'): self._filename = filename @@ -253,7 +247,8 @@ def get_level(self): return self._level def set_level(self, level): - """ Set the level of the file handler. + """ + Set the level of the file handler. Parameters ---------- @@ -275,13 +270,13 @@ def __exit__(self, type, value, traceback): class file_logger: """ - Creates a `log_file` at `filename` to which logs under `loggername` at - a given `level` are recorded when the file logger is listening. This + Creates a ``log_file`` at ``filename`` to which logs under ``loggername`` at + a given ``level`` are recorded when the file logger is listening. This class is indended to be sub-classed (see Examples). - To listen to a method in a sub-class of `file_logger` (i.e. record all logs + To listen to a method in a sub-class of ``file_logger`` (i.e. record all logs which occur during the method execution) decorate the class method with - `@file_logger.listen`. + ``@file_logger.listen``. Parameters ---------- @@ -290,38 +285,33 @@ class is indended to be sub-classed (see Examples). level : str, optional Logging level. Default is 'DEBUG'. loggername : str, optional - Name of logger which will send logs to `filename`. Default is `'pbjam'`. + Name of logger which will send logs to ``filename``. Default is ``'pbjam'``. Attributes ---------- log_file : pbjam.jar.log_file - Methods - ------- - listen : - Decorator for recording logs in a sub-class method to `log_file`. - Examples -------- - - ```python - # pbjam/example.py - from .jar import file_logger + .. code-block:: python - class example_class(file_logger): - def __init__(self): - super(example_class, self).__init__('example.log', level='INFO') + # pbjam/example.py + from .jar import file_logger + + class example_class(file_logger): + def __init__(self): + super(example_class, self).__init__('example.log', level='INFO') + + with self.log_file: + # Records content in context to log_file + logger.info('Initializing class.') + ... - with self.log_file: - # Records content in context to `log_file` - logger.info('Initializing class.') + @file_logger.listen # records content of example_method to log_file + def example_method(self): + logger.info('Performing function tasks.') ... - - @file_logger.listen # records content of `example_method` to `log_file` - def example_method(self): - logger.info('Performing function tasks.') - ... - ``` + """ def __init__(self, *args, **kwargs): @@ -330,7 +320,7 @@ def __init__(self, *args, **kwargs): @staticmethod def listen(func): """ - Decorator for recording logs to `log_file` during function operation, + Decorator for recording logs to ``log_file`` during function operation, closing the log file upon completion. """ @functools.wraps(func) @@ -343,7 +333,8 @@ def wrap(self, *args, **kwargs): class references(): - """ A class for managing references used when running PBjam. + """ + A class for managing references used when running PBjam. This is inherited by session and star. @@ -504,7 +495,8 @@ def get_priorpath(): def get_percentiles(X, nsigma = 2, **kwargs): - """ Get percentiles of an distribution + """ + Get percentiles of an distribution Compute the percentiles corresponding to sigma=1,2,3.. including the median (50th), of an array. @@ -534,7 +526,8 @@ def get_percentiles(X, nsigma = 2, **kwargs): def to_log10(x, xerr): - """ Transform to value to log10 + """ + Transform to value to log10 Takes a value and related uncertainty and converts them to logscale. Approximate. @@ -558,7 +551,8 @@ def to_log10(x, xerr): return [x, xerr] def normal(x, mu, sigma): - """ Evaluate logarithm of normal distribution (not normalized!!) + """ + Evaluate logarithm of normal distribution (not normalized!!) Evaluates the logarithm of a normal distribution at x. diff --git a/pbjam/session.py b/pbjam/session.py index 029b9941..1605336a 100755 --- a/pbjam/session.py +++ b/pbjam/session.py @@ -525,15 +525,15 @@ class session(file_logger): Directory to cache lightkurve downloads. Lightkurve will place the fits files in the default lightkurve cache path in your home directory. session_ID : str, optional - Session identifier. Default is `'session'`. This is the name given to - the `log_file` for the session. Give this a unique name when running - multiple sessions with the same `path`, otherwise logs will be appended + Session identifier. Default is ``'session'``. This is the name given to + the ``log_file`` for the session. Give this a unique name when running + multiple sessions with the same ``path``, otherwise logs will be appended to the same file. logging_level : str, optional Level at which logs will be recorded to a log file called - f'{session_ID}.log' at `path`. Default is 'DEBUG' (recommended). Choose + '{session_ID}.log' at ``path``. Default is 'DEBUG' (recommended). Choose from 'DEBUG', 'INFO', 'WARNING', 'ERROR' and 'CRITICAL'. All logs at - levels including and following `logging_level` will be recorded to the + levels including and following ``logging_level`` will be recorded to the file. Attributes @@ -630,7 +630,7 @@ def __init__(self, ID=None, numax=None, dnu=None, teff=None, bp_rp=None, logger.critical("Input numax is greater than Nyquist frequeny for %s" % (st.ID)) def __repr__(self): - """ Repr for the `session` class. """ + """ Repr for the ``session`` class. """ return f'' @file_logger.listen diff --git a/pbjam/star.py b/pbjam/star.py index 8d9e4af6..762cd8f6 100644 --- a/pbjam/star.py +++ b/pbjam/star.py @@ -75,10 +75,10 @@ class star(plotting, file_logger): Path to the csv file containing the prior data. Default is pbjam/data/prior_data.csv logging_level : str, optional - Level at which logs will be recorded to a log file called f'{ID}.log' - at `path`. Default is 'DEBUG' (recommended). Choose from 'DEBUG', + Level at which logs will be recorded to a log file called '{ID}.log' + at ``path``. Default is 'DEBUG' (recommended). Choose from 'DEBUG', 'INFO', 'WARNING', 'ERROR' and 'CRITICAL'. All logs at levels including - and following `logging_level` will be recorded to the file. + and following ``logging_level`` will be recorded to the file. Attributes ---------- diff --git a/pbjam/tests/test_jar.py b/pbjam/tests/test_jar.py index cf608ef8..847de2bb 100644 --- a/pbjam/tests/test_jar.py +++ b/pbjam/tests/test_jar.py @@ -83,7 +83,7 @@ def test_get_percentiles(): assert_array_equal(func(*inp), [0., 0., 1.]) def test_file_logger(): - """Tests subclassing `jam` to use the log file record decorator""" + """Tests subclassing ``jam`` to use the log file record decorator""" test_message = 'This should be logged in file.' class file_logger_test(file_logger): @@ -110,7 +110,7 @@ def method(self): os.remove(filename) def test_log_file(): - """Test `file_logger` context manager.""" + """Test ``file_logger`` context manager.""" filename = 'test_file_logger.log' test_level = 'DEBUG' flog = log_file(filename, level=test_level) @@ -134,7 +134,7 @@ def test_log_file(): os.remove(filename) def test_debug_logger(): - """Tests `log` decorator debug messages""" + """Tests ``log`` decorator debug messages""" test_message = 'Function in progress.' @debug(logger) @@ -160,7 +160,7 @@ def log_test(): os.remove(filename) def test_debug_info(): - """Tests `debug` decorator with INFO level.""" + """Tests ``debug`` decorator with INFO level.""" test_message = 'Function in progress.'