From e6036d5dd0c6678305d072978ea1513c76b06100 Mon Sep 17 00:00:00 2001 From: Jussi Vatjus-Anttila Date: Sat, 27 Jan 2024 19:32:20 +0200 Subject: [PATCH] cleanup log and use f-strings --- .pylintrc | 4 ++-- lockable/lockable.py | 38 ++++++++++++++++++-------------------- lockable/provider.py | 6 +----- lockable/provider_file.py | 6 +++--- lockable/provider_http.py | 12 ++++++------ setup.py | 2 +- 6 files changed, 31 insertions(+), 37 deletions(-) diff --git a/.pylintrc b/.pylintrc index 76f197f..f857caf 100644 --- a/.pylintrc +++ b/.pylintrc @@ -60,7 +60,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". -disable= +disable=logging-fstring-interpolation # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option @@ -110,7 +110,7 @@ never-returning-functions=sys.exit # Format style used to check logging format string. `old` means using % # formatting, `new` is for `{}` formatting,and `fstr` is for f-strings. -logging-format-style=old +logging-format-style=new # Logging modules to check that the string format arguments are in logging # function parameter format. diff --git a/lockable/lockable.py b/lockable/lockable.py index c2246ff..281f4fc 100644 --- a/lockable/lockable.py +++ b/lockable/lockable.py @@ -32,7 +32,7 @@ def invariant(true, message): class Lockable: """ - Base class for Lockable. It handle low-level functionality. + Base class for Lockable. It handles low-level functionality. """ def __init__(self, @@ -44,6 +44,7 @@ def __init__(self, MODULE_LOGGER.debug('Initialized lockable') self._hostname = hostname self._lock_folder = lock_folder + MODULE_LOGGER.debug(f"Use lock folder: {self._lock_folder}") assert not (isinstance(resource_list, list) and resource_list_file), 'only one of resource_list or ' \ 'resource_list_file is accepted, not both' @@ -98,15 +99,14 @@ def _try_lock(self, requirements, candidate): resource_id = candidate.get("id") try: pid_file = f"{resource_id}.pid" - MODULE_LOGGER.debug('Trying lock using: %s', os.path.join(self._lock_folder, pid_file)) - + MODULE_LOGGER.debug(f'Trying lock using: {os.path.join(self._lock_folder, pid_file)}') _lockable = PidFile(pidname=pid_file, piddir=self._lock_folder) _lockable.create() - MODULE_LOGGER.info('Allocated: %s, lockfile: %s', resource_id, pid_file) + MODULE_LOGGER.info(f'Allocated: {resource_id}, lockfile: {pid_file}') def release(): nonlocal self, resource_id, _lockable - MODULE_LOGGER.info('Release resource: %s', resource_id) + MODULE_LOGGER.info(f'Release resource: {resource_id}') _lockable.close() del self._allocations[resource_id] @@ -119,8 +119,8 @@ def release(): def _lock_some(self, requirements, candidates, timeout_s, retry_interval): """ Contextmanager that lock some candidate that is free and release it finally """ - MODULE_LOGGER.debug('Total match local resources: %d, timeout: %d', - len(candidates), timeout_s) + MODULE_LOGGER.debug(f'Total match local resources: {len(candidates)}, ' + f'timeout: {timeout_s}s') if not isinstance(requirements, list): requirements = [requirements] abort_after = timeout_s @@ -139,10 +139,9 @@ def _lock_some(self, requirements, candidates, timeout_s, retry_interval): try: allocation = self._try_lock(req, candidate) - MODULE_LOGGER.debug('resource %s allocated (%s), alloc_id: (%s)', - allocation.resource_id, - json.dumps(allocation.resource_info), - allocation.alloc_id) + MODULE_LOGGER.debug(f'resource {allocation.resource_id} ' + f'allocated ({json.dumps(allocation.resource_info)}), ' + f'alloc_id: ({allocation.alloc_id})') self._allocations[allocation.resource_id] = allocation current_allocations.append(allocation) fulfilled_requirement_indexes.append(index) @@ -196,7 +195,7 @@ def _lock_many(self, requirements, timeout_s, retry_interval=1) -> [Allocation]: @staticmethod def _get_requirements(requirements, hostname): """ Generate requirements""" - MODULE_LOGGER.debug('hostname: %s', hostname) + MODULE_LOGGER.debug(f'hostname: {hostname}') merged = merge({'hostname': hostname, 'online': True}, requirements) allowed_to_del = ["online", "hostname"] for key in allowed_to_del: @@ -218,9 +217,8 @@ def lock(self, requirements: (str or dict), timeout_s: int = DEFAULT_TIMEOUT) -> # Refresh resources data self._provider.reload() begin = datetime.now() - MODULE_LOGGER.debug("Use lock folder: %s", self._lock_folder) - MODULE_LOGGER.debug("Requirements: %s", json.dumps(predicate)) - MODULE_LOGGER.debug("Resource list: %s", json.dumps(self.resource_list)) + MODULE_LOGGER.debug(f"Requirements: {json.dumps(predicate)}") + MODULE_LOGGER.debug(f"Resource list: {json.dumps(self.resource_list)}") allocation = self._lock(predicate, timeout_s) allocation.allocation_queue_time = datetime.now() - begin return allocation @@ -238,9 +236,8 @@ def lock_many(self, requirements: list, timeout_s: int = DEFAULT_TIMEOUT) -> lis predicates.append(self._get_requirements(self.parse_requirements(req), self._hostname)) self._provider.reload() begin = datetime.now() - MODULE_LOGGER.debug("Use lock folder: %s", self._lock_folder) - MODULE_LOGGER.debug("Requirements: %s", json.dumps(predicates)) - MODULE_LOGGER.debug("Resource list: %s", json.dumps(self.resource_list)) + MODULE_LOGGER.debug(f"Requirements: {json.dumps(predicates)}") + MODULE_LOGGER.debug(f"Resource list: {json.dumps(self.resource_list)}") allocations = self._lock_many(predicates, timeout_s) for allocation in allocations: @@ -254,9 +251,10 @@ def unlock(self, allocation: Allocation) -> None: :return: None """ assert 'id' in allocation.resource_info, 'missing "id" -key' - MODULE_LOGGER.info('Release: %s', allocation.resource_id) + MODULE_LOGGER.info(f'Release: {allocation.resource_id}') resource_id = allocation.resource_id - ResourceNotFound.invariant(resource_id in self._allocations, 'resource not locked') + ResourceNotFound.invariant(resource_id in self._allocations, + 'resource not locked') reservation = self._allocations[resource_id] reservation.release(allocation.alloc_id) diff --git a/lockable/provider.py b/lockable/provider.py index 3013b2b..65f5753 100644 --- a/lockable/provider.py +++ b/lockable/provider.py @@ -1,6 +1,5 @@ """ Provider library """ from abc import ABC, abstractmethod -import json import typing from typing import List @@ -37,9 +36,6 @@ def set_resources_list(self, resources_list: list): assert isinstance(resources_list, list), 'resources_list is not an list' Provider._validate_json(resources_list) self._resources = resources_list - MODULE_LOGGER.debug('Resources loaded: ') - for resource in self._resources: - MODULE_LOGGER.debug(json.dumps(resource)) @staticmethod def _validate_json(data: List[dict]): @@ -51,5 +47,5 @@ def _validate_json(data: List[dict]): duplicates = filter_(counts.keys(), lambda key: counts[key] > 1) if duplicates: - MODULE_LOGGER.warning('Duplicates: %s', duplicates) + MODULE_LOGGER.warning(f'Duplicates: {duplicates}') raise ValueError(f"Invalid json, duplicate ids in {duplicates}") diff --git a/lockable/provider_file.py b/lockable/provider_file.py index a36e757..706c660 100644 --- a/lockable/provider_file.py +++ b/lockable/provider_file.py @@ -17,14 +17,14 @@ def __init__(self, uri: str): ProviderFile constructor :param uri: file path """ - MODULE_LOGGER.debug('Creating ProviderFile using %s', uri) + MODULE_LOGGER.debug(f'Creating ProviderFile using {uri}') self._resource_list_file_mtime = None super().__init__(uri) def reload(self): """ Load resources list file""" self.reload_resource_list_file() - MODULE_LOGGER.warning('Use resources from %s file', self._uri) + MODULE_LOGGER.warning(f'Use resources from {self._uri} file') def reload_resource_list_file(self): """ Reload resources from file if file has been modified """ @@ -37,7 +37,7 @@ def reload_resource_list_file(self): @staticmethod def _read_resources_list_file(filename: str) -> List[dict]: """ Read resources json file """ - MODULE_LOGGER.debug('Read resource list file: %s', filename) + MODULE_LOGGER.debug(f'Read resource list file: {filename}') with open(filename, encoding="utf-8") as json_file: try: data = json.load(json_file) diff --git a/lockable/provider_http.py b/lockable/provider_http.py index 0a64494..afc8238 100644 --- a/lockable/provider_http.py +++ b/lockable/provider_http.py @@ -17,7 +17,7 @@ class RetryWithLogging(Retry): def increment(self, *args, **kwargs): try: error = kwargs['error'] - MODULE_LOGGER.warning('retried http resources GET due to %s', error) + MODULE_LOGGER.warning(f'retried http resources GET due to {error}') except KeyError: pass @@ -33,7 +33,7 @@ class ProviderHttp(Provider): def __init__(self, uri: str): """ ProviderHttp constructor """ - MODULE_LOGGER.debug('Creating ProviderHTTP using %s', uri) + MODULE_LOGGER.debug(f'Creating ProviderHTTP using {uri}') self._configure_http_strategy(uri) super().__init__(uri) @@ -82,14 +82,14 @@ def _get_list(self) -> list: # access JSON content return response.json() except HTTPError as http_err: - MODULE_LOGGER.error('HTTP error occurred %s', http_err) + MODULE_LOGGER.error(f'HTTP error occurred {http_err}') raise ProviderError(http_err.response.reason) from http_err except RequestConnectionError as error: - MODULE_LOGGER.error('Connection error: %s', error) + MODULE_LOGGER.error(f'Connection error: {error}') raise ProviderError(error) from error except MaxRetryError as error: - MODULE_LOGGER.error('Max retries error: %s', error) + MODULE_LOGGER.error(f'Max retries error: {error}') raise ProviderError(error) from error except Exception as error: - MODULE_LOGGER.error('Other error occurred: %s', error) + MODULE_LOGGER.error(f'Other error occurred: {error}') raise ProviderError(error) from error diff --git a/setup.py b/setup.py index 9f288e9..315cd46 100644 --- a/setup.py +++ b/setup.py @@ -36,9 +36,9 @@ "Topic :: Software Development :: Quality Assurance", "Topic :: Software Development :: Testing", "Topic :: Utilities", + 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.7', "Programming Language :: Python :: 3 :: Only", ], packages=find_packages(exclude=['tests']),