Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=
disable=logging-fstring-interpolation

# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
Expand Down Expand Up @@ -110,7 +110,7 @@ never-returning-functions=sys.exit

# Format style used to check logging format string. `old` means using %
# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
logging-format-style=old
logging-format-style=new

# Logging modules to check that the string format arguments are in logging
# function parameter format.
Expand Down
38 changes: 18 additions & 20 deletions lockable/lockable.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def invariant(true, message):

class Lockable:
"""
Base class for Lockable. It handle low-level functionality.
Base class for Lockable. It handles low-level functionality.
"""

def __init__(self,
Expand All @@ -44,6 +44,7 @@ def __init__(self,
MODULE_LOGGER.debug('Initialized lockable')
self._hostname = hostname
self._lock_folder = lock_folder
MODULE_LOGGER.debug(f"Use lock folder: {self._lock_folder}")
assert not (isinstance(resource_list, list) and
resource_list_file), 'only one of resource_list or ' \
'resource_list_file is accepted, not both'
Expand Down Expand Up @@ -98,15 +99,14 @@ def _try_lock(self, requirements, candidate):
resource_id = candidate.get("id")
try:
pid_file = f"{resource_id}.pid"
MODULE_LOGGER.debug('Trying lock using: %s', os.path.join(self._lock_folder, pid_file))

MODULE_LOGGER.debug(f'Trying lock using: {os.path.join(self._lock_folder, pid_file)}')
_lockable = PidFile(pidname=pid_file, piddir=self._lock_folder)
_lockable.create()
MODULE_LOGGER.info('Allocated: %s, lockfile: %s', resource_id, pid_file)
MODULE_LOGGER.info(f'Allocated: {resource_id}, lockfile: {pid_file}')

def release():
nonlocal self, resource_id, _lockable
MODULE_LOGGER.info('Release resource: %s', resource_id)
MODULE_LOGGER.info(f'Release resource: {resource_id}')
_lockable.close()
del self._allocations[resource_id]

Expand All @@ -119,8 +119,8 @@ def release():

def _lock_some(self, requirements, candidates, timeout_s, retry_interval):
""" Contextmanager that lock some candidate that is free and release it finally """
MODULE_LOGGER.debug('Total match local resources: %d, timeout: %d',
len(candidates), timeout_s)
MODULE_LOGGER.debug(f'Total match local resources: {len(candidates)}, '
f'timeout: {timeout_s}s')
if not isinstance(requirements, list):
requirements = [requirements]
abort_after = timeout_s
Expand All @@ -139,10 +139,9 @@ def _lock_some(self, requirements, candidates, timeout_s, retry_interval):

try:
allocation = self._try_lock(req, candidate)
MODULE_LOGGER.debug('resource %s allocated (%s), alloc_id: (%s)',
allocation.resource_id,
json.dumps(allocation.resource_info),
allocation.alloc_id)
MODULE_LOGGER.debug(f'resource {allocation.resource_id} '
f'allocated ({json.dumps(allocation.resource_info)}), '
f'alloc_id: ({allocation.alloc_id})')
self._allocations[allocation.resource_id] = allocation
current_allocations.append(allocation)
fulfilled_requirement_indexes.append(index)
Expand Down Expand Up @@ -196,7 +195,7 @@ def _lock_many(self, requirements, timeout_s, retry_interval=1) -> [Allocation]:
@staticmethod
def _get_requirements(requirements, hostname):
""" Generate requirements"""
MODULE_LOGGER.debug('hostname: %s', hostname)
MODULE_LOGGER.debug(f'hostname: {hostname}')
merged = merge({'hostname': hostname, 'online': True}, requirements)
allowed_to_del = ["online", "hostname"]
for key in allowed_to_del:
Expand All @@ -218,9 +217,8 @@ def lock(self, requirements: (str or dict), timeout_s: int = DEFAULT_TIMEOUT) ->
# Refresh resources data
self._provider.reload()
begin = datetime.now()
MODULE_LOGGER.debug("Use lock folder: %s", self._lock_folder)
MODULE_LOGGER.debug("Requirements: %s", json.dumps(predicate))
MODULE_LOGGER.debug("Resource list: %s", json.dumps(self.resource_list))
MODULE_LOGGER.debug(f"Requirements: {json.dumps(predicate)}")
MODULE_LOGGER.debug(f"Resource list: {json.dumps(self.resource_list)}")
allocation = self._lock(predicate, timeout_s)
allocation.allocation_queue_time = datetime.now() - begin
return allocation
Expand All @@ -238,9 +236,8 @@ def lock_many(self, requirements: list, timeout_s: int = DEFAULT_TIMEOUT) -> lis
predicates.append(self._get_requirements(self.parse_requirements(req), self._hostname))
self._provider.reload()
begin = datetime.now()
MODULE_LOGGER.debug("Use lock folder: %s", self._lock_folder)
MODULE_LOGGER.debug("Requirements: %s", json.dumps(predicates))
MODULE_LOGGER.debug("Resource list: %s", json.dumps(self.resource_list))
MODULE_LOGGER.debug(f"Requirements: {json.dumps(predicates)}")
MODULE_LOGGER.debug(f"Resource list: {json.dumps(self.resource_list)}")

allocations = self._lock_many(predicates, timeout_s)
for allocation in allocations:
Expand All @@ -254,9 +251,10 @@ def unlock(self, allocation: Allocation) -> None:
:return: None
"""
assert 'id' in allocation.resource_info, 'missing "id" -key'
MODULE_LOGGER.info('Release: %s', allocation.resource_id)
MODULE_LOGGER.info(f'Release: {allocation.resource_id}')
resource_id = allocation.resource_id
ResourceNotFound.invariant(resource_id in self._allocations, 'resource not locked')
ResourceNotFound.invariant(resource_id in self._allocations,
'resource not locked')
reservation = self._allocations[resource_id]
reservation.release(allocation.alloc_id)

Expand Down
6 changes: 1 addition & 5 deletions lockable/provider.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
""" Provider library """
from abc import ABC, abstractmethod
import json
import typing
from typing import List

Expand Down Expand Up @@ -37,9 +36,6 @@ def set_resources_list(self, resources_list: list):
assert isinstance(resources_list, list), 'resources_list is not an list'
Provider._validate_json(resources_list)
self._resources = resources_list
MODULE_LOGGER.debug('Resources loaded: ')
for resource in self._resources:
MODULE_LOGGER.debug(json.dumps(resource))

@staticmethod
def _validate_json(data: List[dict]):
Expand All @@ -51,5 +47,5 @@ def _validate_json(data: List[dict]):

duplicates = filter_(counts.keys(), lambda key: counts[key] > 1)
if duplicates:
MODULE_LOGGER.warning('Duplicates: %s', duplicates)
MODULE_LOGGER.warning(f'Duplicates: {duplicates}')
raise ValueError(f"Invalid json, duplicate ids in {duplicates}")
6 changes: 3 additions & 3 deletions lockable/provider_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@ def __init__(self, uri: str):
ProviderFile constructor
:param uri: file path
"""
MODULE_LOGGER.debug('Creating ProviderFile using %s', uri)
MODULE_LOGGER.debug(f'Creating ProviderFile using {uri}')
self._resource_list_file_mtime = None
super().__init__(uri)

def reload(self):
""" Load resources list file"""
self.reload_resource_list_file()
MODULE_LOGGER.warning('Use resources from %s file', self._uri)
MODULE_LOGGER.warning(f'Use resources from {self._uri} file')

def reload_resource_list_file(self):
""" Reload resources from file if file has been modified """
Expand All @@ -37,7 +37,7 @@ def reload_resource_list_file(self):
@staticmethod
def _read_resources_list_file(filename: str) -> List[dict]:
""" Read resources json file """
MODULE_LOGGER.debug('Read resource list file: %s', filename)
MODULE_LOGGER.debug(f'Read resource list file: {filename}')
with open(filename, encoding="utf-8") as json_file:
try:
data = json.load(json_file)
Expand Down
12 changes: 6 additions & 6 deletions lockable/provider_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class RetryWithLogging(Retry):
def increment(self, *args, **kwargs):
try:
error = kwargs['error']
MODULE_LOGGER.warning('retried http resources GET due to %s', error)
MODULE_LOGGER.warning(f'retried http resources GET due to {error}')
except KeyError:
pass

Expand All @@ -33,7 +33,7 @@ class ProviderHttp(Provider):

def __init__(self, uri: str):
""" ProviderHttp constructor """
MODULE_LOGGER.debug('Creating ProviderHTTP using %s', uri)
MODULE_LOGGER.debug(f'Creating ProviderHTTP using {uri}')
self._configure_http_strategy(uri)
super().__init__(uri)

Expand Down Expand Up @@ -82,14 +82,14 @@ def _get_list(self) -> list:
# access JSON content
return response.json()
except HTTPError as http_err:
MODULE_LOGGER.error('HTTP error occurred %s', http_err)
MODULE_LOGGER.error(f'HTTP error occurred {http_err}')
raise ProviderError(http_err.response.reason) from http_err
except RequestConnectionError as error:
MODULE_LOGGER.error('Connection error: %s', error)
MODULE_LOGGER.error(f'Connection error: {error}')
raise ProviderError(error) from error
except MaxRetryError as error:
MODULE_LOGGER.error('Max retries error: %s', error)
MODULE_LOGGER.error(f'Max retries error: {error}')
raise ProviderError(error) from error
except Exception as error:
MODULE_LOGGER.error('Other error occurred: %s', error)
MODULE_LOGGER.error(f'Other error occurred: {error}')
raise ProviderError(error) from error
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@
"Topic :: Software Development :: Quality Assurance",
"Topic :: Software Development :: Testing",
"Topic :: Utilities",
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.7',
"Programming Language :: Python :: 3 :: Only",
],
packages=find_packages(exclude=['tests']),
Expand Down