Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions rptools/rpcompletion/rpcompletion.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,8 +548,8 @@ def __read_pathways(

try:
df = pd.read_csv(infile)
except pd.errors.EmptyDataError as e:
logger.error(f'File {infile} is empty: {e}')
except (pd.errors.EmptyDataError, FileNotFoundError) as e:
logger.error(f'File {infile} is empty or does not exist: {e}')
return {}, {}

check = __check_pathways(df)
Expand Down
20 changes: 20 additions & 0 deletions rptools/rpextractsink/rpextractsink.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from os import path as os_path
from requests import get as r_get
from re import search as re_search
from time import sleep

from rr_cache import rrCache
from rptools.rpfba import cobra_format
Expand Down Expand Up @@ -135,6 +136,25 @@ def get_inchi_from_crossid(
except Exception as e:
logger.warning(f'Connection lost from {url_search}')
return ''
logger.debug(f'Page retrieved from MetaNetX for {id}: {page.url}')
logger.debug(f'Page content: {page.text}')
# If server 'too busy', wait and retry 3 times
retries = 3
wait_time = 5 # seconds
attempt = 0
while 'please try again' in page.text.lower() and attempt < retries:
logger.debug(f'Server is too busy. Retrying in {wait_time} seconds... (Attempt {attempt + 1} of {retries})')
sleep(wait_time)
try:
page = r_get(f'{url_search}?query={id}')
except Exception as e:
logger.warning(f'Connection lost from {url_search}')
return ''
attempt += 1
if 'please try again' in page.text.lower():
logger.debug('Server is still too busy after multiple attempts. Aborting retrieval.')
return ''
logger.debug(f'Final page content after retries: {page.text}')
url_crossid = re_search(r'/chem_info/\w+', page.text).group()
return get_inchi_from_url(f'{url_mnx}{url_crossid}', logger)

Expand Down
Loading