Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.5
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- id: ruff-format
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,13 @@
# Changelog
## 0.10.0 (2025-11-18)
- Modernized the repository by migrating to `uv` for dependency management and `Ruff` for code formatting and linting.
- Replaced `setup.cfg` with a modern `pyproject.toml` using `hatchling`.
- Defined all project dependencies in `pyproject.toml`.
- Generated a `uv.lock` file for reproducible environments.
- Added a `.pre-commit-config.yaml` to run `Ruff` automatically.
- Formatted the entire codebase with `Ruff`.
- Updated versioning to use `importlib.metadata`.

## 0.9.0 (2022-07-25)
- Files containing CFs information are now stored in .yaml format (instead of .xlsx)
- `add_aesa_pbs()` still reads .xlsx files, which are generated in the background on execution with the help of the functionality added by the `DataConverter` class
Expand Down
52 changes: 47 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,49 @@
[build-system]
requires = [
"setuptools>=58",
"wheel",
"bw2io >= 0.8.6",
requires = ["hatchling"]
build-backend = "hatchling.build"

[project]
name = "aesa_pbs"
version = "0.10.0"
authors = [
{ name = "Victor Tulus", email = "vtulus@ethz.ch" },
]
build-backend = "setuptools.build_meta"
description = "Implementation of Absolute Environmental Sustainability Assessment (AESA) methods in brightway"
readme = "README.md"
license = { file = "LICENSE" }
requires-python = ">=3.8"
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: BSD License",
"Operating System :: Microsoft :: Windows",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Scientific/Engineering :: Information Analysis",
]
dependencies = [
"brightway2",
"bw2io",
"bw2data",
"prettytable",
"pandas",
"PyYAML",
"openpyxl",
]

[project.urls]
"Homepage" = "https://github.com/vtulus/AESAmethods"

[project.optional-dependencies]
test = [
"pytest",
"pytest-cov",
]

[tool.ruff]
line-length = 88

[tool.ruff.lint]
select = ["E", "F", "W"]

[tool.ruff.format]
quote-style = "double"
32 changes: 0 additions & 32 deletions setup.cfg

This file was deleted.

5 changes: 4 additions & 1 deletion src/aesa_pbs/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import importlib.metadata

__all__ = [
"add_aesa_pbs",
"get_nitrogenous_fertilizers",
Expand All @@ -6,11 +8,12 @@
"DataConverter",
]

__version__ = importlib.metadata.version("aesa_pbs")

from .aesa_pbs import add_aesa_pbs
from .n_direct_fixation import (
get_nitrogenous_fertilizers,
remove_nitrogen_fertilizer_exchanges,
update_nitrogen_fertilizer_exchanges,
)
from .data_converter import DataConverter
from .version import __version__
38 changes: 26 additions & 12 deletions src/aesa_pbs/aesa_pbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,14 @@

from .biosphere import get_biosphere_database
from .data_converter import DataConverter
from .version import __version__
from . import __version__

DATA_DIR = Path(__file__).resolve().parent / "data"
DATA_EXCELS = Path(DATA_DIR).resolve() / "excels"


# write_methods() does not write metadata other than "description", "unit" and "filename"
# write_methods() does not write metadata other than "description",
# "unit" and "filename"
# store everything in the description using json.dumps()
# later can be retrieved with json.loads()
def add_aesa_pbs(verbose=True):
Expand Down Expand Up @@ -189,7 +190,10 @@ def add_aesa_pbs(verbose=True):
"Tg N",
json.dumps(
{
"overview": "direct quantification of industrial and intentional biological fixation of N fertilizer",
"overview": (
"direct quantification of industrial and intentional"
" biological fixation of N fertilizer"
),
"authors": MAINTAINER,
"doi": None,
"current_version": "v" + __version__,
Expand All @@ -204,7 +208,10 @@ def add_aesa_pbs(verbose=True):
"% forested land",
json.dumps(
{
"overview": "Unit: area of forested land as % of original forest cover",
"overview": (
"Unit: area of forested land as % of original forest"
" cover"
),
"authors": RYBERG_ET_AL,
"doi": DOI_RYBERG,
"current_version": "v" + __version__,
Expand All @@ -219,7 +226,10 @@ def add_aesa_pbs(verbose=True):
"km3",
json.dumps(
{
"overview": "Unit: Maximum amount of consumptive blue water use per year",
"overview": (
"Unit: Maximum amount of consumptive blue water use per"
" year"
),
"authors": RYBERG_ET_AL,
"doi": DOI_RYBERG,
"current_version": "v" + __version__,
Expand Down Expand Up @@ -352,9 +362,9 @@ def add_aesa_pbs(verbose=True):
)

# confirm that everything is correctly linked
assert (
len(list(method.unlinked)) == 0
), f"{cat[0]} method contains unlinked flows. Method could not be installed."
assert len(list(method.unlinked)) == 0, (
f"{cat[0]} method contains unlinked flows. Method could not be installed."
)

# write method
method.write_methods(overwrite=True, verbose=verbose)
Expand All @@ -375,8 +385,13 @@ def add_aesa_pbs(verbose=True):


# refs
RYBERG_ET_AL = "Ryberg, M. W.; Owsianiak, M.; Richardson, K.; Hauschild, M. Z."
GALAN_ET_AL = "Galán-Martín, Á.; Tulus, V.; Díaz, I.; Pozo, C.; Pérez-Ramírez, J.; Guillén-Gosálbez, G."
RYBERG_ET_AL = (
"Ryberg, M. W.; Owsianiak, M.; Richardson, K.; Hauschild, M. Z."
)
GALAN_ET_AL = (
"Galán-Martín, Á.; Tulus, V.; Díaz, I.; Pozo, C.; Pérez-Ramírez, J.;"
" Guillén-Gosálbez, G."
)
DOI_RYBERG = "https://doi.org/10.1016/j.ecolind.2017.12.065"
DOI_GALAN = "https://doi.org/10.1016/j.oneear.2021.04.001"
MAINTAINER = "Tulus, V."
Expand All @@ -396,8 +411,7 @@ def drop_empty_lines(data):

# TODO: substitute the print with logging?
def warning_directly_fixated_n() -> None:
"""Printing a warning regarding a missing database.
"""
"""Printing a warning regarding a missing database."""
message = PrettyTable(["Warning"])
message.add_row(
[
Expand Down
8 changes: 6 additions & 2 deletions src/aesa_pbs/biosphere.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import bw2data as bd


def get_biosphere_database():
ERROR = "AESA methods work with ecoinvent biosphere flows only. Install base ecoinvent data."
ERROR = (
"AESA methods work with ecoinvent biosphere flows only. Install base"
" ecoinvent data."
)
assert "biosphere3" in bd.databases, ERROR
return list(bd.Database("biosphere3"))
return list(bd.Database("biosphere3"))
38 changes: 25 additions & 13 deletions src/aesa_pbs/data_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,8 @@ def to_yaml(self, outfilepath: str = None, verbose=True) -> None:
Parameters
----------
outfilepath : str, optional
Absolute file path. If None is provided, uses input file name and default directory.
Absolute file path. If None is provided, uses input file name
and default directory.
verbose : bool, optional
Print completion message, by default True
"""
Expand Down Expand Up @@ -107,7 +108,8 @@ def to_excel(self, outfilepath: str = None, verbose=True) -> None:
Parameters
----------
outfilepath : str, optional
Absolute file path. If None is provided, uses input file name and default directory.
Absolute file path. If None is provided, uses input file name
and default directory.
verbose : bool, optional
Print completion message, by default True
"""
Expand Down Expand Up @@ -138,9 +140,10 @@ def _validate_extension(filepath: str, extension: str) -> None:
extension : str
Desired extension of the file.
"""
assert (
Path(filepath).suffix == extension
), f"Filepath extension ('{Path(filepath).suffix}') is not valid. Must be '{extension}'."
assert Path(filepath).suffix == extension, (
f"Filepath extension ('{Path(filepath).suffix}') is not valid. Must be"
f" '{extension}'."
)


def _sanitize(data: pd.DataFrame, filename: str) -> pd.DataFrame:
Expand All @@ -160,9 +163,9 @@ def _sanitize(data: pd.DataFrame, filename: str) -> pd.DataFrame:
pd.DataFrame
Sanitized data without missing values, nor duplicates
"""
assert {"name", "categories", "amount"}.issubset(
data.columns
), "Data must contain 'name', 'categories' and 'amount' column labels."
assert {"name", "categories", "amount"}.issubset(data.columns), (
"Data must contain 'name', 'categories' and 'amount' column labels."
)
data = data[["name", "categories", "amount"]] # the extra columns are dropped

# "amount" column should have only numeric values,
Expand Down Expand Up @@ -233,19 +236,28 @@ def _remove_duplicates(data: pd.DataFrame, filename: str) -> pd.DataFrame:
ValueError
If the resulting DataFrame is empty
"""
duplicates = data[
data.duplicated(subset=["name", "categories", "amount"], keep="first")
]
# Create a temporary series with string-ified categories for duplicate check
categories_as_str = data["categories"].astype(str)

# Identify duplicates based on the original columns, but using the stringified
# categories
is_duplicated = data.assign(categories=categories_as_str).duplicated(
subset=["name", "categories", "amount"], keep="first"
)

duplicates = data[is_duplicated]
if duplicates.empty:
clean_data = data
else:
message_duplicate = f"Duplicated flows found in {filename} (see below):\n"
message_duplicate = (
f"Duplicated flows found in {filename} (see below):\n"
)
message_duplicate += duplicates.to_markdown(
index=False, tablefmt="pretty", stralign="left"
)
message_duplicate += "\nNote: All duplicates will be removed.\n"
logging.warning(message_duplicate)
clean_data = data.drop_duplicates(keep="first", ignore_index=True)
clean_data = data[~is_duplicated].reset_index(drop=True)
if clean_data.empty:
raise ValueError(f"Data in {filename} is not valid.")
return clean_data
Expand Down
10 changes: 6 additions & 4 deletions src/aesa_pbs/n_direct_fixation.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ def get_nitrogenous_fertilizers(db_name: str) -> list:
"""Get list of nitrogenous fertilizers

Get all activities classified according to CPC as
"Class 3461: Mineral of chemical fertilizers, nitrogenous",
"Class 3461: Mineral of chemical fertilizers, nitrogenous",
except "market"-type activities

Parameters
Expand All @@ -30,7 +30,7 @@ def get_nitrogenous_fertilizers(db_name: str) -> list:


def update_nitrogen_fertilizer_exchanges(activities: list, show_updated=True) -> None:
"""Create exchanges for 'nitrogen fertilizer' in `activities` if they don't exist already.
"""Create new exchanges for 'nitrogen fertilizer' in `activities`.

Does not return anything, but modified `activities` inplace.

Expand Down Expand Up @@ -100,7 +100,8 @@ def is_exchange(exc):
)

print(
"Cleaning 'nitrogen fertilizer' exchanges from the activities that produce nitrogen fertilizer..."
"Cleaning 'nitrogen fertilizer' exchanges from the activities that produce"
" nitrogen fertilizer..."
)
cleaned_act = set()
for act in activities:
Expand All @@ -110,7 +111,8 @@ def is_exchange(exc):
cleaned_act.add(act)
if show_cleaned:
print(
f"These {len(cleaned_act)} activities have been cleaned: \n{list(cleaned_act)}."
f"These {len(cleaned_act)} activities have been cleaned:"
f" \n{list(cleaned_act)}."
)
else:
print(f"{len(cleaned_act)} activities have been cleaned.")
11 changes: 7 additions & 4 deletions src/aesa_pbs/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def progressbar(itobj: list, **kwargs):
----------
itobj : list-like
Iterable object

**kwargs
--------
total : int, default len(itobj)
Expand All @@ -74,11 +74,13 @@ def progressbar(itobj: list, **kwargs):
str-like symbol to be used as "done"-icon
icon_todo : str, default "🔳"
str-like symbol to be used as "to do"-icon

Example
-------
lst = [5, 3, 4]
for i in progressbar(itobj=lst, total=len(lst), prefix="Progress: ", size=5, unit="datapoint"):
for i in progressbar(
itobj=lst, total=len(lst), prefix="Progress: ", size=5, unit="datapoint"
):
# do_something
"""
start_time = datetime.now()
Expand Down Expand Up @@ -111,7 +113,8 @@ def show(j):
# some ASCII symbol or emoji alternatives

# |, *, #, %, ▒. ▓, █, ■, ♢, ⚃ ⚄ ⚅
# 🦾, 👍, 🔴, 🟡 🟢 🔵 🟣 ⚫️ ⚪️, 🔸 🔹 🔶 🔷 🔳 🔲 ▪️ ▫️ ◾️ ◽️ ◼️ ◻️ 🟥 🟧 🟨 🟩 🟦 🟪 ⬛️ ⬜️, ▶️
# 🦾, 👍, 🔴, 🟡 🟢 🔵 🟣 ⚫️ ⚪️, 🔸 🔹 🔶 🔷 🔳 🔲 ▪️ ▫️ ◾️ ◽️ ◼️ ◻️ 🟥 🟧 🟨 🟩
# 🟦 🟪 ⬛️ ⬜️, ▶️

file.write(
"%s[%s%s] %i/%i %s\r"
Expand Down
1 change: 0 additions & 1 deletion src/aesa_pbs/version.py

This file was deleted.

Empty file added tests/__init__.py
Empty file.
Loading