From edbf8f039620bfcb9de32792bbe30743105e65dd Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Wed, 30 Jul 2025 12:47:13 +0200 Subject: [PATCH 01/14] remove deprecated code - replaced by service module which handles table creation orchestration --- api/views.py | 258 ++++----------------------------------------------- 1 file changed, 16 insertions(+), 242 deletions(-) diff --git a/api/views.py b/api/views.py index 5dd596ff1..4dbcab8fd 100644 --- a/api/views.py +++ b/api/views.py @@ -1,20 +1,20 @@ -# SPDX-FileCopyrightText: 2025 Adel Memariani © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Adel Memariani © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. +# SPDX-FileCopyrightText: 2025 Adel Memariani © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Adel Memariani © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. # noqa: E501 # SPDX-FileCopyrightText: 2025 Eike Broda -# SPDX-FileCopyrightText: 2025 Johann Wagner © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. -# SPDX-FileCopyrightText: 2025 Christian Hofmann © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 chrwm © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 user © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. +# SPDX-FileCopyrightText: 2025 Johann Wagner © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. # noqa: E501 +# SPDX-FileCopyrightText: 2025 Christian Hofmann © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 chrwm © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 user © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. # noqa: E501 # # SPDX-License-Identifier: AGPL-3.0-or-later @@ -23,7 +23,6 @@ import json import logging import re -from datetime import datetime, timedelta # noqa from decimal import Decimal import geoalchemy2 # noqa: Although this import seems unused is has to be here @@ -34,9 +33,7 @@ from django.contrib.auth.mixins import LoginRequiredMixin from django.contrib.postgres.search import TrigramSimilarity from django.core.exceptions import ObjectDoesNotExist -from django.db import DatabaseError, transaction from django.db.models import Q -from django.db.utils import IntegrityError from django.http import ( Http404, HttpResponse, @@ -81,7 +78,6 @@ from api.validators.column import validate_column_names from api.validators.identifier import assert_valid_identifier_name from dataedit.models import Embargo -from dataedit.models import Schema as DBSchema from dataedit.models import Table as DBTable from dataedit.views import get_tag_keywords_synchronized_metadata, schema_whitelist from factsheet.permission_decorator import post_only_if_user_is_owner_of_scenario_bundle @@ -576,228 +572,6 @@ def validate_column_names(self, column_definitions): if len(colname) > MAX_COL_NAME_LENGTH: raise APIError(f"Column name is too long! {err_msg}") - def oep_create_table_transaction( - self, - django_schema_object, - schema, - table, - column_definitions, - constraint_definitions, - ): - """ - This method handles atomic table creation transactions on the OEP. It - attempts to create first the django table objects and stored it in - dataedit_tables table. Then it attempts to create the OEDB table. - If there is an error raised during the first two steps the function - will cleanup any table object or table artifacts created during the - process. The order of execution matters, it should always first - create the django table object. - - Params: - django_schema_object: The schema object stored in the django - database - schema: - table - column_definitions - constraint_definitions - - returns: - table_object: The django table objects that was created - """ - - try: - with transaction.atomic(): - # First create the table object in the django database. - table_object = self._create_table_object(django_schema_object, table) - # Then attempt to create the OEDB table to check - # if creation will succeed - action includes checks - # and will raise api errors - actions.table_create( - schema, table, column_definitions, constraint_definitions - ) - except DatabaseError as e: - # remove any oedb table artifacts left after table creation - # transaction failed - self.__remove_oedb_table_on_exception_raised_during_creation_transaction( - table, schema - ) - - # also remove any django table object - # find the created django table object - object_to_delete = DBTable.objects.filter( - name=table, schema=django_schema_object - ) - # delete it if it exists - if object_to_delete.exists(): - object_to_delete.delete() - - raise APIError( - message="Error during table creation transaction. All table fragments" - f"have been removed. For further details see: {e}" - ) - - # for now only return the django table object - # TODO: Check if is necessary to return the response dict returned by the oedb - # table creation function - return table_object - - def __remove_oedb_table_on_exception_raised_during_creation_transaction( - self, table, schema - ): - """ - This private method handles removing a table form the OEDB only for the case - where an error was raised during table creation. It specifically will delete - the OEDB table created by the user and also the edit_ meta(revision) table - that is automatically created in the background. - """ - # find the created oedb table - if actions.has_table({"table": table, "schema": schema}): - # get table and schema names, also for meta(revision) tables - schema, table = actions.get_table_name(schema, table) - meta_schema = actions.get_meta_schema_name(schema) - - # drop the revision table with edit_ prefix - edit_table = actions.get_edit_table_name(schema, table) - actions._get_engine().execute( - 'DROP TABLE "{schema}"."{table}" CASCADE;'.format( - schema=meta_schema, table=edit_table - ) - ) - # drop the data table - actions._get_engine().execute( - 'DROP TABLE "{schema}"."{table}" CASCADE;'.format( - schema=schema, table=table - ) - ) - - @load_cursor() - def __create_table( - self, - request, - schema, - table, - column_definitions, - constraint_definitions, - metadata=None, - embargo_data=None, - ): - assert_valid_identifier_name(table) - self.validate_column_names(column_definitions) - - schema_object, _ = DBSchema.objects.get_or_create(name=schema) - context = { - "connection_id": actions.get_or_403(request.data, "connection_id"), - "cursor_id": actions.get_or_403(request.data, "cursor_id"), - } - cursor = sessions.load_cursor_from_context(context) - - embargo_error, embargo_payload_check = self._check_embargo_payload_valid( - embargo_data - ) - if embargo_error: - raise embargo_error - - if embargo_payload_check: - table_object = self.oep_create_table_transaction( - django_schema_object=schema_object, - table=table, - schema=schema, - column_definitions=column_definitions, - constraint_definitions=constraint_definitions, - ) - self._apply_embargo(table_object, embargo_data) - - if metadata: - actions.set_table_metadata( - table=table, schema=schema, metadata=metadata, cursor=cursor - ) - - try: - self._assign_table_holder(request.user, schema, table) - except ValueError as e: - # Ensure the user is assigned as the table holder - self._assign_table_holder(request.user, schema, table) - raise APIError( - "Table was created without embargo due to an unexpected " - "error during embargo setup." - f"{e}" - ) - - else: - table_object = self.oep_create_table_transaction( - django_schema_object=schema_object, - table=table, - schema=schema, - column_definitions=column_definitions, - constraint_definitions=constraint_definitions, - ) - self._assign_table_holder(request.user, schema, table) - - if metadata: - actions.set_table_metadata( - table=table, schema=schema, metadata=metadata, cursor=cursor - ) - - def _create_table_object(self, schema_object, table): - try: - table_object = DBTable.objects.create(name=table, schema=schema_object) - except IntegrityError: - raise APIError("Table already exists") - return table_object - - def _check_embargo_payload_valid(self, embargo_data): - if not embargo_data: - return None, False - - if not isinstance(embargo_data, dict): - error = APIError("The embargo payload must be a dict") - return error, False - - embargo_period = embargo_data.get("duration") - if embargo_period in ["6_months", "1_year"]: - # self._apply_embargo(table_object, embargo_period) - return None, True - elif embargo_period == "none": - return None, False - else: - error = actions.APIError( - f"Could not parse the embargo period format: {embargo_period}. " - "Please use {'embargo': {'duration':'6_months'} } or '1_year' to " - "set the embargo or use 'none' to remove the embargo." - ) - return error, False - - def _apply_embargo(self, table_object, embargo_period): - unpack_embargo_period = embargo_period.get("duration") - duration_in_weeks = 26 if unpack_embargo_period == "6_months" else 52 - embargo, created = Embargo.objects.get_or_create( - table=table_object, - defaults={ - "duration": unpack_embargo_period, - "date_ended": datetime.now() + timedelta(weeks=duration_in_weeks), - }, - ) - if not created: - if embargo.date_started: - embargo.date_ended = embargo.date_started + timedelta( - weeks=duration_in_weeks - ) - else: - embargo.date_started = datetime.now() - embargo.date_ended = embargo.date_started + timedelta( - weeks=duration_in_weeks - ) - embargo.save() - - def _assign_table_holder(self, user, schema, table): - table_object = DBTable.load(schema, table) - perm, _ = login_models.UserPermission.objects.get_or_create( - table=table_object, holder=user - ) - perm.level = login_models.ADMIN_PERM - perm.save() - user.save() - @api_exception @require_delete_permission def delete(self, request, schema, table): From f0ea251b6e0f5b4e594623cb9fcc3cd10ff8ad7a Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 1 Aug 2025 09:47:16 +0200 Subject: [PATCH 02/14] #1971: Implement datasets: - Add model to dataedit app which n:m relation to tables - Add hybrid api endpoint for read and create datasets - Use DRF serializer to handle client data from request - add service to handle dataset creation - add migration - add serializer to process table assignment client request data - add serializer to list dataset resource with data from table model - Add views to edit / delete datasets, assign tables as ressources to a dataset - list dataset resources (tables) --- api/serializers.py | 40 ++++++++++- api/services/dataset_creation.py | 23 +++++++ api/views.py | 101 +++++++++++++++++++++++++++- dataedit/migrations/0037_dataset.py | 37 ++++++++++ dataedit/models.py | 47 +++++++++---- 5 files changed, 231 insertions(+), 17 deletions(-) create mode 100644 api/services/dataset_creation.py create mode 100644 dataedit/migrations/0037_dataset.py diff --git a/api/serializers.py b/api/serializers.py index 203b0d368..452aaf5ff 100644 --- a/api/serializers.py +++ b/api/serializers.py @@ -1,5 +1,4 @@ -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 # # SPDX-License-Identifier: AGPL-3.0-or-later @@ -10,7 +9,7 @@ from rest_framework import serializers from dataedit.helper import get_readable_table_name -from dataedit.models import Table +from dataedit.models import Dataset, Table from modelview.models import Energyframework, Energymodel from oeplatform.settings import URL @@ -164,3 +163,38 @@ def validate_dataset(self, value): raise serializers.ValidationError("Dataset names must be unique.") return value + + +class DatasetReadSerializer(serializers.ModelSerializer): + class Meta: + model = Dataset + fields = ["uuid", "name", "metadata", "created_at"] + + +class DatasetCreateSerializer(serializers.Serializer): + name = serializers.CharField() + title = serializers.CharField() + description = serializers.CharField() + at_id = serializers.URLField(required=False) + + +class DatasetAssignTablesSerializer(serializers.Serializer): + tables = serializers.ListField( + child=serializers.DictField(child=serializers.CharField()), min_length=1 + ) + + def validate_tables(self, value): + for item in value: + if "schema" not in item or "name" not in item: + raise serializers.ValidationError( + "Each table must have 'schema' and 'name'." + ) + return value + + +class DatasetResourceSerializer(serializers.ModelSerializer): + schema = serializers.StringRelatedField() + + class Meta: + model = Table + fields = ["id", "schema", "name", "oemetadata", "human_readable_name"] diff --git a/api/services/dataset_creation.py b/api/services/dataset_creation.py new file mode 100644 index 000000000..89ff7f820 --- /dev/null +++ b/api/services/dataset_creation.py @@ -0,0 +1,23 @@ +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +from typing import Any + +from oemetadata.v2.v20.example import OEMETADATA_V20_EXAMPLE +from oemetadata.v2.v20.template import OEMETADATA_V20_TEMPLATE + + +def assemble_dataset_metadata( + validated_data: dict[str, Any], oemetadata: dict = OEMETADATA_V20_TEMPLATE +) -> dict[str, Any]: + # set the context + oemetadata["@context"] = OEMETADATA_V20_EXAMPLE["@context"] + oemetadata["resources"] = [] # Remove resources + + oemetadata["@id"] = validated_data.get("at_id") + oemetadata["name"] = validated_data["name"] + oemetadata["title"] = validated_data["title"] + oemetadata["description"] = validated_data["description"] + + return oemetadata diff --git a/api/views.py b/api/views.py index 4dbcab8fd..68d6063a6 100644 --- a/api/views.py +++ b/api/views.py @@ -42,6 +42,7 @@ JsonResponse, StreamingHttpResponse, ) +from django.shortcuts import get_object_or_404 from django.utils import timezone from django.utils.decorators import method_decorator from django.views.decorators.cache import never_cache @@ -62,11 +63,16 @@ from api.error import APIError from api.helpers.http import ModHttpResponse from api.serializers import ( + DatasetAssignTablesSerializer, + DatasetCreateSerializer, + DatasetReadSerializer, + DatasetResourceSerializer, EnergyframeworkSerializer, EnergymodelSerializer, ScenarioBundleScenarioDatasetSerializer, ScenarioDataTablesSerializer, ) +from api.services.dataset_creation import assemble_dataset_metadata from api.services.embargo import ( EmbargoValidationError, apply_embargo, @@ -77,7 +83,7 @@ from api.utils import get_dataset_configs from api.validators.column import validate_column_names from api.validators.identifier import assert_valid_identifier_name -from dataedit.models import Embargo +from dataedit.models import Dataset, Embargo from dataedit.models import Table as DBTable from dataedit.views import get_tag_keywords_synchronized_metadata, schema_whitelist from factsheet.permission_decorator import post_only_if_user_is_owner_of_scenario_bundle @@ -374,6 +380,99 @@ def post(self, request, schema, table): raise APIError(error) +class DatasetsListCreate(generics.ListCreateAPIView): + queryset = Dataset.objects.all() + + def get_serializer_class(self): + if self.request.method == "POST": + return DatasetCreateSerializer + return DatasetReadSerializer + + def create(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + + metadata = assemble_dataset_metadata(serializer.validated_data) + dataset = Dataset.objects.create(metadata=metadata, name=metadata["name"]) + + return Response( + {"id": dataset.pk, "metadata": dataset.metadata}, + status=status.HTTP_201_CREATED, + ) + + +class DatasetsListResources(generics.ListAPIView): + serializer_class = DatasetResourceSerializer + + def get_queryset(self): + dataset_name = self.kwargs["dataset_name"] + dataset = get_object_or_404(Dataset, name=dataset_name) + return dataset.tables.all() + + +class DatasetManager(APIView): + """ + View to retrieve, update, or delete a single dataset's metadata. + URL: /v0/datasets// + """ + + def get(self, request, dataset_name): + dataset = get_object_or_404(Dataset, name=dataset_name) + serializer = DatasetReadSerializer(dataset) + return Response(serializer.data, status=status.HTTP_200_OK) + + def put(self, request, dataset_name): + dataset = get_object_or_404(Dataset, name=dataset_name) + serializer = DatasetCreateSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + dataset.metadata = assemble_dataset_metadata(serializer.validated_data) + dataset.save() + return Response({"message": "Dataset updated"}, status=status.HTTP_200_OK) + + def delete(self, request, dataset_name): + dataset = get_object_or_404(Dataset, name=dataset_name) + dataset.delete() + return Response( + {"message": "Dataset deleted"}, status=status.HTTP_204_NO_CONTENT + ) + + +class AssignDatasetTables(APIView): + def post(self, request, dataset_name): + serializer = DatasetAssignTablesSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + table_refs = serializer.validated_data["tables"] + + try: + dataset = Dataset.objects.get(name=dataset_name) + except Dataset.DoesNotExist: + return Response({"error": "Dataset not found"}, status=404) + + missing = [] + added_tables = [] + + for table_ref in table_refs: + try: + table = DBTable.load(table_ref["schema"], table_ref["name"]) + dataset.tables.add(table) + added_tables.append(table.name) + except DBTable.DoesNotExist: + missing.append(table_ref) + + dataset.update_resources_from_tables() + + return Response( + { + "message": f"Added {len(added_tables)} tables.", + "added": added_tables, + "missing": missing, + }, + status=200, + ) + + class Table(APIView): """ Handles the creation of tables and serves information on existing tables diff --git a/dataedit/migrations/0037_dataset.py b/dataedit/migrations/0037_dataset.py new file mode 100644 index 000000000..66eef5e8f --- /dev/null +++ b/dataedit/migrations/0037_dataset.py @@ -0,0 +1,37 @@ +# Generated by Django 5.1.4 on 2025-07-31 12:27 + +import uuid + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("dataedit", "0036_alter_peerreview_oemetadata"), + ] + + operations = [ + migrations.CreateModel( + name="Dataset", + fields=[ + ( + "uuid", + models.UUIDField( + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + ), + ), + ("name", models.CharField(max_length=255, unique=True)), + ("metadata", models.JSONField(default=dict)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ( + "tables", + models.ManyToManyField( + blank=True, related_name="datasets", to="dataedit.table" + ), + ), + ], + ), + ] diff --git a/dataedit/models.py b/dataedit/models.py index e404daf21..5fd709589 100644 --- a/dataedit/models.py +++ b/dataedit/models.py @@ -1,22 +1,23 @@ -# SPDX-FileCopyrightText: 2025 Pierre Francois © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Pierre Francois © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. -# SPDX-FileCopyrightText: 2025 Daryna Barabanova © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg +# SPDX-FileCopyrightText: 2025 Pierre Francois © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Pierre Francois © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. # noqa: E501 +# SPDX-FileCopyrightText: 2025 Daryna Barabanova © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg # noqa: E501 # SPDX-FileCopyrightText: 2025 Tom Heimbrodt -# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. -# SPDX-FileCopyrightText: 2025 Daryna Barabanova © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Stephan Uller © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 user © Reiner Lemoine Institut +# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. # noqa: E501 +# SPDX-FileCopyrightText: 2025 Daryna Barabanova © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Stephan Uller © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 user © Reiner Lemoine Institut # noqa: E501 # # SPDX-License-Identifier: AGPL-3.0-or-later import json import logging +import uuid from datetime import datetime, timedelta from enum import Enum @@ -171,6 +172,26 @@ class Meta: unique_together = (("name",),) +class Dataset(models.Model): + """Represents a dataset in the database. + + Datasets are implemented according to oemetadata specification. + """ + + uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + name = models.CharField(max_length=255, unique=True) + metadata = models.JSONField(null=False, default=dict) + tables = models.ManyToManyField("Table", related_name="datasets", blank=True) + created_at = models.DateTimeField(auto_now_add=True) + + def update_resources_from_tables(self): + """ + Rebuild the `resources` field in OEMetadata based on linked tables. + """ + self.metadata["resources"] = [table.oemetadata for table in self.tables.all()] + self.save() + + class Embargo(models.Model): DURATION_CHOICES = [ ("6_months", "6 Months"), From 7dbe988f3bc56bf0cc449f3c9bd0cf23e9770092 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 1 Aug 2025 09:48:28 +0200 Subject: [PATCH 03/14] #1971: add api paths for new dataset functionalities --- api/urls.py | 42 +++++++++++++++++++++++++++++++----------- 1 file changed, 31 insertions(+), 11 deletions(-) diff --git a/api/urls.py b/api/urls.py index 2149a8e6f..f66351cfe 100644 --- a/api/urls.py +++ b/api/urls.py @@ -1,14 +1,14 @@ -# SPDX-FileCopyrightText: 2025 Adel Memariani © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. -# SPDX-FileCopyrightText: 2025 Johann Wagner © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg -# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut -# SPDX-FileCopyrightText: 2025 user © Reiner Lemoine Institut +# SPDX-FileCopyrightText: 2025 Adel Memariani © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. # noqa: E501 +# SPDX-FileCopyrightText: 2025 Johann Wagner © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Martin Glauer © Otto-von-Guericke-Universität Magdeburg # noqa: E501 +# SPDX-FileCopyrightText: 2025 Christian Winger © Öko-Institut e.V. # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# SPDX-FileCopyrightText: 2025 user © Reiner Lemoine Institut # noqa: E501 # # SPDX-License-Identifier: AGPL-3.0-or-later @@ -229,4 +229,24 @@ views.ManageOekgScenarioDatasets.as_view(), name="add-scenario-datasets", ), + path( + "v0/datasets/", + views.DatasetsListCreate.as_view(), + name="dataset-list-create", + ), + path( + "v0/datasets//assign-tables/", + views.AssignDatasetTables.as_view(), + name="dataset-assign-tables", + ), + path( + "v0/datasets//", + views.DatasetManager.as_view(), + name="dataset", + ), + path( + "v0/datasets//resources/", + views.DatasetsListResources.as_view(), + name="dataset-resources", + ), ] From 244f3b1ae82867302e57354a383f2fea4e66e08a Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 1 Aug 2025 09:51:23 +0200 Subject: [PATCH 04/14] #1971: extend api test suite with tests for all new dataset related functionality --- api/tests/test_datasets_api.py | 152 +++++++++++++++++++++++++++++++++ 1 file changed, 152 insertions(+) create mode 100644 api/tests/test_datasets_api.py diff --git a/api/tests/test_datasets_api.py b/api/tests/test_datasets_api.py new file mode 100644 index 000000000..2547f95d0 --- /dev/null +++ b/api/tests/test_datasets_api.py @@ -0,0 +1,152 @@ +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# +# SPDX-License-Identifier: AGPL-3.0-or-later + +from rest_framework import status +from rest_framework.test import APITestCase + +from dataedit.models import Dataset, Schema, Table + + +class DatasetAPITests(APITestCase): + def test_create_dataset(self): + payload = { + "name": "test_dataset", + "title": "Test Dataset", + "description": "This is a test dataset", + } + response = self.client.post( + "/api/v0/datasets/", payload, format="json" + ) # fixed + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertIn("metadata", response.data) + self.assertIn("resources", response.data["metadata"]) + self.assertEqual(response.data["metadata"]["name"], "test_dataset") + + def test_list_datasets(self): + Dataset.objects.create(name="ds1", metadata={"name": "ds1"}) + Dataset.objects.create(name="ds2", metadata={"name": "ds2"}) + response = self.client.get("/api/v0/datasets/") # fixed + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(response.data), 2) + + def test_assign_tables_to_dataset(self): + schema = Schema.objects.create(name="test_schema") + Table.objects.create(name="t1", schema=schema, oemetadata={"name": "t1"}) + Table.objects.create(name="t2", schema=schema, oemetadata={"name": "t2"}) + dataset = Dataset.objects.create( + name="test_dataset", metadata={"name": "test_dataset"} + ) + + payload = { + "dataset_name": "test_dataset", + "tables": [ + {"schema": "test_schema", "name": "t1"}, + {"schema": "test_schema", "name": "t2"}, + ], + } + + response = self.client.post( + "/api/v0/datasets/test_dataset/assign-tables/", payload, format="json" + ) + self.assertEqual(response.status_code, 200) + dataset.refresh_from_db() + self.assertEqual(len(dataset.tables.all()), 2) + self.assertEqual(len(dataset.metadata["resources"]), 2) + + def test_list_resources_for_dataset(self): + schema = Schema.objects.create(name="test_schema") + table = Table.objects.create( + name="t1", schema=schema, oemetadata={"name": "t1"} + ) + dataset = Dataset.objects.create( + name="test_dataset", metadata={"name": "test_dataset"} + ) + dataset.tables.add(table) + dataset.update_resources_from_tables() + + response = self.client.get( + f"/api/v0/datasets/{dataset.name}/resources/" + ) # fixed + self.assertEqual(response.status_code, 200) + self.assertEqual(len(response.data), 1) + self.assertEqual(response.data[0]["name"], "t1") + + def test_assign_missing_table(self): + Dataset.objects.create(name="ds_missing", metadata={"name": "ds_missing"}) + + payload = { + "dataset_name": "ds_missing", + "tables": [{"schema": "nonexistent", "name": "missing"}], + } + + response = self.client.post( + "/api/v0/datasets/ds_missing/assign-tables/", payload, format="json" + ) + self.assertEqual(response.status_code, 200) + self.assertIn("missing", response.data) + self.assertEqual(len(response.data["missing"]), 1) + + def test_list_resources_dataset_not_found(self): + response = self.client.get("/api/v0/datasets/nonexistent/resources/") # fixed + self.assertEqual(response.status_code, 404) + + +class DatasetManagerAPITests(APITestCase): + def setUp(self): + self.dataset = Dataset.objects.create( + name="test_dataset", + metadata={ + "name": "test_dataset", + "title": "Test Title", + "description": "Test Description", + "resources": [], + }, + ) + self.detail_url = f"/api/v0/datasets/{self.dataset.name}/" + + def test_get_dataset(self): + response = self.client.get(self.detail_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["name"], "test_dataset") + + def test_update_dataset(self): + updated_data = { + "name": "test_dataset", # must match existing name + "title": "Updated Title", + "description": "Updated Description", + "at_id": "https://example.org/dataset/test_dataset", + } + + response = self.client.put(self.detail_url, updated_data, format="json") + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.dataset.refresh_from_db() + self.assertEqual(self.dataset.metadata["title"], "Updated Title") + self.assertEqual(self.dataset.metadata["description"], "Updated Description") + self.assertEqual( + self.dataset.metadata["@id"], "https://example.org/dataset/test_dataset" + ) + + def test_delete_dataset(self): + response = self.client.delete(self.detail_url) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertFalse(Dataset.objects.filter(name="test_dataset").exists()) + + def test_get_nonexistent_dataset(self): + response = self.client.get("/api/v0/datasets/nonexistent_dataset/") + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_put_nonexistent_dataset(self): + payload = { + "name": "nonexistent_dataset", + "title": "Does Not Exist", + "description": "Should return 404", + } + response = self.client.put( + "/api/v0/datasets/nonexistent_dataset/", payload, format="json" + ) + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + + def test_delete_nonexistent_dataset(self): + response = self.client.delete("/api/v0/datasets/nonexistent_dataset/") + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) From 4b38d37340e4133838cff9b3294e5aceb37d8d81 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 1 Aug 2025 09:52:48 +0200 Subject: [PATCH 05/14] #1971: fix missing reuse information --- dataedit/migrations/0037_dataset.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dataedit/migrations/0037_dataset.py b/dataedit/migrations/0037_dataset.py index 66eef5e8f..be2b20cf6 100644 --- a/dataedit/migrations/0037_dataset.py +++ b/dataedit/migrations/0037_dataset.py @@ -1,3 +1,7 @@ +# SPDX-FileCopyrightText: 2025 Jonas Huber © Reiner Lemoine Institut # noqa: E501 +# +# SPDX-License-Identifier: AGPL-3.0-or-later + # Generated by Django 5.1.4 on 2025-07-31 12:27 import uuid From 6ee6101f29845f469c603ef18ef5f3850434cb64 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Fri, 1 Aug 2025 17:53:28 +0200 Subject: [PATCH 06/14] #1971: avoid changes to oemetadata v2 template --- api/services/dataset_creation.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/api/services/dataset_creation.py b/api/services/dataset_creation.py index 89ff7f820..58e22f4e3 100644 --- a/api/services/dataset_creation.py +++ b/api/services/dataset_creation.py @@ -2,6 +2,7 @@ # # SPDX-License-Identifier: AGPL-3.0-or-later +from copy import deepcopy from typing import Any from oemetadata.v2.v20.example import OEMETADATA_V20_EXAMPLE @@ -12,6 +13,7 @@ def assemble_dataset_metadata( validated_data: dict[str, Any], oemetadata: dict = OEMETADATA_V20_TEMPLATE ) -> dict[str, Any]: # set the context + oemetadata = deepcopy(oemetadata) oemetadata["@context"] = OEMETADATA_V20_EXAMPLE["@context"] oemetadata["resources"] = [] # Remove resources From 4515a18afff41c8a96f1f3e391e3891d42d2dea3 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 4 Aug 2025 12:16:22 +0200 Subject: [PATCH 07/14] #1971: Add important note on how to handle resource (table) metadata --- api/views.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/api/views.py b/api/views.py index 68d6063a6..ae0747fc9 100644 --- a/api/views.py +++ b/api/views.py @@ -321,6 +321,14 @@ def __create_sequence(self, request, schema, sequence, jsn): class Metadata(APIView): + """ + Important note: + oemetadata v2 introduces datasets which are not relevant on a table level + always query for metadata["resources"][0]. Keeping the complete oemetadata v2 JSON + makes it easy to integrate as no further changes to validation are required for now. + Datasets are handled in the model.Datasets & api views. + """ + @api_exception @method_decorator(never_cache) def get(self, request, schema, table): @@ -344,7 +352,8 @@ def post(self, request, schema, table): cursor = actions.load_cursor_from_context(request.data) # update/sync keywords with tags before saving metadata - # TODO make this iter over all resources + # oemetadata v2 introduces datasets which are not relevant on a table level + # always query for metadata["resources"][0] keywords = metadata["resources"][0].get("keywords", []) or [] # get_tag_keywords_synchronized_metadata returns the OLD metadata @@ -354,18 +363,19 @@ def post(self, request, schema, table): _metadata = get_tag_keywords_synchronized_metadata( table=table, schema=schema, keywords_new=keywords ) - # TODO make this iter over all resources + # oemetadata v2 introduces datasets which are not relevant on a table level + # always query for metadata["resources"][0] metadata["resources"][0]["keywords"] = _metadata["resources"][0]["keywords"] # Write oemetadata json to dataedit.models.tables - # and to SQL comment on table actions.set_table_metadata( table=table, schema=schema, metadata=metadata, cursor=cursor ) _metadata = get_tag_keywords_synchronized_metadata( table=table, schema=schema, keywords_new=keywords ) - # TODO make this iter over all resources + # oemetadata v2 introduces datasets which are not relevant on a table level + # always query for metadata["resources"][0] metadata["resources"][0]["keywords"] = _metadata["resources"][0]["keywords"] # make sure extra metadata is removed From 1bbf66f881db761325a8eab6ac3d50b0d66394be Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 4 Aug 2025 12:19:36 +0200 Subject: [PATCH 08/14] #1971 Enhance table creation: - Make sure every table got a minimal set of oemetadata resource information in case the user did not provide any info - Adding oemetadata on table creation from latest template if no metadata is provided - Auto set some values: name, topics, fields(name, data type and nullable) --- api/views.py | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/api/views.py b/api/views.py index ae0747fc9..41eabdec8 100644 --- a/api/views.py +++ b/api/views.py @@ -23,6 +23,7 @@ import json import logging import re +from copy import deepcopy from decimal import Decimal import geoalchemy2 # noqa: Although this import seems unused is has to be here @@ -46,6 +47,7 @@ from django.utils import timezone from django.utils.decorators import method_decorator from django.views.decorators.cache import never_cache +from oemetadata.latest.example import OEMETADATA_LATEST_EXAMPLE from oemetadata.latest.template import OEMETADATA_LATEST_TEMPLATE from rest_framework import generics, status from rest_framework.authentication import TokenAuthentication @@ -654,6 +656,39 @@ def put(self, request, schema, table): metadata=metadata, cursor=cursor, ) + else: + # If no metadata is provided, we create a minimal metadata object + metadata = deepcopy(OEMETADATA_LATEST_TEMPLATE) + metadata["@context"] = OEMETADATA_LATEST_EXAMPLE["@context"] + metadata["metaMetadata"] = OEMETADATA_LATEST_EXAMPLE["metaMetadata"] + + # Set basic resource info + resource = { + "name": table, + "topics": [schema], + } + + # Update the first resource - there will only be one resource. + # The dataset section is managed by the database implementation ... + metadata["resources"][0].update(resource) + + # Build schema fields from columns + fields = [] + for col in columns: + field = { + "name": col["name"], + "type": col["data_type"], + "nullable": col.get("is_nullable", True), + # add more field metadata as needed + } + fields.append(field) + + # Replace the fields list entirely + metadata["resources"][0]["schema"]["fields"] = fields + + actions.set_table_metadata( + table=table, schema=schema, metadata=metadata, cursor=None + ) return JsonResponse({}, status=status.HTTP_201_CREATED) From 62cc52ed594299ced1152f405553b1c0950c01fe Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 4 Aug 2025 12:20:59 +0200 Subject: [PATCH 09/14] #1971: Fix Dataset resource update method to correctly handle table resource metadata by only reading out the resources[0] element and add it to the dataset metadata --- dataedit/models.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dataedit/models.py b/dataedit/models.py index 5fd709589..d1fff86d6 100644 --- a/dataedit/models.py +++ b/dataedit/models.py @@ -188,7 +188,9 @@ def update_resources_from_tables(self): """ Rebuild the `resources` field in OEMetadata based on linked tables. """ - self.metadata["resources"] = [table.oemetadata for table in self.tables.all()] + self.metadata["resources"] = [ + table.oemetadata["resources"][0] for table in self.tables.all() + ] self.save() From 55c52a04af6a4cdbdb16035113979cbc922a9cd9 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 4 Aug 2025 12:21:45 +0200 Subject: [PATCH 10/14] Add notice about legacy code which is not part of the dataview response --- dataedit/views.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dataedit/views.py b/dataedit/views.py index aab32a6af..8ff17b649 100644 --- a/dataedit/views.py +++ b/dataedit/views.py @@ -1002,6 +1002,8 @@ def iter_oem_key_order(metadata: dict): meta_widget = MetaDataWidget(ordered_oem_151) revisions = [] + # TODO Legacy code, not fully reviewed yet, currently not used + # seems to continue to track changes in the brackround api_changes = change_requests(schema, table) data = api_changes.get("data") display_message = api_changes.get("display_message") From e7c24b674fbab4c19e100f4e82b2dfdb01c4f568 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 4 Aug 2025 12:44:50 +0200 Subject: [PATCH 11/14] #1971: Update tests to match intended oemetadata handling --- api/tests/test_datasets_api.py | 34 +++++++++++++++++++++++++++------- 1 file changed, 27 insertions(+), 7 deletions(-) diff --git a/api/tests/test_datasets_api.py b/api/tests/test_datasets_api.py index 2547f95d0..1fce853ff 100644 --- a/api/tests/test_datasets_api.py +++ b/api/tests/test_datasets_api.py @@ -2,6 +2,9 @@ # # SPDX-License-Identifier: AGPL-3.0-or-later +from copy import deepcopy + +from oemetadata.latest.template import OEMETADATA_LATEST_TEMPLATE from rest_framework import status from rest_framework.test import APITestCase @@ -9,6 +12,17 @@ class DatasetAPITests(APITestCase): + def setUpDatasetMetadata(self, dataset_name: str): + self.metadata = deepcopy(OEMETADATA_LATEST_TEMPLATE) + + self.metadata["name"] = dataset_name + self.metadata["resources"] = [] + + def setUpResourceMetadata(self, table_name: str): + self.metadata = deepcopy(OEMETADATA_LATEST_TEMPLATE) + + self.metadata["resources"][0]["name"] = table_name + def test_create_dataset(self): payload = { "name": "test_dataset", @@ -24,16 +38,20 @@ def test_create_dataset(self): self.assertEqual(response.data["metadata"]["name"], "test_dataset") def test_list_datasets(self): - Dataset.objects.create(name="ds1", metadata={"name": "ds1"}) - Dataset.objects.create(name="ds2", metadata={"name": "ds2"}) + Dataset.objects.create(name="ds1", metadata=self.setUpDatasetMetadata("ds1")) + Dataset.objects.create(name="ds2", metadata=self.setUpDatasetMetadata("ds2")) response = self.client.get("/api/v0/datasets/") # fixed self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), 2) def test_assign_tables_to_dataset(self): schema = Schema.objects.create(name="test_schema") - Table.objects.create(name="t1", schema=schema, oemetadata={"name": "t1"}) - Table.objects.create(name="t2", schema=schema, oemetadata={"name": "t2"}) + Table.objects.create( + name="t1", schema=schema, oemetadata=self.setUpResourceMetadata("t1") + ) + Table.objects.create( + name="t2", schema=schema, oemetadata=self.setUpResourceMetadata("t2") + ) dataset = Dataset.objects.create( name="test_dataset", metadata={"name": "test_dataset"} ) @@ -57,10 +75,10 @@ def test_assign_tables_to_dataset(self): def test_list_resources_for_dataset(self): schema = Schema.objects.create(name="test_schema") table = Table.objects.create( - name="t1", schema=schema, oemetadata={"name": "t1"} + name="t1", schema=schema, oemetadata=self.setUpResourceMetadata("t1") ) dataset = Dataset.objects.create( - name="test_dataset", metadata={"name": "test_dataset"} + name="test_dataset", metadata=self.setUpDatasetMetadata("test_dataset") ) dataset.tables.add(table) dataset.update_resources_from_tables() @@ -73,7 +91,9 @@ def test_list_resources_for_dataset(self): self.assertEqual(response.data[0]["name"], "t1") def test_assign_missing_table(self): - Dataset.objects.create(name="ds_missing", metadata={"name": "ds_missing"}) + Dataset.objects.create( + name="ds_missing", metadata=self.setUpDatasetMetadata("ds_missing") + ) payload = { "dataset_name": "ds_missing", From 0d487bc1e46d1ad5928af3bcd16c74c023733018 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 4 Aug 2025 12:51:28 +0200 Subject: [PATCH 12/14] #1971: fix return metadata --- api/tests/test_datasets_api.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/api/tests/test_datasets_api.py b/api/tests/test_datasets_api.py index 1fce853ff..4cc457de5 100644 --- a/api/tests/test_datasets_api.py +++ b/api/tests/test_datasets_api.py @@ -13,15 +13,19 @@ class DatasetAPITests(APITestCase): def setUpDatasetMetadata(self, dataset_name: str): - self.metadata = deepcopy(OEMETADATA_LATEST_TEMPLATE) + metadata = deepcopy(OEMETADATA_LATEST_TEMPLATE) - self.metadata["name"] = dataset_name - self.metadata["resources"] = [] + metadata["name"] = dataset_name + metadata["resources"] = [] + + return metadata def setUpResourceMetadata(self, table_name: str): - self.metadata = deepcopy(OEMETADATA_LATEST_TEMPLATE) + metadata = deepcopy(OEMETADATA_LATEST_TEMPLATE) + + metadata["resources"][0]["name"] = table_name - self.metadata["resources"][0]["name"] = table_name + return metadata def test_create_dataset(self): payload = { From 2011ad0c4512b66f5a7e461e0e9b6fa0170affbc Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 4 Aug 2025 15:23:34 +0200 Subject: [PATCH 13/14] #1971: update changelog --- versions/changelogs/current.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/versions/changelogs/current.md b/versions/changelogs/current.md index ca715d4ba..7a7306a1b 100644 --- a/versions/changelogs/current.md +++ b/versions/changelogs/current.md @@ -46,9 +46,15 @@ SPDX-License-Identifier: CC0-1.0 - Add javaScript modules: main for connecting logic as entrypoint; navigation for switching between fields/tabs; opr_reviewer_logic for checking if review is complete; peer_review for main review logic; state_current_review for getting certain values from review; summary for review summary ([#1965](https://github.com/OpenEnergyPlatform/oeplatform/pull/1965)) - Change main views function for metadata v2 structure ([#2026](https://github.com/OpenEnergyPlatform/oeplatform/pull/2056)) - ## Features +- Add Dataset rest-api and metadata based concept as specified in oemetadata / frictionless ([#2071](https://github.com/OpenEnergyPlatform/oeplatform/pull/2071)) + + - Ressource metadata is stored for each created table. + - Dataset objects can be listed, created, edited and existing tables can be assigned as resource + - Datasets and assigned Ressources are stored in the django database using a m:n relation with tables to read the oemetadata. + - Rest api implementation + ## Bugs ## Removed From 8c6153530855f72408454753254ff9f427552bd7 Mon Sep 17 00:00:00 2001 From: jh-RLI Date: Mon, 4 Aug 2025 15:24:48 +0200 Subject: [PATCH 14/14] fix typo --- versions/changelogs/current.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/versions/changelogs/current.md b/versions/changelogs/current.md index 7a7306a1b..fee179be0 100644 --- a/versions/changelogs/current.md +++ b/versions/changelogs/current.md @@ -50,9 +50,9 @@ SPDX-License-Identifier: CC0-1.0 - Add Dataset rest-api and metadata based concept as specified in oemetadata / frictionless ([#2071](https://github.com/OpenEnergyPlatform/oeplatform/pull/2071)) - - Ressource metadata is stored for each created table. + - Resource metadata is stored for each created table. - Dataset objects can be listed, created, edited and existing tables can be assigned as resource - - Datasets and assigned Ressources are stored in the django database using a m:n relation with tables to read the oemetadata. + - Datasets and assigned Resources are stored in the django database using a m:n relation with tables to read the oemetadata. - Rest api implementation ## Bugs