diff --git a/.github/workflows/publish_container_image.yml b/.github/workflows/publish_container_image.yml new file mode 100644 index 0000000..ed88cbf --- /dev/null +++ b/.github/workflows/publish_container_image.yml @@ -0,0 +1,25 @@ +#name: Deply to GitHub Container Registry +run-name: Releasing next version 🚀 +on: + push: + tags: + - '*' + +jobs: + publish-docker-image: + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v2 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_CONTAINER_PUSH_USERNAME }} + password: ${{ secrets.DOCKERHUB_CONTAINER_PUSH }} + + - name: Build the container image + run: | + docker build . --tag esdlmapeditoressim/esdlvalidator:latest --tag esdlmapeditoressim/esdlvalidator:${{ github.ref_name}} + docker push esdlmapeditoressim/esdlvalidator:latest + docker push esdlmapeditoressim/esdlvalidator:${{ github.ref_name}} diff --git a/Dockerfile b/Dockerfile index 8c5e738..79fa6e3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3-alpine +FROM python:3.8-alpine ENV ESDLVALIDATOR_DB_LOCATION=/storage/schemas.db ENV HOSTNAME=localhost diff --git a/README.md b/README.md index 9f11895..6aee7b6 100644 --- a/README.md +++ b/README.md @@ -1,20 +1,227 @@ # ESDL-Validator -Service for validating ESDL files against validation schemas + +Service for validating ESDL files against validation schemas + +## Usage + +To start the service locally run + +```bash +docker-compose up +``` + +Then go to `localhost:3011` to see the available endpoints. +First upload the validation schema `testdata/schemas/schema_Poc.json` using `POST /schema`. +`GET /schema` returns a list of the available schema id's, names and descriptions. +`GET /schema/{schema_id_or_name}` returns a schema by id. + +To run the validation use `POST /validationToMessages/` with the esdl as string (for instance +`testdata/esdls/Single_pipes.esdl`) and the schema id. +This should return a response with errors and warnings: + +```json +[ + { + "assetID": "6f45c6f8-e8e2-4378-a910-45140337b9dd", + "messages": [ + { + "message": "Only one of supplyTemperature or returnTemperature must be defined for asset 6f45c6f8-e8e2-4378-a910-45140337b9dd (HotSide)", + "severity": "ERROR" + }, + { + "message": "6f45c6f8-e8e2-4378-a910-45140337b9dd (HotSide) does not have a parallel return asset with the name (HotSide_ret)", + "severity": "ERROR" + } + ] + }, + { + "assetID": "e11a2daa-2e56-4635-afb3-1e23c6d08c33", + "messages": [ + { + "message": "e11a2daa-2e56-4635-afb3-1e23c6d08c33's OutPort is unconnected", + "severity": "WARNING" + } + ] + }, + { + "assetID": "c407387a-ebfd-41b7-af1b-9cae7aec842b", + "messages": [ + { + "message": "c407387a-ebfd-41b7-af1b-9cae7aec842b's InPort is unconnected", + "severity": "WARNING" + } + ] + }, + { + "assetID": "e677bac5-48e0-4ac0-b2a9-5a329b00b0d6", + "messages": [ + { + "message": "e677bac5-48e0-4ac0-b2a9-5a329b00b0d6's OutPort is unconnected", + "severity": "WARNING" + } + ] + }, + { + "assetID": "737494af-8ac4-4ebe-a11a-e89f6f2c415a", + "messages": [ + { + "message": "737494af-8ac4-4ebe-a11a-e89f6f2c415a's InPort is unconnected", + "severity": "WARNING" + }, + { + "message": "737494af-8ac4-4ebe-a11a-e89f6f2c415a (Pipe_7374) does not have a parallel return asset with the name (Pipe_7374_ret)", + "severity": "ERROR" + } + ] + }, + { + "assetID": "363103d6-a32e-40a6-b3a9-ae376c944f6b", + "messages": [ + { + "message": "363103d6-a32e-40a6-b3a9-ae376c944f6b (Pipe_3631) does not have a parallel return asset with the name (Pipe_3631_ret)", + "severity": "ERROR" + } + ] + }, + { + "assetID": "83582c37-b692-4432-8430-4e3407ed4c08", + "messages": [ + { + "message": "83582c37-b692-4432-8430-4e3407ed4c08 (Pipe_8358) does not have a parallel return asset with the name (Pipe_8358_ret)", + "severity": "ERROR" + } + ] + }, + { + "assetID": "a5142229-9331-4313-a52b-70c3da15f4a9", + "messages": [ + { + "message": "a5142229-9331-4313-a52b-70c3da15f4a9 (Pipe_a514) does not have a parallel return asset with the name (Pipe_a514_ret)", + "severity": "ERROR" + } + ] + }, + { + "assetID": "0fc640c8-7f6b-45ae-8e40-2966c1377761", + "messages": [ + { + "message": "0fc640c8-7f6b-45ae-8e40-2966c1377761 (Pipe_0fc6) does not have a parallel return asset with the name (Pipe_0fc6_ret)", + "severity": "ERROR" + } + ] + }, + { + "assetID": "a0b0a95a-76c6-4afa-a319-f41297c5be6e", + "messages": [ + { + "message": "a0b0a95a-76c6-4afa-a319-f41297c5be6e (Pipe_a0b0) does not have a parallel return asset with the name (Pipe_a0b0_ret)", + "severity": "ERROR" + } + ] + }, + { + "assetID": "f6779198-7d2a-456b-997b-58b9a02cdb2d", + "messages": [ + { + "message": "f6779198-7d2a-456b-997b-58b9a02cdb2d (Joint_f677) does not have a parallel return asset with the name (Joint_f677_ret)", + "severity": "ERROR" + } + ] + }, + { + "assetID": "7aae2f5f-c3d4-4041-9577-d0a46124df09", + "messages": [ + { + "message": "7aae2f5f-c3d4-4041-9577-d0a46124df09 (Joint_7aae) does not have a parallel return asset with the name (Joint_7aae_ret)", + "severity": "ERROR" + } + ] + }, + { + "assetID": "f5214b94-d097-4fa9-a86a-110a1386f884", + "messages": [ + { + "message": "f5214b94-d097-4fa9-a86a-110a1386f884 (Joint_f521) does not have a parallel return asset with the name (Joint_f521_ret)", + "severity": "ERROR" + } + ] + } +] +``` ## Status -**work in progress** + +**work in progress** + +# Status PoC scheme + +### Checks if: + +- Heatcommodity carriers availability with atleast one supply and return temperature +- Pipes are not directly connected to pipes +- No other assets than pipes are connected to joints +- there are no unconnected ports +- Joint contains exactly two ports +- [Producers, Conversion] are connected as producers (outport temperature> inport temperature) +- [Consumer, Heatstorage] are connected as consumers (outport temperature < inport temperature) +- Diameter of pipe is defined +- charge and discharge rate of Heatstorages is defined +- heatexchanger capacity is defined +- costinformation exists with children installation, investment, fixed & variable operational + - all checked units are independent of multiplier + - investment cost in EUR/W for producers, conversion, ATES, consumers, EUR/m3 for tank storage, EUR/m for pipes + - installation cost in EUR for all assets except pipes and joints + - fixed operational cost in EUR/W for producers, conversion, ATES, consumers, EUR/m3 for tank storage + - variable operational cost in EUR/Wh for producers, conversion, ATES, consumers +- names of assets are unique +- pipes and joints have names pairs for supply and return based on + '_ret' +- power of producer, conversion, consumer is defined +- consumer profile is defined + +### PoC scheme ToDo: + +- [ ] check minimum pipelength [warning] +- [ ] check if there are carriers which are not used [error] +- [x] check connection of assets with more than 2 ports (heat pump and heat exchanger) [error] +- [ ] check area ... +- [x] check on heatpump information (power,COP, ....) [warning] +- [x] check tankstorage volume or capacity defined +- [ ] check power consumer is larger than max of profile [warning] +- [x] check power consumer is not 0, because used for scaling. [error] +- [ ] check aggregation count: + - [ ] if >0 then aggregated ==True else [warning] + - [ ] if ==0, same as disabled (asset ignored) [warning] +- [ ] check unit EUR/Wh to also allow EUR/J [warning] +- [ ] check fixed and variable maintenance to also be allowed. [warning] +- [ ] check number of assets in the ESDL. [warning] +- [ ] check if area contains atleast one asset [warning] +- [x] check if multiplier of profile is not 0 [error] !! +- [x] check name of carriers, return carrier = supplycarrier name + '_ret' [error] !! +- [ ] check carrier on all ports of joint are the same [error] !! +- [ ] improve error/warning messages + - [ ] change asset id to asset name to be used in description + - [ ] improve description of what to changes for errors: +- [ ] add scheme to use before duplicator + - use same scheme without + - [ ] the check on how assets are connected + - [ ] the check if also '_ret' exists for pipe and joint + - [ ] the check if all ports are connected + - [ ] the exclusive check if only supply or return temperature of carrier is set. # ToDo V1.0 + - [x] Handle 'and', 'or' in checks - working, need some more thoroughly testing - [ ] Add xsd validation - [ ] Fix waitress logging - [ ] Better output messages for current checks -- [ ] Add type filter to get function, for instance to be able to select SingleValue of type marginalCosts, Subselect with filter on all assets is now needed (schema_test_2.json) +- [ ] Add type filter to get function, for instance to be able to select SingleValue of type marginalCosts, Subselect + with filter on all assets is now needed (schema_test_2.json) - [ ] More select functions - [ ] More check functions - [ ] More interesting validation rules to test with - [ ] Option to log to file instead of stdout -- [ ] Accept multiple types in get function so a check can be done on multiple entities but not the parent, for example GasHeater and HeatPump +- [ ] Accept multiple types in get function so a check can be done on multiple entities but not the parent, for example + GasHeater and HeatPump - [ ] Versioning? - [ ] More unit test (currently no test for api package) - [ ] Endpoint for getting an overview of registered functions @@ -23,56 +230,72 @@ Service for validating ESDL files against validation schemas - [ ] Tutorial ## Endpoints -Swagger documentation of the endpoints can be viewed by navigating to the root of the service. The services does not contain authentication/authorization, this can be done within your own setup with something like traefik. + +Swagger documentation of the endpoints can be viewed by navigating to the root of the service. The services does not +contain authentication/authorization, this can be done within your own setup with something like traefik. ### schema -The schema endpoint can be used to manage the validation schemas. Validation schemas are used to validate an ESDL document. -| Endpoints | Operation | Description | -| ------------- |:-------------| :-----| -| /schema | GET | Get a summary of schemas | -| /schema | POST | Post a new schema | -| /schema/{id} | GET | Get a schema by id | -| /schema/{id} | PUT | Update a schema by id | -| /schema/{id} | DELETE | Delete a schema by id | +The schema endpoint can be used to manage the validation schemas. Validation schemas are used to validate an ESDL +document. + +| Endpoints | Operation | Description | +|--------------|:----------|:-------------------------| +| /schema | GET | Get a summary of schemas | +| /schema | POST | Post a new schema | +| /schema/{id} | GET | Get a schema by id | +| /schema/{id} | PUT | Update a schema by id | +| /schema/{id} | DELETE | Delete a schema by id | ### validation -validation endpoint expects multipart/form-data since we want to send an ESDL file with extra request parameters such as schema id's, posting json with the ESDL as base64 string will have too much overhead with larger ESDL files. -| Endpoints | Operation | Description | -| ------------- |:-------------| :-----| -| /validation | POST | Validate an ESDL against given schemas | +validation endpoint expects multipart/form-data since we want to send an ESDL file with extra request parameters such as +schema id's, posting json with the ESDL as base64 string will have too much overhead with larger ESDL files. + +| Endpoints | Operation | Description | +|-------------|:----------|:---------------------------------------| +| /validation | POST | Validate an ESDL against given schemas | ### Settings + esdl-validator can be configured using the following environment variables. -| Variable | Description | default | -| ------------- |:-------------| :-----| -| ESDLVALIDATOR_TITLE | Title of the service, shown in swagger | ESDL-Validator | -| ESDLVALIDATOR_DESCRIPTION | Description of the service, shown in swagger | API for validating ESDL files | -| ESDLVALIDATOR_ENDPOINT_PREFIX | Prefix of the endpoint, for example /api | - | -| ESDLVALIDATOR_DB_LOCATION | location and name of database | schemas.db | -| ESDLVALIDATOR_DEFAULT_CORS | Enable the default CORS, accepting everything | False | -| ESDLVALIDATOR_LOG_LEVEL | Set the log level: CRITICAL, ERROR, WARNING, INFO, DEBUG | INFO | +| Variable | Description | default | +|-------------------------------|:---------------------------------------------------------|:------------------------------| +| ESDLVALIDATOR_TITLE | Title of the service, shown in swagger | ESDL-Validator | +| ESDLVALIDATOR_DESCRIPTION | Description of the service, shown in swagger | API for validating ESDL files | +| ESDLVALIDATOR_ENDPOINT_PREFIX | Prefix of the endpoint, for example /api | - | +| ESDLVALIDATOR_DB_LOCATION | location and name of database | schemas.db | +| ESDLVALIDATOR_DEFAULT_CORS | Enable the default CORS, accepting everything | False | +| ESDLVALIDATOR_LOG_LEVEL | Set the log level: CRITICAL, ERROR, WARNING, INFO, DEBUG | INFO | ## validation schema + ToDo: information on how a validatio schema is constructed ## Local development -Setup a development environment using virtual environment and install the dependencies. For Visual Studio Code a default settings.json can be found under ```.vscode/settings.json.default``` paste these settings into a new file ```.vscode/settings.json```. Make sure the ```python.pythonPath``` is pointing to python in your virtual env. The default settings file excludes some unwanted files and folders, styling and discovery and settings for unit tests. + +Setup a development environment using virtual environment and install the dependencies. For Visual Studio Code a default +settings.json can be found under ```.vscode/settings.json.default``` paste these settings into a new +file ```.vscode/settings.json```. Make sure the ```python.pythonPath``` is pointing to python in your virtual env. The +default settings file excludes some unwanted files and folders, styling and discovery and settings for unit tests. ### Virtual environment + Install virtual environment if not installed yet + ``` python3 -m pip install --user virtualenv ``` Create a virtual environment + ``` python3 -m venv env ``` Enable virtual environment with one of the following commands + ``` source env/bin/activate (Linux) env\Scripts\activate.ps1 (Windows Powershell) @@ -80,31 +303,41 @@ env\Scripts\activate.bat (Windows CMD) ``` ### Install project dependencies + ``` pip3 install -r requirements.txt ``` ### Testing + Use the 'Test' tab is vscode or execute one of the following commands from the root folder + ``` pytest python3 -m unittest discover ./ ``` ### Run ESDL-validator in develop/debug mode + To run the service in debug mode using the build in flask development server. + ``` python3 app.py ``` ### Run ESDL-validator using waitress + An example how to start the service using waitress. + ``` waitress-serve --listen="*:8080" --call "esdlvalidator.api.manage:create_app" ``` ### Update static ESDL metamodel code -To update the ESDL code to work with the latest version of the ESDL ecore model, update esdl.ecore to the latest version and run + +To update the ESDL code to work with the latest version of the ESDL ecore model, update esdl.ecore to the latest version +and run + ``` pip3 install pyecoregen pyecoregen -e esdl.ecore -o ./esdlvalidator/core/esdl @@ -112,14 +345,29 @@ pyecoregen -e esdl.ecore -o ./esdlvalidator/core/esdl ## Docker +To test locally on docker desktop: + +``` +docker-compose up --build +``` + +Then go to [localhost:3011]() and `schema` POST to add a validation schema, `schema` GET will give a list of loaded +schemas. +To test the validator use `validationToMessages` POST with the esdl in regular text and the schema ID. + +### OLD + Build example + ``` docker build -t esdl-validator . ``` -The docker image is by default configured to create/read the database file from /storage/schemas.db, this can be updated by setting ESDLVALIDATOR_DB_LOCATION +The docker image is by default configured to create/read the database file from /storage/schemas.db, this can be updated +by setting ESDLVALIDATOR_DB_LOCATION Run example for esdl-validator with logging set to DEBUG and the database file stored and read outside of the container. + ``` docker run -p 8080:80 -v C:\temp:/storage -e ESDLVALIDATOR_LOG_LEVEL=DEBUG esdl-validator ``` @@ -127,13 +375,24 @@ docker run -p 8080:80 -v C:\temp:/storage -e ESDLVALIDATOR_LOG_LEVEL=DEBUG esdl- The service should now be accesible on ```localhost:8080``` ## Validation + ToDo ### Functions -There are 2 types of functions: select and check. Select functions are used to generate a 'dataset' which can be used in a check functions. In one validation schema multiple selects can be defined to select data from the esdl, filter out data or generate new data such for example calculating an average. Check functions test every entry in the given dataset and return a result (for every entry) which will be returned by the service, check function can generate warnings or errors based on how the check is configured in the validation schema. New functions can be added easily, by adding ```@FunctionFactory.register``` to the class, giving it a name and extending the appropriate function type the function will be discovered automatically by the FunctionFactory. To use a function simply reference the function by it's name in the validation schema i.e ```"function": "not_null"``` + +There are 2 types of functions: select and check. Select functions are used to generate a 'dataset' which can be used in +a check functions. In one validation schema multiple selects can be defined to select data from the esdl, filter out +data or generate new data such for example calculating an average. Check functions test every entry in the given dataset +and return a result (for every entry) which will be returned by the service, check function can generate warnings or +errors based on how the check is configured in the validation schema. New functions can be added easily, by +adding ```@FunctionFactory.register``` to the class, giving it a name and extending the appropriate function type the +function will be discovered automatically by the FunctionFactory. To use a function simply reference the function by +it's name in the validation schema i.e ```"function": "not_null"``` #### Select + ToDo: Instructions on the select function and how to add new ones #### Check + ToDo: Instructions on the check function and how to add new ones diff --git a/dev.docker-compose.yml b/dev.docker-compose.yml deleted file mode 100644 index aa54fee..0000000 --- a/dev.docker-compose.yml +++ /dev/null @@ -1,18 +0,0 @@ -version: '3' - -networks: - esdl_val_net: - -services: - mongo: - image: mongo:latest - ports: - - "27017:27017" - volumes: - - mongo-storage:/data/db - networks: - - esdl_val_net - -volumes: - validator-storage: - mongo-storage: \ No newline at end of file diff --git a/docker-compose-poc.yml b/docker-compose-poc.yml new file mode 100644 index 0000000..710740d --- /dev/null +++ b/docker-compose-poc.yml @@ -0,0 +1,25 @@ +version: '3.8' + +networks: + mapeditor-net: + driver: bridge + name: mapeditor-net + external: true + +services: + esdl-validator: +# image: mvrijlandt/nwn_esdl_validator:0.0.4 + build: ./ + ports: + - "5000:5000" + environment: + - MONGODB_HOST=mongo + - MONGODB_PORT=27017 + - PYTHONUNBUFFERED=1 + volumes: + - validator-storage:/storage + networks: + - mapeditor-net + +volumes: + validator-storage: \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 9f844a9..5c1c62b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,8 +4,9 @@ networks: esdl_val_net: services: - esdlValidator: - image: ci.tno.nl/warmingup/chess-preprocessor:latest + esdl_validator: +# build: . + image: esdlmapeditoressim/esdlvalidator:0.0.7 ports: - "3011:5000" environment: @@ -15,13 +16,21 @@ services: - validator-storage:/storage networks: - esdl_val_net + depends_on: + mongo: + condition: service_healthy mongo: - image: mongo:latest + image: mvertes/alpine-mongo ports: - "27017:27017" volumes: - mongo-storage:/data/db + healthcheck: + test: echo 'db.runCommand("ping").ok' | mongo mongo:27017/test --quiet + interval: 10s + timeout: 10s + retries: 5 networks: - esdl_val_net diff --git a/esdlvalidator/api/controller/validationToMessages.py b/esdlvalidator/api/controller/validationToMessages.py index 6c28d9e..35dbd1e 100644 --- a/esdlvalidator/api/controller/validationToMessages.py +++ b/esdlvalidator/api/controller/validationToMessages.py @@ -1,5 +1,6 @@ import json import uuid +import logging from datetime import datetime as dt from math import cos, sin, atan2, sqrt, radians, degrees @@ -13,6 +14,8 @@ from esdlvalidator.validation.functions import utils parser = app.api.parser() +parser.add_argument("data", type=str, required=True) +parser.add_argument("schemas", type=str, required=True) @app.ns_validation_to_msgs.route('/') @@ -26,8 +29,14 @@ class ValidationToMessagesController(Resource): @app.api.expect(parser, validate=True) def post(self): """Validate an ESDL file against one or more validation schemas""" + if request.data: + # 'Contains the incoming request data as string in case it came with a mimetype Flask does not handle' + # > Happens with requests from the mapeditor + file = request.data.decode('utf-8') + else: + # with openapi + file = request.args['data'] - file = request.data.decode('utf-8') if "schemas" not in request.args: return "Bad Request: Required 'schemas' parameter missing", 400 schema_list = [id for id in request.args['schemas'].split(',')] diff --git a/esdlvalidator/api/setup.py b/esdlvalidator/api/setup.py index 5291a2d..0fe4fd0 100644 --- a/esdlvalidator/api/setup.py +++ b/esdlvalidator/api/setup.py @@ -31,7 +31,11 @@ def setup_logger(logLevel: str): werkzeug.setLevel(logLevel) waitress.setLevel(logLevel) - logging.basicConfig(level=logLevel, format="%(asctime)s | %(name)s | %(levelname)s | %(message)s", datefmt="%Y-%m-%dT%H:%M:%S%z") + logging.basicConfig( + level=logLevel, + format="%(asctime)s | %(name)s | %(levelname)s | %(message)s", + datefmt="%Y-%m-%dT%H:%M:%S%z" + ) class AppConfig: @@ -47,7 +51,8 @@ def __init__(self): # Setup flask/restx, namespaces self.apiBlueprint = Blueprint("api", __name__) - self.api = Api(self.apiBlueprint, version=self.settings.version, title=self.settings.title, description=self.settings.description) + self.api = Api(self.apiBlueprint, version=self.settings.version, title=self.settings.title, + description=self.settings.description) self.ns_validation = self.api.namespace("validation", "ESDL validation endpoint") self.ns_validation_to_notes = self.api.namespace("validationToNotes", "ESDL-aas validation endpoint") self.ns_validation_to_msgs = self.api.namespace("validationToMessages", "ESDL-aas validation endpoint to return JSON") diff --git a/esdlvalidator/validation/functions/__init__.py b/esdlvalidator/validation/functions/__init__.py index 6cbd8e3..ff84605 100644 --- a/esdlvalidator/validation/functions/__init__.py +++ b/esdlvalidator/validation/functions/__init__.py @@ -1,6 +1,33 @@ from pathlib import Path from autodiscover import AutoDiscover +import esdlvalidator.validation.functions.check_child_attribute +import esdlvalidator.validation.functions.check_child_attribute_not_null +import esdlvalidator.validation.functions.check_in_range +import esdlvalidator.validation.functions.check_multi_cond +import esdlvalidator.validation.functions.check_multi_cond_xor +import esdlvalidator.validation.functions.check_name_condition +import esdlvalidator.validation.functions.check_not_connected_to +import esdlvalidator.validation.functions.check_not_null +import esdlvalidator.validation.functions.check_value_range +import esdlvalidator.validation.functions.check_number_ports +import esdlvalidator.validation.functions.check_only_connected_to +import esdlvalidator.validation.functions.check_port_property_combination +import esdlvalidator.validation.functions.check_unconnected_port + +import esdlvalidator.validation.functions.function + +import esdlvalidator.validation.functions.select_avg +import esdlvalidator.validation.functions.select_but +import esdlvalidator.validation.functions.select_filter_has_property +import esdlvalidator.validation.functions.select_get +import esdlvalidator.validation.functions.select_get_exclude +import esdlvalidator.validation.functions.select_get_names_list +import esdlvalidator.validation.functions.select_get_references +import esdlvalidator.validation.functions.select_sum +import esdlvalidator.validation.functions.utils + path = Path('./esdlvalidator/validation/functions') autodiscover = AutoDiscover(path) autodiscover() + diff --git a/esdlvalidator/validation/functions/check_child_attribute.py b/esdlvalidator/validation/functions/check_child_attribute.py index ab8e98b..1f76d4e 100644 --- a/esdlvalidator/validation/functions/check_child_attribute.py +++ b/esdlvalidator/validation/functions/check_child_attribute.py @@ -28,6 +28,8 @@ def execute(self): components = [] if hasattr(self.value, self.args["component"]): components = getattr(self.value, self.args["component"]) + if components is None: + components = [] if len(components) == 0: result = "{} has no components of type {}".format(self.value.id, self.args["component"]) diff --git a/esdlvalidator/validation/functions/check_child_attribute_not_null.py b/esdlvalidator/validation/functions/check_child_attribute_not_null.py new file mode 100644 index 0000000..e942c52 --- /dev/null +++ b/esdlvalidator/validation/functions/check_child_attribute_not_null.py @@ -0,0 +1,188 @@ +import json +from esdlvalidator.validation.functions import utils +from esdlvalidator.validation.functions.function import FunctionFactory, FunctionCheck, FunctionDefinition, \ + ArgDefinition, FunctionType, CheckResult + + +@FunctionFactory.register(FunctionType.CHECK, "check_child_attribute_not_null") +class ContainsNotConnectedTo(FunctionCheck): + + def get_function_definition(self): + return FunctionDefinition( + "check_child_attribute_not_null", + "Check if asset component has attribute", + [ + ArgDefinition("component", "The child component containing the attribute", True), + ArgDefinition("attribute", "The attributes that needs to checked", True), + ArgDefinition("check_type", "Checking for existence or values: exists/value/both", True), + ArgDefinition("unit_type", "Which unit type to check", False), + ArgDefinition("unit", "Unit to compare it to", False), + ArgDefinition("resultMsgJSON", "Display output in JSON format", False) + ] + ) + + def before_execute(self): + pass + + def execute(self): + msg = {"offending_asset": self.value.id} + exist_check = False # only error messaging for relevant checks + value_check = False + unit_check = False + + if self.args["check_type"] == "exists" or self.args["check_type"] == "both": + exist_check = True + if self.args["check_type"] == "unit" or self.args["check_type"] == "both": + unit_check = True + if self.args["check_type"] == "value": + value_check = True + + if isinstance(self.args["attribute"], list) and value_check: + result = ("Not allowed to check multiple attributes if also values and units of attributes are " + "checked") + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + + components = None + if hasattr(self.value, self.args["component"]): + components = getattr(self.value, self.args["component"]) + + if components is None: + if exist_check: + result = "{} has no components of type {} with the attributes {}".format(self.value.id, self.args[ + "component"], self.args["attribute"]) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + else: + CheckResult(True) #no checking of values and units required if no cost information exists + + + + if isinstance(self.args["attribute"], list): + attr_list = [] + for attribute in self.args["attribute"]: + attr = None + if hasattr(components, attribute): + attr = getattr(components, attribute) + if attr is None: + attr_list.append(attribute) + if len(attr_list)==0: + return CheckResult(True) + else: + result = "{} has no components of type {} with the attributes {}".format(self.value.id, + self.args["component"], + attr_list) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + else: + attr = None + if type(components).__name__ == "EOrderedSet" and len(components)==1: + components = components[0] + if hasattr(components, self.args["attribute"]): + attr = getattr(components, self.args["attribute"]) + if attr is None: + if exist_check: + result = "{} has no components of type {} with the attributes {}".format(self.value.id, + self.args["component"], + self.args["attribute"]) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + else: + return CheckResult(True) #no checking of values and units required if no cost information + # attribute exists + + if value_check: + if isinstance(attr, float): + value = attr + else: + value = getattr(attr, "value") + if value == 0.0: + result = "{} has a component of type {} with the attributes {} at 0".format(self.value.id, + self.args["component"], + self.args["attribute"]) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + + if unit_check: + value = getattr(attr, "value") + qau = getattr(attr, 'profileQuantityAndUnit') + + if isinstance(self.args["unit_type"], list): + if len(self.args["unit_type"]) == len(self.args["unit"]): + resultcheck = self.check_units(self.args["attribute"], qau) + if not resultcheck.ok: + return resultcheck + else: + result = "Bad Schema: Number of unit_types don't match number of units" + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + + return self.check_includes("0.0", attr, value, self.value) + + return CheckResult(True) + + + def check_includes(self, include, prop, value, originalValue): + msg = {"offending_asset": self.value.id} + for includeValue in include: + if isinstance(value, list): + ret = [] + for v in value: + ret.append(self.check_includes(include, prop, v, originalValue)) + for r in ret: + if not r.ok: + return r + elif str(includeValue).lower() == str(value).lower(): + result = self.__create_message("{0} cannot be null".format(prop), originalValue) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + + return CheckResult(True) + + def check_units(self, prop, qau): + correct = 0 + msg = {"offending_asset": self.value.id} + for i in range(0, len(self.args["unit_type"])): + if hasattr(qau, self.args["unit_type"][i]): + unittype = getattr(qau, self.args["unit_type"][i]) + unit = self.args["unit"][i] + if str(unittype.name).lower() == str(unit).lower(): + correct += 1 + if correct != len(self.args["unit_type"]): + result = self.__create_message("{0} should contain {1} of type {2}".format(prop, self.args["unit_type"], + self.args["unit"]), self.value) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + + return CheckResult(True) + + + def __create_message(self, msg, value): + if utils.has_attribute(value, "id"): + msg += " for entity {0}".format(utils.get_attribute(value, "id")) + return msg + diff --git a/esdlvalidator/validation/functions/check_in_range.py b/esdlvalidator/validation/functions/check_in_range.py index bed2432..d397211 100644 --- a/esdlvalidator/validation/functions/check_in_range.py +++ b/esdlvalidator/validation/functions/check_in_range.py @@ -41,7 +41,7 @@ def execute(self): msg["message"] = result return CheckResult(False, msg) else: - CheckResult(False, result) + return CheckResult(False, result) def set_values(self): self.property = utils.get_attribute(self.args, "property") diff --git a/esdlvalidator/validation/functions/check_multi_cond.py b/esdlvalidator/validation/functions/check_multi_cond.py index 2318129..51ab437 100644 --- a/esdlvalidator/validation/functions/check_multi_cond.py +++ b/esdlvalidator/validation/functions/check_multi_cond.py @@ -49,8 +49,8 @@ def execute(self): return CheckResult(False, msg) else: return CheckResult(False, result) - - fail = fail and (str(utils.get_attribute(self.value, p)) == str(v)) + value = utils.get_attribute(self.value, p) + fail = fail and ((isinstance(v, str) and str(v).lower() == str(value).lower()) or (v == value)) if fail: result = "One of {} must be defined".format(" or ".join(self.args['properties'])) diff --git a/esdlvalidator/validation/functions/check_multi_cond_xor.py b/esdlvalidator/validation/functions/check_multi_cond_xor.py new file mode 100644 index 0000000..3bce5fb --- /dev/null +++ b/esdlvalidator/validation/functions/check_multi_cond_xor.py @@ -0,0 +1,68 @@ +import json +from esdlvalidator.validation.functions import utils +from esdlvalidator.validation.functions.function import FunctionFactory, FunctionCheck, FunctionDefinition, \ + ArgDefinition, FunctionType, CheckResult + + +@FunctionFactory.register(FunctionType.CHECK, "multi_cond_xor") +class ContainsMultiConditionCheck(FunctionCheck): + + def get_function_definition(self): + return FunctionDefinition( + "multi_cond_xor", + "Check if a combination of properties of an asset are in violation (XOR-condition)", + [ + ArgDefinition("properties", "The properties that need to checked", True), + ArgDefinition("violations", "The property values to check against", True), + ArgDefinition("resultMsgJSON", "Display output in JSON format", False) + ] + ) + + def before_execute(self): + pass + + def execute(self): + msg = {"offending_asset": self.value.id} + if "properties" not in self.args or "violations" not in self.args: + result = "Bad Schema: Either properties or violations missing from schema for this check" + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + if len(self.args["properties"]) != len(self.args["violations"]): + result = "Bad Schema: Number of properties don't match number of violations" + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + + fails = 0 + for i in range(0, len(self.args["properties"])): + p = self.args["properties"][i] + v = self.args["violations"][i] + if not utils.has_attribute(self.value, p): + result = "property {0} not found".format(p) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + # print(str(utils.get_attribute(self.value, p)), p, v) + if utils.get_attribute(self.value, p) == v: + fails += 1 + + fail = False + if fails != len(self.args["properties"])-1: + fail = True + + if fail: + result = "Only one of {} must be defined for asset {} ({})".format(" or ".join(self.args['properties']), + self.value.id, self.value.name) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + return CheckResult(True) diff --git a/esdlvalidator/validation/functions/check_name_condition.py b/esdlvalidator/validation/functions/check_name_condition.py new file mode 100644 index 0000000..90df3e0 --- /dev/null +++ b/esdlvalidator/validation/functions/check_name_condition.py @@ -0,0 +1,62 @@ +import json + +from esdlvalidator.validation.functions.function import FunctionFactory, FunctionCheck, FunctionDefinition, \ + ArgDefinition, FunctionType, CheckResult + + +@FunctionFactory.register(FunctionType.CHECK, "name_condition") +class NameCondition(FunctionCheck): + + def get_function_definition(self): + return FunctionDefinition( + "name_condition", + "Asset names agrees with conditions", + [ + ArgDefinition("condition", "The condition for which the names need to be checked", True), + ArgDefinition("resultMsgJSON", "Display output in JSON format", False) + ] + ) + + def before_execute(self): + pass + + def execute(self): + names_list = self.datasets.get("names_dict")["name"] + + msg = {"offending_asset": self.value.id} + if self.args["condition"] == "uniqueness": + count = names_list.count(self.value.name) + if count != 1: + result = "{} has a non-unique name ({}), it occurs {} times in this ESDL".format(self.value.id, + self.value.name, count) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + elif self.args["condition"] == "supply_return": + if self.value.name.endswith('_ret'): + check_name = self.value.name[:-4] + if check_name not in names_list: + result = ("{} does not have a parallel supply asset with the name ({})").format( + self.value.id, check_name) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + else: + check_name = self.value.name + '_ret' + if check_name not in names_list: + result = ("{} ({}) does not have a parallel return asset with the name ({})").format( + self.value.id, self.value.name, check_name) + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + else: + CheckResult(False, "No proper condition was selected") + + + return CheckResult(True) diff --git a/esdlvalidator/validation/functions/check_not_null.py b/esdlvalidator/validation/functions/check_not_null.py index c80b72c..2ad1497 100644 --- a/esdlvalidator/validation/functions/check_not_null.py +++ b/esdlvalidator/validation/functions/check_not_null.py @@ -1,3 +1,5 @@ +from esdl import esdl + from esdlvalidator.validation.functions import utils from esdlvalidator.validation.functions.function import FunctionFactory, FunctionCheck, FunctionDefinition, ArgDefinition, FunctionType, CheckResult @@ -27,7 +29,10 @@ def execute(self): # Some esdl entity values have a default of undefined or none when not set include.extend(["undefined", "none"]) value = self.value - msg = {"offending_asset": self.value.id} + if isinstance(value, esdl.Port): + msg = {"offending_asset": self.value.eContainer().id} + else: + msg = {"offending_asset": self.value.id} if hasProp: if not utils.has_attribute(value, prop): @@ -48,19 +53,20 @@ def execute(self): else: return CheckResult(False, result) - return self.check_includes(include, prop, value, self.value) + return self.check_includes(include, prop, value, self.value, msg) - def check_includes(self, include, prop, value, originalValue): - msg = {"offending_asset": self.value.id} + def check_includes(self, include, prop, value, originalValue, msg): + # msg = {"offending_asset": self.value.id} for includeValue in include: if isinstance(value, list): ret = [] for v in value: - ret.append(self.check_includes(include, prop, v, originalValue)) + ret.append(self.check_includes(include, prop, v, originalValue, msg)) for r in ret: if not r.ok: return r - elif str(includeValue).lower() == str(value).lower(): + elif (isinstance(includeValue, str) and str(includeValue).lower() == str(value).lower()) or ( + includeValue == value): result = self.__create_message("{0} cannot be null".format(prop), originalValue) if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: msg["message"] = result diff --git a/esdlvalidator/validation/functions/check_number_ports.py b/esdlvalidator/validation/functions/check_number_ports.py new file mode 100644 index 0000000..53d4530 --- /dev/null +++ b/esdlvalidator/validation/functions/check_number_ports.py @@ -0,0 +1,40 @@ +import json + +from esdlvalidator.validation.functions.function import FunctionFactory, FunctionCheck, FunctionDefinition, \ + ArgDefinition, FunctionType, CheckResult + + +@FunctionFactory.register(FunctionType.CHECK, "number_ports") +class ContainsXNumberPorts(FunctionCheck): + + def get_function_definition(self): + return FunctionDefinition( + "number_ports", + "Check the number of ports available", + [ + ArgDefinition("number", "Display output in JSON format", True), + ArgDefinition("resultMsgJSON", "Display output in JSON format", False) + ] + ) + + def before_execute(self): + pass + + def execute(self): + msg = {"offending_asset": self.value.id} + if len(self.value.port) < self.args["number"]: + result = f"{self.value.id} has less than {self.args['number']} ports" + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + elif len(self.value.port) > self.args["number"]: + result = f"{self.value.id} has more than {self.args['number']} ports" + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + + return CheckResult(True) diff --git a/esdlvalidator/validation/functions/check_port_property_combination.py b/esdlvalidator/validation/functions/check_port_property_combination.py new file mode 100644 index 0000000..ba111c7 --- /dev/null +++ b/esdlvalidator/validation/functions/check_port_property_combination.py @@ -0,0 +1,103 @@ +import json + +import esdl + +from esdlvalidator.validation.functions import utils +from esdlvalidator.validation.functions.function import FunctionFactory, FunctionCheck, FunctionDefinition, \ + ArgDefinition, FunctionType, CheckResult + + +@FunctionFactory.register(FunctionType.CHECK, "port_property_combination") +class ContainsPortPropertyCombination(FunctionCheck): + + def get_function_definition(self): + return FunctionDefinition( + "port_property_combination", + "Check if an asset is only connected to", + [ + ArgDefinition("port_type_larger", "The porttype of which the property must be larger than the " + "port of port_type_smaller",True), + ArgDefinition("port_type_smaller", "The porttype of which the property must be smaller than the " + "port of port_type_larger", True), + ArgDefinition("property_larger", "The property which must be larger on port_type_larger", True), + ArgDefinition("property_smaller", "The property which must be smaller on port_type_smaller", True), + ArgDefinition("port_name_larger", "A string that must be part of the port name for the port with the " + "larger property", False), + ArgDefinition("port_name_smaller", "A string that must be part of the port name for the port with " + "the smaller property", False), + ArgDefinition("resultMsgJSON", "Display output in JSON format", False) + ] + ) + + def before_execute(self): + pass + + def execute(self): + msg = {"offending_asset": self.value.id} + result = "" + connected_ports = 0 + carriers_exists = 0 + attributes_exists = 0 + + port_type_larger = getattr(esdl, self.args['port_type_larger']) + port_type_smaller = getattr(esdl, self.args['port_type_smaller']) + + if len(self.value.port) !=0: + value_larger = 0 + value_smaller = 0 + for port in self.value.port: + if len(port.connectedTo) != 0: + connected_ports += 1 + if isinstance(port, port_type_larger): + new_obj = port + bool_name = True + if 'port_name_larger' in self.args: + if self.args['port_name_larger'] not in port.name.lower(): + bool_name = False + if bool_name == True: + for arg in self.args["property_larger"]: + new_obj = utils.get_attribute(new_obj, arg) + if arg == self.args["property_larger"][-1]: + value_larger = new_obj + attributes_exists += 1 + if arg == None: + continue + elif isinstance(port, port_type_smaller): + new_obj = port + bool_name = True + if 'port_name_smaller' in self.args: + if self.args['port_name_smaller'] not in port.name.lower(): + bool_name = False + if bool_name == True: + for arg in self.args["property_smaller"]: + new_obj = utils.get_attribute(new_obj, arg) + if arg == self.args["property_smaller"][-1]: + value_smaller = new_obj + attributes_exists += 1 + if arg == None: + continue + if connected_ports != 1: + if attributes_exists != 2: + result = (f"{self.value.id} (name: {self.value.name}) does not have the carriers properly assigned, set carriers again") + elif (value_larger == 0 or value_smaller == 0) or value_larger < value_smaller : + port_name_smaller_value = self.args['port_name_smaller'] if 'port_name_smaller' in self.args else '' + port_name_larger_value = self.args['port_name_larger'] if 'port_name_larger' in self.args else '' + result = (f"{self.value.id} (name: {self.value.name}) is connected to the wrong ports or carriers " + f"have not been added properly. The {self.args['property_smaller'][-1]} of " + f"{self.args['port_type_smaller']} {port_name_smaller_value} should be smaller than the " + f"{self.args['property_larger'][-1]} of {self.args['port_type_larger']} " + f"{port_name_larger_value}") + # as carriers only have return or supply temperature, if wrong carrier, then temperature 0 + else: + if value_larger == 0 and value_smaller == 0: + result = ("{} (name: {}) is connected to the wrong ports or carriers have not been added " + "properly").format(self.value.id, self.value.name) + + if len(result) > 0: + if 'resultMsgJSON' in self.args and self.args['resultMsgJSON']: + msg["message"] = result + return CheckResult(False, msg) + else: + return CheckResult(False, result) + + return CheckResult(True) diff --git a/esdlvalidator/validation/functions/check_value_range.py b/esdlvalidator/validation/functions/check_value_range.py new file mode 100644 index 0000000..2e0cdd4 --- /dev/null +++ b/esdlvalidator/validation/functions/check_value_range.py @@ -0,0 +1,41 @@ +from esdlvalidator.core.esdl import utils as esdlUtils +from esdlvalidator.validation.functions import utils +from esdlvalidator.validation.functions.function import FunctionFactory, FunctionCheck, FunctionDefinition, \ + ArgDefinition, FunctionType, CheckResult + + +@FunctionFactory.register(FunctionType.CHECK, "check_value_range") +class ContainsNumberAssets(FunctionCheck): + + def get_function_definition(self): + return FunctionDefinition( + "check_value_range", + "Check the number of assets", + [ + ArgDefinition("min_value", "The min number of assets that should be present", True), + ArgDefinition("max_value", "The max number of assets that should be present", True), + ArgDefinition("message", "Part of message to be added", False), + ArgDefinition("resultMsgJSON", "Display output in JSON format", False) + ] + ) + + def before_execute(self): + pass + + def execute(self): + msg = {} + + dataset2 = self.datasets.get("resource") + area_obj = esdlUtils.get_entities_from_esdl_resource_by_type(dataset2, "Area") + msg = {"offending_asset": area_obj[0].id} + + self.min = utils.get_attribute(self.args, "min_value") + self.max = utils.get_attribute(self.args, "max_value") + if self.valueself.max: + msg["message"] = utils.get_attribute(self.args, "message") + ", too many assets present" + return CheckResult(False, msg) + + return CheckResult(True) diff --git a/esdlvalidator/validation/functions/select_get.py b/esdlvalidator/validation/functions/select_get.py index 7b75e55..66b4e9c 100644 --- a/esdlvalidator/validation/functions/select_get.py +++ b/esdlvalidator/validation/functions/select_get.py @@ -18,7 +18,19 @@ def get_function_definition(self): def execute(self): dataset = self.datasets.get("resource") - getType = utils.get_attribute(self.args, "type") - entities = esdlUtils.get_entities_from_esdl_resource_by_type(dataset, getType) + if isinstance(self.args['type'], str): + # if self.args['type'] == "dataset": + # entities = dataset + # else: + getType = utils.get_attribute(self.args, "type") + entities = esdlUtils.get_entities_from_esdl_resource_by_type(dataset, getType) + elif isinstance(self.args['type'], list): + entities = [] + for arg in self.args['type']: + getType = utils.get_attribute({'type': arg}, "type") + if len(entities)==0: + entities = esdlUtils.get_entities_from_esdl_resource_by_type(dataset, getType) + else: + entities += esdlUtils.get_entities_from_esdl_resource_by_type(dataset, getType) return entities diff --git a/esdlvalidator/validation/functions/select_get_exclude.py b/esdlvalidator/validation/functions/select_get_exclude.py new file mode 100644 index 0000000..9387c40 --- /dev/null +++ b/esdlvalidator/validation/functions/select_get_exclude.py @@ -0,0 +1,35 @@ +from esdlvalidator.core.esdl import utils as esdlUtils + +from esdlvalidator.validation.functions import utils +from esdlvalidator.validation.functions.function import FunctionFactory, FunctionSelect, FunctionDefinition, ArgDefinition, FunctionType + + +@FunctionFactory.register(FunctionType.SELECT, "get_exclude") +class SelectGetExclude(FunctionSelect): + + def get_function_definition(self): + return FunctionDefinition( + "get_exclude", + "Get a list of entities from loaded ESDL data", + [ + ArgDefinition("type", "type name of entities to retrieve, this can be a superclass. i.e. Port returns InPort, OutPort...", True), + ArgDefinition("exclude_types", "type name of entities to exclude, this can be a superclass. i.e. Port returns InPort, OutPort...", True) + ] + ) + + def execute(self): + dataset = self.datasets.get("resource") + esdlClasses = esdlUtils.get_esdl_class_from_string(self.args["exclude_types"]) + entities = [] + if isinstance(self.args['type'], str): + getType = utils.get_attribute(self.args, "type") + conditions = esdlUtils.get_entities_from_esdl_resource_by_type(dataset, getType) + for condition in conditions: + count_class = 0 + for esdlClass in esdlClasses: + if isinstance(condition, esdlClass): + count_class +=1 + if count_class == 0: + entities.append(condition) + + return entities diff --git a/esdlvalidator/validation/functions/select_get_names_list.py b/esdlvalidator/validation/functions/select_get_names_list.py new file mode 100644 index 0000000..a813466 --- /dev/null +++ b/esdlvalidator/validation/functions/select_get_names_list.py @@ -0,0 +1,45 @@ +from esdlvalidator.core.esdl import utils as esdlUtils + +from esdlvalidator.validation.functions import utils +from esdlvalidator.validation.functions.function import FunctionFactory, FunctionSelect, FunctionDefinition, ArgDefinition, FunctionType +# from esdlvalidator.validation.functions.select_get import SelectGet + + +@FunctionFactory.register(FunctionType.SELECT, "get_nameslist") +class SelectGetNamesList(FunctionSelect): + + def get_function_definition(self): + return FunctionDefinition( + "get_nameslist", + "Get a list of entities from loaded ESDL data and list of all names", + [ + ArgDefinition("type", "type name of entities to retrieve, this can be a superclass. i.e. Port returns InPort, OutPort...", True), + ArgDefinition("property","name of property that needs to be saved in list", True) + + ] + ) + + def execute(self): + dataset = self.datasets.get("resource") + if isinstance(self.args['type'], str): + getType = utils.get_attribute(self.args, "type") + entities = esdlUtils.get_entities_from_esdl_resource_by_type(dataset, getType) + elif isinstance(self.args['type'], list): + entities = [] + for arg in self.args['type']: + getType = utils.get_attribute({'type': arg}, "type") + if len(entities)==0: + entities = esdlUtils.get_entities_from_esdl_resource_by_type(dataset, getType) + else: + entities += esdlUtils.get_entities_from_esdl_resource_by_type(dataset, getType) + + names_list = {} + if isinstance(self.args["property"], list): + for property in self.args["property"]: + names_list[property] = [] + for entity in entities: + names_list[property].append(utils.get_attribute(entity, property)) + + self.datasets['names_dict'] = names_list + + return entities diff --git a/esdlvalidator/validation/functions/select_sum.py b/esdlvalidator/validation/functions/select_sum.py index 460608f..cb22310 100644 --- a/esdlvalidator/validation/functions/select_sum.py +++ b/esdlvalidator/validation/functions/select_sum.py @@ -11,17 +11,26 @@ def get_function_definition(self): "Sum all numbers for a dataset", [ ArgDefinition("property", "The name of the propery containing the value to calculate the average for", True), - ArgDefinition("dataset", "Which dataset to use, this should be matching an alias given to a previous select query", True) + ArgDefinition("dataset", "Which dataset to use, this should be matching an alias given to a previous " + "select query", True), + ArgDefinition("condition", "when condition met, the asset should be count", False), ] ) def execute(self): prop = utils.get_attribute(self.args, "property") dataset = utils.get_attribute(self.args, "dataset") + condition = utils.get_attribute(self.args, "condition") count = 0 - for entry in self.datasets.get(dataset): - value = getattr(entry, prop) - count += value + if not condition: + for entry in self.datasets.get(dataset): + value = getattr(entry, prop) + count += value + else: + for entry in self.datasets.get(dataset): + value = getattr(entry, prop) + if value.name in condition: + count += 1 return count diff --git a/esdlvalidator/validation/functions/tests/test_function_select.py b/esdlvalidator/validation/functions/tests/test_function_select.py index 650a417..7a2b0a0 100644 --- a/esdlvalidator/validation/functions/tests/test_function_select.py +++ b/esdlvalidator/validation/functions/tests/test_function_select.py @@ -53,5 +53,5 @@ def test_select_filter_has_property(self): self.assertEqual(len(filtered.result), 2, "There should be 2 filtered") def get_test_datasets(self): - esh = utils.get_esh_from_file("testdata/ameland_energie_2015.esdl") + esh = utils.get_esh_from_file("testdata/Ameland_energie_2015.esdl") return {"resource": esh.resource} diff --git a/esdlvalidator/validation/functions/utils.py b/esdlvalidator/validation/functions/utils.py index b0c4596..e00f26f 100644 --- a/esdlvalidator/validation/functions/utils.py +++ b/esdlvalidator/validation/functions/utils.py @@ -6,7 +6,7 @@ def has_attribute(obj, name: str) -> bool: # give a default "nothing_found" since None can be the actual returned value result = get_attribute(obj, name, "nothing_found") - return False if result is "nothing_found" else True + return False if result == "nothing_found" else True def get_attribute(obj, name: str, default=None) -> bool: @@ -15,7 +15,8 @@ def get_attribute(obj, name: str, default=None) -> bool: Args: obj: List, class or dictionary to get a property value from name (str): The property to get - default (Object): Optional default value that will return when property not found, defaults to None + default (Object): Optional default value that will return when property not found, + defaults to None Returns: property value: The property value found for given name, default if not found diff --git a/esdlvalidator/validation/tests/__init__.py b/esdlvalidator/validation/tests/__init__.py index a8fc00d..112d0eb 100644 --- a/esdlvalidator/validation/tests/__init__.py +++ b/esdlvalidator/validation/tests/__init__.py @@ -2,7 +2,9 @@ from esdlvalidator.core.esdl import utils from esdlvalidator.validation.file_repository import FileSchemaRepository -with open('./testdata/validation_test.db', 'w') as file: +from pathlib import Path + +with open(Path.joinpath(Path(__file__).parents[3], "testdata/validation_test.db"), 'w') as file: file.write("") @@ -14,16 +16,35 @@ def get_test_schema_data(file): def get_test_schema_id(schemaData): - repo = FileSchemaRepository("./testdata/validation_test.db") + # repo = FileSchemaRepository("./testdata/validation_test.db") + repo = FileSchemaRepository(Path.joinpath(Path(__file__).parents[3], "testdata/validation_test.db")) schemaID = repo.insert(schemaData) return repo.get_by_id(schemaID) def get_test_dataset_ameland(): - esh = utils.get_esh_from_file("testdata/ameland_energie_2015.esdl") + esh = utils.get_esh_from_file(Path.joinpath(Path(__file__).parents[3], "testdata/esdls/ameland_energie_2015.esdl")) return esh.resource def get_test_dataset_hybrid(): - esh = utils.get_esh_from_file("testdata/hybrid_hp_with_pv_storage.esdl") + esh = utils.get_esh_from_file(Path.joinpath(Path(__file__).parents[3], "testdata/esdls/hybrid_hp_with_pv_storage.esdl")) + return esh.resource + +def get_test_dataset_3B_bad(): + esh = utils.get_esh_from_file(Path.joinpath(Path(__file__).parents[3], "testdata/esdls/3B_bad.esdl")) + return esh.resource + +def get_test_dataset_PoC(): + esh = utils.get_esh_from_file(Path.joinpath(Path(__file__).parents[3], "testdata/esdls/PoC_validator with return network.esdl")) + # esh = utils.get_esh_from_file(r"C:\Users\janssenfpjh\Downloads\New Energy System with return network (1).esdl") + # esh = utils.get_esh_from_file(r"C:\Users\janssenfpjh\Downloads\lkj with return network.esdl") + # esh = utils.get_esh_from_file(r"C:\Users\janssenfpjh\Downloads\WBr Moerdijk 2clean_carrier_duplicate.esdl") + # esh = utils.get_esh_from_file(r"C:\Users\janssenfpjh\Downloads\WBr Moerdijk 2clean.esdl") + # esh = utils.get_esh_from_file(r"C:\Users\janssenfpjh\Downloads\WBr Moerdijk 2clean2.esdl") + return esh.resource + +def get_test_dataset_single_pipes(): + esh = utils.get_esh_from_file( + Path.joinpath(Path(__file__).parents[3], "testdata/esdls/Single_pipes.esdl")) return esh.resource diff --git a/esdlvalidator/validation/tests/test_validator.py b/esdlvalidator/validation/tests/test_validator.py index 6b79f41..7768c35 100644 --- a/esdlvalidator/validation/tests/test_validator.py +++ b/esdlvalidator/validation/tests/test_validator.py @@ -1,8 +1,11 @@ import unittest -from esdlvalidator.validation.tests import get_test_schema_data, get_test_schema_id, get_test_dataset_ameland, get_test_dataset_hybrid +from esdlvalidator.validation.tests import (get_test_schema_data, get_test_schema_id, get_test_dataset_ameland, + get_test_dataset_hybrid, get_test_dataset_3B_bad, get_test_dataset_PoC, get_test_dataset_single_pipes) from esdlvalidator.validation.validator import EsdlValidator +from pathlib import Path + class TestValidator(unittest.TestCase): """Tests for the validator""" @@ -10,10 +13,17 @@ class TestValidator(unittest.TestCase): @classmethod def setUpClass(cls): super(TestValidator, cls).setUpClass() - cls.schemaOne = get_test_schema_id(get_test_schema_data("testdata/schema_test_1.json")) - cls.schemaTwo = get_test_schema_id(get_test_schema_data("testdata/schema_test_2.json")) + cls.schemaOne = get_test_schema_id(get_test_schema_data(Path.joinpath(Path(__file__).parents[3], + "testdata/schemas/schema_test_1.json"))) + cls.schemaTwo = get_test_schema_id(get_test_schema_data(Path.joinpath(Path(__file__).parents[3], + "testdata/schemas/schema_test_2.json"))) + cls.schemaPOC = get_test_schema_id(get_test_schema_data(Path.joinpath(Path(__file__).parents[3], + "testdata/schemas/schema_PoC.json"))) cls.esdlAmeland = get_test_dataset_ameland() cls.esdlHybrid = get_test_dataset_hybrid() + cls.esdl3B = get_test_dataset_3B_bad() + cls.esdlPOC = get_test_dataset_PoC() + cls.esdlSingle = get_test_dataset_single_pipes() def test_validate_schema_1(self): """test running the validator for test schema 1 and ameland test esdl""" @@ -28,7 +38,7 @@ def test_validate_schema_1(self): # assert self.assertEqual(validationAreaScope.checked, 8, "there should be 8 checked") self.assertEqual(len(validationAreaScope.warnings), 1, "there should be 1 warning") - self.assertEqual(validationAreaScope.warnings[0], "Area does not have a scope: value equals undefined for entity BU00600007", "Warning should say: Area does not have a scope: value equals undefined for entity BU00600007") + self.assertEqual(validationAreaScope.warnings[0], "Area does not have a scope: scope cannot be null for entity BU00600007", "Warning should say: Area does not have a scope: scope cannot be null for entity BU00600007") def test_validate_schema_2(self): """test running the validator on test schema 2 on dynamic test esdl with a real world scenario, multiple validations including and + or""" @@ -46,8 +56,8 @@ def test_validate_schema_2(self): # assert self.assertEqual(validationProducer.checked, 3, "there should be 3 checked since there are only 3 producers") - self.assertEqual(len(validationProducer.errors), 2, "there should be 2 errors since 1 producer validates ok") - self.assertEqual(validationProducer.errors[0], "Consumer missing power and marginal costs or no energy profile connected: None", "Warning should say: Consumer missing power and marginal costs or no energy profile connected: None") + self.assertEqual(len(validationProducer.errors), 1, "there should be 1 errors since 2 producer validates ok") + self.assertEqual(validationProducer.errors[0], "Consumer missing power and marginal costs or no energy profile connected: port.profile cannot be null for entity 5983a0f8-6f87-47b5-ba31-5e245c370dab", "Warning should say: Consumer missing power and marginal costs or no energy profile connected: port.profile cannot be null for entity 5983a0f8-6f87-47b5-ba31-5e245c370dab") self.assertEqual(validationStorage.checked, 1, "there should be 1 checked storage") self.assertEqual(len(validationStorage.errors), 0, "there should be 0 errors, storage should be correct") @@ -72,3 +82,72 @@ def test_validate_multiple_schemas(self): # assert self.assertEqual(len(result.schemas), 2, "there should be 2 schemas in the result") + + def test_validate_poc_scheme(self): + """test running the validator on test schema poc on dynamic test esdl""" + + # prepare + validator = EsdlValidator() + + # execute, validate against 1 schema for both single and duplicated network + result = validator.validate(self.esdlPOC, [self.schemaPOC]) + result2 = validator.validate(self.esdlSingle, [self.schemaPOC]) + + self.assertEqual(len(result.schemas), 1, "there should be 1 schemas in the result") + + for validation in result.schemas[0].validations: + if validation.name == 'connected_as_consumer': + self.assertEqual(len(validation.errors),2, 'there should be two assets not properly connected as ' + 'consumers') + elif validation.name == "storage_chargerate_undefined": + self.assertEqual(len(validation.warnings), 2) + elif validation.name == "storage_dischargerate_undefined": + self.assertEqual(len(validation.warnings), 1) + elif validation.name == "heat_exchanger_capacity": + self.assertEqual(len(validation.errors), 1) + elif validation.name == "costinformation_exists": + self.assertEqual(len(validation.warnings), 8) + elif validation.name == "investmentcost_per_power_undefined": + self.assertEqual(len(validation.errors), 2) + elif validation.name == "fixed_opex_undefined": + self.assertEqual(len(validation.errors), 1) + elif validation.name == "var_opex_undefined": + self.assertEqual(len(validation.errors), 1) + elif validation.name == "non_unique_names": + self.assertEqual(len(validation.errors), 2) + elif validation.name == "supply_return_combination": + self.assertEqual(len(validation.errors), 6) + elif validation.name == "investmentcost_per_power_undefined": + self.assertEqual(len(validation.errors), 1) + elif validation.name == "asset_power_undefined": + self.assertEqual(len(validation.errors), 7) + elif validation.name == "heatpump_cop_undefined": + self.assertEqual(len(validation.errors), 1) + elif validation.name == "tank_storage_capacity_undefined": + self.assertEqual(len(validation.errors), 1) + elif validation.name == "consumer_profile_undefined": + self.assertEqual(len(validation.errors), 1) + elif validation.name == "consumer_profile_not_null": + self.assertEqual(len(validation.errors), 1) + elif validation.name == "connected_prim_sec": + self.assertEqual(len(validation.errors), 1) + elif validation.name == "connected_prim_sec_return": + self.assertEqual(len(validation.errors), 1) + else: + if 'errors' in validation.__dict__.keys(): + self.assertEqual(len(validation.errors),0) + elif 'warnings' in validation.__dict__.keys(): + self.assertEqual(len(validation.warnings), 0) + + for validation in result2.schemas[0].validations: + if validation.name == 'heat_carriers': + self.assertEqual(len(validation.errors),1) + elif validation.name == "unconnected_port": + self.assertEqual(len(validation.warnings), 4) + elif validation.name == "supply_return_combination": + self.assertEqual(len(validation.errors), 10) + else: + if 'errors' in validation.__dict__.keys(): + self.assertEqual(len(validation.errors),0) + elif 'warnings' in validation.__dict__.keys(): + self.assertEqual(len(validation.warnings), 0) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index e017f51..adf4a38 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,9 +3,8 @@ pyecoregen>=0.4.3 lxml>=4.6.2 numpy>=1.19.4 flask>=1.1.2 -flask-cors==1.10.3 -flask-restx==0.5.0 -Flask-RESTful>0.3.8 +flask-cors==4.0.0 +flask-restx==1.3.0 waitress>=1.4.4 tinydb>=4.3.0 autodiscover>=0.0.3 @@ -16,7 +15,7 @@ pymongo>=3.11.3 pytz>=2021.1 toml>=0.10.2 Jinja2>=2.11.3 -Werkzeug>=1.0.1 +Werkzeug==2.1.2 itsdangerous>=1.1.0 click>=7.1.2 setuptools>=53.0.0 diff --git a/testdata/3B_bad.esdl b/testdata/esdls/3B_bad.esdl similarity index 100% rename from testdata/3B_bad.esdl rename to testdata/esdls/3B_bad.esdl diff --git a/testdata/Ameland_energie_2015.esdl b/testdata/esdls/Ameland_energie_2015.esdl similarity index 100% rename from testdata/Ameland_energie_2015.esdl rename to testdata/esdls/Ameland_energie_2015.esdl diff --git a/testdata/esdls/PoC_validator with return network.esdl b/testdata/esdls/PoC_validator with return network.esdl new file mode 100644 index 0000000..a1bdf7c --- /dev/null +++ b/testdata/esdls/PoC_validator with return network.esdl @@ -0,0 +1,386 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/testdata/esdls/Single_pipes.esdl b/testdata/esdls/Single_pipes.esdl new file mode 100644 index 0000000..61bfae8 --- /dev/null +++ b/testdata/esdls/Single_pipes.esdl @@ -0,0 +1,195 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/testdata/hybrid_hp_with_pv_storage.esdl b/testdata/esdls/hybrid_hp_with_pv_storage.esdl similarity index 100% rename from testdata/hybrid_hp_with_pv_storage.esdl rename to testdata/esdls/hybrid_hp_with_pv_storage.esdl diff --git a/testdata/3B.esdl b/testdata/esdls/old/3B.esdl similarity index 100% rename from testdata/3B.esdl rename to testdata/esdls/old/3B.esdl diff --git a/testdata/HundredPipes.esdl b/testdata/esdls/old/HundredPipes.esdl similarity index 100% rename from testdata/HundredPipes.esdl rename to testdata/esdls/old/HundredPipes.esdl diff --git a/testdata/esdl_with_notes.esdl b/testdata/esdls/old/esdl_with_notes.esdl similarity index 100% rename from testdata/esdl_with_notes.esdl rename to testdata/esdls/old/esdl_with_notes.esdl diff --git a/testdata/validated_3B_bad.esdl b/testdata/esdls/old/validated_3B_bad.esdl similarity index 100% rename from testdata/validated_3B_bad.esdl rename to testdata/esdls/old/validated_3B_bad.esdl diff --git a/testdata/results.json b/testdata/schemas/old/results.json similarity index 100% rename from testdata/results.json rename to testdata/schemas/old/results.json diff --git a/testdata/schemas/old/schema_PoC_test.json b/testdata/schemas/old/schema_PoC_test.json new file mode 100644 index 0000000..6e3e93f --- /dev/null +++ b/testdata/schemas/old/schema_PoC_test.json @@ -0,0 +1,733 @@ +{ + "name": "PoC validation schema", + "description": "CHESS validation schema contains checks for CHESS", + "validations": [{ + "name": "number_of_assets", + "description": "Find if number of assets is within limits", + "type": "warning", + "message": "Many assets present (ENABLED) in the file, the optimisation/simulation might take a while", + "selects": [{ + "function": "get_exclude", + "alias": "dataset", + "args": { + "type": "EnergyAsset", + "exclude_types": "Joint" + } + }, { + "function": "sum", + "alias": "count_enabled", + "args": { + "property" : "state", + "dataset" : "dataset", + "condition" : "ENABLED" + } + }],"check": { + "function": "check_value_range", + "dataset": "count_enabled", + "args": { + "min_value": 0, + "max_value": 5, + "message": "Many assets present (ENABLED) in the file, the optimisation/simulation might take a while", + "resultMsgJSON": true + } + } + },{ + "name": "number_of_assets_sizing", + "description": "Find if number of assets is within limits", + "type": "error", + "message": "Too many assets to size (OPTIONAL) in the file besides pipes, this is not supported in the PoC", + "selects": [{ + "function": "get_exclude", + "alias": "dataset", + "args": { + "type": "EnergyAsset", + "exclude_types": ["Joint", "Pipe"] + } + }, { + "function": "sum", + "alias": "count_optional", + "args": { + "property" : "state", + "dataset" : "dataset", + "condition" : "OPTIONAL" + } + }],"check": { + "function": "check_value_range", + "dataset": "count_optional", + "args": { + "min_value": 0, + "max_value": 100, + "message": "Too many assets to size (OPTIONAL) in the file besides pipes, this is not supported in the PoC", + "resultMsgJSON": true + } + } + },{ + "name": "number_of_pipe_sizing_error", + "description": "Find if number of sizing assets is within limits", + "type": "error", + "message": "Too many pipes to size (OPTIONAL) in the file, this is not supported in the PoC", + "selects": [{ + "function": "get", + "alias": "dataset", + "args": { + "type": "Pipe" + } + }, { + "function": "sum", + "alias": "count_optional", + "args": { + "property" : "state", + "dataset" : "dataset", + "condition" : "OPTIONAL" + } + } + ],"check": { + "function": "check_value_range", + "dataset": "count_optional", + "args": { + "min_value": 0, + "max_value": 100, + "message": "Too many pipes to size (OPTIONAL) in the file, this is not supported in the PoC", + "resultMsgJSON": true + } + } + + },{ + "name": "number_of_pipe_sizing_warning", + "description": "Find if number of sizing assets is within limits", + "type": "warning", + "message": "Many pipes to size (OPTIONAL) in the file, the optimisation might take a while", + "selects": [{ + "function": "get", + "alias": "dataset", + "args": { + "type": "Pipe" + } + }, { + "function": "sum", + "alias": "count_optional", + "args": { + "property" : "state", + "dataset" : "dataset", + "condition" : "OPTIONAL" + } + } + ],"check": { + "function": "check_value_range", + "dataset": "count_optional", + "args": { + "min_value": 0, + "max_value": 50, + "message": "Many pipes to size (OPTIONAL) in the file, the optimisation might take a while", + "resultMsgJSON": true + } + } + + },{ + "name": "heat_carriers", + "description": "Find heat carriers are available", + "type": "error", + "message": "One of supply and return temperature should be defined", + "selects": [{ + "function": "get", + "alias": "heatcarriers", + "args": { + "type": "HeatCommodity" + } + } + ], + "check": { + "function": "multi_cond_xor", + "dataset": "heatcarriers", + "args": { + "properties": ["supplyTemperature", "returnTemperature"], + "violations": [0.0, 0.0], + "resultMsgJSON": true + } + } + },{ + "name": "pipe_connected_to_pipe", + "description": "Find if no Pipes are connected to other Pipes", + "type": "error", + "message": "Pipe connected to another pipe", + "selects": [{ + "function": "get", + "alias": "pipes", + "args": { + "type": "Pipe" + } + } + ], + "check": { + "function": "not_connected_to", + "dataset": "pipes", + "args": { + "assetType": "Pipe", + "resultMsgJSON": true + } + } + }, { + "name": "asset_connected_to_joint", + "description": "Find if no assets other than pipes are connected to Joints", + "type": "error", + "message": "Asset connected to joint", + "selects": [{ + "function": "get", + "alias": "energy_assets", + "args": { + "type": ["Producer", "Consumer", "Storage", "Conversion", "Joint"] + } + } + ], + "check": { + "function": "not_connected_to", + "dataset": "energy_assets", + "args": { + "assetType": "Joint", + "resultMsgJSON": true + } + } + }, { + "name": "unconnected_port", + "description": "Find if any port is left unconnected", + "type": "warning", + "message": "Unconnected port", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": "EnergyAsset" + } + } + ], + "check": { + "function": "unconnected_port", + "dataset": "assets", + "args": { + "resultMsgJSON": true + } + } + }, { + "name": "number_ports", + "description": "Find if joint does not have exactly 2 ports", + "type": "warning", + "message": "Number of ports incorrect", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": "Joint" + } + } + ], + "check": { + "function": "number_ports", + "dataset": "assets", + "args": { + "resultMsgJSON": true + } + } + },{ + "name": "connected_as_producer", + "description": "Find if assets are connected as producers (Outport on supply network)", + "type": "error", + "message": "Asset is not connected as producer", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": ["Producer", "GasHeater"] + } + } + ], + "check": { + "function": "port_property_combination", + "dataset": "assets", + "args": { + "component": "carrier", + "port_type_larger": "OutPort", + "port_type_smaller": "InPort", + "resultMsgJSON": true + } + } + },{ + "name": "connected_as_consumer", + "description": "Find if assets are connected as consumer (Inport on supply network)", + "type": "error", + "message": "Asset is not connected as consumer", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": ["Consumer", "HeatStorage"] + } + } + ], + "check": { + "function": "port_property_combination", + "dataset": "assets", + "args": { + "component": "carrier", + "port_type_larger": "InPort", + "port_type_smaller": "OutPort", + "resultMsgJSON": true + } + } + },{ + "name": "connected_prim_sec", + "description": "Find if assets are connected properly with secondary and primary", + "type": "error", + "message": "Asset is not connected properly with primary and secondary", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": ["HeatExchange", "GenericConversion", "HeatPump"] + } + } + ], + "check": { + "function": "port_property_combination", + "dataset": "assets", + "args": { + "component": "carrier", + "port_type_larger": "OutPort", + "port_type_smaller": "InPort", + "resultMsgJSON": true + } + } + },{ + "name": "diameter_undefined", + "description": "Find if both innerDiameter and diameter are not defined", + "type": "error", + "message": "Diameter not defined", + "selects": [{ + "function": "get", + "alias": "pipes", + "args": { + "type": "Pipe" + } + } + ], + "check": { + "function": "multi_cond", + "dataset": "pipes", + "args": { + "properties": ["diameter", "innerDiameter"], + "violations": ["VALUE_SPECIFIED", 0.0], + "resultMsgJSON": true + } + } + },{ + "name": "storage_chargerate_undefined", + "description": "An asset of type storage must have a charge attribute", + "type": "warning", + "message": "Storage charge rate undefined", + "selects": [{ + "function": "get", + "alias": "storage", + "args": { + "type": "HeatStorage" + } + } + ], + "check": { + "function": "not_null", + "dataset": "storage", + "args": { + "property": "maxChargeRate", + "counts_as_null": [0.0], + "resultMsgJSON": true + } + } + },{ + "name": "storage_dischargerate_undefined", + "description": "An asset of type storage must have a discharge attribute", + "type": "warning", + "message": "Storage discharge rate undefined", + "selects": [{ + "function": "get", + "alias": "storage", + "args": { + "type": "HeatStorage" + } + } + ], + "check": { + "function": "not_null", + "dataset": "storage", + "args": { + "property": "maxDischargeRate", + "counts_as_null": [0.0], + "resultMsgJSON": true + } + } + },{ + "name": "heat_exchanger_capacity", + "description": "An asset of type heat exchanger must have a maximum capacity attribute", + "type": "error", + "message": "Heat exchanger maximum capacity undefined", + "selects": [{ + "function": "get", + "alias": "hex", + "args": { + "type": "HeatExchange" + } + } + ], + "check": { + "function": "multi_cond", + "dataset": "hex", + "args": { + "properties": ["capacity", "heatTransferCoefficient"], + "violations": [0.0, 0.0], + "resultMsgJSON": true + } + } + },{ + "name": "costinformation_exists", + "description": "The asset must have a specified type of costinformation structure ", + "type": "warning", + "message": "The selection of cost information attributes not complete", + "selects": [{ + "function": "get", + "alias": "producers_conversions", + "args": { + "type": ["Producer", "Conversion", "ATES", "Consumer"] + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "producers_conversions", + "args": { + "component": "costInformation", + "attribute": ["installationCosts", "investmentCosts", + "fixedOperationalCosts", "variableOperationalCosts"], + "check_type": "exists", + "resultMsgJSON": true + } + } + },{ + "name": "investmentcost_per_power_undefined", + "description": "The asset must have a specified type of investmentcost structure [EUR/W]", + "type": "error", + "message": "producer costinformation - investmentcosts incomplete or incorrect", + "selects": [{ + "function": "get", + "alias": "producers_conversions", + "args": { + "type": ["Producer", "Conversion", "ATES", "Consumer"] + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "producers_conversions", + "args": { + "component": "costInformation", + "attribute": "investmentCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "WATT"], + "resultMsgJSON": true + } + } + },{ + "name": "investmentcost_per_volume_undefined", + "description": "The asset must have a specified type of investmentcost structure [EUR/m3]", + "type": "error", + "message": "tank storage costinformation - investmentcosts incomplete or incorrect", + "selects": [{ + "function": "get_exclude", + "alias": "tank_storage", + "args": { + "type": "HeatStorage", + "exclude_types": "ATES" + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "tank_storage", + "args": { + "component": "costInformation", + "attribute": "investmentCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "CUBIC_METRE"], + "resultMsgJSON": true + } + } + },{ + "name": "investmentcost_per_length_undefined", + "description": "The asset must have a specified type of investmentcost structure [EUR/m]", + "type": "error", + "message": "pipe costinformation - investmentcosts incomplete or incorrect", + "selects": [{ + "function": "get", + "alias": "pipes", + "args": { + "type": "Pipe" + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "pipes", + "args": { + "component": "costInformation", + "attribute": "investmentCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "METRE"], + "resultMsgJSON": true + } + } + },{ + "name": "installation_cost_undefined", + "description": "The asset must have a specified type of installation structure [EUR]", + "type": "error", + "message": " costinformation - installation incomplete or incorrect", + "selects": [{ + "function": "get_exclude", + "alias": "energy_assets", + "args": { + "type": "EnergyAsset", + "exclude_types": ["Pipe", "Joint"] + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "energy_assets", + "args": { + "component": "costInformation", + "attribute": "installationCosts", + "check_type": "unit", + "unit_type": ["unit"], + "unit": ["EURO"], + "resultMsgJSON": true + } + } + },{ + "name": "fixed_opex_undefined", + "description": "The asset must have a specified type of fixed opex structure [EUR/W]", + "type": "error", + "message": " costinformation - fixed operational incomplete or incorrect", + "selects": [{ + "function": "get", + "alias": "producers_conversions", + "args": { + "type": ["Producer", "Conversion", "ATES", "Consumer"] + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "producers_conversions", + "args": { + "component": "costInformation", + "attribute": "fixedOperationalCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "WATT"], + "resultMsgJSON": true + } + } + },{ + "name": "fixedopex_per_volume_undefined", + "description": "The asset must have a specified type of fixed operational structure [EUR/m3]", + "type": "error", + "message": "Tank costinformation - fixed operational incomplete or incorrect", + "selects": [{ + "function": "get_exclude", + "alias": "tank_storage", + "args": { + "type": "HeatStorage", + "exclude_types": "ATES" + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "tank_storage", + "args": { + "component": "costInformation", + "attribute": "investmentCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "CUBIC_METRE"], + "resultMsgJSON": true + } + } + },{ + "name": "var_opex_undefined", + "description": "The asset must have a specified type of variable opex structure [EUR/Wh]", + "type": "error", + "message": " costinformation - variable operational incomplete or incorrect", + "selects": [{ + "function": "get", + "alias": "producers_conversions", + "args": { + "type": ["Producer", "Conversion", "ATES", "Consumer"] + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "producers_conversions", + "args": { + "component": "costInformation", + "attribute": "variableOperationalCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "WATTHOUR"], + "resultMsgJSON": true + } + } + },{ + "name": "non_unique_names", + "description": "The asset must have a unique name", + "type": "error", + "message": " The name of this asset is not unique", + "selects": [{ + "function": "get_nameslist", + "alias": "energy_assets", + "args": { + "type": ["EnergyAsset"], + "property": ["name"] + } + } + ], + "check": { + "function": "name_condition", + "dataset": "energy_assets", + "args": { + "condition": "uniqueness", + "resultMsgJSON": true + } + } + },{ + "name": "supply_return_combination", + "description": "Pipes, joints and heat commodities must have a name with +_ret for return line", + "type": "error", + "message": "Pipes, joints and heat commodities must have a name with +_ret for return line", + "selects": [{ + "function": "get_nameslist", + "alias": "pipes_joints", + "args": { + "type": ["Pipe", "Joint", "HeatCommodity"], + "property": ["name"] + } + } + ], + "check": { + "function": "name_condition", + "dataset": "pipes_joints", + "args": { + "condition": "supply_return", + "resultMsgJSON": true + } + } + },{ + "name": "asset_power_undefined", + "description": "An asset must have a power attribute (asset.power)", + "type": "error", + "message": "Asset missing power", + "selects": [{ + "function": "get", + "alias": "producers", + "args": { + "type": ["Producer", "Conversion", "Consumer"] + } + } + ], + "check": { + "function": "not_null", + "dataset": "producers", + "args": { + "property": "power", + "counts_as_null": [0.0], + "resultMsgJSON": true + } + } + }, { + "name": "tank_storage_capacity_undefined", + "description": "A tank storage must have a capacity or volume attribute set", + "type": "error", + "message": "Asset missing capacity or volume", + "selects": [{ + "function": "get_exclude", + "alias": "tank", + "args": { + "type": "HeatStorage", + "exclude_types": "ATES" + + } + } + ], + "check": { + "function": "multi_cond", + "dataset": "tank", + "args": { + "properties": ["volume", "capacity"], + "violations": [0.0, 0.0], + "resultMsgJSON": true + } + } + }, + { + "name": "consumer_profile_undefined", + "description": "An asset of type consumer must have a profile defined", + "type": "error", + "message": "Consumer missing profile", + "selects": [ + { + "function": "get_references", + "alias": "consumerInPorts", + "args": { + "assetType": "Consumer", + "referenceType": "InPort" + } + } + ], + "check": { + "function": "not_null", + "dataset": "consumerInPorts", + "args": { + "property": "profile", + "counts_as_null": [ + "EOrderedSet()" + ], + "resultMsgJSON": true + } + } + },{ + "name": "consumer_profile_not_null", + "description": "An asset of type consumer must have a profile defined", + "type": "error", + "message": "Consumer missing profile", + "selects": [{ + "function": "get_references", + "alias": "consumerInPorts", + "args": { + "assetType": "Consumer", + "referenceType": "InPort" + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "consumerInPorts", + "args": { + "component": "profile", + "attribute": "multiplier", + "check_type": "value", + "resultMsgJSON": true + } + } + } + ], + "id": "6139f62803a5f590c2367933" +} diff --git a/testdata/schema_chess.json b/testdata/schemas/old/schema_chess.json similarity index 100% rename from testdata/schema_chess.json rename to testdata/schemas/old/schema_chess.json diff --git a/testdata/schemas/schema_PoC.json b/testdata/schemas/schema_PoC.json new file mode 100644 index 0000000..2ac8ec2 --- /dev/null +++ b/testdata/schemas/schema_PoC.json @@ -0,0 +1,717 @@ +{ + "name": "PoC validation schema", + "description": "PoC validation schema contains checks for OMOTES MILP problem", + "validations": [{ + "name": "heat_carriers", + "description": "Find heat carriers are available", + "type": "error", + "message": "One of supply and return temperature should be defined", + "selects": [{ + "function": "get", + "alias": "heatcarriers", + "args": { + "type": "HeatCommodity" + } + } + ], + "check": { + "function": "multi_cond_xor", + "dataset": "heatcarriers", + "args": { + "properties": ["supplyTemperature", "returnTemperature"], + "violations": [0.0, 0.0], + "resultMsgJSON": true + } + } + },{ + "name": "pipe_connected_to_pipe", + "description": "Find if no Pipes are connected to other Pipes", + "type": "error", + "message": "Pipe connected to another pipe", + "selects": [{ + "function": "get", + "alias": "pipes", + "args": { + "type": "Pipe" + } + } + ], + "check": { + "function": "not_connected_to", + "dataset": "pipes", + "args": { + "assetType": "Pipe", + "resultMsgJSON": true + } + } + }, { + "name": "asset_connected_to_joint", + "description": "Find if no assets other than pipes are connected to Joints", + "type": "error", + "message": "Asset connected to joint", + "selects": [{ + "function": "get", + "alias": "energy_assets", + "args": { + "type": ["Producer", "Consumer", "Storage", "Conversion", "Joint"] + } + } + ], + "check": { + "function": "not_connected_to", + "dataset": "energy_assets", + "args": { + "assetType": "Joint", + "resultMsgJSON": true + } + } + }, { + "name": "unconnected_port", + "description": "Find if any port is left unconnected", + "type": "warning", + "message": "Unconnected port", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": "EnergyAsset" + } + } + ], + "check": { + "function": "unconnected_port", + "dataset": "assets", + "args": { + "resultMsgJSON": true + } + } + }, { + "name": "number_ports", + "description": "Find if joint does not have exactly 2 ports", + "type": "warning", + "message": "Number of ports incorrect", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": "Joint" + } + } + ], + "check": { + "function": "number_ports", + "dataset": "assets", + "args": { + "number": 2, + "resultMsgJSON": true + } + } + },{ + "name": "connected_as_producer", + "description": "Find if assets are connected as producers (Outport on supply network)", + "type": "error", + "message": "Asset is not connected as producer", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": ["Producer", "GasHeater"] + } + } + ], + "check": { + "function": "port_property_combination", + "dataset": "assets", + "args": { + "port_type_larger": "OutPort", + "port_type_smaller": "InPort", + "property_larger": ["carrier", "supplyTemperature"], + "property_smaller": ["carrier", "returnTemperature"], + "resultMsgJSON": true + } + } + },{ + "name": "connected_as_consumer", + "description": "Find if assets are connected as consumer (Inport on supply network)", + "type": "error", + "message": "Asset is not connected as consumer", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": ["Consumer", "HeatStorage"] + } + } + ], + "check": { + "function": "port_property_combination", + "dataset": "assets", + "args": { + "port_type_larger": "InPort", + "port_type_smaller": "OutPort", + "property_larger": ["carrier", "supplyTemperature"], + "property_smaller": ["carrier", "returnTemperature"], + "resultMsgJSON": true + } + } + },{ + "name": "connected_temp_prim", + "description": "Find if assets are connected properly with secondary and primary", + "type": "error", + "message": "Asset is not connected properly connected on primary side", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": ["HeatExchange", "GenericConversion", "HeatPump"] + } + } + ], + "check": { + "function": "port_property_combination", + "dataset": "assets", + "args": { + "port_type_larger": "InPort", + "port_type_smaller": "OutPort", + "property_larger": ["carrier", "supplyTemperature"], + "property_smaller": ["carrier", "returnTemperature"], + "port_name_larger": "prim", + "port_name_smaller": "prim", + "resultMsgJSON": true + } + } + },{ + "name": "connected_temp_sec", + "description": "Find if assets are connected properly with secondary and primary", + "type": "error", + "message": "Asset is not connected properly connected on secondary side", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": ["HeatExchange", "GenericConversion", "HeatPump"] + } + } + ], + "check": { + "function": "port_property_combination", + "dataset": "assets", + "args": { + "port_type_larger": "OutPort", + "port_type_smaller": "InPort", + "property_larger": ["carrier", "supplyTemperature"], + "property_smaller": ["carrier", "returnTemperature"], + "port_name_larger": "sec", + "port_name_smaller": "sec", + "resultMsgJSON": true + } + } + },{ + "name": "connected_prim_sec", + "description": "Find if assets are connected properly with secondary and primary", + "type": "error", + "message": "Asset is not connected properly connected between primary and secondary side", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": ["HeatExchange", "GenericConversion"] + } + } + ], + "check": { + "function": "port_property_combination", + "dataset": "assets", + "args": { + "port_type_larger": "InPort", + "port_type_smaller": "OutPort", + "property_larger": ["carrier", "supplyTemperature"], + "property_smaller": ["carrier", "supplyTemperature"], + "port_name_larger": "prim", + "port_name_smaller": "sec", + "resultMsgJSON": true + } + } + },{ + "name": "connected_prim_sec_return", + "description": "Find if assets are connected properly with secondary and primary", + "type": "error", + "message": "Asset is not connected properly connected between primary and secondary side", + "selects": [{ + "function": "get", + "alias": "assets", + "args": { + "type": ["HeatExchange", "GenericConversion"] + } + } + ], + "check": { + "function": "port_property_combination", + "dataset": "assets", + "args": { + "port_type_larger": "OutPort", + "port_type_smaller": "InPort", + "property_larger": ["carrier", "returnTemperature"], + "property_smaller": ["carrier", "returnTemperature"], + "port_name_larger": "prim", + "port_name_smaller": "sec", + "resultMsgJSON": true + } + } + },{ + "name": "diameter_undefined", + "description": "Find if both innerDiameter and diameter are not defined", + "type": "error", + "message": "Diameter not defined", + "selects": [{ + "function": "get", + "alias": "pipes", + "args": { + "type": "Pipe" + } + } + ], + "check": { + "function": "multi_cond", + "dataset": "pipes", + "args": { + "properties": ["diameter", "innerDiameter"], + "violations": ["VALUE_SPECIFIED", 0.0], + "resultMsgJSON": true + } + } + },{ + "name": "storage_chargerate_undefined", + "description": "An asset of type storage must have a charge attribute", + "type": "warning", + "message": "Storage charge rate undefined", + "selects": [{ + "function": "get", + "alias": "storage", + "args": { + "type": "HeatStorage" + } + } + ], + "check": { + "function": "not_null", + "dataset": "storage", + "args": { + "property": "maxChargeRate", + "counts_as_null": [0.0], + "resultMsgJSON": true + } + } + },{ + "name": "storage_dischargerate_undefined", + "description": "An asset of type storage must have a discharge attribute", + "type": "warning", + "message": "Storage discharge rate undefined", + "selects": [{ + "function": "get", + "alias": "storage", + "args": { + "type": "HeatStorage" + } + } + ], + "check": { + "function": "not_null", + "dataset": "storage", + "args": { + "property": "maxDischargeRate", + "counts_as_null": [0.0], + "resultMsgJSON": true + } + } + },{ + "name": "heat_exchanger_capacity", + "description": "An asset of type heat exchanger must have a maximum capacity attribute", + "type": "error", + "message": "Heat exchanger maximum capacity undefined", + "selects": [{ + "function": "get", + "alias": "hex", + "args": { + "type": "HeatExchange" + } + } + ], + "check": { + "function": "multi_cond", + "dataset": "hex", + "args": { + "properties": ["capacity", "heatTransferCoefficient"], + "violations": [0.0, 0.0], + "resultMsgJSON": true + } + } + },{ + "name": "costinformation_exists", + "description": "The asset must have a specified type of costinformation structure ", + "type": "warning", + "message": "The selection of cost information attributes not complete", + "selects": [{ + "function": "get", + "alias": "producers_conversions", + "args": { + "type": ["Producer", "Conversion", "ATES", "Consumer"] + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "producers_conversions", + "args": { + "component": "costInformation", + "attribute": ["installationCosts", "investmentCosts", + "fixedOperationalCosts", "variableOperationalCosts"], + "check_type": "exists", + "resultMsgJSON": true + } + } + },{ + "name": "investmentcost_per_power_undefined", + "description": "The asset must have a specified type of investmentcost structure [EUR/W]", + "type": "error", + "message": "producer costinformation - investmentcosts incomplete or incorrect", + "selects": [{ + "function": "get", + "alias": "producers_conversions", + "args": { + "type": ["Producer", "Conversion", "ATES", "Consumer"] + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "producers_conversions", + "args": { + "component": "costInformation", + "attribute": "investmentCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "WATT"], + "resultMsgJSON": true + } + } + },{ + "name": "investmentcost_per_volume_undefined", + "description": "The asset must have a specified type of investmentcost structure [EUR/m3]", + "type": "error", + "message": "tank storage costinformation - investmentcosts incomplete or incorrect", + "selects": [{ + "function": "get_exclude", + "alias": "tank_storage", + "args": { + "type": "HeatStorage", + "exclude_types": "ATES" + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "tank_storage", + "args": { + "component": "costInformation", + "attribute": "investmentCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "CUBIC_METRE"], + "resultMsgJSON": true + } + } + },{ + "name": "investmentcost_per_length_undefined", + "description": "The asset must have a specified type of investmentcost structure [EUR/m]", + "type": "error", + "message": "pipe costinformation - investmentcosts incomplete or incorrect", + "selects": [{ + "function": "get", + "alias": "pipes", + "args": { + "type": "Pipe" + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "pipes", + "args": { + "component": "costInformation", + "attribute": "investmentCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "METRE"], + "resultMsgJSON": true + } + } + },{ + "name": "installation_cost_undefined", + "description": "The asset must have a specified type of installation structure [EUR]", + "type": "error", + "message": " costinformation - installation incomplete or incorrect", + "selects": [{ + "function": "get_exclude", + "alias": "energy_assets", + "args": { + "type": "EnergyAsset", + "exclude_types": ["Pipe", "Joint"] + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "energy_assets", + "args": { + "component": "costInformation", + "attribute": "installationCosts", + "check_type": "unit", + "unit_type": ["unit"], + "unit": ["EURO"], + "resultMsgJSON": true + } + } + },{ + "name": "fixed_opex_undefined", + "description": "The asset must have a specified type of fixed opex structure [EUR/W]", + "type": "error", + "message": " costinformation - fixed operational incomplete or incorrect", + "selects": [{ + "function": "get", + "alias": "producers_conversions", + "args": { + "type": ["Producer", "Conversion", "ATES", "Consumer"] + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "producers_conversions", + "args": { + "component": "costInformation", + "attribute": "fixedOperationalCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "WATT"], + "resultMsgJSON": true + } + } + },{ + "name": "fixedopex_per_volume_undefined", + "description": "The asset must have a specified type of fixed operational structure [EUR/m3]", + "type": "error", + "message": "Tank costinformation - fixed operational incomplete or incorrect", + "selects": [{ + "function": "get_exclude", + "alias": "tank_storage", + "args": { + "type": "HeatStorage", + "exclude_types": "ATES" + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "tank_storage", + "args": { + "component": "costInformation", + "attribute": "investmentCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "CUBIC_METRE"], + "resultMsgJSON": true + } + } + },{ + "name": "var_opex_undefined", + "description": "The asset must have a specified type of variable opex structure [EUR/Wh]", + "type": "error", + "message": " costinformation - variable operational incomplete or incorrect", + "selects": [{ + "function": "get", + "alias": "producers_conversions", + "args": { + "type": ["Producer", "Conversion", "ATES", "Consumer"] + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "producers_conversions", + "args": { + "component": "costInformation", + "attribute": "variableOperationalCosts", + "check_type": "unit", + "unit_type": ["unit", "perUnit"], + "unit": ["EURO", "WATTHOUR"], + "resultMsgJSON": true + } + } + },{ + "name": "non_unique_names", + "description": "The asset must have a unique name", + "type": "error", + "message": " The name of this asset is not unique", + "selects": [{ + "function": "get_nameslist", + "alias": "energy_assets", + "args": { + "type": ["EnergyAsset"], + "property": ["name"] + } + } + ], + "check": { + "function": "name_condition", + "dataset": "energy_assets", + "args": { + "condition": "uniqueness", + "resultMsgJSON": true + } + } + },{ + "name": "supply_return_combination", + "description": "Pipes, joints and heat commodities must have a name with +_ret for return line", + "type": "error", + "message": "Pipes, joints and heat commodities must have a name with +_ret for return line", + "selects": [{ + "function": "get_nameslist", + "alias": "pipes_joints", + "args": { + "type": ["Pipe", "Joint", "HeatCommodity"], + "property": ["name"] + } + } + ], + "check": { + "function": "name_condition", + "dataset": "pipes_joints", + "args": { + "condition": "supply_return", + "resultMsgJSON": true + } + } + },{ + "name": "asset_power_undefined", + "description": "An asset must have a power attribute (asset.power)", + "type": "error", + "message": "Asset missing power", + "selects": [{ + "function": "get", + "alias": "producers", + "args": { + "type": ["Producer", "Conversion", "Consumer"] + } + } + ], + "check": { + "function": "not_null", + "dataset": "producers", + "args": { + "property": "power", + "counts_as_null": [0.0], + "resultMsgJSON": true + } + } + }, { + "name": "heatpump_cop_undefined", + "description": "An asset must have a cop attribute", + "type": "error", + "message": "Asset missing cop", + "selects": [{ + "function": "get", + "alias": "heatpumps", + "args": { + "type": ["HeatPump"] + } + } + ], + "check": { + "function": "not_null", + "dataset": "heatpumps", + "args": { + "property": "COP", + "counts_as_null": [0.0], + "resultMsgJSON": true + } + } + },{ + "name": "tank_storage_capacity_undefined", + "description": "A tank storage must have a capacity or volume attribute set", + "type": "error", + "message": "Asset missing capacity or volume", + "selects": [{ + "function": "get_exclude", + "alias": "tank", + "args": { + "type": "HeatStorage", + "exclude_types": "ATES" + + } + } + ], + "check": { + "function": "multi_cond", + "dataset": "tank", + "args": { + "properties": ["volume", "capacity"], + "violations": [0.0, 0.0], + "resultMsgJSON": true + } + } + }, + { + "name": "consumer_profile_undefined", + "description": "An asset of type consumer must have a profile defined", + "type": "error", + "message": "Consumer missing profile", + "selects": [ + { + "function": "get_references", + "alias": "consumerInPorts", + "args": { + "assetType": "Consumer", + "referenceType": "InPort" + } + } + ], + "check": { + "function": "not_null", + "dataset": "consumerInPorts", + "args": { + "property": "profile", + "counts_as_null": [ + "EOrderedSet()" + ], + "resultMsgJSON": true + } + } + },{ + "name": "consumer_profile_not_null", + "description": "An asset of type consumer must have a profile defined", + "type": "error", + "message": "Consumer missing profile", + "selects": [{ + "function": "get_references", + "alias": "consumerInPorts", + "args": { + "assetType": "Consumer", + "referenceType": "InPort" + } + } + ], + "check": { + "function": "check_child_attribute_not_null", + "dataset": "consumerInPorts", + "args": { + "component": "profile", + "attribute": "multiplier", + "check_type": "value", + "resultMsgJSON": true + } + } + } + ], + "id": "6139f62803a5f590c2367933" +} diff --git a/testdata/schema_chess_2022-08-18.json b/testdata/schemas/schema_chess_2022-08-18.json similarity index 100% rename from testdata/schema_chess_2022-08-18.json rename to testdata/schemas/schema_chess_2022-08-18.json diff --git a/testdata/schema_test_1.json b/testdata/schemas/schema_test_1.json similarity index 100% rename from testdata/schema_test_1.json rename to testdata/schemas/schema_test_1.json diff --git a/testdata/schema_test_2.json b/testdata/schemas/schema_test_2.json similarity index 98% rename from testdata/schema_test_2.json rename to testdata/schemas/schema_test_2.json index 02e9041..5c8c2ab 100644 --- a/testdata/schema_test_2.json +++ b/testdata/schemas/schema_test_2.json @@ -40,7 +40,8 @@ { "function": "not_null", "args": { - "property": "port.profile" + "property": "port.profile", + "counts_as_null": ["EOrderedSet()"] } } ]