Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 77 additions & 1 deletion .github/workflows/integration_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,13 @@ on:
MCP_VENUE_TEST_OGC_PROCESSES_API_ENDPOINT:
description: "Base URL for the OGC endpoint in MCP Venue Test (i.e. https://abcdef12345.execute-api.us-west-2.amazonaws.com/test/ogc/api)"
type: string
# TODO: add MCP_VENUE_OPS inputs
MCP_VENUE_OPS_AIRFLOW_API_ENDPOINT:
description: "Base URL for the Airflow API endpoint in MCP Venue Ops (i.e. https://abcdef12345.execute-api.us-west-2.amazonaws.com/test/sps/api/v1)"
type: string
MCP_VENUE_OPS_OGC_PROCESSES_API_ENDPOINT:
description: "Base URL for the OGC endpoint in MCP Venue Ops (i.e. https://abcdef12345.execute-api.us-west-2.amazonaws.com/test/ogc/api)"
type: string

jobs:

Dev-Venue-Airflow-API:
Expand Down Expand Up @@ -94,6 +100,41 @@ jobs:
exit 1
fi

Ops-Venue-Airflow-API:
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v4

- name: Setup
uses: ./.github/actions/setup-action
continue-on-error: false

- name: MCP Venue Ops - Integration tests with Airflow API
id: mcp_venue_ops_integration_tests_with_airflow_api
continue-on-error: true
env:
UNITY_USER: ${{ secrets.MCP_VENUE_OPS_UNITY_USERNAME }}
UNITY_PASSWORD: ${{ secrets.MCP_VENUE_OPS_UNITY_PASSWORD }}
UNITY_CLIENT_ID: ${{ secrets.MCP_VENUE_OPS_UNITY_CLIENTID }}
run: |
pytest -vv -s --gherkin-terminal-reporter \
unity-test/system/integration/step_defs/test_cwl_workflows_with_airflow_api.py \
--venue="prod" \
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_OPS_AIRFLOW_API_ENDPOINT || vars.MCP_VENUE_OPS_AIRFLOW_API_ENDPOINT }} \
--ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_OPS_OGC_PROCESSES_API_ENDPOINT || vars.MCP_VENUE_OPS_OGC_PROCESSES_API_ENDPOINT }}

- name: Check Tests Results
if: always()
run: |
tests_status=${{ steps.mcp_venue_ops_integration_tests_with_airflow_api.outcome }}
echo "Tests Status: $tests_status"
if [ "$tests_status" != "success" ]; then
echo "Integration Tests with Airflow API on MCP Venue Ops failed."
exit 1
fi

Dev-Venue-OGC-API:
runs-on: ubuntu-latest

Expand Down Expand Up @@ -163,3 +204,38 @@ jobs:
echo "Integration Tests with OGC API on MCP Venue Test failed."
exit 1
fi

Ops-Venue-OGC-API:
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v4

- name: Setup
uses: ./.github/actions/setup-action
continue-on-error: false

- name: MCP Venue Ops - Integration tests with OGC API
id: mcp_venue_ops_integration_tests_with_ogc_api
continue-on-error: true
env:
UNITY_USER: ${{ secrets.MCP_VENUE_OPS_UNITY_USERNAME }}
UNITY_PASSWORD: ${{ secrets.MCP_VENUE_OPS_UNITY_PASSWORD }}
UNITY_CLIENT_ID: ${{ secrets.MCP_VENUE_OPS_UNITY_CLIENTID }}
run: |
pytest -vv -s --gherkin-terminal-reporter \
unity-test/system/integration/step_defs/test_cwl_workflows_with_ogc_api.py \
--venue="prod" \
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_OPS_AIRFLOW_API_ENDPOINT || vars.MCP_VENUE_OPS_AIRFLOW_API_ENDPOINT }} \
--ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_OPS_OGC_PROCESSES_API_ENDPOINT || vars.MCP_VENUE_OPS_OGC_PROCESSES_API_ENDPOINT }}

- name: Check Tests Results
if: always()
run: |
tests_status=${{ steps.mcp_venue_ops_integration_tests_with_ogc_api.outcome }}
echo "Tests Status: $tests_status"
if [ "$tests_status" != "success" ]; then
echo "Integration Tests with OGC API on MCP Venue Ops failed."
exit 1
fi
17 changes: 15 additions & 2 deletions unity-test/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,9 @@ def pytest_addoption(parser):
"--venue",
action="store",
default=None,
choices=("dev", "test", "ops"),
help="The venue in which the cluster will be deployed (dev, test, ops).",
# Note: unity-py uses "prod" but sps software uses "ops"
choices=("dev", "test", "ops", "prod"),
help="The venue in which the cluster will be deployed (dev, test, ops, prod).",
)
parser.addoption(
"--developer",
Expand Down Expand Up @@ -176,3 +177,15 @@ def cwl_dag_modular_process(ogc_processes):
if p.id == "cwl_dag_modular":
return p
return None


@pytest.fixture(scope="session")
def karpenter_dag_process(ogc_processes):
"""
Selects the Karpenter Test DAG from the list of available OGC processes
"""

for p in ogc_processes:
if p.id == "karpenter_test":
return p
return None
Original file line number Diff line number Diff line change
@@ -1,21 +1,20 @@
Feature: Execute CWL workflows using the Airflow API
Feature: Execute DAG workflows using the Airflow API

As a UNITY SPS user
I want to execute a CWL workflow using the Airflow API
I want to execute a DAG workflow using the Airflow API
And verify that it completes successfully
So that I can inspect the results

Scenario Outline: Successful execution of a CWL workflow with the Airflow API
Scenario Outline: Successful execution of a DAG workflow with the Airflow API
Given the Airflow API is up and running
When I trigger a dag run for the <test_case> workflow using the <test_dag> DAG
Then I receive a response with status code 200
And I see an eventual successful dag run

Examples:
| test_case | test_dag |
| KARPENTER | karpenter_test |
| EMIT | cwl_dag |
# | SBG_E2E_SCALE | cwl_dag |
| SBG_PREPROCESS | cwl_dag |
| EMIT | cwl_dag_modular |
| SBG_PREPROCESS | cwl_dag_modular |
# | SBG_ISOFIT | cwl_dag_modular |
Original file line number Diff line number Diff line change
@@ -1,21 +1,20 @@
Feature: Execute CWL workflows using the OGC API
Feature: Execute DAG workflows using the OGC API

As a UNITY SPS user
I want to execute a CWL workflow using the OGC API
I want to execute a DAG workflow using the OGC API
And verify that it completes successfully
So that I can inspect the results

Scenario Outline: Successful execution of a CWL workflow with the OGC API
Scenario Outline: Successful execution of a DAG workflow with the OGC API
Given the OGC API is up and running
When I trigger a <test_case> OGC job for the <test_dag> OGC process
Then the job starts executing
And I see an eventual successful job

Examples:
| test_case | test_dag |
| KARPENTER | karpenter_test |
| EMIT | cwl_dag |
# | SBG_E2E_SCALE | cwl_dag |
| SBG_PREPROCESS | cwl_dag |
| EMIT | cwl_dag_modular |
| SBG_PREPROCESS | cwl_dag_modular |
# | SBG_ISOFIT | cwl_dag_modular |
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
Feature: Airflow Karpenter Test Workflow

As an SPS user
I want to ensure that the system has been successfully deployed to a given venue
So that I can execute workflows as DAGs while provisioning nodes as needed

Scenario: Execute the Karpenter Test Workflow
Given the Airflow API is up and running
When I trigger a run for the Karpenter Test DAG
Then I receive a response with status code 200
And I see an eventual successful DAG run
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
# This test executes the specified CWL workflow
# using the CWL DAG (classic or modular) submitted through the Airflow API.
# The workflow parameters are contained in a YAML file which is venue-dependent.
# The CWL DAGs (classic and modular) must already be deployed in Airflow,
# and it is invoked via the Airflow API.
# The CWL task is executed via a KubernetesPodOperator on a worker node
# This test executes the specified DAG workflow using the Airflow API.
# The workflow parameters are contained in a YAML file which may be venue-dependent.
# The DAG must already be deployed in Airflow.
# The DAG tasks are executed via a KubernetesPodOperator on a worker node
# that is dynamically provisioned by Karpenter.
import json
from pathlib import Path
Expand All @@ -19,7 +17,9 @@
# DAG parameters are venue specific
CWL_DAG_ID = "cwl_dag"
CWL_DAG_MODULAR_ID = "cwl_dag_modular"
KARPENTER_DAG_ID = "karpenter_test"
DAG_PARAMETERS = {
KARPENTER_DAG_ID: {"KARPENTER": {"placeholder": 1}},
CWL_DAG_MODULAR_ID: {
"EMIT": {
"stac_json": {
Expand All @@ -38,7 +38,7 @@
"process_workflow": "http://awslbdockstorestack-lb-1429770210.us-west-2.elb.amazonaws.com:9998/api/ga4gh/trs/v2/tools/%23workflow%2Fdockstore.org%2Fedwinsarkissian%2FSBG-unity-preprocess-mod/versions/4/PLAIN-CWL/descriptor/%2Fprocess.cwl",
"process_args": {"dev": json.dumps({})},
"log_level": "INFO",
"request_instance_type": "t3.2xlarge",
"request_instance_type": "r7i.2xlarge",
"request_storage": "100Gi",
},
"SBG_ISOFIT": {
Expand Down Expand Up @@ -96,8 +96,8 @@
}


@scenario(FEATURE_FILE, "Successful execution of a CWL workflow with the Airflow API")
def test_successful_execution_of_a_cwl_workflow_with_the_airflow_api():
@scenario(FEATURE_FILE, "Successful execution of a DAG workflow with the Airflow API")
def test_successful_execution_of_a_dag_workflow_with_the_airflow_api():
pass


Expand All @@ -118,13 +118,16 @@ def trigger_dag(airflow_api_url, fetch_token, venue, test_case, test_dag):
try:

# configuration common to all DAGs
job_config = {
"conf": {
"log_level": f'{DAG_PARAMETERS[test_dag][test_case]["log_level"]}',
"request_storage": f'{DAG_PARAMETERS[test_dag][test_case]["request_storage"]}',
"request_instance_type": f'{DAG_PARAMETERS[test_dag][test_case]["request_instance_type"]}',
if test_dag == KARPENTER_DAG_ID:
job_config = {"conf": DAG_PARAMETERS[KARPENTER_DAG_ID]}
else:
job_config = {
"conf": {
"log_level": f'{DAG_PARAMETERS[test_dag][test_case]["log_level"]}',
"request_storage": f'{DAG_PARAMETERS[test_dag][test_case]["request_storage"]}',
"request_instance_type": f'{DAG_PARAMETERS[test_dag][test_case]["request_instance_type"]}',
}
}
}

# configuration specific to CWL_DAG
if test_dag == CWL_DAG_ID:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# This test executes the specified CWL workflow
# using the CWL DAG OGC process submitted through the OGC API.
# The workflow parameters are contained in a YAML file which is venue-dependent.
# The CWL DAG OGC process must already be deployed in Airflow,
# This test executes the specified DAG workflow
# using the DAG OGC process submitted through the OGC API.
# The workflow parameters are contained in a YAML file which may be venue-dependent.
# The DAG OGC process must already be deployed in Airflow,
# and it is invoked via the OGC API.
# The CWL task is executed via a KubernetesPodOperator on a worker node
# The DAG tasks are executed via a KubernetesPodOperator on a worker node
# that is dynamically provisioned by Karpenter.
import json
from pathlib import Path
Expand All @@ -21,6 +21,13 @@
# DAG parameters are venue specific
CWL_DAG_ID = "cwl_dag"
CWL_DAG_MODULAR_ID = "cwl_dag_modular"
KARPENTER_DAG_ID = "karpenter_test"
GENERIC_DAG_DATA = {
"KARPENTER": {
"inputs": {"placeholder": 1},
"outputs": {"result": {"transmissionMode": "reference"}},
},
}
CWL_DAG_DATA = {
"EMIT": {
"inputs": {
Expand Down Expand Up @@ -92,7 +99,7 @@
"process_workflow": "http://awslbdockstorestack-lb-1429770210.us-west-2.elb.amazonaws.com:9998/api/ga4gh/trs/v2/tools/%23workflow%2Fdockstore.org%2Fedwinsarkissian%2FSBG-unity-preprocess-mod/versions/4/PLAIN-CWL/descriptor/%2Fprocess.cwl",
"process_args": {"dev": json.dumps({})},
"log_level": "INFO",
"request_instance_type": "t3.2xlarge",
"request_instance_type": "r7i.2xlarge",
"request_storage": "100Gi",
},
"outputs": {"result": {"transmissionMode": "reference"}},
Expand All @@ -113,8 +120,8 @@
}


@scenario(FEATURE_FILE, "Successful execution of a CWL workflow with the OGC API")
def test_successful_execution_of_a_cwl_workflow_with_the_ogc_api():
@scenario(FEATURE_FILE, "Successful execution of a DAG workflow with the OGC API")
def test_successful_execution_of_a_dag_workflow_with_the_ogc_api():
pass


Expand All @@ -124,7 +131,9 @@ def api_up_and_running(ogc_processes):


@when(parsers.parse("I trigger a {test_case} OGC job for the {test_dag} OGC process"), target_fixture="job")
def trigger_process(cwl_dag_process, cwl_dag_modular_process, venue, test_case, test_dag):
def trigger_process(
cwl_dag_process, cwl_dag_modular_process, karpenter_dag_process, venue, test_case, test_dag
):

# check that this test_case and test_dag are enabled for the specified venue
ogc_process = None
Expand All @@ -140,6 +149,9 @@ def trigger_process(cwl_dag_process, cwl_dag_modular_process, venue, test_case,
payload = CWL_DAG_MODULAR_DATA[test_case]
payload["inputs"]["stac_json"] = payload["inputs"]["stac_json"][venue]
payload["inputs"]["process_args"] = payload["inputs"]["process_args"][venue]
elif test_dag == KARPENTER_DAG_ID:
ogc_process = karpenter_dag_process
payload = GENERIC_DAG_DATA[test_case]

print(ogc_process)
assert ogc_process is not None
Expand Down
Loading