From ef30e170d410ddf398ffd66e87c7b453cfb33158 Mon Sep 17 00:00:00 2001 From: Kevin Yang Date: Sat, 27 Dec 2025 23:17:59 +0800 Subject: [PATCH 01/10] update custom data format --- documents/CustomizeData.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/documents/CustomizeData.md b/documents/CustomizeData.md index 12946833a..af10bbf3a 100644 --- a/documents/CustomizeData.md +++ b/documents/CustomizeData.md @@ -4,8 +4,8 @@ If you would like to update the solution to leverage your own data please follow > Note: you will need to complete the deployment steps [here](./DeploymentGuide.md) before proceeding. ## Prerequisites: -1. Your data will need to be in JSON or wav format with the file name formated prefixed with "convo" then a GUID followed by a timestamp. For more examples of the data format, please review the sample transcripts and audio data included [here](/infra/data/) - * Example: convo_32e38683-bbf7-407e-a541-09b37b77921d_2024-12-07 04%3A00%3A00 +1. Your data will need to be in JSON or wav format with the file name formated prefixed with "convo" then a GUID followed by a timestamp. + * Example: convo_32e38683-bbf7-407e-a541-09b37b77921d_2024-12-07 04_00_00.wav 1. Navigate to the storage account in the resource group you are using for this solution. From 0406a99e3d25c353677be553f42b4e811b774a20 Mon Sep 17 00:00:00 2001 From: Vamshi-Microsoft Date: Wed, 14 Jan 2026 12:07:08 +0530 Subject: [PATCH 02/10] ci: fixed pipeline failures --- .github/workflows/deploy-linux.yml | 194 +++++++++++++++++-- .github/workflows/deploy-orchestrator.yml | 12 +- .github/workflows/deploy-windows.yml | 194 +++++++++++++++++-- .github/workflows/job-azure-deploy.yml | 70 ++++--- .github/workflows/job-deploy-linux.yml | 4 +- .github/workflows/job-deploy-windows.yml | 4 +- .github/workflows/job-send-notifications.yml | 8 +- 7 files changed, 420 insertions(+), 66 deletions(-) diff --git a/.github/workflows/deploy-linux.yml b/.github/workflows/deploy-linux.yml index c5e66ccd0..e67f53822 100644 --- a/.github/workflows/deploy-linux.yml +++ b/.github/workflows/deploy-linux.yml @@ -46,7 +46,7 @@ on: required: false default: false type: boolean - exp: + EXP: description: 'Enable EXP' required: false default: false @@ -70,12 +70,12 @@ on: - 'GoldenPath-Testing' - 'Smoke-Testing' - 'None' - azure_env_log_anlytics_workspace_id: + AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: description: 'Log Analytics Workspace ID (Optional)' required: false default: '' type: string - azure_existing_ai_project_resource_id: + AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: description: 'AI Project Resource ID (Optional)' required: false default: '' @@ -91,19 +91,187 @@ permissions: actions: read jobs: + validate-inputs: + runs-on: ubuntu-latest + outputs: + validation_passed: ${{ steps.validate.outputs.passed }} + azure_location: ${{ steps.validate.outputs.azure_location }} + resource_group_name: ${{ steps.validate.outputs.resource_group_name }} + waf_enabled: ${{ steps.validate.outputs.waf_enabled }} + exp: ${{ steps.validate.outputs.exp }} + build_docker_image: ${{ steps.validate.outputs.build_docker_image }} + cleanup_resources: ${{ steps.validate.outputs.cleanup_resources }} + run_e2e_tests: ${{ steps.validate.outputs.run_e2e_tests }} + azure_env_log_analytics_workspace_id: ${{ steps.validate.outputs.azure_env_log_analytics_workspace_id }} + azure_existing_ai_project_resource_id: ${{ steps.validate.outputs.azure_existing_ai_project_resource_id }} + existing_webapp_url: ${{ steps.validate.outputs.existing_webapp_url }} + steps: + - name: Validate Workflow Input Parameters + id: validate + shell: bash + env: + INPUT_AZURE_LOCATION: ${{ github.event.inputs.azure_location }} + INPUT_RESOURCE_GROUP_NAME: ${{ github.event.inputs.resource_group_name }} + INPUT_WAF_ENABLED: ${{ github.event.inputs.waf_enabled }} + INPUT_EXP: ${{ github.event.inputs.EXP }} + INPUT_BUILD_DOCKER_IMAGE: ${{ github.event.inputs.build_docker_image }} + INPUT_CLEANUP_RESOURCES: ${{ github.event.inputs.cleanup_resources }} + INPUT_RUN_E2E_TESTS: ${{ github.event.inputs.run_e2e_tests }} + INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ github.event.inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} + INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ github.event.inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + INPUT_EXISTING_WEBAPP_URL: ${{ github.event.inputs.existing_webapp_url }} + run: | + echo "🔍 Validating workflow input parameters..." + VALIDATION_FAILED=false + + # Validate azure_location (Azure region format) + LOCATION="${INPUT_AZURE_LOCATION:-australiaeast}" + + if [[ ! "$LOCATION" =~ ^[a-z0-9]+$ ]]; then + echo "❌ ERROR: azure_location '$LOCATION' is invalid. Must contain only lowercase letters and numbers" + VALIDATION_FAILED=true + else + echo "✅ azure_location: '$LOCATION' is valid" + fi + + # Validate resource_group_name (Azure naming convention, optional) + if [[ -n "$INPUT_RESOURCE_GROUP_NAME" ]]; then + if [[ ! "$INPUT_RESOURCE_GROUP_NAME" =~ ^[a-zA-Z0-9._\(\)-]+$ ]] || [[ "$INPUT_RESOURCE_GROUP_NAME" =~ \.$ ]]; then + echo "❌ ERROR: resource_group_name '$INPUT_RESOURCE_GROUP_NAME' is invalid. Must contain only alphanumerics, periods, underscores, hyphens, and parentheses. Cannot end with period." + VALIDATION_FAILED=true + elif [[ ${#INPUT_RESOURCE_GROUP_NAME} -gt 90 ]]; then + echo "❌ ERROR: resource_group_name '$INPUT_RESOURCE_GROUP_NAME' exceeds 90 characters (length: ${#INPUT_RESOURCE_GROUP_NAME})" + VALIDATION_FAILED=true + else + echo "✅ resource_group_name: '$INPUT_RESOURCE_GROUP_NAME' is valid" + fi + else + echo "✅ resource_group_name: Not provided (will be auto-generated)" + fi + + # Validate waf_enabled (boolean) + WAF_ENABLED="${INPUT_WAF_ENABLED:-false}" + if [[ "$WAF_ENABLED" != "true" && "$WAF_ENABLED" != "false" ]]; then + echo "❌ ERROR: waf_enabled must be 'true' or 'false', got: '$WAF_ENABLED'" + VALIDATION_FAILED=true + else + echo "✅ waf_enabled: '$WAF_ENABLED' is valid" + fi + + # Validate EXP (boolean) + EXP_ENABLED="${INPUT_EXP:-false}" + if [[ "$EXP_ENABLED" != "true" && "$EXP_ENABLED" != "false" ]]; then + echo "❌ ERROR: EXP must be 'true' or 'false', got: '$EXP_ENABLED'" + VALIDATION_FAILED=true + else + echo "✅ EXP: '$EXP_ENABLED' is valid" + fi + + # Validate build_docker_image (boolean) + BUILD_DOCKER="${INPUT_BUILD_DOCKER_IMAGE:-false}" + if [[ "$BUILD_DOCKER" != "true" && "$BUILD_DOCKER" != "false" ]]; then + echo "❌ ERROR: build_docker_image must be 'true' or 'false', got: '$BUILD_DOCKER'" + VALIDATION_FAILED=true + else + echo "✅ build_docker_image: '$BUILD_DOCKER' is valid" + fi + + # Validate cleanup_resources (boolean) + CLEANUP_RESOURCES="${INPUT_CLEANUP_RESOURCES:-false}" + if [[ "$CLEANUP_RESOURCES" != "true" && "$CLEANUP_RESOURCES" != "false" ]]; then + echo "❌ ERROR: cleanup_resources must be 'true' or 'false', got: '$CLEANUP_RESOURCES'" + VALIDATION_FAILED=true + else + echo "✅ cleanup_resources: '$CLEANUP_RESOURCES' is valid" + fi + + # Validate run_e2e_tests (specific allowed values) + TEST_OPTION="${INPUT_RUN_E2E_TESTS:-GoldenPath-Testing}" + if [[ "$TEST_OPTION" != "GoldenPath-Testing" && "$TEST_OPTION" != "Smoke-Testing" && "$TEST_OPTION" != "None" ]]; then + echo "❌ ERROR: run_e2e_tests must be one of: GoldenPath-Testing, Smoke-Testing, None, got: '$TEST_OPTION'" + VALIDATION_FAILED=true + else + echo "✅ run_e2e_tests: '$TEST_OPTION' is valid" + fi + + # Validate AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID (optional, Azure Resource ID format) + if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]]; then + if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/[Mm]icrosoft\.[Oo]perational[Ii]nsights/[Ww]orkspaces/[^/]+$ ]]; then + echo "❌ ERROR: AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID is invalid. Must be a valid Azure Resource ID format:" + echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" + echo " Got: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: Valid Resource ID format" + fi + else + echo "✅ AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: Not provided (optional)" + fi + + # Validate AZURE_EXISTING_AI_PROJECT_RESOURCE_ID (optional, Azure Resource ID format) + if [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then + if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/([Mm]icrosoft\.[Mm]achine[Ll]earning[Ss]ervices/([Ww]orkspaces|[Pp]rojects)/[^/]+|[Mm]icrosoft\.[Cc]ognitive[Ss]ervices/[Aa]ccounts/[^/]+/[Pp]rojects/[^/]+)$ ]]; then + echo "❌ ERROR: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID is invalid. Must be a valid Azure Resource ID format:" + echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/projects/{projectName}" + echo " Got: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: Valid Resource ID format" + fi + else + echo "✅ AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: Not provided (optional)" + fi + + # Validate existing_webapp_url (optional, must start with https) + if [[ -n "$INPUT_EXISTING_WEBAPP_URL" ]]; then + if [[ ! "$INPUT_EXISTING_WEBAPP_URL" =~ ^https:// ]]; then + echo "❌ ERROR: existing_webapp_url must start with 'https://', got: '$INPUT_EXISTING_WEBAPP_URL'" + VALIDATION_FAILED=true + else + echo "✅ existing_webapp_url: '$INPUT_EXISTING_WEBAPP_URL' is valid" + fi + else + echo "✅ existing_webapp_url: Not provided (will perform deployment)" + fi + + # Fail workflow if any validation failed + if [[ "$VALIDATION_FAILED" == "true" ]]; then + echo "" + echo "❌ Parameter validation failed. Please correct the errors above and try again." + exit 1 + fi + + echo "" + echo "✅ All input parameters validated successfully!" + + # Output validated values + echo "passed=true" >> $GITHUB_OUTPUT + echo "azure_location=$LOCATION" >> $GITHUB_OUTPUT + echo "resource_group_name=$INPUT_RESOURCE_GROUP_NAME" >> $GITHUB_OUTPUT + echo "waf_enabled=$WAF_ENABLED" >> $GITHUB_OUTPUT + echo "exp=$EXP_ENABLED" >> $GITHUB_OUTPUT + echo "build_docker_image=$BUILD_DOCKER" >> $GITHUB_OUTPUT + echo "cleanup_resources=$CLEANUP_RESOURCES" >> $GITHUB_OUTPUT + echo "run_e2e_tests=$TEST_OPTION" >> $GITHUB_OUTPUT + echo "azure_env_log_analytics_workspace_id=$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" >> $GITHUB_OUTPUT + echo "azure_existing_ai_project_resource_id=$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" >> $GITHUB_OUTPUT + echo "existing_webapp_url=$INPUT_EXISTING_WEBAPP_URL" >> $GITHUB_OUTPUT + Run: + needs: validate-inputs + if: needs.validate-inputs.outputs.validation_passed == 'true' uses: ./.github/workflows/deploy-orchestrator.yml with: runner_os: ubuntu-latest - azure_location: ${{ github.event.inputs.azure_location || 'australiaeast' }} - resource_group_name: ${{ github.event.inputs.resource_group_name || '' }} - waf_enabled: ${{ github.event.inputs.waf_enabled == 'true' }} - exp: ${{ github.event.inputs.exp == 'true' }} - build_docker_image: ${{ github.event.inputs.build_docker_image == 'true' }} - cleanup_resources: ${{ github.event.inputs.cleanup_resources == 'true' }} - run_e2e_tests: ${{ github.event.inputs.run_e2e_tests || 'GoldenPath-Testing' }} - azure_env_log_anlytics_workspace_id: ${{ github.event.inputs.azure_env_log_anlytics_workspace_id || '' }} - azure_existing_ai_project_resource_id: ${{ github.event.inputs.azure_existing_ai_project_resource_id || '' }} - existing_webapp_url: ${{ github.event.inputs.existing_webapp_url || '' }} + azure_location: ${{ needs.validate-inputs.outputs.azure_location || 'australiaeast' }} + resource_group_name: ${{ needs.validate-inputs.outputs.resource_group_name || '' }} + waf_enabled: ${{ needs.validate-inputs.outputs.waf_enabled == 'true' }} + exp: ${{ needs.validate-inputs.outputs.exp == 'true' }} + build_docker_image: ${{ needs.validate-inputs.outputs.build_docker_image == 'true' }} + cleanup_resources: ${{ needs.validate-inputs.outputs.cleanup_resources == 'true' }} + run_e2e_tests: ${{ needs.validate-inputs.outputs.run_e2e_tests || 'GoldenPath-Testing' }} + azure_env_log_analytics_workspace_id: ${{ needs.validate-inputs.outputs.azure_env_log_analytics_workspace_id || '' }} + azure_existing_ai_project_resource_id: ${{ needs.validate-inputs.outputs.azure_existing_ai_project_resource_id || '' }} + existing_webapp_url: ${{ needs.validate-inputs.outputs.existing_webapp_url || '' }} trigger_type: ${{ github.event_name }} secrets: inherit \ No newline at end of file diff --git a/.github/workflows/deploy-orchestrator.yml b/.github/workflows/deploy-orchestrator.yml index daa6a5389..6e2b3d303 100644 --- a/.github/workflows/deploy-orchestrator.yml +++ b/.github/workflows/deploy-orchestrator.yml @@ -42,7 +42,7 @@ on: required: false default: 'GoldenPath-Testing' type: string - azure_env_log_anlytics_workspace_id: + azure_env_log_analytics_workspace_id: description: 'Log Analytics Workspace ID (Optional)' required: false default: '' @@ -74,7 +74,7 @@ jobs: secrets: inherit deploy: - if: always() && (inputs.trigger_type != 'workflow_dispatch' || inputs.existing_webapp_url == '' || inputs.existing_webapp_url == null) + if: "!cancelled() && (needs.docker-build.result == 'success' || needs.docker-build.result == 'skipped') && (inputs.trigger_type != 'workflow_dispatch' || inputs.existing_webapp_url == '' || inputs.existing_webapp_url == null)" needs: docker-build uses: ./.github/workflows/job-azure-deploy.yml with: @@ -86,7 +86,7 @@ jobs: exp: ${{ inputs.exp }} build_docker_image: ${{ inputs.build_docker_image }} existing_webapp_url: ${{ inputs.existing_webapp_url }} - azure_env_log_anlytics_workspace_id: ${{ inputs.azure_env_log_anlytics_workspace_id }} + azure_env_log_analytics_workspace_id: ${{ inputs.azure_env_log_analytics_workspace_id }} azure_existing_ai_project_resource_id: ${{ inputs.azure_existing_ai_project_resource_id }} docker_image_tag: ${{ needs.docker-build.outputs.IMAGE_TAG }} run_e2e_tests: ${{ inputs.run_e2e_tests }} @@ -94,7 +94,7 @@ jobs: secrets: inherit e2e-test: - if: always() && ((needs.deploy.result == 'success' && needs.deploy.outputs.WEB_APP_URL != '') || (inputs.existing_webapp_url != '' && inputs.existing_webapp_url != null)) && (inputs.trigger_type != 'workflow_dispatch' || (inputs.run_e2e_tests != 'None' && inputs.run_e2e_tests != '' && inputs.run_e2e_tests != null)) + if: "!cancelled() && ((needs.deploy.result == 'success' && needs.deploy.outputs.WEB_APP_URL != '') || (inputs.existing_webapp_url != '' && inputs.existing_webapp_url != null)) && (inputs.trigger_type != 'workflow_dispatch' || (inputs.run_e2e_tests != 'None' && inputs.run_e2e_tests != '' && inputs.run_e2e_tests != null))" needs: [docker-build, deploy] uses: ./.github/workflows/job-test-automation.yml with: @@ -104,7 +104,7 @@ jobs: secrets: inherit send-notification: - if: always() + if: "!cancelled()" needs: [docker-build, deploy, e2e-test] uses: ./.github/workflows/job-send-notifications.yml with: @@ -123,7 +123,7 @@ jobs: secrets: inherit cleanup-deployment: - if: always() && needs.deploy.result == 'success' && needs.deploy.outputs.RESOURCE_GROUP_NAME != '' && inputs.existing_webapp_url == '' && (inputs.trigger_type != 'workflow_dispatch' || inputs.cleanup_resources) + if: "!cancelled() && needs.deploy.outputs.RESOURCE_GROUP_NAME != '' && inputs.existing_webapp_url == '' && (inputs.trigger_type != 'workflow_dispatch' || inputs.cleanup_resources)" needs: [docker-build, deploy, e2e-test] uses: ./.github/workflows/job-cleanup-resources.yml with: diff --git a/.github/workflows/deploy-windows.yml b/.github/workflows/deploy-windows.yml index 8163196ea..8f8b9a2b7 100644 --- a/.github/workflows/deploy-windows.yml +++ b/.github/workflows/deploy-windows.yml @@ -30,7 +30,7 @@ on: required: false default: false type: boolean - exp: + EXP: description: 'Enable EXP' required: false default: false @@ -54,12 +54,12 @@ on: - 'GoldenPath-Testing' - 'Smoke-Testing' - 'None' - azure_env_log_anlytics_workspace_id: + AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: description: 'Log Analytics Workspace ID (Optional)' required: false default: '' type: string - azure_existing_ai_project_resource_id: + AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: description: 'AI Project Resource ID (Optional)' required: false default: '' @@ -75,19 +75,187 @@ permissions: actions: read jobs: + validate-inputs: + runs-on: ubuntu-latest + outputs: + validation_passed: ${{ steps.validate.outputs.passed }} + azure_location: ${{ steps.validate.outputs.azure_location }} + resource_group_name: ${{ steps.validate.outputs.resource_group_name }} + waf_enabled: ${{ steps.validate.outputs.waf_enabled }} + exp: ${{ steps.validate.outputs.exp }} + build_docker_image: ${{ steps.validate.outputs.build_docker_image }} + cleanup_resources: ${{ steps.validate.outputs.cleanup_resources }} + run_e2e_tests: ${{ steps.validate.outputs.run_e2e_tests }} + azure_env_log_analytics_workspace_id: ${{ steps.validate.outputs.azure_env_log_analytics_workspace_id }} + azure_existing_ai_project_resource_id: ${{ steps.validate.outputs.azure_existing_ai_project_resource_id }} + existing_webapp_url: ${{ steps.validate.outputs.existing_webapp_url }} + steps: + - name: Validate Workflow Input Parameters + id: validate + shell: bash + env: + INPUT_AZURE_LOCATION: ${{ github.event.inputs.azure_location }} + INPUT_RESOURCE_GROUP_NAME: ${{ github.event.inputs.resource_group_name }} + INPUT_WAF_ENABLED: ${{ github.event.inputs.waf_enabled }} + INPUT_EXP: ${{ github.event.inputs.EXP }} + INPUT_BUILD_DOCKER_IMAGE: ${{ github.event.inputs.build_docker_image }} + INPUT_CLEANUP_RESOURCES: ${{ github.event.inputs.cleanup_resources }} + INPUT_RUN_E2E_TESTS: ${{ github.event.inputs.run_e2e_tests }} + INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ github.event.inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} + INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ github.event.inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + INPUT_EXISTING_WEBAPP_URL: ${{ github.event.inputs.existing_webapp_url }} + run: | + echo "🔍 Validating workflow input parameters..." + VALIDATION_FAILED=false + + # Validate azure_location (Azure region format) + LOCATION="${INPUT_AZURE_LOCATION:-australiaeast}" + + if [[ ! "$LOCATION" =~ ^[a-z0-9]+$ ]]; then + echo "❌ ERROR: azure_location '$LOCATION' is invalid. Must contain only lowercase letters and numbers" + VALIDATION_FAILED=true + else + echo "✅ azure_location: '$LOCATION' is valid" + fi + + # Validate resource_group_name (Azure naming convention, optional) + if [[ -n "$INPUT_RESOURCE_GROUP_NAME" ]]; then + if [[ ! "$INPUT_RESOURCE_GROUP_NAME" =~ ^[a-zA-Z0-9._\(\)-]+$ ]] || [[ "$INPUT_RESOURCE_GROUP_NAME" =~ \.$ ]]; then + echo "❌ ERROR: resource_group_name '$INPUT_RESOURCE_GROUP_NAME' is invalid. Must contain only alphanumerics, periods, underscores, hyphens, and parentheses. Cannot end with period." + VALIDATION_FAILED=true + elif [[ ${#INPUT_RESOURCE_GROUP_NAME} -gt 90 ]]; then + echo "❌ ERROR: resource_group_name '$INPUT_RESOURCE_GROUP_NAME' exceeds 90 characters (length: ${#INPUT_RESOURCE_GROUP_NAME})" + VALIDATION_FAILED=true + else + echo "✅ resource_group_name: '$INPUT_RESOURCE_GROUP_NAME' is valid" + fi + else + echo "✅ resource_group_name: Not provided (will be auto-generated)" + fi + + # Validate waf_enabled (boolean) + WAF_ENABLED="${INPUT_WAF_ENABLED:-false}" + if [[ "$WAF_ENABLED" != "true" && "$WAF_ENABLED" != "false" ]]; then + echo "❌ ERROR: waf_enabled must be 'true' or 'false', got: '$WAF_ENABLED'" + VALIDATION_FAILED=true + else + echo "✅ waf_enabled: '$WAF_ENABLED' is valid" + fi + + # Validate EXP (boolean) + EXP_ENABLED="${INPUT_EXP:-false}" + if [[ "$EXP_ENABLED" != "true" && "$EXP_ENABLED" != "false" ]]; then + echo "❌ ERROR: EXP must be 'true' or 'false', got: '$EXP_ENABLED'" + VALIDATION_FAILED=true + else + echo "✅ EXP: '$EXP_ENABLED' is valid" + fi + + # Validate build_docker_image (boolean) + BUILD_DOCKER="${INPUT_BUILD_DOCKER_IMAGE:-false}" + if [[ "$BUILD_DOCKER" != "true" && "$BUILD_DOCKER" != "false" ]]; then + echo "❌ ERROR: build_docker_image must be 'true' or 'false', got: '$BUILD_DOCKER'" + VALIDATION_FAILED=true + else + echo "✅ build_docker_image: '$BUILD_DOCKER' is valid" + fi + + # Validate cleanup_resources (boolean) + CLEANUP_RESOURCES="${INPUT_CLEANUP_RESOURCES:-false}" + if [[ "$CLEANUP_RESOURCES" != "true" && "$CLEANUP_RESOURCES" != "false" ]]; then + echo "❌ ERROR: cleanup_resources must be 'true' or 'false', got: '$CLEANUP_RESOURCES'" + VALIDATION_FAILED=true + else + echo "✅ cleanup_resources: '$CLEANUP_RESOURCES' is valid" + fi + + # Validate run_e2e_tests (specific allowed values) + TEST_OPTION="${INPUT_RUN_E2E_TESTS:-GoldenPath-Testing}" + if [[ "$TEST_OPTION" != "GoldenPath-Testing" && "$TEST_OPTION" != "Smoke-Testing" && "$TEST_OPTION" != "None" ]]; then + echo "❌ ERROR: run_e2e_tests must be one of: GoldenPath-Testing, Smoke-Testing, None, got: '$TEST_OPTION'" + VALIDATION_FAILED=true + else + echo "✅ run_e2e_tests: '$TEST_OPTION' is valid" + fi + + # Validate AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID (optional, Azure Resource ID format) + if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]]; then + if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/[Mm]icrosoft\.[Oo]perational[Ii]nsights/[Ww]orkspaces/[^/]+$ ]]; then + echo "❌ ERROR: AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID is invalid. Must be a valid Azure Resource ID format:" + echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" + echo " Got: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: Valid Resource ID format" + fi + else + echo "✅ AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: Not provided (optional)" + fi + + # Validate AZURE_EXISTING_AI_PROJECT_RESOURCE_ID (optional, Azure Resource ID format) + if [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then + if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/([Mm]icrosoft\.[Mm]achine[Ll]earning[Ss]ervices/([Ww]orkspaces|[Pp]rojects)/[^/]+|[Mm]icrosoft\.[Cc]ognitive[Ss]ervices/[Aa]ccounts/[^/]+/[Pp]rojects/[^/]+)$ ]]; then + echo "❌ ERROR: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID is invalid. Must be a valid Azure Resource ID format:" + echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/projects/{projectName}" + echo " Got: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: Valid Resource ID format" + fi + else + echo "✅ AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: Not provided (optional)" + fi + + # Validate existing_webapp_url (optional, must start with https) + if [[ -n "$INPUT_EXISTING_WEBAPP_URL" ]]; then + if [[ ! "$INPUT_EXISTING_WEBAPP_URL" =~ ^https:// ]]; then + echo "❌ ERROR: existing_webapp_url must start with 'https://', got: '$INPUT_EXISTING_WEBAPP_URL'" + VALIDATION_FAILED=true + else + echo "✅ existing_webapp_url: '$INPUT_EXISTING_WEBAPP_URL' is valid" + fi + else + echo "✅ existing_webapp_url: Not provided (will perform deployment)" + fi + + # Fail workflow if any validation failed + if [[ "$VALIDATION_FAILED" == "true" ]]; then + echo "" + echo "❌ Parameter validation failed. Please correct the errors above and try again." + exit 1 + fi + + echo "" + echo "✅ All input parameters validated successfully!" + + # Output validated values + echo "passed=true" >> $GITHUB_OUTPUT + echo "azure_location=$LOCATION" >> $GITHUB_OUTPUT + echo "resource_group_name=$INPUT_RESOURCE_GROUP_NAME" >> $GITHUB_OUTPUT + echo "waf_enabled=$WAF_ENABLED" >> $GITHUB_OUTPUT + echo "exp=$EXP_ENABLED" >> $GITHUB_OUTPUT + echo "build_docker_image=$BUILD_DOCKER" >> $GITHUB_OUTPUT + echo "cleanup_resources=$CLEANUP_RESOURCES" >> $GITHUB_OUTPUT + echo "run_e2e_tests=$TEST_OPTION" >> $GITHUB_OUTPUT + echo "azure_env_log_analytics_workspace_id=$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" >> $GITHUB_OUTPUT + echo "azure_existing_ai_project_resource_id=$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" >> $GITHUB_OUTPUT + echo "existing_webapp_url=$INPUT_EXISTING_WEBAPP_URL" >> $GITHUB_OUTPUT + Run: + needs: validate-inputs + if: needs.validate-inputs.outputs.validation_passed == 'true' uses: ./.github/workflows/deploy-orchestrator.yml with: runner_os: windows-latest - azure_location: ${{ github.event.inputs.azure_location || 'australiaeast' }} - resource_group_name: ${{ github.event.inputs.resource_group_name || '' }} - waf_enabled: ${{ github.event.inputs.waf_enabled == 'true' }} - exp: ${{ github.event.inputs.exp == 'true' }} - build_docker_image: ${{ github.event.inputs.build_docker_image == 'true' }} - cleanup_resources: ${{ github.event.inputs.cleanup_resources == 'true' }} - run_e2e_tests: ${{ github.event.inputs.run_e2e_tests || 'GoldenPath-Testing' }} - azure_env_log_anlytics_workspace_id: ${{ github.event.inputs.azure_env_log_anlytics_workspace_id || '' }} - azure_existing_ai_project_resource_id: ${{ github.event.inputs.azure_existing_ai_project_resource_id || '' }} - existing_webapp_url: ${{ github.event.inputs.existing_webapp_url || '' }} + azure_location: ${{ needs.validate-inputs.outputs.azure_location || 'australiaeast' }} + resource_group_name: ${{ needs.validate-inputs.outputs.resource_group_name || '' }} + waf_enabled: ${{ needs.validate-inputs.outputs.waf_enabled == 'true' }} + exp: ${{ needs.validate-inputs.outputs.exp == 'true' }} + build_docker_image: ${{ needs.validate-inputs.outputs.build_docker_image == 'true' }} + cleanup_resources: ${{ needs.validate-inputs.outputs.cleanup_resources == 'true' }} + run_e2e_tests: ${{ needs.validate-inputs.outputs.run_e2e_tests || 'GoldenPath-Testing' }} + azure_env_log_analytics_workspace_id: ${{ needs.validate-inputs.outputs.azure_env_log_analytics_workspace_id || '' }} + azure_existing_ai_project_resource_id: ${{ needs.validate-inputs.outputs.azure_existing_ai_project_resource_id || '' }} + existing_webapp_url: ${{ needs.validate-inputs.outputs.existing_webapp_url || '' }} trigger_type: ${{ github.event_name }} secrets: inherit \ No newline at end of file diff --git a/.github/workflows/job-azure-deploy.yml b/.github/workflows/job-azure-deploy.yml index b51c1313d..0887f4cc7 100644 --- a/.github/workflows/job-azure-deploy.yml +++ b/.github/workflows/job-azure-deploy.yml @@ -51,7 +51,7 @@ on: required: false default: '' type: string - azure_env_log_anlytics_workspace_id: + azure_env_log_analytics_workspace_id: description: 'Log Analytics Workspace ID (Optional)' required: false default: '' @@ -114,6 +114,8 @@ jobs: AZURE_ENV_OPENAI_LOCATION: ${{ steps.set_region.outputs.AZURE_ENV_OPENAI_LOCATION }} IMAGE_TAG: ${{ steps.determine_image_tag.outputs.IMAGE_TAG }} QUOTA_FAILED: ${{ steps.quota_failure_output.outputs.QUOTA_FAILED }} + EXP_ENABLED: ${{ steps.configure_exp.outputs.EXP_ENABLED }} + steps: - name: Validate Workflow Input Parameters @@ -128,7 +130,7 @@ jobs: INPUT_EXP: ${{ inputs.exp }} INPUT_CLEANUP_RESOURCES: ${{ inputs.cleanup_resources }} INPUT_RUN_E2E_TESTS: ${{ inputs.run_e2e_tests }} - INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_anlytics_workspace_id }} + INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_analytics_workspace_id }} INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.azure_existing_ai_project_resource_id }} INPUT_EXISTING_WEBAPP_URL: ${{ inputs.existing_webapp_url }} INPUT_DOCKER_IMAGE_TAG: ${{ inputs.docker_image_tag }} @@ -227,7 +229,7 @@ jobs: # Validate AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID (Azure Resource ID format) if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]]; then - if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/microsoft\.operationalinsights/workspaces/[^/]+$ ]]; then + if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/[Mm]icrosoft\.[Oo]perational[Ii]nsights/[Ww]orkspaces/[^/]+$ ]]; then echo "❌ ERROR: AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" echo " Got: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" @@ -239,7 +241,7 @@ jobs: # Validate AZURE_EXISTING_AI_PROJECT_RESOURCE_ID (Azure Resource ID format) if [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then - if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/(Microsoft\.MachineLearningServices/(workspaces|projects)/[^/]+|Microsoft\.CognitiveServices/accounts/[^/]+/projects/[^/]+)$ ]]; then + if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/([Mm]icrosoft\.[Mm]achine[Ll]earning[Ss]ervices/([Ww]orkspaces|[Pp]rojects)/[^/]+|[Mm]icrosoft\.[Cc]ognitive[Ss]ervices/[Aa]ccounts/[^/]+/[Pp]rojects/[^/]+)$ ]]; then echo "❌ ERROR: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/projects/{projectName}" echo " Got: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" @@ -285,28 +287,35 @@ jobs: echo "✅ All input parameters validated successfully!" - name: Validate and Auto-Configure EXP + id: configure_exp shell: bash env: - INPUT_EXP: ${{ inputs.exp }} - INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_anlytics_workspace_id }} - INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.azure_existing_ai_project_resource_id }} + INPUT_EXP: ${{ inputs.EXP }} + INPUT_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} + INPUT_AI_PROJECT_RESOURCE_ID: ${{ inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} run: | echo "🔍 Validating EXP configuration..." - if [[ "$INPUT_EXP" != "true" ]]; then - if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]] || [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then - echo "🔧 AUTO-ENABLING EXP: EXP parameter values were provided but EXP was not explicitly enabled." - echo "" - echo "You provided values for:" - [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]] && echo " - Azure Log Analytics Workspace ID: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" - [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]] && echo " - Azure AI Project Resource ID: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" - echo "" - echo "✅ Automatically enabling EXP to use these values." - echo "EXP=true" >> $GITHUB_ENV - echo "📌 EXP has been automatically enabled for this deployment." - fi + EXP_ENABLED="false" + + if [[ "$INPUT_EXP" == "true" ]]; then + EXP_ENABLED="true" + echo "✅ EXP explicitly enabled by user input" + elif [[ -n "$INPUT_LOG_ANALYTICS_WORKSPACE_ID" ]] || [[ -n "$INPUT_AI_PROJECT_RESOURCE_ID" ]]; then + echo "🔧 AUTO-ENABLING EXP: EXP parameter values were provided but EXP was not explicitly enabled." + echo "" + echo "You provided values for:" + [[ -n "$INPUT_LOG_ANALYTICS_WORKSPACE_ID" ]] && echo " - Azure Log Analytics Workspace ID: '$INPUT_LOG_ANALYTICS_WORKSPACE_ID'" + [[ -n "$INPUT_AI_PROJECT_RESOURCE_ID" ]] && echo " - Azure AI Project Resource ID: '$INPUT_AI_PROJECT_RESOURCE_ID'" + echo "" + echo "✅ Automatically enabling EXP to use these values." + EXP_ENABLED="true" fi + echo "EXP_ENABLED=$EXP_ENABLED" >> $GITHUB_ENV + echo "EXP_ENABLED=$EXP_ENABLED" >> $GITHUB_OUTPUT + echo "Final EXP status: $EXP_ENABLED" + - name: Checkout Code uses: actions/checkout@v4 @@ -335,6 +344,15 @@ jobs: fi exit 1 # Fail the pipeline if any other failure occurs fi + + - name: Set Quota Failure Output + id: quota_failure_output + if: env.QUOTA_FAILED == 'true' + shell: bash + run: | + echo "QUOTA_FAILED=true" >> $GITHUB_OUTPUT + echo "Quota check failed - will notify via separate notification job" + - name: Fail Pipeline if Quota Check Fails if: env.QUOTA_FAILED == 'true' @@ -477,7 +495,7 @@ jobs: INPUT_AZURE_LOCATION: ${{ inputs.azure_location }} INPUT_RESOURCE_GROUP_NAME: ${{ inputs.resource_group_name }} WAF_ENABLED_DISPLAY: ${{ env.WAF_ENABLED == 'true' && '✅ Yes' || '❌ No' }} - EXP_DISPLAY: ${{ env.EXP == 'true' && '✅ Yes' || '❌ No' }} + EXP_DISPLAY: ${{ steps.configure_exp.outputs.EXP_ENABLED == 'true' && '✅ Yes' || '❌ No' }} CLEANUP_DISPLAY: ${{ env.CLEANUP_RESOURCES == 'true' && '✅ Yes' || '❌ No' }} BUILD_DOCKER_DISPLAY: ${{ env.BUILD_DOCKER_IMAGE == 'true' && '✅ Yes' || '❌ No' }} run: | @@ -515,7 +533,7 @@ jobs: deploy-linux: name: Deploy on Linux needs: azure-setup - if: inputs.runner_os == 'ubuntu-latest' && always() && needs.azure-setup.result == 'success' + if: inputs.runner_os == 'ubuntu-latest' && !cancelled() && needs.azure-setup.result == 'success' uses: ./.github/workflows/job-deploy-linux.yml with: ENV_NAME: ${{ needs.azure-setup.outputs.ENV_NAME }} @@ -524,16 +542,16 @@ jobs: RESOURCE_GROUP_NAME: ${{ needs.azure-setup.outputs.RESOURCE_GROUP_NAME }} IMAGE_TAG: ${{ needs.azure-setup.outputs.IMAGE_TAG }} BUILD_DOCKER_IMAGE: ${{ github.event.inputs.build_docker_image || 'false' }} - EXP: ${{ github.event.inputs.exp || 'false' }} + EXP: ${{ needs.azure-setup.outputs.EXP_ENABLED }} WAF_ENABLED: ${{ inputs.waf_enabled == true && 'true' || 'false' }} - AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_anlytics_workspace_id }} + AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_analytics_workspace_id }} AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.azure_existing_ai_project_resource_id }} secrets: inherit deploy-windows: name: Deploy on Windows needs: azure-setup - if: inputs.runner_os == 'windows-latest' && always() && needs.azure-setup.result == 'success' + if: inputs.runner_os == 'windows-latest' && !cancelled() && needs.azure-setup.result == 'success' uses: ./.github/workflows/job-deploy-windows.yml with: ENV_NAME: ${{ needs.azure-setup.outputs.ENV_NAME }} @@ -542,8 +560,8 @@ jobs: RESOURCE_GROUP_NAME: ${{ needs.azure-setup.outputs.RESOURCE_GROUP_NAME }} IMAGE_TAG: ${{ needs.azure-setup.outputs.IMAGE_TAG }} BUILD_DOCKER_IMAGE: ${{ github.event.inputs.build_docker_image || 'false' }} - EXP: ${{ github.event.inputs.exp || 'false' }} + EXP: ${{ needs.azure-setup.outputs.EXP_ENABLED }} WAF_ENABLED: ${{ inputs.waf_enabled == true && 'true' || 'false' }} - AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_anlytics_workspace_id }} + AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_analytics_workspace_id }} AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.azure_existing_ai_project_resource_id }} secrets: inherit \ No newline at end of file diff --git a/.github/workflows/job-deploy-linux.yml b/.github/workflows/job-deploy-linux.yml index 4f3f65e71..745313fa3 100644 --- a/.github/workflows/job-deploy-linux.yml +++ b/.github/workflows/job-deploy-linux.yml @@ -153,7 +153,7 @@ jobs: # Validate AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID (optional, if provided must be valid Resource ID) if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]]; then - if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/microsoft\.operationalinsights/workspaces/[^/]+$ ]]; then + if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/[Mm]icrosoft\.[Oo]perational[Ii]nsights/[Ww]orkspaces/[^/]+$ ]]; then echo "❌ ERROR: AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" echo " Got: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" @@ -165,7 +165,7 @@ jobs: # Validate AZURE_EXISTING_AI_PROJECT_RESOURCE_ID (optional, if provided must be valid Resource ID) if [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then - if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/(Microsoft\.MachineLearningServices/(workspaces|projects)/[^/]+|Microsoft\.CognitiveServices/accounts/[^/]+/projects/[^/]+)$ ]]; then + if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/([Mm]icrosoft\.[Mm]achine[Ll]earning[Ss]ervices/([Ww]orkspaces|[Pp]rojects)/[^/]+|[Mm]icrosoft\.[Cc]ognitive[Ss]ervices/[Aa]ccounts/[^/]+/[Pp]rojects/[^/]+)$ ]]; then echo "❌ ERROR: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/projects/{projectName}" echo " Got: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" diff --git a/.github/workflows/job-deploy-windows.yml b/.github/workflows/job-deploy-windows.yml index 3295444ca..ecf3f460f 100644 --- a/.github/workflows/job-deploy-windows.yml +++ b/.github/workflows/job-deploy-windows.yml @@ -156,7 +156,7 @@ jobs: # Validate AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID (optional, if provided must be valid Resource ID) if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]]; then - if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/microsoft\.operationalinsights/workspaces/[^/]+$ ]]; then + if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/[Mm]icrosoft\.[Oo]perational[Ii]nsights/[Ww]orkspaces/[^/]+$ ]]; then echo "❌ ERROR: AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" echo " Got: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" @@ -168,7 +168,7 @@ jobs: # Validate AZURE_EXISTING_AI_PROJECT_RESOURCE_ID (optional, if provided must be valid Resource ID) if [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then - if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/(Microsoft\.MachineLearningServices/(workspaces|projects)/[^/]+|Microsoft\.CognitiveServices/accounts/[^/]+/projects/[^/]+)$ ]]; then + if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/([Mm]icrosoft\.[Mm]achine[Ll]earning[Ss]ervices/([Ww]orkspaces|[Pp]rojects)/[^/]+|[Mm]icrosoft\.[Cc]ognitive[Ss]ervices/[Aa]ccounts/[^/]+/[Pp]rojects/[^/]+)$ ]]; then echo "❌ ERROR: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/projects/{projectName}" echo " Got: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" diff --git a/.github/workflows/job-send-notifications.yml b/.github/workflows/job-send-notifications.yml index 2d564ffb2..6330f02b0 100644 --- a/.github/workflows/job-send-notifications.yml +++ b/.github/workflows/job-send-notifications.yml @@ -193,12 +193,12 @@ jobs: fi fi - # Validate quota_failed (must be 'true' or 'false') - if [[ "$INPUT_QUOTA_FAILED" != "true" && "$INPUT_QUOTA_FAILED" != "false" ]]; then - echo "❌ ERROR: quota_failed must be 'true' or 'false', got: '$INPUT_QUOTA_FAILED'" + # Validate QUOTA_FAILED (must be 'true', 'false', or empty string) + if [[ "$INPUT_QUOTA_FAILED" != "true" && "$INPUT_QUOTA_FAILED" != "false" && "$INPUT_QUOTA_FAILED" != "" ]]; then + echo "❌ ERROR: QUOTA_FAILED must be 'true', 'false', or empty string, got: '$INPUT_QUOTA_FAILED'" VALIDATION_FAILED=true else - echo "✅ quota_failed: '$INPUT_QUOTA_FAILED' is valid" + echo "✅ QUOTA_FAILED: '$INPUT_QUOTA_FAILED' is valid" fi # Validate test_success (must be 'true' or 'false' or empty) From b33b2cf13bca9cf33ff6fffbcee67f6ca7b7562e Mon Sep 17 00:00:00 2001 From: Vamshi-Microsoft Date: Wed, 14 Jan 2026 18:07:04 +0530 Subject: [PATCH 03/10] Added usecase input paramter to pipelines --- .github/workflows/deploy-linux.yml | 25 +++++++++++++++++++++++ .github/workflows/deploy-orchestrator.yml | 7 +++++++ .github/workflows/deploy-windows.yml | 21 +++++++++++++++++++ .github/workflows/job-azure-deploy.yml | 9 ++++++++ .github/workflows/job-deploy-linux.yml | 18 ++++++++++++++++ .github/workflows/job-deploy-windows.yml | 18 ++++++++++++++++ .github/workflows/job-test-automation.yml | 6 ++++++ 7 files changed, 104 insertions(+) diff --git a/.github/workflows/deploy-linux.yml b/.github/workflows/deploy-linux.yml index e67f53822..1cdaf6292 100644 --- a/.github/workflows/deploy-linux.yml +++ b/.github/workflows/deploy-linux.yml @@ -61,6 +61,14 @@ on: required: false default: false type: boolean + AZURE_ENV_USE_CASE: + description: 'Specify Use case to deploy' + type: 'choice' + options: + - 'telecom' + - 'IT_helpdesk' + required: false + default: 'telecom' run_e2e_tests: description: 'Run End-to-End Tests' required: false @@ -85,6 +93,7 @@ on: required: false default: '' type: string + permissions: contents: read @@ -105,6 +114,8 @@ jobs: azure_env_log_analytics_workspace_id: ${{ steps.validate.outputs.azure_env_log_analytics_workspace_id }} azure_existing_ai_project_resource_id: ${{ steps.validate.outputs.azure_existing_ai_project_resource_id }} existing_webapp_url: ${{ steps.validate.outputs.existing_webapp_url }} + azure_env_use_case: ${{ steps.validate.outputs.azure_env_use_case }} + steps: - name: Validate Workflow Input Parameters id: validate @@ -120,6 +131,8 @@ jobs: INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ github.event.inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ github.event.inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} INPUT_EXISTING_WEBAPP_URL: ${{ github.event.inputs.existing_webapp_url }} + INPUT_AZURE_ENV_USE_CASE: ${{ github.event.inputs.AZURE_ENV_USE_CASE }} + run: | echo "🔍 Validating workflow input parameters..." VALIDATION_FAILED=false @@ -233,6 +246,15 @@ jobs: else echo "✅ existing_webapp_url: Not provided (will perform deployment)" fi + + # Validate AZURE_ENV_USE_CASE (specific allowed values) + USE_CASE="${INPUT_AZURE_ENV_USE_CASE:-telecom}" + if [[ "$USE_CASE" != "telecom" && "$USE_CASE" != "IT_helpdesk" ]]; then + echo "❌ ERROR: AZURE_ENV_USE_CASE must be one of: telecom, IT_helpdesk, got: '$USE_CASE'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_USE_CASE: '$USE_CASE' is valid" + fi # Fail workflow if any validation failed if [[ "$VALIDATION_FAILED" == "true" ]]; then @@ -256,6 +278,8 @@ jobs: echo "azure_env_log_analytics_workspace_id=$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" >> $GITHUB_OUTPUT echo "azure_existing_ai_project_resource_id=$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" >> $GITHUB_OUTPUT echo "existing_webapp_url=$INPUT_EXISTING_WEBAPP_URL" >> $GITHUB_OUTPUT + echo "azure_env_use_case=$USE_CASE" >> $GITHUB_OUTPUT + Run: needs: validate-inputs @@ -273,5 +297,6 @@ jobs: azure_env_log_analytics_workspace_id: ${{ needs.validate-inputs.outputs.azure_env_log_analytics_workspace_id || '' }} azure_existing_ai_project_resource_id: ${{ needs.validate-inputs.outputs.azure_existing_ai_project_resource_id || '' }} existing_webapp_url: ${{ needs.validate-inputs.outputs.existing_webapp_url || '' }} + azure_env_use_case: ${{ needs.validate-inputs.outputs.azure_env_use_case || 'telecom' }} trigger_type: ${{ github.event_name }} secrets: inherit \ No newline at end of file diff --git a/.github/workflows/deploy-orchestrator.yml b/.github/workflows/deploy-orchestrator.yml index 6e2b3d303..6100bbd94 100644 --- a/.github/workflows/deploy-orchestrator.yml +++ b/.github/workflows/deploy-orchestrator.yml @@ -57,6 +57,11 @@ on: required: false default: '' type: string + azure_env_use_case: + description: 'Azure Environment Use Case (telecom or IT_helpdesk)' + required: false + default: 'telecom' + type: string trigger_type: description: 'Trigger type (workflow_dispatch, pull_request, schedule)' required: true @@ -88,6 +93,7 @@ jobs: existing_webapp_url: ${{ inputs.existing_webapp_url }} azure_env_log_analytics_workspace_id: ${{ inputs.azure_env_log_analytics_workspace_id }} azure_existing_ai_project_resource_id: ${{ inputs.azure_existing_ai_project_resource_id }} + azure_env_use_case: ${{ inputs.azure_env_use_case }} docker_image_tag: ${{ needs.docker-build.outputs.IMAGE_TAG }} run_e2e_tests: ${{ inputs.run_e2e_tests }} cleanup_resources: ${{ inputs.cleanup_resources }} @@ -101,6 +107,7 @@ jobs: KMGENERIC_URL: ${{ needs.deploy.outputs.WEB_APP_URL || inputs.existing_webapp_url }} KMGENERIC_URL_API: ${{ needs.deploy.outputs.API_APP_URL || inputs.existing_webapp_url }} TEST_SUITE: ${{ inputs.trigger_type == 'workflow_dispatch' && inputs.run_e2e_tests || 'GoldenPath-Testing' }} + AZURE_ENV_USE_CASE: ${{ inputs.azure_env_use_case }} secrets: inherit send-notification: diff --git a/.github/workflows/deploy-windows.yml b/.github/workflows/deploy-windows.yml index 8f8b9a2b7..7f8b709dc 100644 --- a/.github/workflows/deploy-windows.yml +++ b/.github/workflows/deploy-windows.yml @@ -45,6 +45,14 @@ on: required: false default: false type: boolean + AZURE_ENV_USE_CASE: + description: 'Specify Use case to deploy' + type: 'choice' + options: + - 'telecom' + - 'IT_helpdesk' + required: false + default: 'telecom' run_e2e_tests: description: 'Run End-to-End Tests' required: false @@ -89,6 +97,7 @@ jobs: azure_env_log_analytics_workspace_id: ${{ steps.validate.outputs.azure_env_log_analytics_workspace_id }} azure_existing_ai_project_resource_id: ${{ steps.validate.outputs.azure_existing_ai_project_resource_id }} existing_webapp_url: ${{ steps.validate.outputs.existing_webapp_url }} + azure_env_use_case: ${{ steps.validate.outputs.azure_env_use_case }} steps: - name: Validate Workflow Input Parameters id: validate @@ -104,6 +113,7 @@ jobs: INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ github.event.inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ github.event.inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} INPUT_EXISTING_WEBAPP_URL: ${{ github.event.inputs.existing_webapp_url }} + INPUT_AZURE_ENV_USE_CASE: ${{ github.event.inputs.AZURE_ENV_USE_CASE }} run: | echo "🔍 Validating workflow input parameters..." VALIDATION_FAILED=false @@ -218,6 +228,15 @@ jobs: echo "✅ existing_webapp_url: Not provided (will perform deployment)" fi + # Validate AZURE_ENV_USE_CASE (specific allowed values) + USE_CASE="${INPUT_AZURE_ENV_USE_CASE:-telecom}" + if [[ "$USE_CASE" != "telecom" && "$USE_CASE" != "IT_helpdesk" ]]; then + echo "❌ ERROR: AZURE_ENV_USE_CASE must be one of: telecom, IT_helpdesk, got: '$USE_CASE'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_USE_CASE: '$USE_CASE' is valid" + fi + # Fail workflow if any validation failed if [[ "$VALIDATION_FAILED" == "true" ]]; then echo "" @@ -240,6 +259,7 @@ jobs: echo "azure_env_log_analytics_workspace_id=$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" >> $GITHUB_OUTPUT echo "azure_existing_ai_project_resource_id=$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" >> $GITHUB_OUTPUT echo "existing_webapp_url=$INPUT_EXISTING_WEBAPP_URL" >> $GITHUB_OUTPUT + echo "azure_env_use_case=$USE_CASE" >> $GITHUB_OUTPUT Run: needs: validate-inputs @@ -257,5 +277,6 @@ jobs: azure_env_log_analytics_workspace_id: ${{ needs.validate-inputs.outputs.azure_env_log_analytics_workspace_id || '' }} azure_existing_ai_project_resource_id: ${{ needs.validate-inputs.outputs.azure_existing_ai_project_resource_id || '' }} existing_webapp_url: ${{ needs.validate-inputs.outputs.existing_webapp_url || '' }} + azure_env_use_case: ${{ needs.validate-inputs.outputs.azure_env_use_case || 'telecom' }} trigger_type: ${{ github.event_name }} secrets: inherit \ No newline at end of file diff --git a/.github/workflows/job-azure-deploy.yml b/.github/workflows/job-azure-deploy.yml index 0887f4cc7..899ab3ecf 100644 --- a/.github/workflows/job-azure-deploy.yml +++ b/.github/workflows/job-azure-deploy.yml @@ -61,6 +61,11 @@ on: required: false default: '' type: string + azure_env_use_case: + description: 'Azure Environment Use Case (telecom or IT_helpdesk)' + required: false + default: 'telecom' + type: string docker_image_tag: description: 'Docker Image Tag from build job' required: false @@ -498,6 +503,7 @@ jobs: EXP_DISPLAY: ${{ steps.configure_exp.outputs.EXP_ENABLED == 'true' && '✅ Yes' || '❌ No' }} CLEANUP_DISPLAY: ${{ env.CLEANUP_RESOURCES == 'true' && '✅ Yes' || '❌ No' }} BUILD_DOCKER_DISPLAY: ${{ env.BUILD_DOCKER_IMAGE == 'true' && '✅ Yes' || '❌ No' }} + AZURE_ENV_USE_CASE: ${{ inputs.azure_env_use_case }} run: | echo "## 📋 Workflow Configuration Summary" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY @@ -511,6 +517,7 @@ jobs: echo "| **Run E2E Tests** | \`${{ env.RUN_E2E_TESTS }}\` |" >> $GITHUB_STEP_SUMMARY echo "| **Cleanup Resources** | $CLEANUP_DISPLAY |" >> $GITHUB_STEP_SUMMARY echo "| **Build Docker Image** | $BUILD_DOCKER_DISPLAY |" >> $GITHUB_STEP_SUMMARY + echo "| **Use Case** | \`$AZURE_ENV_USE_CASE\` |" >> $GITHUB_STEP_SUMMARY if [[ "$INPUT_TRIGGER_TYPE" == "workflow_dispatch" && -n "$INPUT_AZURE_LOCATION" ]]; then echo "| **Azure Location** | \`$INPUT_AZURE_LOCATION\` (User Selected) |" >> $GITHUB_STEP_SUMMARY @@ -546,6 +553,7 @@ jobs: WAF_ENABLED: ${{ inputs.waf_enabled == true && 'true' || 'false' }} AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_analytics_workspace_id }} AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.azure_existing_ai_project_resource_id }} + AZURE_ENV_USE_CASE: ${{ inputs.azure_env_use_case }} secrets: inherit deploy-windows: @@ -564,4 +572,5 @@ jobs: WAF_ENABLED: ${{ inputs.waf_enabled == true && 'true' || 'false' }} AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_analytics_workspace_id }} AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.azure_existing_ai_project_resource_id }} + AZURE_ENV_USE_CASE: ${{ inputs.azure_env_use_case }} secrets: inherit \ No newline at end of file diff --git a/.github/workflows/job-deploy-linux.yml b/.github/workflows/job-deploy-linux.yml index 745313fa3..6e9e8b569 100644 --- a/.github/workflows/job-deploy-linux.yml +++ b/.github/workflows/job-deploy-linux.yml @@ -34,6 +34,10 @@ on: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: required: false type: string + AZURE_ENV_USE_CASE: + required: false + type: string + default: 'telecom' outputs: WEB_APP_URL: description: "Web App URL" @@ -65,6 +69,7 @@ jobs: INPUT_WAF_ENABLED: ${{ inputs.WAF_ENABLED }} INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + INPUT_AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | echo "🔍 Validating workflow input parameters..." VALIDATION_FAILED=false @@ -175,6 +180,15 @@ jobs: fi fi + # Validate AZURE_ENV_USE_CASE (optional, must be 'telecom' or 'IT_helpdesk') + USE_CASE="${INPUT_AZURE_ENV_USE_CASE:-telecom}" + if [[ "$USE_CASE" != "telecom" && "$USE_CASE" != "IT_helpdesk" ]]; then + echo "❌ ERROR: AZURE_ENV_USE_CASE must be one of: telecom, IT_helpdesk, got: '$USE_CASE'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_USE_CASE: '$USE_CASE' is valid" + fi + # Fail workflow if any validation failed if [[ "$VALIDATION_FAILED" == "true" ]]; then echo "" @@ -224,6 +238,7 @@ jobs: EXP: ${{ inputs.EXP }} AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | set -e echo "Starting azd deployment..." @@ -246,6 +261,7 @@ jobs: azd env set AZURE_LOCATION="$AZURE_LOCATION" azd env set AZURE_RESOURCE_GROUP="$RESOURCE_GROUP_NAME" azd env set AZURE_ENV_IMAGETAG="$IMAGE_TAG" + azd env set AZURE_ENV_USE_CASE="$AZURE_ENV_USE_CASE" if [[ "$BUILD_DOCKER_IMAGE" == "true" ]]; then ACR_NAME=$(echo "${{ secrets.ACR_TEST_LOGIN_SERVER }}") @@ -310,6 +326,7 @@ jobs: AZURE_LOCATION: ${{ inputs.AZURE_LOCATION }} AZURE_ENV_OPENAI_LOCATION: ${{ inputs.AZURE_ENV_OPENAI_LOCATION }} IMAGE_TAG: ${{ inputs.IMAGE_TAG }} + AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | echo "## 🚀 Deploy Job Summary (Linux)" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY @@ -321,6 +338,7 @@ jobs: echo "| **Azure Region (Infrastructure)** | \`$AZURE_LOCATION\` |" >> $GITHUB_STEP_SUMMARY echo "| **Azure OpenAI Region** | \`$AZURE_ENV_OPENAI_LOCATION\` |" >> $GITHUB_STEP_SUMMARY echo "| **Docker Image Tag** | \`$IMAGE_TAG\` |" >> $GITHUB_STEP_SUMMARY + echo "| **Use Case** | \`$AZURE_ENV_USE_CASE\` |" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY if [[ "${{ job.status }}" == "success" ]]; then echo "### ✅ Deployment Details" >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/job-deploy-windows.yml b/.github/workflows/job-deploy-windows.yml index ecf3f460f..4787ddbd3 100644 --- a/.github/workflows/job-deploy-windows.yml +++ b/.github/workflows/job-deploy-windows.yml @@ -34,6 +34,10 @@ on: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: required: false type: string + AZURE_ENV_USE_CASE: + required: false + type: string + default: 'telecom' AZURE_TAGS: required: false type: string @@ -68,6 +72,7 @@ jobs: INPUT_WAF_ENABLED: ${{ inputs.WAF_ENABLED }} INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + INPUT_AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | echo "🔍 Validating workflow input parameters..." VALIDATION_FAILED=false @@ -178,6 +183,15 @@ jobs: fi fi + # Validate AZURE_ENV_USE_CASE (optional, must be 'telecom' or 'IT_helpdesk') + USE_CASE="${INPUT_AZURE_ENV_USE_CASE:-telecom}" + if [[ "$USE_CASE" != "telecom" && "$USE_CASE" != "IT_helpdesk" ]]; then + echo "❌ ERROR: AZURE_ENV_USE_CASE must be one of: telecom, IT_helpdesk, got: '$USE_CASE'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_USE_CASE: '$USE_CASE' is valid" + fi + # Fail workflow if any validation failed if [[ "$VALIDATION_FAILED" == "true" ]]; then echo "" @@ -228,6 +242,7 @@ jobs: INPUT_EXP: ${{ inputs.EXP }} INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + INPUT_AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | $ErrorActionPreference = "Stop" Write-Host "Starting azd deployment..." @@ -247,6 +262,7 @@ jobs: azd env set AZURE_LOCATION="$env:INPUT_AZURE_LOCATION" azd env set AZURE_RESOURCE_GROUP="$env:INPUT_RESOURCE_GROUP_NAME" azd env set AZURE_ENV_IMAGETAG="$env:INPUT_IMAGE_TAG" + azd env set AZURE_ENV_USE_CASE="$env:INPUT_AZURE_ENV_USE_CASE" # Set ACR name only when building Docker image if ($env:INPUT_BUILD_DOCKER_IMAGE -eq "true") { @@ -315,6 +331,7 @@ jobs: INPUT_AZURE_LOCATION: ${{ inputs.AZURE_LOCATION }} INPUT_AZURE_ENV_OPENAI_LOCATION: ${{ inputs.AZURE_ENV_OPENAI_LOCATION }} INPUT_IMAGE_TAG: ${{ inputs.IMAGE_TAG }} + INPUT_AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | echo "## 🚀 Deploy Job Summary (Windows)" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY @@ -338,6 +355,7 @@ jobs: echo "| **Azure Region (Infrastructure)** | \`$INPUT_AZURE_LOCATION\` |" >> $GITHUB_STEP_SUMMARY echo "| **Azure OpenAI Region** | \`$INPUT_AZURE_ENV_OPENAI_LOCATION\` |" >> $GITHUB_STEP_SUMMARY echo "| **Docker Image Tag** | \`$INPUT_IMAGE_TAG\` |" >> $GITHUB_STEP_SUMMARY + echo "| **Use Case** | \`$INPUT_AZURE_ENV_USE_CASE\` |" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY if [ "${{ job.status }}" == "success" ]; then echo "### ✅ Deployment Details" >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/job-test-automation.yml b/.github/workflows/job-test-automation.yml index 80f1ad61a..4faa36323 100644 --- a/.github/workflows/job-test-automation.yml +++ b/.github/workflows/job-test-automation.yml @@ -16,6 +16,11 @@ on: type: string default: "GoldenPath-Testing" description: "Test suite to run: 'Smoke-Testing', 'GoldenPath-Testing' " + AZURE_ENV_USE_CASE: + required: false + type: string + default: "telecom" + description: "Azure Environment Use Case: 'telecom', 'IT_helpdesk'" secrets: EMAILNOTIFICATION_LOGICAPP_URL_TA: required: false @@ -33,6 +38,7 @@ env: api_url: ${{ inputs.KMGENERIC_URL_API}} accelerator_name: "KMGeneric" test_suite: ${{ inputs.TEST_SUITE }} + azure_env_use_case: ${{ inputs.AZURE_ENV_USE_CASE }} jobs: test: From 9eabbb5eefa5eec20835b63f4829f66df8d86254 Mon Sep 17 00:00:00 2001 From: Vamshi-Microsoft Date: Wed, 14 Jan 2026 19:04:31 +0530 Subject: [PATCH 04/10] Skip e2e testing --- .github/workflows/deploy-orchestrator.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-orchestrator.yml b/.github/workflows/deploy-orchestrator.yml index 6100bbd94..c6a9a0c05 100644 --- a/.github/workflows/deploy-orchestrator.yml +++ b/.github/workflows/deploy-orchestrator.yml @@ -100,7 +100,7 @@ jobs: secrets: inherit e2e-test: - if: "!cancelled() && ((needs.deploy.result == 'success' && needs.deploy.outputs.WEB_APP_URL != '') || (inputs.existing_webapp_url != '' && inputs.existing_webapp_url != null)) && (inputs.trigger_type != 'workflow_dispatch' || (inputs.run_e2e_tests != 'None' && inputs.run_e2e_tests != '' && inputs.run_e2e_tests != null))" + if: false # E2E testing disabled needs: [docker-build, deploy] uses: ./.github/workflows/job-test-automation.yml with: From 596695a756882faf9bd1f90720ad6d0a9e44e144 Mon Sep 17 00:00:00 2001 From: Vamshi-Microsoft Date: Fri, 16 Jan 2026 15:25:51 +0530 Subject: [PATCH 05/10] Map Exp to Notification Job --- .github/workflows/deploy-orchestrator.yml | 2 +- .github/workflows/job-azure-deploy.yml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-orchestrator.yml b/.github/workflows/deploy-orchestrator.yml index c6a9a0c05..5cb016411 100644 --- a/.github/workflows/deploy-orchestrator.yml +++ b/.github/workflows/deploy-orchestrator.yml @@ -117,7 +117,7 @@ jobs: with: trigger_type: ${{ inputs.trigger_type }} waf_enabled: ${{ inputs.waf_enabled }} - exp: ${{ inputs.exp }} + exp: ${{ needs.deploy.outputs.EXP_ENABLED }} run_e2e_tests: ${{ inputs.run_e2e_tests }} existing_webapp_url: ${{ inputs.existing_webapp_url }} deploy_result: ${{ needs.deploy.result }} diff --git a/.github/workflows/job-azure-deploy.yml b/.github/workflows/job-azure-deploy.yml index 899ab3ecf..1f7a0f86f 100644 --- a/.github/workflows/job-azure-deploy.yml +++ b/.github/workflows/job-azure-deploy.yml @@ -96,6 +96,9 @@ on: QUOTA_FAILED: description: "Quota Check Failed Flag" value: ${{ jobs.azure-setup.outputs.QUOTA_FAILED }} + EXP_ENABLED: + description: "EXP Flag" + value: ${{ jobs.azure-setup.outputs.EXP_ENABLED }} env: GPT_MIN_CAPACITY: 100 From 769fb79041dea0d7f8a0a984036e4fcda4b29682 Mon Sep 17 00:00:00 2001 From: Vamshi-Microsoft Date: Fri, 16 Jan 2026 17:05:14 +0530 Subject: [PATCH 06/10] refactor: update EXP input handling in deployment workflows --- .github/workflows/deploy-orchestrator.yml | 2 +- .github/workflows/job-azure-deploy.yml | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/deploy-orchestrator.yml b/.github/workflows/deploy-orchestrator.yml index 5cb016411..c6a9a0c05 100644 --- a/.github/workflows/deploy-orchestrator.yml +++ b/.github/workflows/deploy-orchestrator.yml @@ -117,7 +117,7 @@ jobs: with: trigger_type: ${{ inputs.trigger_type }} waf_enabled: ${{ inputs.waf_enabled }} - exp: ${{ needs.deploy.outputs.EXP_ENABLED }} + exp: ${{ inputs.exp }} run_e2e_tests: ${{ inputs.run_e2e_tests }} existing_webapp_url: ${{ inputs.existing_webapp_url }} deploy_result: ${{ needs.deploy.result }} diff --git a/.github/workflows/job-azure-deploy.yml b/.github/workflows/job-azure-deploy.yml index 1f7a0f86f..899ab3ecf 100644 --- a/.github/workflows/job-azure-deploy.yml +++ b/.github/workflows/job-azure-deploy.yml @@ -96,9 +96,6 @@ on: QUOTA_FAILED: description: "Quota Check Failed Flag" value: ${{ jobs.azure-setup.outputs.QUOTA_FAILED }} - EXP_ENABLED: - description: "EXP Flag" - value: ${{ jobs.azure-setup.outputs.EXP_ENABLED }} env: GPT_MIN_CAPACITY: 100 From ed3bfddeab3ce8fb8a6a644a1dd28c845154217f Mon Sep 17 00:00:00 2001 From: Kevin Yang Date: Sun, 14 Dec 2025 11:51:45 +0800 Subject: [PATCH 07/10] Refactor to use get_field_value() for safer field access to avoid KeyError exceptions --- .../index_scripts/03_cu_process_data_text.py | 22 ++++++---- .../04_cu_process_custom_data.py | 41 +++++++++++-------- 2 files changed, 36 insertions(+), 27 deletions(-) diff --git a/infra/scripts/index_scripts/03_cu_process_data_text.py b/infra/scripts/index_scripts/03_cu_process_data_text.py index e2056b1ad..1e40c3fdd 100644 --- a/infra/scripts/index_scripts/03_cu_process_data_text.py +++ b/infra/scripts/index_scripts/03_cu_process_data_text.py @@ -306,6 +306,10 @@ def create_tables(): create_tables() +def get_field_value(fields, field_name, default=""): + field = fields.get(field_name, {}) + return field.get('valueString', default) + # Process files and insert into DB and Search conversationIds, docs, counter = [], [], 0 for path in paths: @@ -325,17 +329,17 @@ def create_tables(): start_timestamp = datetime.strptime(start_time, timestamp_format) conversation_id = file_name.split('convo_', 1)[1].split('_')[0] conversationIds.append(conversation_id) - duration = int(result['result']['contents'][0]['fields']['Duration']['valueString']) + fields = result['result']['contents'][0]['fields'] + duration = int(get_field_value(fields, 'Duration', '0')) end_timestamp = str(start_timestamp + timedelta(seconds=duration)).split(".")[0] start_timestamp = str(start_timestamp).split(".")[0] - fields = result['result']['contents'][0]['fields'] - summary = fields['summary']['valueString'] - satisfied = fields['satisfied']['valueString'] - sentiment = fields['sentiment']['valueString'] - topic = fields['topic']['valueString'] - key_phrases = fields['keyPhrases']['valueString'] - complaint = fields['complaint']['valueString'] - content = fields['content']['valueString'] + summary = get_field_value(fields, 'summary') + satisfied = get_field_value(fields, 'satisfied') + sentiment = get_field_value(fields, 'sentiment') + topic = get_field_value(fields, 'topic') + key_phrases = get_field_value(fields, 'keyPhrases') + complaint = get_field_value(fields, 'complaint') + content = get_field_value(fields, 'content') cursor.execute( "INSERT INTO processed_data (ConversationId, EndTime, StartTime, Content, summary, satisfied, sentiment, topic, key_phrases, complaint) VALUES (?,?,?,?,?,?,?,?,?,?)", (conversation_id, end_timestamp, start_timestamp, content, summary, satisfied, sentiment, topic, key_phrases, complaint) diff --git a/infra/scripts/index_scripts/04_cu_process_custom_data.py b/infra/scripts/index_scripts/04_cu_process_custom_data.py index 8836e311c..b9724eb99 100644 --- a/infra/scripts/index_scripts/04_cu_process_custom_data.py +++ b/infra/scripts/index_scripts/04_cu_process_custom_data.py @@ -351,6 +351,10 @@ def create_tables(): create_tables() +def get_field_value(fields, field_name, default=""): + field = fields.get(field_name, {}) + return field.get('valueString', default) + ANALYZER_ID = "ckm-json" # Process files and insert into DB and Search - transcripts conversationIds, docs, counter = [], [], 0 @@ -367,17 +371,17 @@ def create_tables(): start_timestamp = datetime.strptime(start_time, timestamp_format) conversation_id = file_name.split('convo_', 1)[1].split('_')[0] conversationIds.append(conversation_id) - duration = int(result['result']['contents'][0]['fields']['Duration']['valueString']) + fields = result['result']['contents'][0]['fields'] + duration = int(get_field_value(fields, 'Duration', '0')) end_timestamp = str(start_timestamp + timedelta(seconds=duration)).split(".")[0] start_timestamp = str(start_timestamp).split(".")[0] - fields = result['result']['contents'][0]['fields'] - summary = fields['summary']['valueString'] - satisfied = fields['satisfied']['valueString'] - sentiment = fields['sentiment']['valueString'] - topic = fields['topic']['valueString'] - key_phrases = fields['keyPhrases']['valueString'] - complaint = fields['complaint']['valueString'] - content = fields['content']['valueString'] + summary = get_field_value(fields, 'summary') + satisfied = get_field_value(fields, 'satisfied') + sentiment = get_field_value(fields, 'sentiment') + topic = get_field_value(fields, 'topic') + key_phrases = get_field_value(fields, 'keyPhrases') + complaint = get_field_value(fields, 'complaint') + content = get_field_value(fields, 'content') cursor.execute( "INSERT INTO processed_data (ConversationId, EndTime, StartTime, Content, summary, satisfied, sentiment, topic, key_phrases, complaint) VALUES (?,?,?,?,?,?,?,?,?,?)", (conversation_id, end_timestamp, start_timestamp, content, summary, satisfied, sentiment, topic, key_phrases, complaint) @@ -421,19 +425,20 @@ def create_tables(): conversation_id = file_name.split('convo_', 1)[1].split('_')[0] conversationIds.append(conversation_id) - duration = int(result['result']['contents'][0]['fields']['Duration']['valueString']) + fields = result['result']['contents'][0]['fields'] + duration = int(get_field_value(fields, 'Duration', '0')) end_timestamp = str(start_timestamp + timedelta(seconds=duration)) end_timestamp = end_timestamp.split(".")[0] start_timestamp = str(start_timestamp).split(".")[0] - summary = result['result']['contents'][0]['fields']['summary']['valueString'] - satisfied = result['result']['contents'][0]['fields']['satisfied']['valueString'] - sentiment = result['result']['contents'][0]['fields']['sentiment']['valueString'] - topic = result['result']['contents'][0]['fields']['topic']['valueString'] - key_phrases = result['result']['contents'][0]['fields']['keyPhrases']['valueString'] - complaint = result['result']['contents'][0]['fields']['complaint']['valueString'] - content = result['result']['contents'][0]['fields']['content']['valueString'] - # print(topic) + summary = get_field_value(fields, 'summary') + satisfied = get_field_value(fields, 'satisfied') + sentiment = get_field_value(fields, 'sentiment') + topic = get_field_value(fields, 'topic') + key_phrases = get_field_value(fields, 'keyPhrases') + complaint = get_field_value(fields, 'complaint') + content = get_field_value(fields, 'content') + cursor.execute(f"INSERT INTO processed_data (ConversationId, EndTime, StartTime, Content, summary, satisfied, sentiment, topic, key_phrases, complaint) VALUES (?,?,?,?,?,?,?,?,?,?)", (conversation_id, end_timestamp, start_timestamp, content, summary, satisfied, sentiment, topic, key_phrases, complaint)) conn.commit() From 623ce407bcfe5dda1a8111f83b3797a128b3ddae Mon Sep 17 00:00:00 2001 From: Kevin Yang Date: Sun, 14 Dec 2025 12:09:46 +0800 Subject: [PATCH 08/10] Add try-except for duration parsing to handle ValueError and TypeError gracefully --- .../index_scripts/03_cu_process_data_text.py | 6 +++++- .../index_scripts/04_cu_process_custom_data.py | 16 +++++++++++++--- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/infra/scripts/index_scripts/03_cu_process_data_text.py b/infra/scripts/index_scripts/03_cu_process_data_text.py index 1e40c3fdd..0cabc52d0 100644 --- a/infra/scripts/index_scripts/03_cu_process_data_text.py +++ b/infra/scripts/index_scripts/03_cu_process_data_text.py @@ -330,7 +330,11 @@ def get_field_value(fields, field_name, default=""): conversation_id = file_name.split('convo_', 1)[1].split('_')[0] conversationIds.append(conversation_id) fields = result['result']['contents'][0]['fields'] - duration = int(get_field_value(fields, 'Duration', '0')) + duration_str = get_field_value(fields, 'Duration', '0') + try: + duration = int(duration_str) + except (ValueError, TypeError): + duration = 0 end_timestamp = str(start_timestamp + timedelta(seconds=duration)).split(".")[0] start_timestamp = str(start_timestamp).split(".")[0] summary = get_field_value(fields, 'summary') diff --git a/infra/scripts/index_scripts/04_cu_process_custom_data.py b/infra/scripts/index_scripts/04_cu_process_custom_data.py index b9724eb99..e33582d87 100644 --- a/infra/scripts/index_scripts/04_cu_process_custom_data.py +++ b/infra/scripts/index_scripts/04_cu_process_custom_data.py @@ -371,8 +371,14 @@ def get_field_value(fields, field_name, default=""): start_timestamp = datetime.strptime(start_time, timestamp_format) conversation_id = file_name.split('convo_', 1)[1].split('_')[0] conversationIds.append(conversation_id) + fields = result['result']['contents'][0]['fields'] - duration = int(get_field_value(fields, 'Duration', '0')) + duration_str = get_field_value(fields, 'Duration', '0') + try: + duration = int(duration_str) + except (ValueError, TypeError): + duration = 0 + end_timestamp = str(start_timestamp + timedelta(seconds=duration)).split(".")[0] start_timestamp = str(start_timestamp).split(".")[0] summary = get_field_value(fields, 'summary') @@ -422,11 +428,15 @@ def get_field_value(fields, field_name, default=""): timestamp_format = "%Y-%m-%d %H_%M_%S" # Adjust format if necessary start_timestamp = datetime.strptime(start_time, timestamp_format) - conversation_id = file_name.split('convo_', 1)[1].split('_')[0] conversationIds.append(conversation_id) fields = result['result']['contents'][0]['fields'] - duration = int(get_field_value(fields, 'Duration', '0')) + duration_str = get_field_value(fields, 'Duration', '0') + try: + duration = int(duration_str) + except (ValueError, TypeError): + duration = 0 + end_timestamp = str(start_timestamp + timedelta(seconds=duration)) end_timestamp = end_timestamp.split(".")[0] start_timestamp = str(start_timestamp).split(".")[0] From a0d1bd25c78cb3dfd5fcd1572d0ae5040c6dd6d6 Mon Sep 17 00:00:00 2001 From: Kevin Yang Date: Sun, 18 Jan 2026 00:41:21 +0800 Subject: [PATCH 09/10] Extract conversation ID from file name for improved data processing --- infra/scripts/index_scripts/04_cu_process_custom_data.py | 1 + 1 file changed, 1 insertion(+) diff --git a/infra/scripts/index_scripts/04_cu_process_custom_data.py b/infra/scripts/index_scripts/04_cu_process_custom_data.py index e33582d87..534a87ca8 100644 --- a/infra/scripts/index_scripts/04_cu_process_custom_data.py +++ b/infra/scripts/index_scripts/04_cu_process_custom_data.py @@ -428,6 +428,7 @@ def get_field_value(fields, field_name, default=""): timestamp_format = "%Y-%m-%d %H_%M_%S" # Adjust format if necessary start_timestamp = datetime.strptime(start_time, timestamp_format) + conversation_id = file_name.split('convo_', 1)[1].split('_')[0] conversationIds.append(conversation_id) fields = result['result']['contents'][0]['fields'] From edc704adbe3194495dda9826eaf55b32ccc86fb5 Mon Sep 17 00:00:00 2001 From: Pavan-Microsoft Date: Wed, 21 Jan 2026 13:31:47 +0530 Subject: [PATCH 10/10] docs: enhance data format examples in customization guide --- documents/CustomizeData.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/documents/CustomizeData.md b/documents/CustomizeData.md index af10bbf3a..998d74ef6 100644 --- a/documents/CustomizeData.md +++ b/documents/CustomizeData.md @@ -4,9 +4,10 @@ If you would like to update the solution to leverage your own data please follow > Note: you will need to complete the deployment steps [here](./DeploymentGuide.md) before proceeding. ## Prerequisites: -1. Your data will need to be in JSON or wav format with the file name formated prefixed with "convo" then a GUID followed by a timestamp. - * Example: convo_32e38683-bbf7-407e-a541-09b37b77921d_2024-12-07 04_00_00.wav - +1. Your data will need to be in JSON or wav format with the file name formated prefixed with "convo" then a GUID followed by a timestamp. For more examples of the data format, please review the sample transcripts and audio data included [here](/infra/data/telecom) + * Example: + * Transcripts: `convo_32e38683-bbf7-407e-a541-09b37b77921d_2024-12-07 04%3A00%3A00.json` + * Audio: `convo_2c703f97-6657-4a15-b8b2-db6b96630b2d_2024-12-06 06_00_00.wav` 1. Navigate to the storage account in the resource group you are using for this solution. 2. Open the `data` container