diff --git a/.github/workflows/reusable-revised-its.yml b/.github/workflows/reusable-revised-its.yml
index d9237a52abab..1aa29f64cb5b 100644
--- a/.github/workflows/reusable-revised-its.yml
+++ b/.github/workflows/reusable-revised-its.yml
@@ -57,6 +57,19 @@ on:
AWS_SECRET_ACCESS_KEY:
required: false
type: string
+ BACKWARD_COMPATIBILITY_IT_ENABLED:
+ required: false
+ type: string
+ default: false
+ DRUID_PREVIOUS_VERSION:
+ required: false
+ type: string
+ DRUID_PREVIOUS_VERSION_DOWNLOAD_URL:
+ required: false
+ type: string
+ DRUID_PREVIOUS_IT_IMAGE_NAME:
+ required: false
+ type: string
env:
MYSQL_DRIVER_CLASSNAME: ${{ inputs.mysql_driver }} # Used by tests to connect to metadata store directly.
@@ -106,6 +119,15 @@ jobs:
./druid-container-jdk${{ inputs.build_jdk }}.tar.gz
./integration-tests-ex/image/target/env.sh
+ - name: Retrieve previous version cached docker image
+ id: docker-restore-previous-version
+ if: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' }}
+ uses: actions/cache/restore@v4
+ with:
+ key: druid-container-jdk${{ inputs.build_jdk }}-version${{ inputs.DRUID_PREVIOUS_VERSION }}.tar.gz-${{ github.sha }}
+ path: |
+ ./druid-container-jdk${{ inputs.build_jdk }}-version${{ inputs.DRUID_PREVIOUS_VERSION }}.tar.gz
+
- name: Maven build
if: steps.maven-restore.outputs.cache-hit != 'true' || ( steps.docker-restore.outputs.cache-hit != 'true' && steps.targets-restore.outputs.cache-hit != 'true' )
run: |
@@ -115,6 +137,10 @@ jobs:
if: steps.docker-restore.outputs.cache-hit != 'true' || steps.maven-restore.outputs.cache-hit != 'true'
env:
docker-restore: ${{ toJson(steps.docker-restore.outputs) }}
+ BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
+ DRUID_PREVIOUS_VERSION: ${{ inputs.DRUID_PREVIOUS_VERSION }}
+ DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ inputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
+ DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
run: |
./it.sh image
source ./integration-tests-ex/image/target/env.sh
@@ -122,6 +148,15 @@ jobs:
echo $DRUID_IT_IMAGE_NAME
docker save "$DRUID_IT_IMAGE_NAME" | gzip > druid-container-jdk${{ inputs.build_jdk }}.tar.gz
+ - name: Save previous version docker image
+ if: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' && (steps.docker-restore.outputs.cache-hit != 'true' || steps.maven-restore.outputs.cache-hit != 'true') }}
+ env:
+ docker-restore: ${{ toJson(steps.docker-restore.outputs) }}
+ run: |
+ docker tag ${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }} ${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}-jdk${{ inputs.build_jdk }}-version${{ inputs.DRUID_PREVIOUS_VERSION }}
+ echo ${DRUID_PREVIOUS_IT_IMAGE_NAME}
+ docker save "${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}" | gzip > druid-container-jdk${{ inputs.build_jdk }}-version${{ inputs.DRUID_PREVIOUS_VERSION }}.tar.gz
+
- name: Stop and remove docker containers
run: |
echo "Force stopping all containers and pruning"
@@ -133,9 +168,21 @@ jobs:
docker load --input druid-container-jdk${{ inputs.build_jdk }}.tar.gz
docker images
+ - name: Load previous version docker image
+ if: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' }}
+ run: |
+ docker load --input druid-container-jdk${{ inputs.build_jdk }}-version${{ inputs.DRUID_PREVIOUS_VERSION }}.tar.gz
+ docker images
+
- name: Run IT
id: run-it
- run: ${{ inputs.script }}
+ env:
+ BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
+ DRUID_PREVIOUS_VERSION: ${{ inputs.DRUID_PREVIOUS_VERSION }}
+ DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ inputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
+ DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
+ run: |
+ ${{ inputs.script }}
- name: Collect docker logs on failure
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
diff --git a/.github/workflows/revised-its.yml b/.github/workflows/revised-its.yml
index 069562bf7bd3..0acb3a2daec5 100644
--- a/.github/workflows/revised-its.yml
+++ b/.github/workflows/revised-its.yml
@@ -18,6 +18,24 @@
name: "Revised ITs workflow"
on:
workflow_call:
+ inputs:
+ BACKWARD_COMPATIBILITY_IT_ENABLED:
+ description: "Flag for backward compatibility IT"
+ required: false
+ default: false
+ type: string
+ DRUID_PREVIOUS_VERSION:
+ description: "Previous druid versions to run the test against."
+ required: false
+ type: string
+ DRUID_PREVIOUS_VERSION_DOWNLOAD_URL:
+ description: "URL to download the previous druid version."
+ required: false
+ type: string
+ DRUID_PREVIOUS_IT_IMAGE_NAME:
+ description: "Druid previous version image name."
+ required: false
+ type: string
workflow_dispatch:
jobs:
@@ -79,3 +97,19 @@ jobs:
AWS_REGION: us-east-1
AWS_ACCESS_KEY_ID: admin
AWS_SECRET_ACCESS_KEY: miniopassword
+
+ backward-compatibility-it:
+ needs: changes
+ uses: ./.github/workflows/reusable-revised-its.yml
+ if: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' && (needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true') }}
+ with:
+ build_jdk: 8
+ runtime_jdk: 8
+ use_indexer: MiddleManager
+ script: ./it.sh github BackwardIncompatibility
+ it: BackwardIncompatibility
+ mysql_driver: com.mysql.jdbc.Driver
+ BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
+ DRUID_PREVIOUS_VERSION: ${{ inputs.DRUID_PREVIOUS_VERSION }}
+ DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ inputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
+ DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
diff --git a/.github/workflows/unit-and-integration-tests-unified.yml b/.github/workflows/unit-and-integration-tests-unified.yml
index 9651a56b8cb7..f3d9cb40555f 100644
--- a/.github/workflows/unit-and-integration-tests-unified.yml
+++ b/.github/workflows/unit-and-integration-tests-unified.yml
@@ -47,9 +47,36 @@ concurrency:
env:
MYSQL_DRIVER_CLASSNAME: com.mysql.jdbc.Driver # Used to set druid config in docker image for revised ITs
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 5
+ DRUID_PREVIOUS_VERSION: 30.0.0
+ DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: https://dlcdn.apache.org/druid/30.0.0/apache-druid-30.0.0-bin.tar.gz
+ #BACKWARD_COMPATIBILITY_IT_ENABLED: true
+ #DRUID_PREVIOUS_IT_IMAGE_NAME: org.apache.druid.integration-tests/test:30.0.0
jobs:
+ set-env-var:
+ name: Set env var
+ runs-on: ubuntu-latest
+ outputs:
+ DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ steps.image_name.outputs.image_name }}
+ BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ steps.it_enabled.outputs.enabled }}
+ DRUID_PREVIOUS_VERSION: ${{ env.DRUID_PREVIOUS_VERSION }}
+ DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ env.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
+ steps:
+ - name: Set image name env var
+ id: image_name
+ run: |
+ echo "::set-output name=image_name::org.apache.druid.integration-tests/test:${{ env.DRUID_PREVIOUS_VERSION }}"
+ - name: Set env for enabling backward compatibility it
+ id: it_enabled
+ run: |
+ if [ -n "${{ env.DRUID_PREVIOUS_VERSION }}" ]; then
+ echo "::set-output name=enabled::true"
+ else
+ echo "::set-output name=enabled::false"
+ fi
+
build:
+ needs: set-env-var
name: "build (jdk${{ matrix.jdk }})"
strategy:
fail-fast: false
@@ -94,12 +121,25 @@ jobs:
./druid-container-jdk${{ matrix.jdk }}.tar.gz
./integration-tests-ex/image/target/env.sh
+ - name: Cache previous version image
+ id: docker_container_previous_version
+ uses: actions/cache@v4
+ with:
+ key: druid-container-jdk${{ matrix.jdk }}-version${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}.tar.gz-${{ github.sha }}
+ path: |
+ ./druid-container-jdk${{ matrix.jdk }}-version${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}.tar.gz
+
- name: Maven build
id: maven_build
run: |
./it.sh ci
- name: Container build
+ env:
+ BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ needs.set-env-var.outputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
+ DRUID_PREVIOUS_VERSION: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}
+ DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
+ DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
run: |
./it.sh image
source ./integration-tests-ex/image/target/env.sh
@@ -111,6 +151,13 @@ jobs:
echo $DRUID_IT_IMAGE_NAME
docker save "$DRUID_IT_IMAGE_NAME" | gzip > druid-container-jdk${{ matrix.jdk }}.tar.gz
+ - name: Save previous version docker image
+ if: ${{ needs.set-env-var.outputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' }}
+ run: |
+ docker tag ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }} ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}-jdk${{ matrix.jdk }}-version${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}
+ echo ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
+ docker save "${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}" | gzip > druid-container-jdk${{ matrix.jdk }}-version${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}.tar.gz
+
unit-tests-phase2:
strategy:
fail-fast: false
@@ -141,7 +188,34 @@ jobs:
if: ${{ always() && (needs.unit-tests.result == 'success' || needs.unit-tests.outputs.continue_tests) }}
uses: ./.github/workflows/standard-its.yml
+# expose-vars:
+# runs-on: ubuntu-latest
+# outputs:
+# DRUID_PREVIOUS_VERSION: ${{ env.DRUID_PREVIOUS_VERSION }}
+# DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ env.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
+# BACKWARD_COMPATIBILITY_IT_ENABLED: $BACKWARD_COMPATIBILITY_IT_ENABLED
+# DRUID_PREVIOUS_IT_IMAGE_NAME: $DRUID_PREVIOUS_IT_IMAGE_NAME
+# steps:
+# - run: echo "Exposing env vars"
+
+ print-env-vars:
+ needs: set-env-var
+ runs-on: ubuntu-latest
+ steps:
+ - name: print env vars
+ id: test
+ run: |
+ echo ${{ needs.set-env-var.outputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
+ echo ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}
+ echo ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
+ echo ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
+
revised-its:
- needs: unit-tests
+ needs: [unit-tests, set-env-var]
if: ${{ always() && (needs.unit-tests.result == 'success' || needs.unit-tests.outputs.continue_tests) }}
uses: ./.github/workflows/revised-its.yml
+ with:
+ BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ needs.set-env-var.outputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
+ DRUID_PREVIOUS_VERSION: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}
+ DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
+ DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
diff --git a/integration-tests-ex/cases/cluster/BackwardCompatibility/docker-compose.yaml b/integration-tests-ex/cases/cluster/BackwardCompatibility/docker-compose.yaml
new file mode 100644
index 000000000000..fe71ad2b25b2
--- /dev/null
+++ b/integration-tests-ex/cases/cluster/BackwardCompatibility/docker-compose.yaml
@@ -0,0 +1,107 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+networks:
+ druid-it-net:
+ name: druid-it-net
+ ipam:
+ config:
+ - subnet: 172.172.172.0/24
+
+services:
+ zookeeper:
+ extends:
+ file: ../Common/dependencies.yaml
+ service: zookeeper
+
+ metadata:
+ extends:
+ file: ../Common/dependencies.yaml
+ service: metadata
+
+ coordinator:
+ extends:
+ file: ../Common/druid.yaml
+ service: coordinator
+ image: ${DRUID_PREVIOUS_IT_IMAGE_NAME}
+ container_name: coordinator
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ # The frequency with which the coordinator polls the database
+ # for changes. The DB population code has to wait at least this
+ # long for the coordinator to notice changes.
+ - druid_manager_segments_pollDuration=PT5S
+ - druid_coordinator_period=PT10S
+ depends_on:
+ - zookeeper
+ - metadata
+
+ overlord:
+ extends:
+ file: ../Common/druid.yaml
+ service: overlord
+ image: ${DRUID_PREVIOUS_IT_IMAGE_NAME}
+ container_name: overlord
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - zookeeper
+ - metadata
+
+ broker:
+ extends:
+ file: ../Common/druid.yaml
+ service: broker
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - zookeeper
+
+ router:
+ extends:
+ file: ../Common/druid.yaml
+ service: router
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - zookeeper
+
+ historical:
+ extends:
+ file: ../Common/druid.yaml
+ service: historical
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - zookeeper
+
+ middlemanager:
+ extends:
+ file: ../Common/druid.yaml
+ service: middlemanager
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ volumes:
+ # Test data
+ - ../../resources:/resources
+ depends_on:
+ - zookeeper
+
+ kafka:
+ extends:
+ file: ../Common/dependencies.yaml
+ service: kafka
+ depends_on:
+ - zookeeper
diff --git a/integration-tests-ex/cases/pom.xml b/integration-tests-ex/cases/pom.xml
index 40461dd8ef17..a07dad2d14fe 100644
--- a/integration-tests-ex/cases/pom.xml
+++ b/integration-tests-ex/cases/pom.xml
@@ -459,6 +459,15 @@
GcsDeepStorage
+
+ IT-BackwardCompatibility
+
+ false
+
+
+ BackwardCompatibility
+
+
docker-tests
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/BackwardCompatibility/ITBackwardCompatibilityIndexerTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/BackwardCompatibility/ITBackwardCompatibilityIndexerTest.java
new file mode 100644
index 000000000000..7d07d518922e
--- /dev/null
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/BackwardCompatibility/ITBackwardCompatibilityIndexerTest.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testsEx.BackwardCompatibility;
+
+import org.apache.druid.testsEx.categories.BackwardCompatibility;
+import org.apache.druid.testsEx.config.DruidTestRunner;
+import org.apache.druid.testsEx.indexer.IndexerTest;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+
+@RunWith(DruidTestRunner.class)
+@Category({BackwardCompatibility.class})
+public class ITBackwardCompatibilityIndexerTest extends IndexerTest
+{
+}
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/BackwardCompatibility.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/BackwardCompatibility.java
new file mode 100644
index 000000000000..7e357e6df8f0
--- /dev/null
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/BackwardCompatibility.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testsEx.categories;
+
+public class BackwardCompatibility
+{
+}
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITIndexerTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITIndexerTest.java
index 65b8dc0b1ac0..06a097d608f9 100644
--- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITIndexerTest.java
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/ITIndexerTest.java
@@ -19,368 +19,13 @@
package org.apache.druid.testsEx.indexer;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.google.inject.Inject;
-import org.apache.druid.java.util.common.Intervals;
-import org.apache.druid.java.util.common.Pair;
-import org.apache.druid.java.util.common.StringUtils;
-import org.apache.druid.server.coordinator.CoordinatorDynamicConfig;
-import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
-import org.apache.druid.testing.utils.ITRetryUtil;
import org.apache.druid.testsEx.categories.BatchIndex;
import org.apache.druid.testsEx.config.DruidTestRunner;
-import org.joda.time.Interval;
-import org.junit.Assert;
-import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
-import java.io.Closeable;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.function.Function;
-
@RunWith(DruidTestRunner.class)
-@Category(BatchIndex.class)
-public class ITIndexerTest extends AbstractITBatchIndexTest
+@Category({BatchIndex.class})
+public class ITIndexerTest extends IndexerTest
{
- private static final String INDEX_TASK = "/indexer/wikipedia_index_task.json";
- private static final String INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_index_queries.json";
- private static final String INDEX_DATASOURCE = "wikipedia_index_test";
-
- private static final String INDEX_WITH_TIMESTAMP_TASK = "/indexer/wikipedia_with_timestamp_index_task.json";
- // TODO: add queries that validate timestamp is different from the __time column since it is a dimension
- // TODO: https://github.com/apache/druid/issues/9565
- private static final String INDEX_WITH_TIMESTAMP_QUERIES_RESOURCE = "/indexer/wikipedia_index_queries.json";
- private static final String INDEX_WITH_TIMESTAMP_DATASOURCE = "wikipedia_with_timestamp_index_test";
-
- private static final String REINDEX_TASK = "/indexer/wikipedia_reindex_task.json";
- private static final String REINDEX_TASK_WITH_DRUID_INPUT_SOURCE = "/indexer/wikipedia_reindex_druid_input_source_task.json";
- private static final String REINDEX_QUERIES_RESOURCE = "/indexer/wikipedia_reindex_queries.json";
- private static final String REINDEX_DATASOURCE = "wikipedia_reindex_test";
-
- private static final String MERGE_INDEX_TASK = "/indexer/wikipedia_merge_index_task.json";
- private static final String MERGE_INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_merge_index_queries.json";
- private static final String MERGE_INDEX_DATASOURCE = "wikipedia_merge_index_test";
-
- private static final String MERGE_REINDEX_TASK = "/indexer/wikipedia_merge_reindex_task.json";
- private static final String MERGE_REINDEX_TASK_WITH_DRUID_INPUT_SOURCE = "/indexer/wikipedia_merge_reindex_druid_input_source_task.json";
- private static final String MERGE_REINDEX_QUERIES_RESOURCE = "/indexer/wikipedia_merge_index_queries.json";
- private static final String MERGE_REINDEX_DATASOURCE = "wikipedia_merge_reindex_test";
-
- private static final String INDEX_WITH_MERGE_COLUMN_LIMIT_TASK = "/indexer/wikipedia_index_with_merge_column_limit_task.json";
- private static final String INDEX_WITH_MERGE_COLUMN_LIMIT_DATASOURCE = "wikipedia_index_with_merge_column_limit_test";
-
- private static final String GET_LOCKED_INTERVALS = "wikipedia_index_get_locked_intervals_test";
-
- private static final CoordinatorDynamicConfig DYNAMIC_CONFIG_PAUSED =
- CoordinatorDynamicConfig.builder().withPauseCoordination(true).build();
- private static final CoordinatorDynamicConfig DYNAMIC_CONFIG_DEFAULT =
- CoordinatorDynamicConfig.builder().build();
-
- @Inject
- CoordinatorResourceTestClient coordinatorClient;
-
- @Test
- public void testIndexData() throws Exception
- {
- final String reindexDatasource = REINDEX_DATASOURCE + "-testIndexData";
- final String reindexDatasourceWithDruidInputSource = REINDEX_DATASOURCE + "-testIndexData-druidInputSource";
- try (
- final Closeable ignored1 = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
- final Closeable ignored2 = unloader(reindexDatasource + config.getExtraDatasourceNameSuffix());
- final Closeable ignored3 = unloader(reindexDatasourceWithDruidInputSource + config.getExtraDatasourceNameSuffix())
- ) {
-
- final Function transform = spec -> {
- try {
- return StringUtils.replace(
- spec,
- "%%SEGMENT_AVAIL_TIMEOUT_MILLIS%%",
- jsonMapper.writeValueAsString("0")
- );
- }
- catch (JsonProcessingException e) {
- throw new RuntimeException(e);
- }
- };
-
- doIndexTest(
- INDEX_DATASOURCE,
- INDEX_TASK,
- transform,
- INDEX_QUERIES_RESOURCE,
- false,
- true,
- true,
- new Pair<>(false, false)
- );
- doReindexTest(
- INDEX_DATASOURCE,
- reindexDatasource,
- REINDEX_TASK,
- REINDEX_QUERIES_RESOURCE,
- new Pair<>(false, false)
- );
- doReindexTest(
- INDEX_DATASOURCE,
- reindexDatasourceWithDruidInputSource,
- REINDEX_TASK_WITH_DRUID_INPUT_SOURCE,
- REINDEX_QUERIES_RESOURCE,
- new Pair<>(false, false)
- );
- }
- }
-
- @Test
- public void testReIndexDataWithTimestamp() throws Exception
- {
- final String reindexDatasource = REINDEX_DATASOURCE + "-testReIndexDataWithTimestamp";
- final String reindexDatasourceWithDruidInputSource = REINDEX_DATASOURCE + "-testReIndexDataWithTimestamp-druidInputSource";
- try (
- final Closeable ignored1 = unloader(INDEX_WITH_TIMESTAMP_DATASOURCE + config.getExtraDatasourceNameSuffix());
- final Closeable ignored2 = unloader(reindexDatasource + config.getExtraDatasourceNameSuffix());
- final Closeable ignored3 = unloader(reindexDatasourceWithDruidInputSource + config.getExtraDatasourceNameSuffix())
- ) {
- doIndexTest(
- INDEX_WITH_TIMESTAMP_DATASOURCE,
- INDEX_WITH_TIMESTAMP_TASK,
- INDEX_WITH_TIMESTAMP_QUERIES_RESOURCE,
- false,
- true,
- true,
- new Pair<>(false, false)
- );
- doReindexTest(
- INDEX_WITH_TIMESTAMP_DATASOURCE,
- reindexDatasource,
- REINDEX_TASK,
- REINDEX_QUERIES_RESOURCE,
- new Pair<>(false, false)
- );
- doReindexTest(
- INDEX_WITH_TIMESTAMP_DATASOURCE,
- reindexDatasourceWithDruidInputSource,
- REINDEX_TASK_WITH_DRUID_INPUT_SOURCE,
- REINDEX_QUERIES_RESOURCE,
- new Pair<>(false, false)
- );
- }
- }
-
- @Test
- public void testReIndexWithNonExistingDatasource() throws Exception
- {
- Pair dummyPair = new Pair<>(false, false);
- final String fullBaseDatasourceName = "nonExistingDatasource2904";
- final String fullReindexDatasourceName = "newDatasource123";
-
- String taskSpec = StringUtils.replace(
- getResourceAsString(REINDEX_TASK_WITH_DRUID_INPUT_SOURCE),
- "%%DATASOURCE%%",
- fullBaseDatasourceName
- );
- taskSpec = StringUtils.replace(
- taskSpec,
- "%%REINDEX_DATASOURCE%%",
- fullReindexDatasourceName
- );
-
- // This method will also verify task is successful after task finish running
- // We expect task to be successful even if the datasource to reindex does not exist
- submitTaskAndWait(
- taskSpec,
- fullReindexDatasourceName,
- false,
- false,
- dummyPair
- );
- }
-
- @Test
- public void testMERGEIndexData() throws Exception
- {
- final String reindexDatasource = MERGE_REINDEX_DATASOURCE + "-testMergeIndexData";
- final String reindexDatasourceWithDruidInputSource = MERGE_REINDEX_DATASOURCE + "-testMergeReIndexData-druidInputSource";
- try (
- final Closeable ignored1 = unloader(MERGE_INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
- final Closeable ignored2 = unloader(reindexDatasource + config.getExtraDatasourceNameSuffix());
- final Closeable ignored3 = unloader(reindexDatasourceWithDruidInputSource + config.getExtraDatasourceNameSuffix())
- ) {
- doIndexTest(
- MERGE_INDEX_DATASOURCE,
- MERGE_INDEX_TASK,
- MERGE_INDEX_QUERIES_RESOURCE,
- false,
- true,
- true,
- new Pair<>(false, false)
- );
- doReindexTest(
- MERGE_INDEX_DATASOURCE,
- reindexDatasource,
- MERGE_REINDEX_TASK,
- MERGE_REINDEX_QUERIES_RESOURCE,
- new Pair<>(false, false)
- );
- doReindexTest(
- MERGE_INDEX_DATASOURCE,
- reindexDatasourceWithDruidInputSource,
- MERGE_REINDEX_TASK_WITH_DRUID_INPUT_SOURCE,
- MERGE_INDEX_QUERIES_RESOURCE,
- new Pair<>(false, false)
- );
- }
- }
-
- /**
- * Test that task reports indicate the ingested segments were loaded before the configured timeout expired.
- *
- * @throws Exception
- */
- @Test
- public void testIndexDataAwaitSegmentAvailability() throws Exception
- {
- try (
- final Closeable ignored1 = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
- ) {
- final Function transform = spec -> {
- try {
- return StringUtils.replace(
- spec,
- "%%SEGMENT_AVAIL_TIMEOUT_MILLIS%%",
- jsonMapper.writeValueAsString("600000")
- );
- }
- catch (JsonProcessingException e) {
- throw new RuntimeException(e);
- }
- };
-
- doIndexTest(
- INDEX_DATASOURCE,
- INDEX_TASK,
- transform,
- INDEX_QUERIES_RESOURCE,
- false,
- true,
- true,
- new Pair<>(true, true)
- );
- }
- }
-
- /**
- * Test that the task still succeeds if the segments do not become available before the configured wait timeout
- * expires.
- *
- * @throws Exception
- */
- @Test
- public void testIndexDataAwaitSegmentAvailabilityFailsButTaskSucceeds() throws Exception
- {
- try (
- final Closeable ignored1 = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
- ) {
- coordinatorClient.postDynamicConfig(DYNAMIC_CONFIG_PAUSED);
- final Function transform = spec -> {
- try {
- return StringUtils.replace(
- spec,
- "%%SEGMENT_AVAIL_TIMEOUT_MILLIS%%",
- jsonMapper.writeValueAsString("1")
- );
- }
- catch (JsonProcessingException e) {
- throw new RuntimeException(e);
- }
- };
-
- doIndexTest(
- INDEX_DATASOURCE,
- INDEX_TASK,
- transform,
- INDEX_QUERIES_RESOURCE,
- false,
- false,
- false,
- new Pair<>(true, false)
- );
- coordinatorClient.postDynamicConfig(DYNAMIC_CONFIG_DEFAULT);
- ITRetryUtil.retryUntilTrue(
- () -> coordinator.areSegmentsLoaded(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix()), "Segment Load"
- );
- }
- }
-
-
- @Test
- public void testIndexWithMergeColumnLimitData() throws Exception
- {
- try (
- final Closeable ignored1 = unloader(INDEX_WITH_MERGE_COLUMN_LIMIT_DATASOURCE + config.getExtraDatasourceNameSuffix());
- ) {
- doIndexTest(
- INDEX_WITH_MERGE_COLUMN_LIMIT_DATASOURCE,
- INDEX_WITH_MERGE_COLUMN_LIMIT_TASK,
- INDEX_QUERIES_RESOURCE,
- false,
- true,
- true,
- new Pair<>(false, false)
- );
- }
- }
-
- @Test
- public void testGetLockedIntervals() throws Exception
- {
- final String datasourceName = GET_LOCKED_INTERVALS + config.getExtraDatasourceNameSuffix();
- try (final Closeable ignored = unloader(datasourceName)) {
- // Submit an Indexing Task
- submitIndexTask(INDEX_TASK, datasourceName);
-
- // Wait until it acquires a lock
- final Map minTaskPriority = Collections.singletonMap(datasourceName, 0);
- final Map> lockedIntervals = new HashMap<>();
- ITRetryUtil.retryUntilFalse(
- () -> {
- lockedIntervals.clear();
- lockedIntervals.putAll(indexer.getLockedIntervals(minTaskPriority));
- return lockedIntervals.isEmpty();
- },
- "Verify Intervals are Locked"
- );
-
- // Verify the locked intervals for this datasource
- Assert.assertEquals(lockedIntervals.size(), 1);
- Assert.assertEquals(
- lockedIntervals.get(datasourceName),
- Collections.singletonList(Intervals.of("2013-08-31/2013-09-02"))
- );
-
- ITRetryUtil.retryUntilTrue(
- () -> coordinator.areSegmentsLoaded(datasourceName),
- "Segment Load"
- );
- }
- }
-
- @Test
- public void testJsonFunctions() throws Exception
- {
- final String taskSpec = getResourceAsString("/indexer/json_path_index_task.json");
-
- submitTaskAndWait(
- taskSpec,
- "json_path_index_test",
- false,
- true,
- new Pair<>(false, false)
- );
-
- doTestQuery("json_path_index_test", "/indexer/json_path_index_queries.json");
- }
}
diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/IndexerTest.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/IndexerTest.java
new file mode 100644
index 000000000000..ff30110f8ae1
--- /dev/null
+++ b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/indexer/IndexerTest.java
@@ -0,0 +1,380 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testsEx.indexer;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.google.inject.Inject;
+import org.apache.druid.java.util.common.Intervals;
+import org.apache.druid.java.util.common.Pair;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.server.coordinator.CoordinatorDynamicConfig;
+import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
+import org.apache.druid.testing.utils.ITRetryUtil;
+import org.joda.time.Interval;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.Closeable;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+
+public class IndexerTest extends AbstractITBatchIndexTest
+{
+ private static final String INDEX_TASK = "/indexer/wikipedia_index_task.json";
+ private static final String INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_index_queries.json";
+ private static final String INDEX_DATASOURCE = "wikipedia_index_test";
+
+ private static final String INDEX_WITH_TIMESTAMP_TASK = "/indexer/wikipedia_with_timestamp_index_task.json";
+ // TODO: add queries that validate timestamp is different from the __time column since it is a dimension
+ // TODO: https://github.com/apache/druid/issues/9565
+ private static final String INDEX_WITH_TIMESTAMP_QUERIES_RESOURCE = "/indexer/wikipedia_index_queries.json";
+ private static final String INDEX_WITH_TIMESTAMP_DATASOURCE = "wikipedia_with_timestamp_index_test";
+
+ private static final String REINDEX_TASK = "/indexer/wikipedia_reindex_task.json";
+ private static final String REINDEX_TASK_WITH_DRUID_INPUT_SOURCE = "/indexer/wikipedia_reindex_druid_input_source_task.json";
+ private static final String REINDEX_QUERIES_RESOURCE = "/indexer/wikipedia_reindex_queries.json";
+ private static final String REINDEX_DATASOURCE = "wikipedia_reindex_test";
+
+ private static final String MERGE_INDEX_TASK = "/indexer/wikipedia_merge_index_task.json";
+ private static final String MERGE_INDEX_QUERIES_RESOURCE = "/indexer/wikipedia_merge_index_queries.json";
+ private static final String MERGE_INDEX_DATASOURCE = "wikipedia_merge_index_test";
+
+ private static final String MERGE_REINDEX_TASK = "/indexer/wikipedia_merge_reindex_task.json";
+ private static final String MERGE_REINDEX_TASK_WITH_DRUID_INPUT_SOURCE = "/indexer/wikipedia_merge_reindex_druid_input_source_task.json";
+ private static final String MERGE_REINDEX_QUERIES_RESOURCE = "/indexer/wikipedia_merge_index_queries.json";
+ private static final String MERGE_REINDEX_DATASOURCE = "wikipedia_merge_reindex_test";
+
+ private static final String INDEX_WITH_MERGE_COLUMN_LIMIT_TASK = "/indexer/wikipedia_index_with_merge_column_limit_task.json";
+ private static final String INDEX_WITH_MERGE_COLUMN_LIMIT_DATASOURCE = "wikipedia_index_with_merge_column_limit_test";
+
+ private static final String GET_LOCKED_INTERVALS = "wikipedia_index_get_locked_intervals_test";
+
+ private static final CoordinatorDynamicConfig DYNAMIC_CONFIG_PAUSED =
+ CoordinatorDynamicConfig.builder().withPauseCoordination(true).build();
+ private static final CoordinatorDynamicConfig DYNAMIC_CONFIG_DEFAULT =
+ CoordinatorDynamicConfig.builder().build();
+
+ @Inject
+ CoordinatorResourceTestClient coordinatorClient;
+
+ @Test
+ public void testIndexData() throws Exception
+ {
+ final String reindexDatasource = REINDEX_DATASOURCE + "-testIndexData";
+ final String reindexDatasourceWithDruidInputSource = REINDEX_DATASOURCE + "-testIndexData-druidInputSource";
+ try (
+ final Closeable ignored1 = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
+ final Closeable ignored2 = unloader(reindexDatasource + config.getExtraDatasourceNameSuffix());
+ final Closeable ignored3 = unloader(reindexDatasourceWithDruidInputSource + config.getExtraDatasourceNameSuffix())
+ ) {
+
+ final Function transform = spec -> {
+ try {
+ return StringUtils.replace(
+ spec,
+ "%%SEGMENT_AVAIL_TIMEOUT_MILLIS%%",
+ jsonMapper.writeValueAsString("0")
+ );
+ }
+ catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+
+ doIndexTest(
+ INDEX_DATASOURCE,
+ INDEX_TASK,
+ transform,
+ INDEX_QUERIES_RESOURCE,
+ false,
+ true,
+ true,
+ new Pair<>(false, false)
+ );
+ doReindexTest(
+ INDEX_DATASOURCE,
+ reindexDatasource,
+ REINDEX_TASK,
+ REINDEX_QUERIES_RESOURCE,
+ new Pair<>(false, false)
+ );
+ doReindexTest(
+ INDEX_DATASOURCE,
+ reindexDatasourceWithDruidInputSource,
+ REINDEX_TASK_WITH_DRUID_INPUT_SOURCE,
+ REINDEX_QUERIES_RESOURCE,
+ new Pair<>(false, false)
+ );
+ }
+ }
+
+ @Test
+ public void testReIndexDataWithTimestamp() throws Exception
+ {
+ final String reindexDatasource = REINDEX_DATASOURCE + "-testReIndexDataWithTimestamp";
+ final String reindexDatasourceWithDruidInputSource = REINDEX_DATASOURCE + "-testReIndexDataWithTimestamp-druidInputSource";
+ try (
+ final Closeable ignored1 = unloader(INDEX_WITH_TIMESTAMP_DATASOURCE + config.getExtraDatasourceNameSuffix());
+ final Closeable ignored2 = unloader(reindexDatasource + config.getExtraDatasourceNameSuffix());
+ final Closeable ignored3 = unloader(reindexDatasourceWithDruidInputSource + config.getExtraDatasourceNameSuffix())
+ ) {
+ doIndexTest(
+ INDEX_WITH_TIMESTAMP_DATASOURCE,
+ INDEX_WITH_TIMESTAMP_TASK,
+ INDEX_WITH_TIMESTAMP_QUERIES_RESOURCE,
+ false,
+ true,
+ true,
+ new Pair<>(false, false)
+ );
+ doReindexTest(
+ INDEX_WITH_TIMESTAMP_DATASOURCE,
+ reindexDatasource,
+ REINDEX_TASK,
+ REINDEX_QUERIES_RESOURCE,
+ new Pair<>(false, false)
+ );
+ doReindexTest(
+ INDEX_WITH_TIMESTAMP_DATASOURCE,
+ reindexDatasourceWithDruidInputSource,
+ REINDEX_TASK_WITH_DRUID_INPUT_SOURCE,
+ REINDEX_QUERIES_RESOURCE,
+ new Pair<>(false, false)
+ );
+ }
+ }
+
+ @Test
+ public void testReIndexWithNonExistingDatasource() throws Exception
+ {
+ Pair dummyPair = new Pair<>(false, false);
+ final String fullBaseDatasourceName = "nonExistingDatasource2904";
+ final String fullReindexDatasourceName = "newDatasource123";
+
+ String taskSpec = StringUtils.replace(
+ getResourceAsString(REINDEX_TASK_WITH_DRUID_INPUT_SOURCE),
+ "%%DATASOURCE%%",
+ fullBaseDatasourceName
+ );
+ taskSpec = StringUtils.replace(
+ taskSpec,
+ "%%REINDEX_DATASOURCE%%",
+ fullReindexDatasourceName
+ );
+
+ // This method will also verify task is successful after task finish running
+ // We expect task to be successful even if the datasource to reindex does not exist
+ submitTaskAndWait(
+ taskSpec,
+ fullReindexDatasourceName,
+ false,
+ false,
+ dummyPair
+ );
+ }
+
+ @Test
+ public void testMERGEIndexData() throws Exception
+ {
+ final String reindexDatasource = MERGE_REINDEX_DATASOURCE + "-testMergeIndexData";
+ final String reindexDatasourceWithDruidInputSource = MERGE_REINDEX_DATASOURCE + "-testMergeReIndexData-druidInputSource";
+ try (
+ final Closeable ignored1 = unloader(MERGE_INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
+ final Closeable ignored2 = unloader(reindexDatasource + config.getExtraDatasourceNameSuffix());
+ final Closeable ignored3 = unloader(reindexDatasourceWithDruidInputSource + config.getExtraDatasourceNameSuffix())
+ ) {
+ doIndexTest(
+ MERGE_INDEX_DATASOURCE,
+ MERGE_INDEX_TASK,
+ MERGE_INDEX_QUERIES_RESOURCE,
+ false,
+ true,
+ true,
+ new Pair<>(false, false)
+ );
+ doReindexTest(
+ MERGE_INDEX_DATASOURCE,
+ reindexDatasource,
+ MERGE_REINDEX_TASK,
+ MERGE_REINDEX_QUERIES_RESOURCE,
+ new Pair<>(false, false)
+ );
+ doReindexTest(
+ MERGE_INDEX_DATASOURCE,
+ reindexDatasourceWithDruidInputSource,
+ MERGE_REINDEX_TASK_WITH_DRUID_INPUT_SOURCE,
+ MERGE_INDEX_QUERIES_RESOURCE,
+ new Pair<>(false, false)
+ );
+ }
+ }
+
+ /**
+ * Test that task reports indicate the ingested segments were loaded before the configured timeout expired.
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testIndexDataAwaitSegmentAvailability() throws Exception
+ {
+ try (
+ final Closeable ignored1 = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
+ ) {
+ final Function transform = spec -> {
+ try {
+ return StringUtils.replace(
+ spec,
+ "%%SEGMENT_AVAIL_TIMEOUT_MILLIS%%",
+ jsonMapper.writeValueAsString("600000")
+ );
+ }
+ catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+
+ doIndexTest(
+ INDEX_DATASOURCE,
+ INDEX_TASK,
+ transform,
+ INDEX_QUERIES_RESOURCE,
+ false,
+ true,
+ true,
+ new Pair<>(true, true)
+ );
+ }
+ }
+
+ /**
+ * Test that the task still succeeds if the segments do not become available before the configured wait timeout
+ * expires.
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testIndexDataAwaitSegmentAvailabilityFailsButTaskSucceeds() throws Exception
+ {
+ try (
+ final Closeable ignored1 = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
+ ) {
+ coordinatorClient.postDynamicConfig(DYNAMIC_CONFIG_PAUSED);
+ final Function transform = spec -> {
+ try {
+ return StringUtils.replace(
+ spec,
+ "%%SEGMENT_AVAIL_TIMEOUT_MILLIS%%",
+ jsonMapper.writeValueAsString("1")
+ );
+ }
+ catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+
+ doIndexTest(
+ INDEX_DATASOURCE,
+ INDEX_TASK,
+ transform,
+ INDEX_QUERIES_RESOURCE,
+ false,
+ false,
+ false,
+ new Pair<>(true, false)
+ );
+ coordinatorClient.postDynamicConfig(DYNAMIC_CONFIG_DEFAULT);
+ ITRetryUtil.retryUntilTrue(
+ () -> coordinator.areSegmentsLoaded(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix()), "Segment Load"
+ );
+ }
+ }
+
+
+ @Test
+ public void testIndexWithMergeColumnLimitData() throws Exception
+ {
+ try (
+ final Closeable ignored1 = unloader(INDEX_WITH_MERGE_COLUMN_LIMIT_DATASOURCE + config.getExtraDatasourceNameSuffix());
+ ) {
+ doIndexTest(
+ INDEX_WITH_MERGE_COLUMN_LIMIT_DATASOURCE,
+ INDEX_WITH_MERGE_COLUMN_LIMIT_TASK,
+ INDEX_QUERIES_RESOURCE,
+ false,
+ true,
+ true,
+ new Pair<>(false, false)
+ );
+ }
+ }
+
+ @Test
+ public void testGetLockedIntervals() throws Exception
+ {
+ final String datasourceName = GET_LOCKED_INTERVALS + config.getExtraDatasourceNameSuffix();
+ try (final Closeable ignored = unloader(datasourceName)) {
+ // Submit an Indexing Task
+ submitIndexTask(INDEX_TASK, datasourceName);
+
+ // Wait until it acquires a lock
+ final Map minTaskPriority = Collections.singletonMap(datasourceName, 0);
+ final Map> lockedIntervals = new HashMap<>();
+ ITRetryUtil.retryUntilFalse(
+ () -> {
+ lockedIntervals.clear();
+ lockedIntervals.putAll(indexer.getLockedIntervals(minTaskPriority));
+ return lockedIntervals.isEmpty();
+ },
+ "Verify Intervals are Locked"
+ );
+
+ // Verify the locked intervals for this datasource
+ Assert.assertEquals(lockedIntervals.size(), 1);
+ Assert.assertEquals(
+ lockedIntervals.get(datasourceName),
+ Collections.singletonList(Intervals.of("2013-08-31/2013-09-02"))
+ );
+
+ ITRetryUtil.retryUntilTrue(
+ () -> coordinator.areSegmentsLoaded(datasourceName),
+ "Segment Load"
+ );
+ }
+ }
+
+ @Test
+ public void testJsonFunctions() throws Exception
+ {
+ final String taskSpec = getResourceAsString("/indexer/json_path_index_task.json");
+
+ submitTaskAndWait(
+ taskSpec,
+ "json_path_index_test",
+ false,
+ true,
+ new Pair<>(false, false)
+ );
+
+ doTestQuery("json_path_index_test", "/indexer/json_path_index_queries.json");
+ }
+}
diff --git a/integration-tests-ex/cases/src/test/resources/cluster/BackwardCompatibility/docker.yaml b/integration-tests-ex/cases/src/test/resources/cluster/BackwardCompatibility/docker.yaml
new file mode 100644
index 000000000000..d676f530e908
--- /dev/null
+++ b/integration-tests-ex/cases/src/test/resources/cluster/BackwardCompatibility/docker.yaml
@@ -0,0 +1,40 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#-------------------------------------------------------------------------
+
+# Definition of the batch index test cluster.
+# See https://yaml.org/spec/1.2.2 for more about YAML
+include:
+ - /cluster/Common/zk-metastore.yaml
+
+druid:
+ coordinator:
+ instances:
+ - port: 8081
+ overlord:
+ instances:
+ - port: 8090
+ broker:
+ instances:
+ - port: 8082
+ router:
+ instances:
+ - port: 8888
+ historical:
+ instances:
+ - port: 8083
+ indexer:
+ instances:
+ - port: 8091
diff --git a/integration-tests-ex/image/docker-build.sh b/integration-tests-ex/image/docker-build.sh
index 6a945aa6129a..4ff5d7a74387 100755
--- a/integration-tests-ex/image/docker-build.sh
+++ b/integration-tests-ex/image/docker-build.sh
@@ -53,4 +53,23 @@ docker build -t $DRUID_IT_IMAGE_NAME \
--build-arg CONFLUENT_VERSION=$CONFLUENT_VERSION \
--build-arg HADOOP_VERSION=$HADOOP_VERSION \
--build-arg MYSQL_DRIVER_CLASSNAME=$MYSQL_DRIVER_CLASSNAME \
+ --build-arg DRUID_TESTING_TOOLS_VERSION=$DRUID_VERSION \
+ .
+
+if [[ -z "${BACKWARD_COMPATIBILITY_IT_ENABLED:-""}" || $BACKWARD_COMPATIBILITY_IT_ENABLED != "true" ]]; then
+ echo "Not building previous version image."
+ exit 0
+fi
+
+# Download the previous druid tar
+curl -L $DRUID_PREVIOUS_VERSION_DOWNLOAD_URL --output apache-druid-$DRUID_PREVIOUS_VERSION-bin.tar.gz
+
+docker build -t $DRUID_PREVIOUS_IT_IMAGE_NAME \
+ --build-arg DRUID_VERSION=$DRUID_PREVIOUS_VERSION \
+ --build-arg MYSQL_VERSION=$MYSQL_VERSION \
+ --build-arg MARIADB_VERSION=$MARIADB_VERSION \
+ --build-arg CONFLUENT_VERSION=$CONFLUENT_VERSION \
+ --build-arg HADOOP_VERSION=$HADOOP_VERSION \
+ --build-arg MYSQL_DRIVER_CLASSNAME=$MYSQL_DRIVER_CLASSNAME \
+ --build-arg DRUID_TESTING_TOOLS_VERSION=$DRUID_VERSION \
.
diff --git a/integration-tests-ex/image/docker/Dockerfile b/integration-tests-ex/image/docker/Dockerfile
index a77a5c2d023e..90955eae3c11 100644
--- a/integration-tests-ex/image/docker/Dockerfile
+++ b/integration-tests-ex/image/docker/Dockerfile
@@ -46,13 +46,15 @@ ARG MARIADB_VERSION
ENV MARIADB_VERSION=$MARIADB_VERSION
ARG MYSQL_DRIVER_CLASSNAME=com.mysql.jdbc.Driver
ENV MYSQL_DRIVER_CLASSNAME=$MYSQL_DRIVER_CLASSNAME
+ARG DRUID_TESTING_TOOLS_VERSION
ENV DRUID_HOME=/usr/local/druid
# Populate build artifacts
COPY apache-druid-${DRUID_VERSION}-bin.tar.gz /usr/local/
-COPY druid-it-tools-${DRUID_VERSION}.jar /tmp/druid/extensions/druid-it-tools/
+COPY druid-it-tools-${DRUID_TESTING_TOOLS_VERSION}.jar /tmp/druid/extensions/druid-it-tools/
+
COPY kafka-protobuf-provider-${CONFLUENT_VERSION}.jar /tmp/druid/lib/
COPY mysql-connector-j-${MYSQL_VERSION}.jar /tmp/druid/lib/
COPY mariadb-java-client-${MARIADB_VERSION}.jar /tmp/druid/lib/
@@ -60,6 +62,7 @@ COPY test-setup.sh /
COPY druid.sh /
COPY launch.sh /
+
# Do the setup tasks. The tasks are done within a script, rather than
# here, so they are easier to describe and debug. Turn on the "-x" flag
# within the script to trace the steps if needed for debugging.