diff --git a/.github/workflows/javascript-lint.yml b/.github/workflows/javascript-lint.yml index 1ef359c9a..e516c34a1 100644 --- a/.github/workflows/javascript-lint.yml +++ b/.github/workflows/javascript-lint.yml @@ -19,7 +19,7 @@ jobs: - name: Install ESLint run: | npm init -y - npm install eslint@latest @babel/eslint-parser@latest eslint-define-config globals eslint-plugin-jsdoc --save + npm install eslint@^9 @babel/eslint-parser@latest eslint-define-config globals eslint-plugin-jsdoc --save npx eslint "**/*.js" # Step 4: Report status diff --git a/.gitignore b/.gitignore index b5450c6c5..468b4bfa3 100644 --- a/.gitignore +++ b/.gitignore @@ -52,7 +52,7 @@ scripts/globus/mapping.json scripts/admin_datafed_backup.sh scripts/admin_refresh_certs.sh scripts/globus/__pycache__ -services/ +/services/ tests/mock_core/Version.hpp tmp web/SDMS.proto diff --git a/.gitlab/build/build_core_image.yml b/.gitlab/build/build_core_image.yml deleted file mode 100644 index fcc80d443..000000000 --- a/.gitlab/build/build_core_image.yml +++ /dev/null @@ -1,53 +0,0 @@ ---- -stages: - - build - -include: - - local: .gitlab/common.yml - -build-core: - extends: .docker_build_script - stage: build - variables: - PROJECT: "datafed" - COMPONENT: "core" - GIT_STRATEGY: clone - DOCKER_FILE_PATH: "core/docker/Dockerfile" - DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count - BUILD_INTERMEDIATE: "FALSE" - tags: - - ci-datafed-core - - docker - rules: - - changes: - - docker/**/* - - scripts/**/* - - core/**/* - - common/**/* - - CMakeLists.txt - - cmake/**/* - - .gitlab-ci.yml - when: on_success - -retag-image: - extends: .docker_retag_image - stage: build - variables: - PROJECT: "datafed" - COMPONENT: "core" - GIT_STRATEGY: clone - DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count - BUILD_INTERMEDIATE: "FALSE" - tags: - - docker - rules: - - changes: - - docker/**/* - - scripts/**/* - - core/**/* - - common/**/* - - CMakeLists.txt - - cmake/**/* - - .gitlab-ci.yml - when: never - - when: on_success diff --git a/.gitlab/build/build_foxx_image.yml b/.gitlab/build/build_foxx_image.yml deleted file mode 100644 index c0b45a74a..000000000 --- a/.gitlab/build/build_foxx_image.yml +++ /dev/null @@ -1,56 +0,0 @@ ---- -stages: - - build - -include: - - local: .gitlab/common.yml - -build-foxx: - extends: .docker_build_script - stage: build - variables: - PROJECT: "datafed" - COMPONENT: "foxx" - GIT_STRATEGY: clone - DOCKER_FILE_PATH: "docker/Dockerfile.foxx" - DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count - BUILD_INTERMEDIATE: "FALSE" - tags: - - docker - rules: - - changes: - - docker/**/* - - scripts/**/* - - cmake/**/* - - core/database/**/* - - core/CMakeLists.txt - - common/proto/**/* - - .gitlab-ci.yml - - .gitlab/**/* - - CMakeLists.txt - when: on_success - -retag-image: - extends: .docker_retag_image - stage: build - variables: - PROJECT: "datafed" - COMPONENT: "foxx" - GIT_STRATEGY: clone - DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count - BUILD_INTERMEDIATE: "FALSE" - tags: - - docker - rules: - - changes: - - docker/**/* - - scripts/**/* - - cmake/**/* - - core/database/**/* - - core/CMakeLists.txt - - common/proto/**/* - - .gitlab-ci.yml - - .gitlab/**/* - - CMakeLists.txt - when: never - - when: on_success diff --git a/.gitlab/build/build_gcs_base_image.yml b/.gitlab/build/build_gcs_base_image.yml index a437f7502..70c071827 100644 --- a/.gitlab/build/build_gcs_base_image.yml +++ b/.gitlab/build/build_gcs_base_image.yml @@ -31,6 +31,7 @@ build-gcs-base: - echo "$BRANCH_LOWER" - source "scripts/dependency_versions.sh" - cd "external/globus-connect-server-deploy/docker" + - git fetch origin "$DATAFED_GCS_SUBMODULE_VERSION" - git checkout "$DATAFED_GCS_SUBMODULE_VERSION" - docker login "${REGISTRY}" -u "${HARBOR_USER}" -p "${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}" - docker build --no-cache --progress plain -t "${REGISTRY}/${PROJECT}/${COMPONENT}-${BRANCH_LOWER}:latest" - < "./docker-files/Dockerfile.${GCS_BASE_IMAGE_DISTRO}" diff --git a/.gitlab/build/build_gcs_image.yml b/.gitlab/build/build_gcs_image.yml deleted file mode 100644 index 4d53e04eb..000000000 --- a/.gitlab/build/build_gcs_image.yml +++ /dev/null @@ -1,88 +0,0 @@ ---- -stages: - - build - -include: - - local: .gitlab/common.yml - -build-gcs: - stage: build - variables: - PROJECT: "datafed" - COMPONENT: "gcs" - GIT_STRATEGY: clone - DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count - BUILD_INTERMEDIATE: "FALSE" - tags: - - ci-datafed-globus - - docker - rules: - - changes: - - docker/**/* - - scripts/**/* - - common/**/* - - .gitlab-ci.yml - - .gitlab/build/build_gcs_base_image.sh - - .gitlab/build/build_gcs_image.sh - - .gitlab/stage_build_base.sh - - external/globus-connect-server/**/* - - CMakeLists.txt - - cmake/**/* - - repository/docker/entrypoint_authz.sh - - repository/docker/Dockerfile.gcs - - repository/CMakeLists.txt - - repository/gridftp/**/* - when: on_success - script: - - BRANCH_LOWER=$(echo "$CI_COMMIT_REF_NAME" | tr '[:upper:]' '[:lower:]') - - echo "$BRANCH_LOWER" - - "${CI_PROJECT_DIR}/scripts/generate_datafed.sh" - - docker login "${REGISTRY}" -u "${HARBOR_USER}" -p "${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}" - - DOWNSTREAM_SHA=$( git submodule status ./external/DataFedDependencies/ | awk '{print $1}' ) - - DOWNSTREAM_SHA=${DOWNSTREAM_SHA#-} - - docker build --build-arg DEPENDENCIES="${REGISTRY}/datafed/dependencies:$DOWNSTREAM_SHA" --build-arg RUNTIME="${REGISTRY}/datafed/runtime-${BRANCH_LOWER}:latest" --build-arg GCS_IMAGE="${REGISTRY}/datafed/gcs-base-${BRANCH_LOWER}:latest" -f repository/docker/Dockerfile.gcs -t "${REGISTRY}/${PROJECT}/${COMPONENT}-${BRANCH_LOWER}:latest" . - - docker tag "${REGISTRY}/${PROJECT}/${COMPONENT}-${BRANCH_LOWER}:latest" "${REGISTRY}/${PROJECT}/${COMPONENT}-${BRANCH_LOWER}:$CI_COMMIT_SHA" - - export DATAFED_HARBOR_REPOSITORY="${COMPONENT}-${BRANCH_LOWER}" - - export DATAFED_HARBOR_USERNAME="${HARBOR_USER}" - - export DATAFED_HARBOR_PASSWORD="${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}" - - docker push "${REGISTRY}/${PROJECT}/${DATAFED_HARBOR_REPOSITORY}:latest" - - docker push "${REGISTRY}/${PROJECT}/${DATAFED_HARBOR_REPOSITORY}:$CI_COMMIT_SHA" - - | - while [ "$(${CI_PROJECT_DIR}/scripts/ci_harbor_artifact_count.sh -r ${DATAFED_HARBOR_REPOSITORY})" == "0" ]; do - echo "Artifact missing from harbor..." - docker push "${REGISTRY}/${PROJECT}/${DATAFED_HARBOR_REPOSITORY}:latest" - docker push "${REGISTRY}/${PROJECT}/${DATAFED_HARBOR_REPOSITORY}:$CI_COMMIT_SHA" - sleep 5 # Optional: Add a sleep to avoid busy waiting - done - - cat "${CI_PROJECT_DIR}/harbor_check.log" - -retag-image: - extends: .docker_retag_image - stage: build - variables: - PROJECT: "datafed" - COMPONENT: "gcs" - GIT_STRATEGY: clone - DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count - BUILD_INTERMEDIATE: "FALSE" - tags: - - ci-datafed-globus - - docker - rules: - - changes: - - docker/**/* - - scripts/**/* - - common/**/* - - .gitlab-ci.yml - - .gitlab/build/build_gcs_base_image.sh - - .gitlab/build/build_gcs_image.sh - - .gitlab/stage_build_base.sh - - external/globus-connect-server/**/* - - CMakeLists.txt - - cmake/**/* - - repository/docker/entrypoint_authz.sh - - repository/docker/Dockerfile.gcs - - repository/CMakeLists.txt - - repository/gridftp/**/* - when: never - - when: on_success diff --git a/.gitlab/build/build_repo_image.yml b/.gitlab/build/build_repo_image.yml deleted file mode 100644 index 9ee9fd4b5..000000000 --- a/.gitlab/build/build_repo_image.yml +++ /dev/null @@ -1,55 +0,0 @@ ---- -stages: - - build - -include: - - local: .gitlab/common.yml - -build-repo: - extends: .docker_build_script - stage: build - variables: - PROJECT: "datafed" - COMPONENT: "repo" - GIT_STRATEGY: clone - DOCKER_FILE_PATH: "repository/docker/Dockerfile" - DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count - BUILD_INTERMEDIATE: "FALSE" - tags: - - ci-datafed-repo - - docker - rules: - - changes: - - docker/**/* - - scripts/**/* - - common/proto/**/* - - .gitlab-ci.yml - - CMakeLists.txt - - cmake/**/* - - repository/CMakeLists.txt - - repository/server/**/* - when: on_success - -retag-image: - extends: .docker_retag_image - stage: build - variables: - PROJECT: "datafed" - COMPONENT: "repo" - GIT_STRATEGY: clone - DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count - BUILD_INTERMEDIATE: "FALSE" - tags: - - docker - rules: - - changes: - - docker/**/* - - scripts/**/* - - common/proto/**/* - - .gitlab-ci.yml - - CMakeLists.txt - - cmake/**/* - - repository/CMakeLists.txt - - repository/server/**/* - when: never - - when: on_success diff --git a/.gitlab/build/build_ws_image.yml b/.gitlab/build/build_ws_image.yml deleted file mode 100644 index 9ad5767c2..000000000 --- a/.gitlab/build/build_ws_image.yml +++ /dev/null @@ -1,57 +0,0 @@ ---- -stages: - - build - -include: - - local: .gitlab/common.yml - -build-ws: - extends: .docker_build_script - stage: build - variables: - PROJECT: "datafed" - COMPONENT: "ws" - GIT_STRATEGY: clone - DOCKER_FILE_PATH: "web/docker/Dockerfile" - DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count - BUILD_INTERMEDIATE: "TRUE" - INTERMEDIATE_TARGET: "ws-build" # Name of the layer in the dockerfile - INTERMEDIATE_LAYER_NAME: "build" - tags: - - ci-datafed-core - - docker - rules: - - changes: - - docker/**/* - - scripts/**/* - - web/**/* - - cmake/**/* - - common/proto/**/* - - .gitlab-ci.yml - - CMakeLists.txt - when: on_success - -retag-image: - extends: .docker_retag_image - stage: build - variables: - PROJECT: "datafed" - COMPONENT: "ws" - GIT_STRATEGY: clone - DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count - BUILD_INTERMEDIATE: "TRUE" - INTERMEDIATE_TARGET: "ws-build" # Name of the layer in the dockerfile - INTERMEDIATE_LAYER_NAME: "build" - tags: - - docker - rules: - - changes: - - docker/**/* - - scripts/**/* - - web/**/* - - cmake/**/* - - common/proto/**/* - - .gitlab-ci.yml - - CMakeLists.txt - when: never - - when: on_success diff --git a/.gitlab/build/force_build_gcs_base_image.yml b/.gitlab/build/force_build_gcs_base_image.yml index 7e32fad39..c11b97573 100644 --- a/.gitlab/build/force_build_gcs_base_image.yml +++ b/.gitlab/build/force_build_gcs_base_image.yml @@ -18,6 +18,7 @@ build-gcs-base: - echo "$BRANCH_LOWER" - source "scripts/dependency_versions.sh" - cd "external/globus-connect-server-deploy/docker" + - git fetch origin "$DATAFED_GCS_SUBMODULE_VERSION" - git checkout "$DATAFED_GCS_SUBMODULE_VERSION" - docker login "${REGISTRY}" -u "${HARBOR_USER}" -p "${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}" - docker build --no-cache --progress plain -t "${REGISTRY}/${PROJECT}/${COMPONENT}-${BRANCH_LOWER}:latest" - < "./docker-files/Dockerfile.${GCS_BASE_IMAGE_DISTRO}" diff --git a/.gitlab/build/retag_core_image.yml b/.gitlab/build/retag_core_image.yml new file mode 100644 index 000000000..8b719df82 --- /dev/null +++ b/.gitlab/build/retag_core_image.yml @@ -0,0 +1,18 @@ +--- +stages: + - build + +include: + - local: .gitlab/common.yml + +retag-image: + extends: .docker_retag_image + stage: build + variables: + PROJECT: "datafed" + COMPONENT: "core" + GIT_STRATEGY: clone + DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count + BUILD_INTERMEDIATE: "FALSE" + tags: + - docker diff --git a/.gitlab/build/retag_foxx_image.yml b/.gitlab/build/retag_foxx_image.yml new file mode 100644 index 000000000..fd9ab2021 --- /dev/null +++ b/.gitlab/build/retag_foxx_image.yml @@ -0,0 +1,18 @@ +--- +stages: + - build + +include: + - local: .gitlab/common.yml + +retag-image: + extends: .docker_retag_image + stage: build + variables: + PROJECT: "datafed" + COMPONENT: "foxx" + GIT_STRATEGY: clone + DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count + BUILD_INTERMEDIATE: "FALSE" + tags: + - docker diff --git a/.gitlab/build/retag_gcs_base_image.yml b/.gitlab/build/retag_gcs_base_image.yml new file mode 100644 index 000000000..629f490b0 --- /dev/null +++ b/.gitlab/build/retag_gcs_base_image.yml @@ -0,0 +1,18 @@ +--- +stages: + - build + +include: + - local: .gitlab/common.yml + +retag-image: + extends: .docker_retag_image + stage: build + variables: + PROJECT: "datafed" + COMPONENT: "gcs-base" + GIT_STRATEGY: clone + DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count + BUILD_INTERMEDIATE: "FALSE" + tags: + - docker diff --git a/.gitlab/build/retag_gcs_image.yml b/.gitlab/build/retag_gcs_image.yml new file mode 100644 index 000000000..e3c6fec2b --- /dev/null +++ b/.gitlab/build/retag_gcs_image.yml @@ -0,0 +1,19 @@ +--- +stages: + - build + +include: + - local: .gitlab/common.yml + +retag-image: + extends: .docker_retag_image + stage: build + variables: + PROJECT: "datafed" + COMPONENT: "gcs" + GIT_STRATEGY: clone + DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count + BUILD_INTERMEDIATE: "FALSE" + tags: + - ci-datafed-globus + - docker diff --git a/.gitlab/build/retag_repo_image.yml b/.gitlab/build/retag_repo_image.yml new file mode 100644 index 000000000..e0e722a18 --- /dev/null +++ b/.gitlab/build/retag_repo_image.yml @@ -0,0 +1,18 @@ +--- +stages: + - build + +include: + - local: .gitlab/common.yml + +retag-image: + extends: .docker_retag_image + stage: build + variables: + PROJECT: "datafed" + COMPONENT: "repo" + GIT_STRATEGY: clone + DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count + BUILD_INTERMEDIATE: "FALSE" + tags: + - docker diff --git a/.gitlab/build/retag_ws_image.yml b/.gitlab/build/retag_ws_image.yml new file mode 100644 index 000000000..c97a6785b --- /dev/null +++ b/.gitlab/build/retag_ws_image.yml @@ -0,0 +1,20 @@ +--- +stages: + - build + +include: + - local: .gitlab/common.yml + +retag-image: + extends: .docker_retag_image + stage: build + variables: + PROJECT: "datafed" + COMPONENT: "ws" + GIT_STRATEGY: clone + DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count + BUILD_INTERMEDIATE: "TRUE" + INTERMEDIATE_TARGET: "ws-build" # Name of the layer in the dockerfile + INTERMEDIATE_LAYER_NAME: "build" + tags: + - docker diff --git a/.gitlab/common.yml b/.gitlab/common.yml index ee42bffbd..588a1990f 100644 --- a/.gitlab/common.yml +++ b/.gitlab/common.yml @@ -55,7 +55,6 @@ # # INTERMEDIATE_LAYER_NAME # This is used when tagging the image to help differentiate different layers - .image_check: tags: - docker @@ -64,30 +63,101 @@ BRANCH_LOWER=$(echo "$CI_COMMIT_REF_NAME" | tr '[:upper:]' '[:lower:]') docker login "${REGISTRY}" -u "${HARBOR_USER}" -p "${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}" FORCE_BUILD="FALSE" + REPO_NAME="${COMPONENT}-${BRANCH_LOWER}" + IMAGE_PATH="${REGISTRY}/${PROJECT}/${REPO_NAME}" + + # Check if image exists set +e - docker pull --quiet "${REGISTRY}/${PROJECT}/${COMPONENT}-${BRANCH_LOWER}:latest" - if [ $? -eq 0 ]; then echo "Image exists"; else FORCE_BUILD="TRUE"; fi; + docker pull --quiet "${IMAGE_PATH}:latest" + IMAGE_EXISTS=$? set -e - if [ "${BUILD_INTERMEDIATE}" == "TRUE" ] - then - set +e - docker pull --quiet "${REGISTRY}/${PROJECT}/${COMPONENT}-${INTERMEDIATE_LAYER_NAME}-${BRANCH_LOWER}:latest" - if [ $? -eq 0 ]; then echo "Image exists"; else FORCE_BUILD="TRUE"; fi; - set -e + + if [ $IMAGE_EXISTS -ne 0 ]; then + echo "Image does not exist, forcing build" + FORCE_BUILD="TRUE" + else + echo "Image exists, checking for changes in watched files" + + # Get commit SHA from latest image via Harbor API + HARBOR_API_RESPONSE=$(curl -sf -u "${HARBOR_USER}:${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}" \ + "https://${REGISTRY}/api/v2.0/projects/${PROJECT}/repositories/${REPO_NAME}/artifacts?page_size=1&sort=-push_time") + + if [ $? -ne 0 ] || [ -z "$HARBOR_API_RESPONSE" ]; then + echo "WARNING: Harbor API request failed, forcing build" + FORCE_BUILD="TRUE" + else + LATEST_IMAGE_SHA=$(echo "$HARBOR_API_RESPONSE" \ + | jq -r '.[0].tags // [] | .[].name' \ + | grep -E '^[a-f0-9]{40}$' \ + | head -1) + + if [ -z "$LATEST_IMAGE_SHA" ]; then + echo "WARNING: No commit SHA tag found on latest image, forcing build" + FORCE_BUILD="TRUE" + elif [ "$LATEST_IMAGE_SHA" == "$CI_COMMIT_SHA" ]; then + echo "Current commit matches latest image, no rebuild needed" + else + echo "Latest image built from: $LATEST_IMAGE_SHA" + echo "Current commit: $CI_COMMIT_SHA" + + # Fetch full history to ensure old commit is available + git fetch --unshallow 2>/dev/null || git fetch origin "$LATEST_IMAGE_SHA" 2>/dev/null || true + + # Verify commit exists in history + set +e + git cat-file -e "$LATEST_IMAGE_SHA" 2>/dev/null + COMMIT_EXISTS=$? + set -e + + if [ $COMMIT_EXISTS -ne 0 ]; then + echo "WARNING: Commit $LATEST_IMAGE_SHA not in git history, forcing build" + FORCE_BUILD="TRUE" + else + if [ -z "$WATCHED_PATHS" ]; then + echo "ERROR: WATCHED_PATHS not defined" + exit 1 + fi + + DIFF_OUTPUT=$(git diff --name-only "$LATEST_IMAGE_SHA" "$CI_COMMIT_SHA" -- $WATCHED_PATHS) + + if [ -n "$DIFF_OUTPUT" ]; then + echo "Changes detected in watched files:" + echo "$DIFF_OUTPUT" + FORCE_BUILD="TRUE" + else + echo "No changes in watched files since $LATEST_IMAGE_SHA" + fi + fi + fi + fi fi - if [ "$FORCE_BUILD" == "TRUE" ] - then - cp .gitlab/build/force_build_${COMPONENT}_image.yml ${COMPONENT}_image.yml + + if [ "${BUILD_INTERMEDIATE}" == "TRUE" ] && [ "$FORCE_BUILD" == "FALSE" ]; then + set +e + docker pull --quiet "${REGISTRY}/${PROJECT}/${COMPONENT}-${INTERMEDIATE_LAYER_NAME}-${BRANCH_LOWER}:latest" + if [ $? -ne 0 ]; then + echo "Intermediate image missing, forcing build" + FORCE_BUILD="TRUE" + fi + set -e + fi + + # File names are all underscores + FILE_COMPONENT=$(echo "$COMPONENT" | tr '-' '_') + + if [ "$FORCE_BUILD" == "TRUE" ]; then + cp .gitlab/build/force_build_${FILE_COMPONENT}_image.yml ${FILE_COMPONENT}_image.yml else - cp .gitlab/build/build_${COMPONENT}_image.yml ${COMPONENT}_image.yml + cp .gitlab/build/retag_${FILE_COMPONENT}_image.yml ${FILE_COMPONENT}_image.yml fi + echo "REGISTRY=${REGISTRY}" >> build.env echo "HARBOR_USER=${HARBOR_USER}" >> build.env echo "HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN=${HARBOR_DATAFED_GITLAB_CI_REGISTRY_TOKEN}" >> build.env sed -i 's/\(HARBOR_USER=.*\)\$/\1$$/g' build.env artifacts: paths: - - ${COMPONENT}_image.yml + - "*_image.yml" reports: dotenv: build.env @@ -118,7 +188,7 @@ docker push "${REGISTRY}/${PROJECT}/${DATAFED_HARBOR_REPOSITORY}:$CI_COMMIT_SHA" if [ "$first_iteration" == "FALSE" ] then - sleep 5 # Optional: Add a sleep to avoid busy waiting + sleep 5 # Optional: Add a sleep to avoid busy waiting else first_iteration="FALSE" fi diff --git a/.gitlab/stage_image_check.yml b/.gitlab/stage_image_check.yml index 10028b407..d29b8209f 100644 --- a/.gitlab/stage_image_check.yml +++ b/.gitlab/stage_image_check.yml @@ -10,6 +10,7 @@ check-ws-image: COMPONENT: "ws" BUILD_INTERMEDIATE: "TRUE" INTERMEDIATE_LAYER_NAME: "build" + WATCHED_PATHS: "docker scripts web cmake common/proto .gitlab-ci.yml CMakeLists.txt" check-core-image: extends: .image_check @@ -18,6 +19,7 @@ check-core-image: PROJECT: "datafed" COMPONENT: "core" BUILD_INTERMEDIATE: "FALSE" + WATCHED_PATHS: "docker scripts core common CMakeLists.txt cmake .gitlab-ci.yml" check-repo-image: extends: .image_check @@ -26,14 +28,16 @@ check-repo-image: PROJECT: "datafed" COMPONENT: "repo" BUILD_INTERMEDIATE: "FALSE" + WATCHED_PATHS: "docker scripts common/proto .gitlab-ci.yml CMakeLists.txt cmake repository/CMakeLists.txt repository/server" check-gcs-base-image: extends: .image_check stage: image-check variables: PROJECT: "datafed" - COMPONENT: "gcs_base" + COMPONENT: "gcs-base" BUILD_INTERMEDIATE: "FALSE" + WATCHED_PATHS: "docker scripts common .gitlab-ci.yml CMakeLists.txt cmake" check-gcs-image: extends: .image_check @@ -42,6 +46,7 @@ check-gcs-image: PROJECT: "datafed" COMPONENT: "gcs" BUILD_INTERMEDIATE: "FALSE" + WATCHED_PATHS: "docker scripts common .gitlab-ci.yml .gitlab/build/build_gcs_base_image.sh .gitlab/build/build_gcs_image.sh .gitlab/stage_build_base.sh external/globus-connect-server CMakeLists.txt cmake repository/docker/entrypoint_authz.sh repository/docker/Dockerfile.gcs repository/CMakeLists.txt repository/gridftp" check-foxx-image: extends: .image_check @@ -50,3 +55,4 @@ check-foxx-image: PROJECT: "datafed" COMPONENT: "foxx" BUILD_INTERMEDIATE: "FALSE" + WATCHED_PATHS: "docker scripts cmake core/database core/CMakeLists.txt common/proto .gitlab-ci.yml .gitlab CMakeLists.txt" diff --git a/CMakeLists.txt b/CMakeLists.txt index e04ea9cb6..a7146cde2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -36,7 +36,7 @@ building with static depencies is not completely possible because some system libraries must be shared libraries for DataFed to be interoperable. If this setting is turned on DataFed will build it's libraries as shared and try to link to shared libraries." OFF) -OPTION(ENABLE_END_TO_END_API_TESTS "Enable end-to-end API testing" FALSE) +OPTION(ENABLE_END_TO_END_API_TESTS "Enable end-to-end API testing" FALSE) OPTION(ENABLE_END_TO_END_WEB_TESTS "Enable end-to-end web testing with Playwright" FALSE) OPTION(ENABLE_FOXX_TESTS "Enable Foxx testing, off by default because it will overwrite the test database." FALSE) @@ -88,7 +88,7 @@ endif() set(CMAKE_PREFIX_PATH "${DEPENDENCY_INSTALL_PATH}") -if ( BUILD_TESTS ) +if ( BUILD_TESTS ) include(CTest) ENABLE_TESTING() set(CMAKE_CTEST_ARGUMENTS "--output-on-failure") @@ -112,25 +112,24 @@ if( NOT DEFINED ENABLE_END_TO_END_TESTS ) endif() if(CMAKE_MAKE_PROGRAM MATCHES "(make|gmake)") - add_definitions( -Wall -Wextra ) + add_definitions( -Wall -Wextra ) endif() -if ( BUILD_REPO_SERVER OR BUILD_CORE_SERVER OR BUILD_AUTHZ OR BUILD_COMMON OR BUILD_PYTHON_CLIENT OR BUILD_WEB_SERVER) - configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/common/proto/common/Version.proto.in" - "${CMAKE_CURRENT_SOURCE_DIR}/common/proto/common/Version.proto" - @ONLY) +if ( BUILD_REPO_SERVER OR BUILD_CORE_SERVER OR BUILD_AUTHZ OR BUILD_COMMON OR BUILD_PYTHON_CLIENT OR BUILD_WEB_SERVER) + # Create file glob here because need to be made visible here as well - file( GLOB ProtoFiles "${PROJECT_SOURCE_DIR}/common/proto/common/*.proto" ) + file(GLOB_RECURSE ProtoFiles "${PROJECT_SOURCE_DIR}/common/proto3/common/*.proto") include(./cmake/Protobuf.cmake) endif() if( BUILD_WEB_SERVER ) include(./cmake/Web.cmake) - file(COPY ${ProtoFiles} DESTINATION "${CMAKE_CURRENT_SOURCE_DIR}/web/") + + file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/common/proto3/common/" + DESTINATION "${CMAKE_CURRENT_SOURCE_DIR}/web/proto3/") if( ENABLE_UNIT_TESTS ) add_test(NAME unit_tests_web COMMAND "${DEPENDENCY_INSTALL_PATH}/nvm/versions/node/${LOCAL_NODE_VERSION}/bin/npm" "test") @@ -144,7 +143,7 @@ if( BUILD_AUTHZ_TESTS ) include(./cmake/GlobusCommon.cmake) endif() -if ( BUILD_REPO_SERVER OR BUILD_CORE_SERVER OR BUILD_AUTHZ OR BUILD_COMMON) +if ( BUILD_REPO_SERVER OR BUILD_CORE_SERVER OR BUILD_AUTHZ OR BUILD_COMMON) include_directories( "/usr/include/globus" ) @@ -172,7 +171,7 @@ if ( BUILD_REPO_SERVER OR BUILD_CORE_SERVER OR BUILD_AUTHZ OR BUILD_COMMON) endif() -if( BUILD_COMMON ) +if( BUILD_COMMON ) add_subdirectory( common ) endif() @@ -191,7 +190,7 @@ endif() # Must occur before building authz, location of mock keys are defined here # ENV DATAFED_MOCK_CORE_PUB_KEY if (ENABLE_INTEGRATION_TESTS OR ENABLE_END_TO_END_TESTS) - add_subdirectory(tests) + add_subdirectory(tests) endif() if( BUILD_REPO_SERVER OR BUILD_AUTHZ) @@ -200,7 +199,7 @@ endif() if( BUILD_PYTHON_CLIENT ) # make target = pydatafed - file(COPY ${PROJECT_SOURCE_DIR}/external/DataFedDependencies/python/datafed_pkg/requirements.txt DESTINATION ${PROJECT_SOURCE_DIR}/python/datafed_pkg/) + file(COPY ${PROJECT_SOURCE_DIR}/external/DataFedDependencies/python/datafed_pkg/requirements.txt DESTINATION ${PROJECT_SOURCE_DIR}/python/datafed_pkg) add_subdirectory( python EXCLUDE_FROM_ALL ) endif() @@ -228,13 +227,13 @@ if( INSTALL_CORE_SERVER ) endif() if( INSTALL_WEB_SERVER ) - install( FILES ${ProtoFiles} DESTINATION ${DATAFED_INSTALL_PATH}/web ) install( DIRECTORY ${PROJECT_SOURCE_DIR}/web/static DESTINATION ${DATAFED_INSTALL_PATH}/web ) install( DIRECTORY ${PROJECT_SOURCE_DIR}/web/views DESTINATION ${DATAFED_INSTALL_PATH}/web ) + install( DIRECTORY ${PROJECT_SOURCE_DIR}/web/proto3 DESTINATION ${DATAFED_INSTALL_PATH}/web ) install( FILES ${PROJECT_SOURCE_DIR}/web/version.js DESTINATION ${DATAFED_INSTALL_PATH}/web ) endif() -if( INSTALL_FOXX ) +if( INSTALL_FOXX ) install(CODE "execute_process(COMMAND ${DataFed_SOURCE_DIR}/scripts/install_foxx.sh OUTPUT_VARIABLE _out ERROR_VARIABLE _err diff --git a/cmake/Version.cmake b/cmake/Version.cmake index 6e5cdc7c5..fb094d3c2 100644 --- a/cmake/Version.cmake +++ b/cmake/Version.cmake @@ -1,44 +1,44 @@ -set(DATAFED_RELEASE_YEAR 2025) -set(DATAFED_RELEASE_MONTH 12) -set(DATAFED_RELEASE_DAY 9) +set(DATAFED_RELEASE_YEAR 2026) +set(DATAFED_RELEASE_MONTH 2) +set(DATAFED_RELEASE_DAY 24) set(DATAFED_RELEASE_HOUR 10) set(DATAFED_RELEASE_MINUTE 0) -set(DATAFED_COMMON_LIB_MAJOR 2) +set(DATAFED_COMMON_LIB_MAJOR 3) set(DATAFED_COMMON_LIB_MINOR 0) set(DATAFED_COMMON_LIB_PATCH 0) -set(DATAFED_COMMON_PROTOCOL_API_MAJOR 2) +set(DATAFED_COMMON_PROTOCOL_API_MAJOR 3) set(DATAFED_COMMON_PROTOCOL_API_MINOR 0) set(DATAFED_COMMON_PROTOCOL_API_PATCH 0) -set(DATAFED_CORE_MAJOR 2) +set(DATAFED_CORE_MAJOR 3) set(DATAFED_CORE_MINOR 0) set(DATAFED_CORE_PATCH 0) set(DATAFED_FOXX_MAJOR 1) -set(DATAFED_FOXX_MINOR 2) +set(DATAFED_FOXX_MINOR 3) set(DATAFED_FOXX_PATCH 0) set(DATAFED_FOXX_API_MAJOR 1) -set(DATAFED_FOXX_API_MINOR 2) +set(DATAFED_FOXX_API_MINOR 3) set(DATAFED_FOXX_API_PATCH 0) -set(DATAFED_WEB_MAJOR 2) +set(DATAFED_WEB_MAJOR 3) set(DATAFED_WEB_MINOR 0) set(DATAFED_WEB_PATCH 0) -set(DATAFED_REPO_MAJOR 2) +set(DATAFED_REPO_MAJOR 3) set(DATAFED_REPO_MINOR 0) set(DATAFED_REPO_PATCH 0) -set(DATAFED_AUTHZ_MAJOR 2) +set(DATAFED_AUTHZ_MAJOR 3) set(DATAFED_AUTHZ_MINOR 0) set(DATAFED_AUTHZ_PATCH 0) -set(DATAFED_PYTHON_CLIENT_MAJOR 4) +set(DATAFED_PYTHON_CLIENT_MAJOR 5) set(DATAFED_PYTHON_CLIENT_MINOR 0) set(DATAFED_PYTHON_CLIENT_PATCH 0) set(DATAFED_PYTHON_CLIENT_RELEASE_TYPE "") diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index ab7244a97..3c00dd348 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -2,7 +2,7 @@ cmake_minimum_required (VERSION 3.17.0) # The below command will by default generate the proto files in this folder # we want to place them in the binary folder in /proto/common -add_subdirectory(proto/common) +add_subdirectory(proto3/common) if( BUILD_COMMON ) configure_file( diff --git a/common/include/common/IMessage.hpp b/common/include/common/IMessage.hpp index 7d455feca..71aa5bbc7 100644 --- a/common/include/common/IMessage.hpp +++ b/common/include/common/IMessage.hpp @@ -17,16 +17,45 @@ class Message; namespace SDMS { -enum class MessageType { GOOGLE_PROTOCOL_BUFFER, STRING }; +/** + * @enum MessageType + * @brief Discriminator for the payload type carried by an IMessage. + */ +enum class MessageType { + GOOGLE_PROTOCOL_BUFFER, ///< Payload is a google::protobuf::Message. + STRING ///< Payload is a plain std::string. +}; /** - * The message is on its way to a server this it is a REQUEST - * The message is on its way from a server then it is a RESPONSE - **/ + * @enum MessageState + * @brief Indicates the directional state of a message relative to a server. + * + * - @c REQUEST — the message is traveling *toward* a server. + * - @c RESPONSE — the message is traveling *from* a server. + */ enum class MessageState { REQUEST, RESPONSE }; -enum class MessageAttribute { ID, KEY, STATE, CORRELATION_ID }; +/** + * @enum MessageAttribute + * @brief Named keys for the core metadata attributes stored on a message. + */ +enum class MessageAttribute { + ID, ///< Unique message identifier. + KEY, ///< Routing or lookup key. + STATE, ///< Current MessageState (REQUEST or RESPONSE). + CORRELATION_ID ///< Identifier used to correlate requests with responses. +}; +/** + * @brief Convert a MessageAttribute to its human-readable string form. + * + * @param[in] attribute The attribute to stringify. + * @return A string representation, or @c "unsupported_toString_print" if + * the attribute does not have a defined conversion. + * + * @note Only ID and KEY are currently supported; all other values fall + * through to the unsupported case. + */ inline const std::string toString(const MessageAttribute attribute) { if (attribute == MessageAttribute::ID) { return std::string("ID"); @@ -37,67 +66,186 @@ inline const std::string toString(const MessageAttribute attribute) { } } +/// @brief Well-known string constants used throughout the messaging layer. namespace constants { namespace message { namespace google { -/// Supported dynamic arguments +/// @brief Key for the serialized frame size of a protobuf message. const std::string FRAME_SIZE = "frame_size"; -const std::string PROTO_ID = "proto_id"; -const std::string MSG_ID = "msg_id"; +/// @brief Key for the integral message-type identifier (e.g. envelope field number). const std::string MSG_TYPE = "msg_type"; +/// @brief Key for the security/session context attached to a message. const std::string CONTEXT = "context"; } // namespace google } // namespace message } // namespace constants +/** + * @class IMessage + * @brief Abstract interface for all messages exchanged through the SDMS + * messaging infrastructure. + * + * IMessage defines a uniform contract for: + * - **Payload management** — carrying either a protobuf Message or a raw + * string, with ownership semantics enforced by the implementation. + * - **Routing** — an ordered list of route identifiers that determines + * how a message is forwarded through the system. + * - **Metadata** — core attributes (ID, KEY, STATE, CORRELATION_ID) plus + * arbitrary named attributes with small-integer values. + * + * Implementations are expected to own the payload and manage its lifetime. + * Callers retrieve payload data via non-owning raw pointers or copies. + */ class IMessage { public: - + /// @brief Virtual destructor. virtual ~IMessage() {}; - virtual bool exists(MessageAttribute) const = 0; - virtual bool exists(const std::string &) const = 0; /** + * @brief Check whether a core metadata attribute has been set. + * + * @param[in] attribute The attribute to query. + * @return @c true if the attribute is present on this message. + */ + virtual bool exists(MessageAttribute attribute) const = 0; + + /** + * @brief Check whether a named (dynamic) attribute has been set. + * + * @param[in] attribute_name The name of the dynamic attribute to query. + * @return @c true if the named attribute is present on this message. + */ + virtual bool exists(const std::string &attribute_name) const = 0; + + /*--------------------------------------------------------------------- * Setters - **/ + *-------------------------------------------------------------------*/ + + /** + * @brief Set the message payload. + * + * The implementation must take ownership of the supplied payload. For + * protobuf payloads the unique_ptr is moved in; for string payloads the + * string is copied or moved. After this call the IMessage instance is + * solely responsible for the lifetime of the payload data. + * + * @param[in] payload A variant holding either a protobuf message + * (via @c std::unique_ptr) or a plain string. + */ + virtual void setPayload( + std::variant, + std::string> + payload) = 0; /** - * Adding a payload should make a copy and store internally. It should - * Imply ownership of the payload and it's memory management. - **/ - virtual void - setPayload(std::variant, - std::string>) = 0; + * @brief Append a single route to the end of the routing list. + * + * @param[in] route The route identifier to append. + */ virtual void addRoute(const std::string &route) = 0; + /** + * @brief Replace the entire routing list. + * + * @param[in] routes The new ordered list of route identifiers. + */ virtual void setRoutes(const std::list &routes) = 0; - virtual void set(MessageAttribute, const std::string &) = 0; - virtual void set(MessageAttribute, MessageState) = 0; + /** + * @brief Set a core metadata attribute to a string value. + * + * Applicable to attributes such as @c ID, @c KEY, and + * @c CORRELATION_ID. + * + * @param[in] attribute The attribute to set. + * @param[in] value The string value to assign. + */ + virtual void set(MessageAttribute attribute, const std::string &value) = 0; - virtual void set(std::string attribute_name, - std::variant) = 0; /** + * @brief Set a core metadata attribute to a MessageState value. + * + * Intended for the @c STATE attribute. + * + * @param[in] attribute The attribute to set (expected: @c STATE). + * @param[in] state The MessageState value to assign. + */ + virtual void set(MessageAttribute attribute, MessageState state) = 0; + + /** + * @brief Set a named dynamic attribute to a small unsigned integer. + * + * Dynamic attributes are identified by string keys (e.g. the constants + * in @c SDMS::constants::message::google). + * + * @param[in] attribute_name The name of the dynamic attribute. + * @param[in] value The value, stored as one of uint8/16/32. + */ + virtual void set(std::string attribute_name, + std::variant value) = 0; + + /*--------------------------------------------------------------------- * Getters - **/ + *-------------------------------------------------------------------*/ /** - * The correlation ID is assigned to a message when it is created and is - *extremely important for tracing a message in the logs. - **/ + * @brief Retrieve a core metadata attribute. + * + * The returned variant holds a @c std::string for most attributes, or a + * @c MessageState when querying @c STATE. + * + * @param[in] attribute The attribute to retrieve. + * @return A variant containing either the string value or the + * MessageState, depending on the attribute. + */ virtual std::variant - get(MessageAttribute) const = 0; + get(MessageAttribute attribute) const = 0; + + /** + * @brief Get a const reference to the ordered routing list. + * + * @return Const reference to the internal route list. + */ virtual const std::list &getRoutes() const = 0; + + /** + * @brief Get a mutable reference to the ordered routing list. + * + * @return Mutable reference to the internal route list. + */ virtual std::list &getRoutes() = 0; + + /** + * @brief Return the payload type carried by this message. + * + * @return The MessageType discriminator. + */ virtual MessageType type() const noexcept = 0; + + /** + * @brief Retrieve a named dynamic attribute. + * + * @param[in] attribute_name The name of the dynamic attribute. + * @return The value stored as a @c uint8_t, @c uint16_t, or @c uint32_t + * variant. + */ virtual std::variant get(const std::string &attribute_name) const = 0; - /// Note not returning a unique_ptr but a raw pointer because the message - // should stil have ownership of the object. + /** + * @brief Retrieve a non-owning handle to the message payload. + * + * Ownership remains with the IMessage instance. For protobuf payloads + * a raw pointer is returned (not a @c unique_ptr) to make this + * explicit. For string payloads the string is returned by value (copy). + * + * @return A variant holding either a non-owning + * @c google::protobuf::Message* or a @c std::string. + */ virtual std::variant getPayload() = 0; }; } // namespace SDMS + #endif // MESSAGE_HPP diff --git a/common/include/common/IMessageMapper.hpp b/common/include/common/IMessageMapper.hpp index 20e44766e..38ef18a79 100644 --- a/common/include/common/IMessageMapper.hpp +++ b/common/include/common/IMessageMapper.hpp @@ -22,10 +22,8 @@ class IMessageMapper { }; public: - virtual uint16_t getMessageType(uint8_t a_proto_id, - const std::string &a_message_name) = 0; - - virtual uint8_t getProtocolID(MessageProtocol) const = 0; + virtual uint16_t getMessageType(const std::string& message_name) const = 0; + virtual bool requiresAuth(const std::string& msg_type) const = 0; }; } // namespace SDMS diff --git a/common/include/common/ProtoBufMap.hpp b/common/include/common/ProtoBufMap.hpp index 08d92b739..e30f62b90 100644 --- a/common/include/common/ProtoBufMap.hpp +++ b/common/include/common/ProtoBufMap.hpp @@ -1,9 +1,21 @@ +/** + * @file ProtoBufMap.hpp + * @brief Provides message type mapping and envelope wrap/unwrap for the SDMS + * protobuf messaging layer. + * + * ProtoBufMap maintains bidirectional mappings between envelope field numbers + * (used as stable message type identifiers) and protobuf Descriptor objects. + * It also handles serialization boundary concerns via the Envelope message + * pattern, wrapping outgoing messages and unwrapping incoming ones. + */ + #ifndef PROTOBUFMAP_HPP #define PROTOBUFMAP_HPP #pragma once // Public common includes #include "IMessageMapper.hpp" +#include "common/envelope.pb.h" // Third party includes #include @@ -16,34 +28,154 @@ namespace SDMS { +/** + * @class ProtoBufMap + * @brief Bidirectional message type registry and envelope serialization layer. + * + * Maps between uint16_t message type identifiers (derived from envelope field + * numbers) and protobuf Descriptor/FileDescriptor objects. Implements the + * IMessageMapper interface and provides envelope wrap/unwrap operations for + * wire-boundary serialization. + * + * Message type identifiers are organized by category: + * - 10–29: Anonymous (no-auth) messages + * - 200–249: Record messages + * - etc. + * + * @see IMessageMapper + * @see SDMS::Envelope + */ class ProtoBufMap : public IMessageMapper { public: + /** @brief Maps a file index to its protobuf FileDescriptor. */ typedef std::map FileDescriptorMap; + + /** @brief Maps a message type ID (envelope field number) to its Descriptor. */ typedef std::map DescriptorMap; + + /** @brief Reverse mapping from Descriptor back to message type ID. */ typedef std::map MsgTypeMap; private: - FileDescriptorMap m_file_descriptor_map; - DescriptorMap m_descriptor_map; - MsgTypeMap m_msg_type_map; - std::unordered_map m_protocol_ids; + FileDescriptorMap m_file_descriptor_map; ///< Registered file descriptors. + DescriptorMap m_descriptor_map; ///< Type ID → Descriptor lookup. + MsgTypeMap m_msg_type_map; ///< Descriptor → type ID lookup. public: + /** + * @brief Constructs the map and populates all mappings from the Envelope + * descriptor's oneof fields. + * + * Iterates the Envelope message descriptor via reflection to register every + * message field, using each field's number as the stable type identifier. + */ ProtoBufMap(); + /** + * @brief Retrieves the Descriptor for a given message type ID. + * + * @param message_type Envelope field number identifying the message type. + * @return Pointer to the Descriptor, or nullptr if not found. + */ const ::google::protobuf::Descriptor * getDescriptorType(uint16_t message_type) const; - bool exists(uint16_t message_type) const { - return m_descriptor_map.count(message_type) > 0; - } - uint16_t getMessageType(::google::protobuf::Message &); + + /** + * @brief Checks whether a message type ID is registered. + * + * @param message_type Envelope field number to look up. + * @return True if the type is registered, false otherwise. + */ + bool exists(uint16_t message_type) const; + + /** + * @brief Resolves the message type ID for a concrete protobuf message. + * + * @param msg A protobuf message instance. + * @return The envelope field number corresponding to the message's type. + * @throws std::runtime_error If the message type is not registered. + */ + uint16_t getMessageType(const ::google::protobuf::Message &msg) const; + + /** + * @brief Returns a human-readable name for a message type ID. + * + * @param MessageType Envelope field number identifying the message type. + * @return The full protobuf type name, or an error string if not found. + */ std::string toString(uint16_t MessageType) const; - virtual uint16_t getMessageType(uint8_t a_proto_id, - const std::string &a_message_name) final; - virtual uint8_t getProtocolID(MessageProtocol) const final; + + /** + * @brief Resolves a message type ID from a protobuf message type name string. + * + * @param message_name Fully-qualified or short protobuf message name. + * @return The corresponding envelope field number. + * @throws std::runtime_error If the name does not match any registered type. + */ + virtual uint16_t + getMessageType(const std::string &message_name) const final; + + /** + * @brief Wraps an inner message into an Envelope for wire transmission. + * + * Resolves the inner message's type ID via getMessageType(), then uses + * reflection to set the corresponding oneof field in a new Envelope by + * copying from the inner message. + * + * @param inner The message to wrap. + * @return A populated Envelope ready for serialization. + * @throws EC_INVALID_PARAM If the resolved field number does not exist + * in the Envelope descriptor. + */ + std::unique_ptr + wrapInEnvelope(const ::google::protobuf::Message &inner) const; + + /** + * @brief Extracts the inner message from a received Envelope. + * + * Uses reflection to determine which oneof field is set, then calls + * ReleaseMessage() to transfer ownership of the inner message out of + * the envelope. After this call, the released field in @p envelope is + * cleared. + * + * @param envelope The received Envelope to unwrap. Modified in place — + * the released field is no longer owned by the envelope. + * @return The extracted inner message. Caller takes ownership. + * @throws EC_INVALID_PARAM If the envelope's message type cannot be + * resolved to a valid field. + */ + std::unique_ptr<::google::protobuf::Message> + unwrapFromEnvelope(SDMS::Envelope &envelope) const; + + /** + * @brief Determines whether a message type requires authentication. + * + * Messages in the anonymous set (e.g., version handshake, authentication + * requests) do not require a prior authenticated session. All other + * message types do. + * + * @param msg_type Short message type name (e.g., "VersionRequest"). + * @return True if the message requires an authenticated session, false if + * it is in the anonymous (no-auth) set. + */ + virtual bool requiresAuth(const std::string &msg_type) const final { + static const std::unordered_set anon_types = { + "AckReply", + "NackReply", + "VersionRequest", + "VersionReply", + "GetAuthStatusRequest", + "AuthenticateByPasswordRequest", + "AuthenticateByTokenRequest", + "AuthStatusReply", + "DailyMessageRequest", + "DailyMessageReply"}; + return anon_types.count(msg_type) == 0; + } }; + } // namespace SDMS #endif // PROTOBUFMAP_HPP diff --git a/common/proto/common/CMakeLists.txt b/common/proto/common/CMakeLists.txt deleted file mode 100644 index 47d00e102..000000000 --- a/common/proto/common/CMakeLists.txt +++ /dev/null @@ -1,24 +0,0 @@ -cmake_minimum_required (VERSION 3.17.0) - -# Create the .cpp and .hpp files -protobuf_generate( - LANGUAGE cpp - PROTOS ${ProtoFiles} - IMPORT_DIRS "${DataFed_SOURCE_DIR}/common/proto/common" - OUT_VAR protobuf-generated-files -) - -add_custom_target(protobuf-gen-target DEPENDS ${protobuf-generated-files}) - -# make sure that datafed-protobuf is dependent on the cpp files when it compiles -if(BUILD_SHARED_LIBS) - add_library( datafed-protobuf SHARED ${protobuf-generated-files} ) -else() - add_library( datafed-protobuf STATIC ${protobuf-generated-files} ) -endif() -# The following command makes sure that the protobuf files are generated -# before attempting to compile with them. -add_dependencies(datafed-protobuf protobuf-gen-target) -set_target_properties(datafed-protobuf PROPERTIES POSITION_INDEPENDENT_CODE ON SOVERSION ${DATAFED_COMMON_PROTOCOL_API_MAJOR} VERSION ${DATAFED_COMMON_PROTOCOL_API_MAJOR}.${DATAFED_COMMON_PROTOCOL_API_MINOR}.${DATAFED_COMMON_PROTOCOL_API_PATCH} ) -target_link_libraries( datafed-protobuf protobuf::libprotobuf protobuf::libprotoc protobuf::libprotobuf-lite) -target_include_directories( datafed-protobuf INTERFACE ${PROJECT_BINARY_DIR}/common/proto) diff --git a/common/proto/common/SDMS.proto b/common/proto/common/SDMS.proto deleted file mode 100644 index 14c2c984a..000000000 --- a/common/proto/common/SDMS.proto +++ /dev/null @@ -1,448 +0,0 @@ -// ALERT: If messages in this proto file are ADDED or REMOVED, or if the -// definitions of existing message fields are CHANGED, then the VER_PROTO_MAJOR -// enum value in Version.proto must be incremented and backward compatibility -// will be lost. If fields are APPENDED to the end of existing message -// definitions, then VER_PROTO_MINOR must be incremented and backward -// compatibility will be maintained. Unused messages and parameters should be -// marked as deprecated, and these should be removed at the next major version -// change. - -// ALERT: Any LARGE INTEGER fields in messages are currently defined as DOUBLE -// instead of INT64 - This is because Javascript does not support 64 bit integers -// and uses doubles for both integer and floats. As long as an integer does not -// exceed 53 bits, this approach works. - -syntax = "proto2"; - -package SDMS; - - -// ============================ Enums - -// Used to return error categories. Likely to be deprecated. -enum ErrorCode -{ - ID_BAD_REQUEST = 1; - ID_INTERNAL_ERROR = 2; - ID_CLIENT_ERROR = 3; - ID_SERVICE_ERROR = 4; - ID_AUTHN_REQUIRED = 5; - ID_AUTHN_ERROR = 6; - ID_DEST_PATH_ERROR = 7; - ID_DEST_FILE_ERROR = 8; -} - -// Used to return status of backend services. Likely to be deprecated. -enum ServiceStatus -{ - SS_NORMAL = 0; - SS_DEGRADED = 1; - SS_FAILED = 2; - SS_OFFLINE = 3; -} - -// Used to specify data or collection search mode in queries -enum SearchMode -{ - SM_DATA = 0; - SM_COLLECTION = 1; -} - -// Used to specify dependency relatioships. Likely to be deprecated and replaced with expandable system. -enum DependencyType -{ - DEP_IS_DERIVED_FROM = 0; - DEP_IS_COMPONENT_OF = 1; - DEP_IS_NEW_VERSION_OF = 2; - DEP_TYPE_COUNT = 3; -} - -// Used to indicate dependency direction (IN = offspring, OUT = ancestor) -// Note that records only store outgoing dependencies to ancestors -enum DependencyDir -{ - DEP_IN = 0; - DEP_OUT = 1; -} - -// General record/colelction sorting options -enum SortOption -{ - SORT_ID = 0; - SORT_TITLE = 1; - SORT_OWNER = 2; - SORT_TIME_CREATE = 3; - SORT_TIME_UPDATE = 4; - SORT_RELEVANCE = 5; -} - -// Defines a users role in a project -// Note that project owners are treated as project admins -enum ProjectRole -{ - PROJ_NO_ROLE = 0; - PROJ_MEMBER = 1; - PROJ_MANAGER = 2; - PROJ_ADMIN = 3; -} - -// Used to specify type of access token contained within message -enum AccessTokenType { - GENERIC = 1; - GLOBUS = 2; - GLOBUS_AUTH = 3; - GLOBUS_TRANSFER = 4; - GLOBUS_DEFAULT = 5; - ACCESS_SENTINEL = 255; -} - -// Used to specify how the work is being done on the servers. -enum ExecutionMethod { - DIRECT = 1; - DEFERRED = 2; -} -// ============================ Data Structures - -// Allocation-specific statistics data -message AllocStatsData -{ - required string repo = 1; - required uint32 rec_count = 2; - required uint32 file_count = 3; - required uint64 data_size = 4; - repeated uint32 histogram = 5; -} - -message AllocData -{ - required string repo = 1; - required uint64 data_limit = 2; - required uint64 data_size = 3; - required uint32 rec_limit = 4; - required uint32 rec_count = 5; - optional string path = 6; - optional string id = 7; - optional bool is_def = 8; - optional AllocStatsData stats = 9; - optional string repo_type = 10; -} - -// For viewing dependencies -message DependencyData -{ - required string id = 1; - optional string alias = 2; - optional uint32 notes = 3; - required DependencyType type = 4; - required DependencyDir dir = 5; -} - -// For specifying dependencies -message DependencySpecData -{ - required string id = 1; - required DependencyType type = 2; -} - -message UserData -{ - required string uid = 1; - required string name_last = 2; - required string name_first = 3; - optional string email = 4; - optional string options = 5; - optional bool is_admin = 6; - optional bool is_repo_admin = 7; - repeated string ident = 8; - repeated AllocData alloc = 9; -} - -message ProjectData -{ - required string id = 1; - required string title = 2; - optional string desc = 3; - optional string owner = 4; - optional uint32 ct = 5; - optional uint32 ut = 6; - repeated string admin = 7; - repeated string member = 8; - repeated AllocData alloc = 9; -} - -message ListingData -{ - required string id = 1; - required string title = 2; - optional string alias = 3; - optional bool locked = 4; - optional string owner = 5; - optional string creator = 6; - optional double size = 7; - optional bool external = 8; - optional uint32 notes = 9; - // Needed for provenance graphing - optional int32 gen = 10; - optional bool deps_avail = 11; - repeated DependencyData dep = 12; - // Needed for catalog view - optional string owner_name = 13; - optional string desc = 14; -} - -/* -message CatItemInfoData -{ - required string id = 1; - required string title = 2; - required string owner_id = 3; - optional string owner_name = 4; - optional string alias = 5; - optional uint32 notes = 6; - optional string brief = 7; - optional double size = 8; -} -*/ - -message TagData -{ - required string name = 1; - required uint64 count = 2; -} - -message PathData -{ - repeated ListingData item = 1; -} - -message RecordData -{ - required string id = 1; - required string title = 2; - optional string alias = 3; - optional string desc = 4; - repeated string tags = 5; - optional string metadata = 6; - optional bool external = 7; - optional string source = 8; - optional string repo_id = 9; - optional double size = 10; - optional string ext = 11; - optional bool ext_auto = 12; - optional uint32 ct = 13; - optional uint32 ut = 14; - optional uint32 dt = 15; - optional string owner = 16; - optional string creator = 17; - optional bool locked = 18; - optional string parent_id = 19; - repeated DependencyData deps = 20; - optional uint32 notes = 21; - optional string md_err_msg = 22; - optional string sch_id = 23; - optional uint32 sch_ver = 24; -} - -// Fields required for a data repo to locate raw data -// TODO Path will be removed and replaced with owner ID -message RecordDataLocation -{ - required string id = 1; - required string path = 2; -} - -// A list of data locations associated with a specific repo -message RepoRecordDataLocations -{ - required string repo_id = 1; - repeated RecordDataLocation loc = 2; -} - -message RecordDataSize -{ - required string id = 1; - required double size = 2; -} - -message CollData -{ - required string id = 1; - required string title = 2; - optional string alias = 3; - optional string desc = 4; - repeated string tags = 5; - optional string topic = 6; - optional string owner = 7; - optional string creator = 8; - optional uint32 ct = 9; - optional uint32 ut = 10; - optional string parent_id = 11; - optional uint32 notes = 12; -} - -message GroupData -{ - required string uid = 1; - required string gid = 2; - optional string title = 3; - optional string desc = 4; - repeated string member = 5; -} - -message ACLRule -{ - required string id = 1; - optional uint32 grant = 2; - optional uint32 inhgrant = 3; -} - -message TopicData -{ - required string id = 1; - required string title = 2; - repeated TopicData path = 3; - optional string desc = 4; - optional string creator = 5; - optional bool admin = 6; - required uint32 coll_cnt = 7; - - //repeated TagMetrics tags = 7; - //repeated UserMetrics users = 8; - //repeated DateMetrics date = 9; -} - -message RepoData -{ - required string id = 1; - optional string title = 2; - optional string desc = 3; - optional uint64 capacity = 4; - optional string pub_key = 5; - optional string address = 6; - optional string endpoint = 7; - optional string path = 8; - optional string domain = 9; - optional string exp_path = 10; - repeated string admin = 11; - optional string type = 12; -} - - -// -------------------------------------------- ANNOTATION DEFINES - -// Defines annotation types -enum NoteType -{ - NOTE_QUESTION = 0; - NOTE_INFO = 1; - NOTE_WARN = 2; - NOTE_ERROR = 3; -} - -// Defines annotation state -enum NoteState -{ - NOTE_CLOSED = 0; - NOTE_OPEN = 1; - NOTE_ACTIVE = 2; -} - -// Defines annotation user comment data structure -message NoteComment -{ - required string user = 1; // User ID of comment author - required uint32 time = 2; // Timestamp comment was created - required string comment = 3; // Text content of comment - optional NoteType type = 4; // Note type change - optional NoteState state = 5; // Note state change -} - -// Defines annotation data structure -message NoteData -{ - required string id = 1; // Note ID - required NoteType type = 2; // Current note type - required NoteState state = 3; // Current note state - required string subject_id = 4; // Subject data record or collection ID - required string title = 5; // Title - repeated NoteComment comment = 6; // User comments in chronological order - required uint32 ct = 7; // Note create timestamp - required uint32 ut = 8; // Note update timestamp - optional string parent_id = 9; // Parent note ID (only if inherited) - optional bool has_child = 10; // Flag indicating presences of dependent note(s) -} - -// -------------------------------------------------- TASK DEFINES - -enum TaskType -{ - TT_DATA_GET = 0; - TT_DATA_PUT = 1; - TT_DATA_DEL = 2; - TT_REC_CHG_ALLOC = 3; - TT_REC_CHG_OWNER = 4; - TT_REC_DEL = 5; - TT_ALLOC_CREATE = 6; - TT_ALLOC_DEL = 7; - TT_USER_DEL = 8; - TT_PROJ_DEL = 9; -} - -enum TaskStatus -{ - TS_BLOCKED = 0; - TS_READY = 1; - TS_RUNNING = 2; - TS_SUCCEEDED = 3; - TS_FAILED = 4; -} - -enum TaskCommand -{ - TC_STOP = 0; - TC_RAW_DATA_TRANSFER = 1; - TC_RAW_DATA_DELETE = 2; - TC_RAW_DATA_UPDATE_SIZE = 3; - TC_ALLOC_CREATE = 4; - TC_ALLOC_DELETE = 5; -} - -enum Encryption -{ - ENCRYPT_NONE = 0; - ENCRYPT_AVAIL = 1; - ENCRYPT_FORCE = 2; -} - -message TaskData -{ - required string id = 1; - required TaskType type = 2; - required TaskStatus status = 3; - required string client = 4; - required uint32 step = 5; - required uint32 steps = 6; - required string msg = 7; - required uint32 ct = 8; - required uint32 ut = 9; - optional string source = 10; - optional string dest = 11; -} - -// -------------------------------------------------- TASK DEFINES - -message SchemaData -{ - required string id = 1; - required uint32 ver = 2; - optional bool depr = 3; - optional uint32 cnt = 4; - optional bool ref = 5; - optional string own_id = 6; - optional string own_nm = 7; - optional string desc = 8; - optional bool pub = 9; - optional string def = 10; - repeated SchemaData uses = 11; - repeated SchemaData used_by = 12; -} diff --git a/common/proto/common/SDMS_Anon.proto b/common/proto/common/SDMS_Anon.proto deleted file mode 100644 index ac916a653..000000000 --- a/common/proto/common/SDMS_Anon.proto +++ /dev/null @@ -1,106 +0,0 @@ -// ALERT: If messages in this proto file are ADDED or REMOVED, or if the -// definitions of existing message fields are CHANGED, then the VER_PROTO_MAJOR -// enum value in Version.proto must be incremented and backward compatibility -// will be lost. If fields are APPENDED to the end of existing message -// definitions, then VER_PROTO_MINOR must be incremented and backward -// compatibility will be maintained. Unused messages and parameters should be -// marked as deprecated, and these should be removed at the next major version -// change. - -syntax = "proto2"; - -import "SDMS.proto"; - -package SDMS.Anon; - -enum Protocol -{ - ID = 1; -} - - -// The AckReply is a simple positive acknowledgement for requests that do not -// need to reply with any data payload. -message AckReply -{ -} - -// NackReply is used to convey error information and can be returned from any -// request. -message NackReply -{ - required SDMS.ErrorCode err_code = 1; - optional string err_msg = 2; -} - -// Request to get system version information -// Reply: VersionReply on success, NackError on error -message VersionRequest -{ -} - -// Reply containing system version information. This information is compared -// to client/server local version information to determine if interoperability -// is possible. -message VersionReply -{ -// required uint32 major = 1; // System MAJOR version, no backward compatibility -// required uint32 mapi_major = 2; // Message API MAJOR version, no backward compatibility -// required uint32 mapi_minor = 3; // Message API MINOR version, backward compatible -// required uint32 core = 4; // Core server MINOR version, information only -// required uint32 web = 5; // Web server MINOR version, info/notification purposes -// required uint32 repo = 6; // Repo server MINOR version, info/notification purposes -// required uint32 client_py = 7; // Python client/api MINOR version, info/notification purposes - optional uint32 release_year = 1; - optional uint32 release_month = 2; - optional uint32 release_day = 3; - optional uint32 release_hour = 4; - optional uint32 release_minute = 5; - - optional uint32 api_major = 6; - optional uint32 api_minor = 7; - optional uint32 api_patch = 8; - - optional uint32 component_major = 9; - optional uint32 component_minor = 10; - optional uint32 component_patch = 11; -} - -// Request to get client authentication status -// Reply: AuthStatusReply on success, NackError on error -message GetAuthStatusRequest -{ -} - -// Request to authenticate by password -// Reply: AuthStatusReply on success, NackError on error -message AuthenticateByPasswordRequest -{ - required string uid = 1; // Local user ID - required string password = 2; // Password -} - -// Request to authenticate by Globus access token -// Reply: AuthStatusReply on success, NackError on error -message AuthenticateByTokenRequest -{ - required string token = 1; // Access token -} - -// Reply containing client authentication status -message AuthStatusReply -{ - required bool auth = 1; // Authenticated if true - optional string uid = 2; // DataFed user id -} - -// Get the system daily message -// Reply: DailyMessageReply -message DailyMessageRequest -{ -} - -message DailyMessageReply -{ - optional string message = 1; // Daily message, if set -} diff --git a/common/proto/common/SDMS_Auth.proto b/common/proto/common/SDMS_Auth.proto deleted file mode 100644 index 71fbf293c..000000000 --- a/common/proto/common/SDMS_Auth.proto +++ /dev/null @@ -1,1417 +0,0 @@ -// ALERT: If messages in this proto file are ADDED or REMOVED, or if the -// definitions of existing message fields are CHANGED, then the VER_PROTO_MAJOR -// enum value in Version.proto must be incremented and backward compatibility -// will be lost. If fields are APPENDED to the end of existing message -// definitions, then VER_PROTO_MINOR must be incremented and backward -// compatibility will be maintained. Unused messages and parameters should be -// marked as deprecated, and these should be removed at the next major version -// change. - -// NOTE: All messages in this protobuf file require an authorized client and will -// be rejected without processing if otherwise. - -syntax = "proto2"; - -import "SDMS.proto"; - -package SDMS.Auth; - -enum Protocol -{ - ID = 2; -} - -// ============================================================================ -// ----------- Setup/Config Messages ------------------------------------------ -// ============================================================================ - -// Request core server to generate/retrieve user security credentials -// Reply: GenerateCredentialsReply on success, NackReply on error -message GenerateCredentialsRequest -{ - optional string domain = 1; // Reserved for future use (Facility/org domain) - optional uint32 uid = 2; // Reserved for future use (POSIX uid) -} - -// Request core server to revoke (clear) user security credentials for the sending user -// This will prevent non-interactive login from ALL installed clients for the sending user -// Reply: AckReply on success, NackReply on error -message RevokeCredentialsRequest -{ -} - -// Reply containing user security credentials (ZeroMQ keys) that can be used for non-interactive login -// Note: these keys must be stored securely -message GenerateCredentialsReply -{ - required string pub_key = 1; // Public key - required string priv_key = 2; // Private key -} - - -// ============================================================================ -// ----------- Permissions Messages ------------------------------------------- -// ============================================================================ - -// Check if specified permission are granted on given resource -// Reply: CheckPermsReply on success, NackReply on error -message CheckPermsRequest -{ - required string id = 1; // Record, collection, or project ID - required uint32 perms = 2; // Requested access permissions -} - -// Reply indicating if all requested permissions are granted -message CheckPermsReply -{ - required bool granted = 1; // True if all permission granted, false otherwise -} - -// Request current user access permissions for given resource -// Reply: GetPermsReply on success, NackReply on error -message GetPermsRequest -{ - required string id = 1; // Record, collection, or project ID - optional uint32 perms = 2; // Optional bitmask for permissions to check, gets all if not given -} - -// Reply containing granted permissions for given user/resource -// If perms bitmask was specified, granted will only apply to requested permissions -message GetPermsReply -{ - required uint32 granted = 1; // Granted permissions -} - - -// ============================================================================ -// ----------- User Messages -------------------------------------------------- -// ============================================================================ - -// Request to vew user information. Details can only be requested for -// authenticate client or by system admins. -// Reply: UserDataReply on success, NackReply on error -message UserViewRequest -{ - required string uid = 1; // DataFed user ID - optional bool details = 2; // Include details -} - -// Reply containing user information -message UserDataReply -{ - repeated SDMS.UserData user = 1; // User data - optional uint32 offset = 2; // Offset of this result page - optional uint32 count = 3; // Count of this result page - optional uint32 total = 4; // Total number of results -} - - -// Set user access tokens (from Globus OAuth). This message must be called -// whenever a user logs in via Globus (web portal) in order to update the access -// and refresh tokens issued by Globus. -// Reply: AckReply on success, NackReply on error -message UserSetAccessTokenRequest -{ - required string access = 1; // Access token - required uint32 expires_in = 2; // Access expiration time in seconds - required string refresh = 3; // Refresh token (never expires) - optional AccessTokenType type = 4 [default = GLOBUS_DEFAULT]; - optional string other = 5; // Other information relevant to the set access token -} - -// Get user access token (from Globus OAuth). This token will allow client to -// access Globus APIs on behalf of the authenticated user. -// Reply: UserAccessTokenReply on success, NackReply on error -message UserGetAccessTokenRequest -{ - optional string collection_id = 1; - optional string collection_type = 2; // TODO: use enum -} - -// Reply containing the user Globus access token and expiration time (in -// seconds) for authenticated user. -message UserAccessTokenReply -{ - required string access = 1; // Globus access token - required uint32 expires_in = 2; // Access token expiration in seconds - optional bool needs_consent = 3; // Indicate requirement of consent flow -} - -// Request to create a new DataFed user account. -// Reply: AckReply on success, NackReply on error -// Note: Use of this message is restricted to system service. Any attempted use by non-admin -// users will be logged. -message UserCreateRequest -{ - required string uid = 1; // User's GlobusID UID (name portion only) - optional string password = 2; // User-specified DataFed CLI password - required string name = 3; // Full name as given by Globus - required string email = 4; // Email address - repeated string uuid = 5; // UUID for primary Globus account - optional string options = 6; // DataFed options (JSON string) - optional string secret = 7; // System secret -} - -// Request to find DataFed user by one or more Globus UUIDs -// Reply: UserDataReply on success, NackReply on failure -message UserFindByUUIDsRequest -{ - repeated string uuid = 1; // An identity UUID linked to user's Globus account -} - -// Request to find users matching partial name/UID string (ngram search) -// Reply: UserDataReply on success, NackReply on failure -message UserFindByNameUIDRequest -{ - required string name_uid = 1; // String with full/partial name or UID to search for - optional uint32 offset = 2; // Optional result offset - optional uint32 count = 3; // Optional result count -} - -// Request to update a user record -// Reply: UserDataReply on success, NackReply on failure -message UserUpdateRequest -{ - required string uid = 1; // DataFed UID of user record to update - optional string email = 2; // Optional new email address - optional string password = 3; // Optional new DataFed password - optional string options = 4; // Optional DataFed options (JSON string) -} - -// Request to list all registered users -// Reply: UserDataReply on success, NackReply on failure -message UserListAllRequest -{ - optional uint32 offset = 1; // Optional result offset - optional uint32 count = 2; // Optional result count -} - -// Request to list all users that are collaborators of auth user -// Collaborators are users on same project(s) and/or with ACLs set to/from auth user -// Reply: UserDataReply on success, NackReply on failure -message UserListCollabRequest -{ - optional uint32 offset = 1; // Optional result offset - optional uint32 count = 2; // Optional result count -} - -// Request to get the list of recent Globus endpoints used by auth user -// Reply: UserGetRecentEPReply on success, NackReply on failure -message UserGetRecentEPRequest -{ -} - -// Reply with list of recent Globus endpoints used by auth user -message UserGetRecentEPReply -{ - repeated string ep = 1; // Endpoint ID (legacy not UUID) -} - -// Request to set the list of recent Globus endpoints used by auth user -// This request will replace the stored endpoint list with the one provided -// Reply: AckReply on success, NackReply on error -message UserSetRecentEPRequest -{ - repeated string ep = 1; // Endpoint ID (legacy not UUID) -} - - -// ============================================================================ -// ----------- General Data/Collection Messages ------------------------------- -// ============================================================================ - -// Reply containing general list of items (data record, collections, users, -// projects, etc). Only common fields are returned. -message ListingReply -{ - repeated ListingData item = 1; // Items - optional uint32 offset = 2; // Offset of this result page - optional uint32 count = 3; // Count of this result page - optional uint32 total = 4; // Total number of results -} - - -// ============================================================================ -// ----------- Data Record Messages ------------------------------------------- -// ============================================================================ - -// Request to list all records associated with a given repository and user/subject. -// If the subject is provided sufficient privileges are required. -// Reply: ListingReply on success, NackReply on error -message RecordListByAllocRequest -{ - required string repo = 1; // Repository ID - optional string subject = 2; // Optional project or user ID - optional uint32 offset = 3; // Optional result offset - optional uint32 count = 4; // Optional result count -} - -// Request to view a data record. Requires READ_REC permissions. Metadata will be -// returned if client has READ_META permission. -// Reply: RecordDataReply on success, NackReply on error -message RecordViewRequest -{ - required string id = 1; // Record ID or alias - optional bool details = 2; // Reserved for future use -} - -// Reply containing details of one or more data records in response to original -// request. If original request resulted in side effects, impacted records will -// be listed in the update field. This is an optimization to reduce latency for -// graphical clients that may be displaying multiple/many records that need to -// be refreshed as a result of the original request. -message RecordDataReply -{ - repeated SDMS.RecordData data = 1; // Record details - repeated SDMS.ListingData update = 2; // Optional list of records impacted by side effects -} - -// Request to create a new data record. -// Reply: RecordDataReply on success, NackReply on error -message RecordCreateRequest -{ - optional string title = 1; // Record title (required) - optional string desc = 2; // Optional description (markdown supported) - repeated string tags = 3; // Optional list of tags - optional string alias = 4; // Optional alias - optional string metadata = 5; // Optional metadata (JSON string) - optional string parent_id = 6; // Optional parent collection ID or alias (default is root) - optional bool external = 7; // Optional flag for external (unmanaged) raw data - optional string source = 8; // Optional raw data source Globus path - optional string repo_id = 9; // Optional repository ID (default for user/project used if omitted) - optional string ext = 10; // Optional record extension - optional bool ext_auto = 11; // Optional flag for automatic extension (based on source file) - repeated DependencySpecData deps = 12; // Optional record dependencies - optional string sch_id = 13; // Optional metadata schema ID - optional bool sch_enforce = 14; // Optional flag to enforce schema validation (fails on non-compliance) -} - -// Request to create data records in batch. This message will be DEPRECATED. -// Reply: RecordDataReply on success, NackReply on error -message RecordCreateBatchRequest -{ - required string records = 1; // JSON array containing records following DB record create schema -} - -// Request to update an existing data record. -// Reply: RecordDataReply on success, NackReply on error -message RecordUpdateRequest -{ - required string id = 1; // Record ID or alias - optional string title = 2; // Optional title - optional string desc = 3; // Optional description (markdown supported) - repeated string tags = 4; // Optional list of tags to add - optional bool tags_clear = 5; // Optional flag to clear existing tags - optional string alias = 6; // Optional alias - optional string metadata = 7; // Optional metadata (JSON string) - optional bool mdset = 8; // Optional flag to set (replace) metadata if true, or merge if false - optional string sch_id = 9; // Optional metadata schema ID - optional bool sch_enforce = 10; // Optional flag to enforce schema validation (fails on non-compliance) - optional string source = 11; // Optional raw data source Globus path - optional string ext = 12; // Optional record extension - optional bool ext_auto = 13; // Optional flag for automatic extension (based on source file) - //optional uint64 size = 14; - //optional uint32 dt = 15; - repeated DependencySpecData dep_add = 16; // Optional new dependencies to add - repeated DependencySpecData dep_rem = 17; // Optional existing dependencies to remove -} - -// Request to update data records in batch. This message will be DEPRECATED. -// Reply: RecordDataReply on success, NackReply on error -message RecordUpdateBatchRequest -{ - required string records = 1; // JSON array containing records following DB record update schema -} - -// Request to export data records in batch. This message may be DEPRECATED. -// Reply: RecordExportReply on success, NackReply on error -message RecordExportRequest -{ - repeated string id = 1; -} - -// Reply containing JSON representation of data records. This message may be DEPRECATED. -message RecordExportReply -{ - repeated string record = 1; // JSON string containing data record -} - -// Not used currently -message RecordLockRequest -{ - repeated string id = 1; - required bool lock = 2; -} - -// Request to delete one or more data records. Deletions are handle by background tasks -// thus a TaskDataReply is returned containing the TaskID of the delete operation. -// Reply: TaskDataReply on success, NackReply on error -message RecordDeleteRequest -{ - repeated string id = 1; // ID/alias of record(s) to delete -} - -// Request to get the dependency graph for a given data record. Returns all ancestor -// and descendent records that are related by derivation linkage. Lateral provenance -// relationships of related records are not returned. -// Reply: ListingReply on success, NackReply on error -message RecordGetDependencyGraphRequest -{ - required string id = 1; // ID/alias of data record of interest -} - -// Request to change tha allocation of specified records. All records must be owned by -// the auth user or the specified project. Any records already on destination repo will -// be ignored. On success, a background task is created to perform the actual allocation -// change (requires moving raw data and updating records). -// Reply: RecordAllocChangeReply on success, NackReply on error -message RecordAllocChangeRequest -{ - repeated string id = 1; // ID/alias of data records and or collections - required string repo_id = 2; // Destination repository ID - optional string proj_id = 3; // Optional project ID if data belongs to a project - optional bool check = 4; // Optional flag to perform initial validation only if true -} - -// Reply containing information about record allocation change, including -// associated background task. -message RecordAllocChangeReply -{ - required uint32 act_cnt = 1; // Actual count of records to changed - required uint64 act_size = 2; // Actual data size (bytes) that will be moved - required uint32 tot_cnt = 3; // Total number of records in request (including those already on destination repo) - required uint64 data_limit = 4; // Dest repo data size limit (bytes) - required uint64 data_size = 5; // Dest repo current data size (bytes) - required uint32 rec_limit = 6; // Dest repo record count limit - required uint32 rec_count = 7; // Dest repo current record count - optional SDMS.TaskData task = 8; // Background task information -} - -// Request to change tha ownership of specified records. All records must be owned by -// the auth user or the specified project. Auth user must have CREATE permission on -// destination collection. On success, a background task is created to perform the -// actual ownership change (requires moving raw data and updating records). -// Reply: RecordOwnerChangeReply on success, NackReply on error -message RecordOwnerChangeRequest -{ - repeated string id = 1; // ID/alias of data records and or collections - required string coll_id = 2; // Destination collection ID/alias - optional string repo_id = 3; // Optional destination repo ID (default will be used if omitted) - optional string proj_id = 4; // DEPRECATED - optional bool check = 5; // Optional flag to perform initial validation only if true -} - -// Reply containing information about record ownership change, including -// associated background task. -message RecordOwnerChangeReply -{ - required uint32 act_cnt = 1; // Actual count of records to changed - required uint64 act_size = 2; // Actual data size (bytes) that will be moved - required uint32 tot_cnt = 3; // Total number of records in request (including those already owned by destination account) - repeated AllocData alloc = 4; // List of available allocations (only if check option set to true) - optional SDMS.TaskData task = 8; // Background task information -} - - -// ============================================================================ -// ----------- Raw Data Messages ---------------------------------------------- -// ============================================================================ - -// Request to download raw data for one or more records. Auth user must have READ_DATA -// permission on all requested records. If the orig_fname option is specified, a check -// will be performed to ensure there are no duplicate filenames, and the request will -// fail if there are. On success, a background task is created to perform the actual -// data transfer. -// Reply: DataGetReply on success, NackReply on error -message DataGetRequest -{ - repeated string id = 1; // ID/alias of data records and or collections - optional string path = 2; // Globus path to download to (not required if check option is set) - optional Encryption encrypt = 3; // Optional encryption mode (none, if available, required) - optional bool orig_fname = 4; // Optional flag to download to original filenames - optional bool check = 5; // Optional flag to perform initial validation only if true - optional string collection_id = 6; // Globus Collection ID - optional string collection_type = 7; // Globus Collection Type -} - -// Request to upload raw data to a data records. Auth user must have WRITE_DATA -// permission on the requested record. On success, a background task is created to -// perform the actual data transfer. -// Reply: DataPutReply on success, NackReply on error -message DataPutRequest -{ - required string id = 1; // ID/alias of data record - optional string path = 2; // Globus path to upload from - optional Encryption encrypt = 3; // Optional encryption mode (none, if available, required) - optional string ext = 4; // Optional extension override - optional bool check = 5; // Optional flag to perform initial validation only if true - optional string collection_id = 6; // Globus Collection ID - optional string collection_type = 7; // Globus Collection Type -} - -// Reply containing data download information, including associated background task. -message DataGetReply -{ - repeated ListingData item = 1; // Basic data for records to be downloaded - optional SDMS.TaskData task = 2; // Background task information -} - -// Reply containing data upload information, including associated background task. -message DataPutReply -{ - required RecordData item = 1; // Basic data for record to be uploaded - optional SDMS.TaskData task = 2; // Background task information -} - -// Not currently used (delete raw data only) -message DataDeleteRequest -{ - repeated string id = 1; -} - -// Not currently used (get domain-local path to raw data of a record for direct access) -message DataPathRequest -{ - required string id = 1; - required string domain = 2; -} - -// Not currently used (domain-local path to raw data of a record for direct access) -message DataPathReply -{ - required string path = 1; -} - - -// ============================================================================ -// ----------- Search Messages ------------------------------------------------ -// ============================================================================ - -// Request to search for data records or collection. -// Reply: ListingReply on success, NackReply on error -message SearchRequest -{ - required SDMS.SearchMode mode = 1; // Mode is data or collections - optional bool published = 2; // If true, searches public catalog - - // Data and Collections - optional string id = 3; // Partial ID or alias match (wildcard) - optional string text = 4; // Words or phrases (english root word match) - repeated string tags = 5; // Tags - repeated string cat_tags = 6; // Catalog tags used internally for catalog topics - optional uint32 from = 7; // Matches from updated date/time - optional uint32 to = 8; // Matches to updated date/time - optional string owner = 9; // User/project ID of owner - optional string creator = 10; // User ID of creator - repeated string coll = 11; // Collections to search - - // Data scope only - optional string sch_id = 12; // Metadata schema ID - optional string meta = 13; // Metadata expression - optional bool meta_err = 14; // Match records with metadata validation errors - - optional SDMS.SortOption sort = 15; // Sort option (title, ID, etc) - optional bool sort_rev = 16; // Reverse sort order - optional uint32 offset = 17; // Result offset - optional uint32 count = 18; // Result count -} - - -// ============================================================================ -// ----------- Collection Messages -------------------------------------------- -// ============================================================================ - -// Request to view details of a collection. Requires READ_REC permission. -// Reply: CollDataReply on success, NackReply on error -message CollViewRequest -{ - required string id = 1; // Collection ID/alias -} - -// Reply containing details of one or more collections. If original request -// resulted in side effects, impacted collections will be listed in the update field. -// This is an optimization to reduce latency for graphical clients that may be -// displaying multiple/many collections that need to be refreshed as a result -// of the original request. -message CollDataReply -{ - repeated SDMS.CollData coll = 1; // Collection data - repeated SDMS.ListingData update = 2; // Optional list of records/collections impacted by side effects -} - -// Request to read (list) contents of collection (records, child collections). Requires -// LIST permission. -// Reply: ListingReply on success, NackError on error -message CollReadRequest -{ - required string id = 1; // ID/alias of collection to read - optional bool details = 3; // DEPRECATED - optional uint32 offset = 4; // Result offset - optional uint32 count = 5; // Result count -} - -// Request to create a new collection. Requires CREATE permission in parent collection. -// Reply: CollDataReply on success, NackError on error -message CollCreateRequest -{ - optional string title = 1; // Title of collection - optional string desc = 2; // Optional description (markdown supported) - optional string alias = 3; // Optional alias - optional string parent_id = 4; // Optional parent collection ID/alias (root is default) - optional string topic = 6; // Optional topic for public collection - repeated string tags = 7; // Optional tags -} - -// Request to update an existing collection. Requires WRITE_REC permission -// Reply: CollDataReply on success, NackError on error -message CollUpdateRequest -{ - required string id = 1; // ID / alias of collection to update - optional string title = 2; // Optional title - optional string desc = 3; // Optional description (markdown supported) - optional string alias = 4; // Optional alias - optional string topic = 6; // Optional topic for public collection - repeated string tags = 7; // Optional tags to add - optional bool tags_clear = 8; // Optional flag to clear existing tags if true -} - -// Request to delete a collection. Requires DELETE permission. All contained collections -// will be deleted, as well as any data records that are not linked to other collections. -// On success, a background task will be started to perform the actual delete operation. -// Reply: TaskDataReply on success, NackError on error -message CollDeleteRequest -{ - repeated string id = 1; // ID/alias of collection to delete -} - -// Request to write (add/remove) items to a collection. Requires LINK permission. -// This request does not unlink data records being added to the collection. Any -// items removed that are not linked elsewhere will be added to the root collection -// and returned in the update section of the reply. -// Reply: ListingReply on success, NackError on error -message CollWriteRequest -{ - required string id = 1; // ID/alias of collection to write to - repeated string add = 2; // ID/alias of records/collections to add - repeated string rem = 3; // ID/alias of records/collections to remove - optional bool rem_all = 4; // DEPRECATED -} - -// Request to move records and child collections from a source collection to a -// destination collection. Requires LINK permission and same owners for both source -// and destination collections. -// Reply: AckReply on success, NackError on error -message CollMoveRequest -{ - required string src_id = 1; // ID/alias of source collection - required string dst_id = 2; // ID/alias of destination collection - repeated string item = 3; // ID/alias of items in source collection to move -} - -// Request to get the parent collection path of a given data record or collection. -// Data records will have multiple paths if linked to multiple collections. -// Reply: CollPathReply on success, NackError on error -message CollGetParentsRequest -{ - required string id = 1; // ID/alias of data record or collection - optional bool inclusive = 2; // Optional flag to include specified id in path -} - -// Reply containing one or more collection paths -message CollPathReply -{ - repeated SDMS.PathData path = 1; // List of Collection paths -} - -// Request to get the page of an item in a collection. -// Reply: CollGetOffsetReply on success, NackError on error -// TODO: This request should simply return offset not page number -message CollGetOffsetRequest -{ - required string id = 1; // ID/alias of containing collection - required string item = 2; // ID/alias of child data record or collection - required uint32 page_sz = 3; // Page size to use for calculation -} - -// Reply containing page number of item in collection -message CollGetOffsetReply -{ - required string id = 1; // ID/alias of containing collection - required string item = 2; // ID/alias of child data record or collection - required uint32 offset = 3; // Page number of item -} - -// Request to list all published collections of user or project. -// Reply: ListingReply on success, NackError on error -message CollListPublishedRequest -{ - optional string subject = 1; // Optional user/project ID (auth user is default) - optional uint32 offset = 2; // Result offset - optional uint32 count = 3; // Result count -} - - -// ============================================================================ -// ----------- Group Messages ------------------------------------------------- -// ============================================================================ - -// Request to create a new group. -// Reply: GroupDataReply on success, NackError on error -message GroupCreateRequest -{ - required SDMS.GroupData group = 1; // Group data -} - -// Request to update an existing group. -// Reply: GroupDataReply on success, NackError on error -message GroupUpdateRequest -{ - required string uid = 1; // User/project ID of group - required string gid = 2; // Group ID - optional string title = 3; // Optional title - optional string desc = 4; // Optional description - repeated string add_uid = 5; // User IDs to add to group - repeated string rem_uid = 6; // User IDs to remove from group -} - -// Reply containing group details -message GroupDataReply -{ - repeated SDMS.GroupData group = 1; // Group details -} - -// Request to delete a group belonging to auth user or project. Deleting a project -// group requires manager permission for associated project. -// Reply: AckReply on success, NackError on error -// TODO: uid should be optional and changed to proj_id -message GroupDeleteRequest -{ - required string uid = 1; // User/project ID of group owner - required string gid = 2; // Group ID of group -} - -// Request to list all groups belonging to user or project. Listing project groups -// requires membership in associated project. -// Reply: GroupDataReply on success, NackError on error -// TODO: uid should be optional and changed to proj_id -message GroupListRequest -{ - required string uid = 1; // User/project ID of group owner -} - -// Request to view details of specified group. Viewing project group requires -// membership in associated project. -// Reply: GroupDataReply on success, NackError on error -// TODO: uid should be optional and changed to proj_id -message GroupViewRequest -{ - required string uid = 1; // User/project ID of group owner - required string gid = 2; // Group ID of group -} - - -// ============================================================================ -// ----------- Access Control List (ACL) Messages ----------------------------- -// ============================================================================ - -// Request to view all ACL rules on a given data record or collection. -// Reply: ACLDataReply on success, NackError on error -message ACLViewRequest -{ - required string id = 1; // ID/alias of data record or collection -} - -// Request to update (replace) ACLs on a data record or collection. ACLs are -// set using a JSON payload containing an array of rule objects containing -// the following fields: -// id : User or group ID (required) -// grant : bitmask for local permission grants -// inhgrant : bitmask for inherited permission grants (collections only) -// Reply: ACLDataReply on success, NackError on error -// TODO: Why is this not using the ACLRule struct for rules? -message ACLUpdateRequest -{ - required string id = 1; // ID/alias of data record or collection - optional string rules = 2; // New ACL rules as JSON -} - -// Request to list users/projects owning any ACLs set for client -// Reply: ListingReply on success, NackError on error -message ACLSharedListRequest -{ - optional bool inc_users = 2; // Include user ACLs - optional bool inc_projects = 3; // Include project ACLs -} - -// List data records and/or collections that are share with client by the specified -// owner (user or project). -// Reply: ListingReply on success, NackError on error -message ACLSharedListItemsRequest -{ - required string owner = 2; // User/project ID that is sharing data -} - -// Reply containing ACL information -message ACLDataReply -{ - repeated ACLRule rule = 1; // ACL rules -} - - -// ============================================================================ -// ----------- Project Messages ----------------------------------------------- -// ============================================================================ - -// Request to view project details. Requires client to be associated with the -// project, or be an admin. -// Reply: ProjectDataReply on success, NackError on error -message ProjectViewRequest -{ - required string id = 1; // Project ID -} - -// Reply containing detailed information for one or more projects. -message ProjectDataReply -{ - repeated ProjectData proj = 1; // Project data -} - -// Request to create a new project. Requires client to be a repository administrator. -// The creator becomes the owner of the project. -// Reply: ProjectDataReply on success, NackError on error -message ProjectCreateRequest -{ - required string id = 1; // Project OD - optional string title = 2; // Title - optional string desc = 3; // Description - repeated string admin = 4; // List of project admins (managers) - repeated string member = 5; // List of project members -} - -// Request to update an existing project. Requires client to be either the owner -// or an admin of the specified project. Only the project owner can alter the admin -// list. -// Reply: ProjectDataReply on success, NackError on error -message ProjectUpdateRequest -{ - required string id = 1; // Project ID - optional string title = 2; // New title - optional string desc = 3; // New description - optional bool admin_set = 4 [default = false]; // Clear existing admin list - repeated string admin = 5; // New admins (user IDs) - optional bool member_set = 6 [default = false]; // Clear existing member list - repeated string member = 7; // New members (user IDs) -} - -// Request to delete a project. Client must be the project owner or a -// system admin. On success a background task is started to perform the -// actual delete operation. -// Reply: TaskDataReply on success, NackError on error -message ProjectDeleteRequest -{ - repeated string id = 1; // Project ID to delete -} - -// Request to list all projects associated with client. -// Reply: ListingReply on success, NackError on error -message ProjectListRequest -{ - optional string subject = 1; // DEPRECATED - optional bool as_owner = 2; // Flag to include owned projects - optional bool as_admin = 3; // Flag to include managed projects - optional bool as_member = 4; // Flag to include member projects - optional SortOption sort = 5; // Sort option - optional bool sort_rev = 6; // Reverse sort order - optional uint32 offset = 7; // Result offset - optional uint32 count = 8; // Result count -} - -// Not currently used -message ProjectSearchRequest -{ - required string text_query = 1; - repeated string scope = 2; -} - -// Request to get the project role of client or specified user -// Reply: ProjectGetRoleReply on success, NackError on error -message ProjectGetRoleRequest -{ - required string id = 1; // Project ID - optional string subject = 2; // Optional user ID -} - -// Reply containing the project role of a user -message ProjectGetRoleReply -{ - required ProjectRole role = 1; -} - - -// ============================================================================ -// ----------- Repository Messages (Repo) ------------------------------------- -// ============================================================================ - -// NOTE: The following messages are use by Core and Repo servers only - -// Request to delete the raw data of onr or more records from repo -// Reply: AckReply on success, NackError on error -message RepoDataDeleteRequest -{ - repeated RecordDataLocation loc = 1; // Record ID and file path -} - -// Request to get the file size of one or more data records from repo -// Reply: RepoDataSizeReply on success, NackError on error -message RepoDataGetSizeRequest -{ - repeated RecordDataLocation loc = 1; // Record ID and file path -} - -// Reply to hold raw data size of one or more dat records -message RepoDataSizeReply -{ - repeated RecordDataSize size = 1; // Record size information -} - -// Request to create a data storage path on a repo -// Reply: AckReply on success, NackError on error -message RepoPathCreateRequest -{ - required string path = 1; // Path to raw data storage directory -} - -// Request to delete a data storage path on a repo -// Reply: AckReply on success, NackError on error -message RepoPathDeleteRequest -{ - required string path = 1; // Path to raw data storage directory -} - - -// ============================================================================ -// ----------- Repository Messages (Core) ------------------------------------- -// ============================================================================ - -// Request to list repositories -// Reply: RepoDataReply on success, NackError on error -message RepoListRequest -{ - optional bool details = 1; // Flag to include detailed repo information - optional bool all = 2; // Return all repos if true, otherwise only those administered by client -} - -// Request to view details of a repository -// Reply: RepoDataReply on success, NackError on error -message RepoViewRequest -{ - required string id = 1; // Repo ID -} - -// Request to create a new repository. Only system admins may send this request. -// Reply: RepoDataReply on success, NackError on error -message RepoCreateRequest -{ - required string id = 1; // ID of repo - required string title = 2; // Title - optional string desc = 3; // Description - optional string domain = 5; // RESERVED - optional string path = 6; // Path to storage directories - optional string exp_path = 7; // RESERVED - optional string address = 8; // Repo server address - optional string endpoint = 9; // Globus endpoint UUID or legacy name - optional string pub_key = 10; // Public encryption key - required uint64 capacity = 11; // Total data capacity - repeated string admin = 12; // Repo admin(s) - optional string type = 13; // Repository type (defaults to "globus") -} - -// Request to update an existing repository. Only system or repos admins may -// send this request. NOTE: Changing the storage path will not automatically -// move raw data files - this must be done manually. -// Reply: RepoDataReply on success, NackError on error -message RepoUpdateRequest -{ - required string id = 1; // ID of repo - optional string title = 2; // Title - optional string desc = 3; // Description - optional string domain = 5; // RESERVED - optional string path = 6; // Path to storage directories - optional string exp_path = 7; // RESERVED - optional string address = 8; // Repo server address - optional string endpoint = 9; // Globus endpoint UUID or legacy name - optional string pub_key = 10; // Public encryption key - optional uint64 capacity = 11; // Total data capacity - repeated string admin = 12; // Repo admin(s) - optional string type = 13; // Repository type -} - -// Request to delete a repository. Only system or repos admins may send this -// request. NOTE: All data records for all allocations must be moved/deleted -// first or this request will fail. -// Reply: AckReply on success, NackError on error -message RepoDeleteRequest -{ - required string id = 1; // Repo ID -} - -// Reply containing details of one or more repositories -message RepoDataReply -{ - repeated SDMS.RepoData repo = 1; // Repo details -} - -// Request to calculate the count and size of data stored in onr or more collections. -// Reply: RepoCalcSizeReply on success, NackError on error -// TODO: This request should be in Collection category -message RepoCalcSizeRequest -{ - required bool recurse = 1; // Recursive flag - repeated string item = 2; // Data / collection IDs -} - -// Reply containing data size information -message RepoCalcSizeReply -{ - repeated AllocStatsData stats = 1; // Data size info -} - -// Request to list all allocations on a repo administered by client. -// Reply: RepoAllocationsReply on success, NackError on error -message RepoListAllocationsRequest -{ - required string id = 1; // Repo ID -} - -// Request to list allocations of client or project. -// Reply: RepoAllocationsReply on success, NackError on error -message RepoListSubjectAllocationsRequest -{ - optional string subject = 1; // Optional project ID - optional bool stats = 2; // Flag to include allocation statistics -} - -// Request to list allocations belonging to owner of data record or collection. -// Client must have CREATE permission on collection. -// Reply: RepoAllocationsReply on success, NackError on error -message RepoListObjectAllocationsRequest -{ - required string id = 1; // Collection ID/alias -} - -// Request to view an allocation of a client or project on a given repo. If a -// project is specified, client must be associated with the project. -// Reply: RepoAllocationsReply on success, NackError on error -message RepoViewAllocationRequest -{ - required string repo = 1; // Repo ID - required string subject = 2; // Optional project ID -} - -// Reply containing allocation information -message RepoAllocationsReply -{ - repeated AllocData alloc = 1; // Allocation information -} - -// Request to calculate allocation statistics of a user or project on a given -// repo. Only repo admins may make this request. -// Reply: RepoAllocationStatsReply on success, NackError on error -message RepoAllocationStatsRequest -{ - required string repo = 1; // Repo ID - optional string subject = 2; // Optional user/project ID -} - -// Reply containing allocation statistics. -message RepoAllocationStatsReply -{ - required AllocStatsData alloc = 1; // Allocation stats -} - -// Request to create a new allocation on a repo for a user or project. -// Only repo admins may make this request. On success, a background task is -// started to create the allocation if it is a DataFed managed globus repo, -// for a metadata only repo it is created immediatly. -// Reply: TaskDataReply (to be superseded by RepoAllocationCreateResponse) -// on success, NackError on error -message RepoAllocationCreateRequest -{ - required string repo = 1; // Repo ID - required string subject = 2; // User/project ID - required uint64 data_limit = 3; // Data size limit (bytes) - required uint32 rec_limit = 4; // Data record limit(count) -} - -message RepoAllocationCreateResponse -{ - required SDMS.ExecutionMethod execution_method = 1; // The execution method that was used to create the allocation - optional SDMS.TaskData task = 2; // The task data if deferred execution - optional SDMS.AllocData result = 3; // The Allocation data if direct execution -} - -// Request to update an existing allocation. Only repo admins may make this -// request. -// Reply: AckReply on success, NackError on error -// TODO: Rename this message to RepoAllocationUpdateRequest -message RepoAllocationSetRequest -{ - required string repo = 1; // Repo ID - required string subject = 2; // User/project ID - required uint64 data_limit = 3; // New data size limit (bytes) - required uint32 rec_limit = 4; // New data record limit (count) -} - -// Request to set the default allocation for a user or project. If a -// project is specified, client must be associated with the project. -// Reply: AckReply on success, NackError on error -message RepoAllocationSetDefaultRequest -{ - required string repo = 1; // Repo ID - optional string subject = 2; // User/project ID -} - -// Request to delete an existing allocation. Only repo admins may make this -// request. On success, a background task is started to create the allocation. -// Reply: TaskDataReply on success, NackError on error -message RepoAllocationDeleteRequest -{ - required string repo = 1; // Repo ID - required string subject = 2; // User/project ID -} - -// Request to check authorization for GridFTP action on file in a repo. -// Reply: AckReply on success, NackError on error -message RepoAuthzRequest -{ - required string repo = 1; // Repo ID - required string client = 2; // Client ID - required string file = 3; // Path to file in repo - required string action = 4; // GridFTP action -} - -// ============================================================================ -// ----------- Saved Query Messages ------------------------------------------- -// ============================================================================ - -// NOTE: All saved query requests are restricted to authenticated client. - -// Request to create a new saved query. -// Reply: QueryDataReply on success, NackError on error -message QueryCreateRequest -{ - required string title = 1; // Query title - required SearchRequest query = 2; // Search parameters -} - -// Request to update an existing saved query. -// Reply: QueryDataReply on success, NackError on error -message QueryUpdateRequest -{ - required string id = 1; // Query ID - optional string title = 2; // Optional new title - optional SearchRequest query = 3; // Optional new search parameters -} - -// Request to -// Reply: AckReply on success, NackError on error -message QueryDeleteRequest -{ - repeated string id = 1; // Query ID -} - -// Request to list saved queries. -// Reply: ListingReply on success, NackError on error -message QueryListRequest -{ - optional uint32 offset = 1; // Result offset - optional uint32 count = 2; // Result count -} - -// Request to view a saved query information. -// Reply: QueryDataReply on success, NackError on error -message QueryViewRequest -{ - required string id = 1; // Query ID -} - -// Request to execute the specified saved query. -// Reply: ListingReply on success, NackError on error -message QueryExecRequest -{ - required string id = 1; // Query ID - optional uint32 offset = 2; // Results offset - optional uint32 count = 3; // Results count -} - -// Reply containing saved query data -message QueryDataReply -{ - required string id = 1; // Query ID - required string title = 2; // Title - required string owner = 4; // Owner user ID - required uint32 ct = 5; // Query create timestamp - required uint32 ut = 6; // Query update timestamp - required SearchRequest query = 7; // Search parameters -} - - -// ============================================================================ -// ----------- Annotation Messages -------------------------------------------- -// ============================================================================ - -// Request to list annotations associated with a data record or collection. -// Reply: NoteDataReply on success, NackError on error -message NoteListBySubjectRequest -{ - required string subject = 1; // Data record or collection ID/alias -} - -// Request to view details of an annotation -// Reply: NoteDataReply on success, NackError on error -message NoteViewRequest -{ - required string id = 1; // Note ID -} - -// Request to create a new annotation and associate with a data record -// or collection. Creator must have READ_REC permission to create an -// annotation, and must be the owner/creator of the subject to activate -// on note creation. -// Reply: NoteDataReply on success, NackError on error -message NoteCreateRequest -{ - required SDMS.NoteType type = 1; // Note type - required string subject = 2; // Subject data record or collection ID/alias - required string title = 3; // Note title - required string comment = 4; // Note comments (text) - required bool activate = 5; // Flag to activate on creation -} - -// Request to update an existing annotation. Only owner/creator of note -// subject may change note title, type, or state. -// Reply: NoteDataReply on success, NackError on error -message NoteUpdateRequest -{ - required string id = 1; // Note ID - required string comment = 2; // Comments about update - optional SDMS.NoteType new_type = 3; // Optional new note type - optional SDMS.NoteState new_state = 4; // Optional new note state - optional string new_title = 5; // Optional new note title -} - -// Request to edit an existing comment within a note. Only the author of -// the original comment may update the comment. -// Reply: NoteDataReply on success, NackError on error -message NoteCommentEditRequest -{ - required string id = 1; // Note ID - required uint32 comment_idx = 2; // Index of comment to edit - required string comment = 3; // New comment text -} - -// Reply containing note information and any record/collection updates due to -// side effects. -message NoteDataReply -{ - repeated SDMS.NoteData note = 1; // Note information - repeated SDMS.ListingData update = 2; // Updated data records / collections -} - -// ============================================================================ -// ----------- Task Messages -------------------------------------------------- -// ============================================================================ - -// Request to view a specific task. -// Reply: TaskDataReply on success, NackError on error -message TaskViewRequest -{ - required string task_id = 1; // Task ID -} - -// Request to list tasks for auth client. The 'since' and 'from'/'to' parameters -// are mutually exclusive. -// Reply: TaskDataReply on success, NackError on error -message TaskListRequest -{ - optional uint32 since = 1; // Tasks updated since given seconds - optional uint32 from = 2; // Tasks updated starting from timestamp - optional uint32 to = 3; // Tasks updated until timestamp - repeated SDMS.TaskStatus status = 4; // List of status types to return - optional uint32 offset = 5; // Result offset - optional uint32 count = 6; // Result count -} - -// Reply containing detailed information for one or more tasks. -message TaskDataReply -{ - repeated SDMS.TaskData task = 1; // Task information - //optional uint32 offset = 2; // Offset of this result page - //optional uint32 count = 3; // Count of this result page - //optional uint32 total = 4; // Total number of results -} - - -// ============================================================================ -// ----------- Tag Messages --------------------------------------------------- -// ============================================================================ - -// Request to search for a tag against an ngram index of existing tag names. -// Reply: TagDataReply on success, NackError on error -message TagSearchRequest -{ - optional string name = 1; // Partial/full tag name - optional uint32 offset = 2; // Result offset - optional uint32 count = 3; // Result count -} - -// Request to list all existing tags sorted by reference count. -// Reply: TagDataReply on success, NackError on error -message TagListByCountRequest -{ - optional uint32 offset = 1; // Result offset - optional uint32 count = 2; // Result count -} - -// Reply containing statistics -message TagDataReply -{ - repeated SDMS.TagData tag = 1; // Tag information - optional uint32 offset = 2; // Offset of this result page - optional uint32 count = 3; // Count of this result page - optional uint32 total = 4; // Total number of results -} - - -// ============================================================================ -// ----------- Schema / Validation Messages ----------------------------------- -// ============================================================================ - -// Request to validate metadata against a given metadata schema. -// Reply: MetadataValidateReply on success, NackError on error -message MetadataValidateRequest -{ - required string metadata = 1; // Metadata as a JSON string - required string sch_id = 2; // Schema ID to validate against -} - -// Reply containing metadata errors -message MetadataValidateReply -{ - optional string errors = 1; // Validation error message -} - -// Request to view a schema. If the resolve option is true, all schema references -// will be loaded and stored in the '_refs' field of the specified schema. -// Reply: SchemaDataReply on success, NackError on error -message SchemaViewRequest -{ - required string id = 1; // Schema ID - optional bool resolve = 2; // Flag to resolve references -} - -// Request to search for schemas. -// Reply: SchemaDataReply on success, NackError on error -message SchemaSearchRequest -{ - optional string id = 1; // Partial schema ID (ngram index) - optional string text = 2; // Word or phrase to match in description - optional string owner = 3; // Owner of schema - optional SDMS.SortOption sort = 4; // Sort order - optional bool sort_rev = 5; // Flag to reverse sort order - optional uint32 offset = 6; // Result offset - optional uint32 count = 7; // Result count -} - -// Reply containing detailed information for one or more schemas. -message SchemaDataReply -{ - repeated SDMS.SchemaData schema = 1; // Schema details - optional uint32 offset = 2; // Offset of this result page - optional uint32 count = 3; // Count of this result page - optional uint32 total = 4; // Total number of results -} - -// Request to create a new schema. Only system admins may set the system flag. -// Reply: AckReply on success, NackError on error -message SchemaCreateRequest -{ - required string id = 1; // Schema ID - required string desc = 2; // Description - required bool pub = 3; // Public flag - required bool sys = 4; // System flag - required string def = 5; // Schema definition (JSON schema specification) -} - -// Request to update an existing schema. Updates are not allowed for schemas that -// are in use or referenced by other schemas. -// Reply: AckReply on success, NackError on error -message SchemaUpdateRequest -{ - required string id = 1; // Schema ID - optional string id_new = 2; // Optional new schema ID - optional string desc = 3; // Optional new description - optional bool pub = 4; // Optional new public flag - optional bool sys = 5; // Optional new system flag - optional string def = 6; // Optional new definition -} - -// Request to revise an existing schema. Revision creates a copy of the original -// schema with a new revision number and updated fields. -// Reply: AckReply on success, NackError on error -message SchemaReviseRequest -{ - required string id = 1; // Schema ID - optional string desc = 2; // Optional new description - optional bool pub = 3; // Optional new public flag - optional bool sys = 4; // Optional new system flag - optional string def = 5; // Optional new definition -} - - -// Request to delete an existing schema. Deletion is not allowed for schema that -// are in use or are referenced by other schemas. -// Reply: AckReply on success, NackError on error -message SchemaDeleteRequest -{ - required string id = 1; // Schema ID -} - - -// ============================================================================ -// ----------- Catalog / Topic Messages --------------------------------------- -// ============================================================================ - -// Request to list catalog topics. If topic_id is specified, only child topics -// of that topic are returned; otherwise all top-level topics are returned. -// Reply: TopicDataReply on success, NackError on error -message TopicListTopicsRequest -{ - optional string topic_id = 1; // Optional parent topic - optional uint32 offset = 2; // Result offset - optional uint32 count = 3; // Result count -} - -// Request to view a specific topic. -// Reply: TopicDataReply on success, NackError on error -message TopicViewRequest -{ - required string id = 1; // Topic ID -} - -// Request to search for topics by category words/phrase. -// Reply: TopicDataReply on success, NackError on error -message TopicSearchRequest -{ - optional string phrase = 1; // Word/phrase to match -} - -// Reply containing details of one or more topics. -message TopicDataReply -{ - repeated TopicData topic = 1; // Topic details - optional uint32 offset = 2; // Offset of this result page - optional uint32 count = 3; // Count of this result page - optional uint32 total = 4; // Total number of results -} - diff --git a/common/proto/common/Version.proto.in b/common/proto/common/Version.proto.in deleted file mode 100644 index b9ebb4c15..000000000 --- a/common/proto/common/Version.proto.in +++ /dev/null @@ -1,35 +0,0 @@ -syntax = "proto2"; -// WARNING the Version.proto file is generated by CMAKE -// -// DataFed version numbers must be manually updated as needed. -// All minor version numbers should be reset when VER_MAJOR is incremented. -// VER_MAPI_MINOR should be reset to 0 when VER_MAPI_MAJOR is incremented. -// Servers and clients must be interoperable as long as VER_MAJOR and VER_MAPI_MAJOR match - -// Minor version are used to notify admins when local servers need to be updated. -// This is accomplished by sending a version request to the Core server on startup -// and comparing to the local version numbers. - -enum Version -{ - option allow_alias = true; // Must be commented out if there are no duplicate values - -// VER_MAJOR = 1; // System MAJOR version, no backward compatibility -// VER_MAPI_MAJOR = 4; // Message API MAJOR version, no backward compatibility -// VER_MAPI_MINOR = 1; // Message API MINOR version, backward compatible -// VER_CORE = 0; // Core server MINOR version, information only -// VER_WEB = 0; // Web server MINOR version, info/notification purposes -// VER_REPO = 0; // Repo server MINOR version, info/notification purposes -// VER_CLIENT_PY = 0; // Python client/api MINOR version, info/notification purposes - - DATAFED_RELEASE_YEAR = @DATAFED_RELEASE_YEAR@; - DATAFED_RELEASE_MONTH = @DATAFED_RELEASE_MONTH@; - DATAFED_RELEASE_DAY = @DATAFED_RELEASE_DAY@; - DATAFED_RELEASE_HOUR = @DATAFED_RELEASE_HOUR@; - DATAFED_RELEASE_MINUTE = @DATAFED_RELEASE_MINUTE@; - - DATAFED_COMMON_PROTOCOL_API_MAJOR = @DATAFED_COMMON_PROTOCOL_API_MAJOR@; - DATAFED_COMMON_PROTOCOL_API_MINOR = @DATAFED_COMMON_PROTOCOL_API_MINOR@; - DATAFED_COMMON_PROTOCOL_API_PATCH = @DATAFED_COMMON_PROTOCOL_API_PATCH@; -} - diff --git a/common/proto3/common/CMakeLists.txt b/common/proto3/common/CMakeLists.txt new file mode 100644 index 000000000..7c54ebb52 --- /dev/null +++ b/common/proto3/common/CMakeLists.txt @@ -0,0 +1,44 @@ +cmake_minimum_required(VERSION 3.17.0) + +# Collect proto files from the new 1-1-1 structure +file(GLOB_RECURSE ProtoFiles + "${CMAKE_CURRENT_SOURCE_DIR}/enums/*.proto" + "${CMAKE_CURRENT_SOURCE_DIR}/messages/*.proto" + "${CMAKE_CURRENT_SOURCE_DIR}/anon/*.proto" + "${CMAKE_CURRENT_SOURCE_DIR}/auth/*.proto" + "${CMAKE_CURRENT_SOURCE_DIR}/envelope.proto" +) + +# Generate C++ from protos +protobuf_generate( + LANGUAGE cpp + PROTOS ${ProtoFiles} + IMPORT_DIRS "${CMAKE_CURRENT_SOURCE_DIR}" + OUT_VAR protobuf-generated-files +) + +add_custom_target(protobuf-gen-target DEPENDS ${protobuf-generated-files}) + +# Create library (respects parent's BUILD_SHARED_LIBS setting) +if(BUILD_SHARED_LIBS) + add_library(datafed-protobuf SHARED ${protobuf-generated-files}) +else() + add_library(datafed-protobuf STATIC ${protobuf-generated-files}) +endif() + +add_dependencies(datafed-protobuf protobuf-gen-target) + +set_target_properties(datafed-protobuf PROPERTIES + POSITION_INDEPENDENT_CODE ON + SOVERSION ${DATAFED_COMMON_PROTOCOL_API_MAJOR} + VERSION ${DATAFED_COMMON_PROTOCOL_API_MAJOR}.${DATAFED_COMMON_PROTOCOL_API_MINOR}.${DATAFED_COMMON_PROTOCOL_API_PATCH} +) + +target_link_libraries(datafed-protobuf + PUBLIC protobuf::libprotobuf +) + +target_include_directories(datafed-protobuf + PUBLIC ${CMAKE_CURRENT_BINARY_DIR} # Where generated files go + INTERFACE ${PROJECT_BINARY_DIR}/common/proto3 +) diff --git a/common/proto3/common/anon/ack_reply.proto b/common/proto3/common/anon/ack_reply.proto new file mode 100644 index 000000000..1cc67cf67 --- /dev/null +++ b/common/proto3/common/anon/ack_reply.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +// Simple positive acknowledgement +message AckReply { +} diff --git a/common/proto3/common/anon/auth_status_reply.proto b/common/proto3/common/anon/auth_status_reply.proto new file mode 100644 index 000000000..99e2e9742 --- /dev/null +++ b/common/proto3/common/anon/auth_status_reply.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message AuthStatusReply { + bool auth = 1; + string uid = 2; +} diff --git a/common/proto3/common/anon/authenticate_by_password_request.proto b/common/proto3/common/anon/authenticate_by_password_request.proto new file mode 100644 index 000000000..22c569c98 --- /dev/null +++ b/common/proto3/common/anon/authenticate_by_password_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message AuthenticateByPasswordRequest { + string uid = 1; + string password = 2; +} diff --git a/common/proto3/common/anon/authenticate_by_token_request.proto b/common/proto3/common/anon/authenticate_by_token_request.proto new file mode 100644 index 000000000..55af17724 --- /dev/null +++ b/common/proto3/common/anon/authenticate_by_token_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message AuthenticateByTokenRequest { + string token = 1; +} diff --git a/common/proto3/common/anon/daily_message_reply.proto b/common/proto3/common/anon/daily_message_reply.proto new file mode 100644 index 000000000..8ae2ad11b --- /dev/null +++ b/common/proto3/common/anon/daily_message_reply.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message DailyMessageReply { + string message = 1; +} diff --git a/common/proto3/common/anon/daily_message_request.proto b/common/proto3/common/anon/daily_message_request.proto new file mode 100644 index 000000000..3060a7a2a --- /dev/null +++ b/common/proto3/common/anon/daily_message_request.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message DailyMessageRequest { +} diff --git a/common/proto3/common/anon/get_auth_status_request.proto b/common/proto3/common/anon/get_auth_status_request.proto new file mode 100644 index 000000000..c573803a6 --- /dev/null +++ b/common/proto3/common/anon/get_auth_status_request.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message GetAuthStatusRequest { +} diff --git a/common/proto3/common/anon/nack_reply.proto b/common/proto3/common/anon/nack_reply.proto new file mode 100644 index 000000000..8cb856ac3 --- /dev/null +++ b/common/proto3/common/anon/nack_reply.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/error_code.proto"; + +option cc_enable_arenas = true; + +// Error response +message NackReply { + ErrorCode err_code = 1; + optional string err_msg = 2; +} diff --git a/common/proto3/common/anon/version_reply.proto b/common/proto3/common/anon/version_reply.proto new file mode 100644 index 000000000..d921394c5 --- /dev/null +++ b/common/proto3/common/anon/version_reply.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message VersionReply { + uint32 release_year = 1; + uint32 release_month = 2; + uint32 release_day = 3; + uint32 release_hour = 4; + uint32 release_minute = 5; + uint32 api_major = 6; + uint32 api_minor = 7; + uint32 api_patch = 8; + uint32 component_major = 9; + uint32 component_minor = 10; + uint32 component_patch = 11; +} diff --git a/common/proto3/common/anon/version_request.proto b/common/proto3/common/anon/version_request.proto new file mode 100644 index 000000000..f5843c779 --- /dev/null +++ b/common/proto3/common/anon/version_request.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message VersionRequest { +} diff --git a/common/proto3/common/auth/acl_data_reply.proto b/common/proto3/common/auth/acl_data_reply.proto new file mode 100644 index 000000000..8b07ff7cf --- /dev/null +++ b/common/proto3/common/auth/acl_data_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/acl_rule.proto"; + +option cc_enable_arenas = true; + +message ACLDataReply { + repeated ACLRule rule = 1; +} diff --git a/common/proto3/common/auth/acl_shared_list_items_request.proto b/common/proto3/common/auth/acl_shared_list_items_request.proto new file mode 100644 index 000000000..12a6198a0 --- /dev/null +++ b/common/proto3/common/auth/acl_shared_list_items_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ACLSharedListItemsRequest { + string owner = 2; +} diff --git a/common/proto3/common/auth/acl_shared_list_request.proto b/common/proto3/common/auth/acl_shared_list_request.proto new file mode 100644 index 000000000..18867890f --- /dev/null +++ b/common/proto3/common/auth/acl_shared_list_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ACLSharedListRequest { + optional bool inc_users = 2; + optional bool inc_projects = 3; +} diff --git a/common/proto3/common/auth/acl_update_request.proto b/common/proto3/common/auth/acl_update_request.proto new file mode 100644 index 000000000..dc63aeb8f --- /dev/null +++ b/common/proto3/common/auth/acl_update_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ACLUpdateRequest { + string id = 1; + optional string rules = 2; +} diff --git a/common/proto3/common/auth/acl_view_request.proto b/common/proto3/common/auth/acl_view_request.proto new file mode 100644 index 000000000..6acb5e4bf --- /dev/null +++ b/common/proto3/common/auth/acl_view_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ACLViewRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/check_perms_reply.proto b/common/proto3/common/auth/check_perms_reply.proto new file mode 100644 index 000000000..1a1aa1f44 --- /dev/null +++ b/common/proto3/common/auth/check_perms_reply.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CheckPermsReply { + bool granted = 1; +} diff --git a/common/proto3/common/auth/check_perms_request.proto b/common/proto3/common/auth/check_perms_request.proto new file mode 100644 index 000000000..93eae3b48 --- /dev/null +++ b/common/proto3/common/auth/check_perms_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CheckPermsRequest { + string id = 1; + optional uint32 perms = 2; +} diff --git a/common/proto3/common/auth/coll_create_request.proto b/common/proto3/common/auth/coll_create_request.proto new file mode 100644 index 000000000..b599e3be5 --- /dev/null +++ b/common/proto3/common/auth/coll_create_request.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollCreateRequest { + string title = 1; + optional string desc = 2; + optional string alias = 3; + optional string parent_id = 4; + optional string topic = 6; + repeated string tags = 7; +} diff --git a/common/proto3/common/auth/coll_data_reply.proto b/common/proto3/common/auth/coll_data_reply.proto new file mode 100644 index 000000000..27592d05b --- /dev/null +++ b/common/proto3/common/auth/coll_data_reply.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/coll_data.proto"; +import "messages/listing_data.proto"; + +option cc_enable_arenas = true; + +message CollDataReply { + repeated CollData coll = 1; + repeated ListingData update = 2; +} diff --git a/common/proto3/common/auth/coll_delete_request.proto b/common/proto3/common/auth/coll_delete_request.proto new file mode 100644 index 000000000..437626405 --- /dev/null +++ b/common/proto3/common/auth/coll_delete_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollDeleteRequest { + repeated string id = 1; +} diff --git a/common/proto3/common/auth/coll_get_offset_reply.proto b/common/proto3/common/auth/coll_get_offset_reply.proto new file mode 100644 index 000000000..594037161 --- /dev/null +++ b/common/proto3/common/auth/coll_get_offset_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollGetOffsetReply { + string id = 1; + string item = 2; + uint32 offset = 3; +} diff --git a/common/proto3/common/auth/coll_get_offset_request.proto b/common/proto3/common/auth/coll_get_offset_request.proto new file mode 100644 index 000000000..e25538065 --- /dev/null +++ b/common/proto3/common/auth/coll_get_offset_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollGetOffsetRequest { + string id = 1; + string item = 2; + uint32 page_sz = 3; +} diff --git a/common/proto3/common/auth/coll_get_parents_request.proto b/common/proto3/common/auth/coll_get_parents_request.proto new file mode 100644 index 000000000..7750dc3ff --- /dev/null +++ b/common/proto3/common/auth/coll_get_parents_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollGetParentsRequest { + string id = 1; + optional bool inclusive = 2; +} diff --git a/common/proto3/common/auth/coll_list_published_request.proto b/common/proto3/common/auth/coll_list_published_request.proto new file mode 100644 index 000000000..0b2d06895 --- /dev/null +++ b/common/proto3/common/auth/coll_list_published_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollListPublishedRequest { + optional string subject = 1; + optional uint32 offset = 2; + optional uint32 count = 3; +} diff --git a/common/proto3/common/auth/coll_move_request.proto b/common/proto3/common/auth/coll_move_request.proto new file mode 100644 index 000000000..c4de4badd --- /dev/null +++ b/common/proto3/common/auth/coll_move_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollMoveRequest { + string src_id = 1; + string dst_id = 2; + repeated string item = 3; +} diff --git a/common/proto3/common/auth/coll_path_reply.proto b/common/proto3/common/auth/coll_path_reply.proto new file mode 100644 index 000000000..96c708dd8 --- /dev/null +++ b/common/proto3/common/auth/coll_path_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/path_data.proto"; + +option cc_enable_arenas = true; + +message CollPathReply { + repeated PathData path = 1; +} diff --git a/common/proto3/common/auth/coll_read_request.proto b/common/proto3/common/auth/coll_read_request.proto new file mode 100644 index 000000000..89b51905a --- /dev/null +++ b/common/proto3/common/auth/coll_read_request.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollReadRequest { + string id = 1; + bool details = 3; + optional uint32 offset = 4; + optional uint32 count = 5; +} diff --git a/common/proto3/common/auth/coll_update_request.proto b/common/proto3/common/auth/coll_update_request.proto new file mode 100644 index 000000000..901116e42 --- /dev/null +++ b/common/proto3/common/auth/coll_update_request.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollUpdateRequest { + string id = 1; + optional string title = 2; + optional string desc = 3; + optional string alias = 4; + optional string topic = 6; + repeated string tags = 7; + optional bool tags_clear = 8; +} diff --git a/common/proto3/common/auth/coll_view_request.proto b/common/proto3/common/auth/coll_view_request.proto new file mode 100644 index 000000000..93493b8cb --- /dev/null +++ b/common/proto3/common/auth/coll_view_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollViewRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/coll_write_request.proto b/common/proto3/common/auth/coll_write_request.proto new file mode 100644 index 000000000..e8fe8c93f --- /dev/null +++ b/common/proto3/common/auth/coll_write_request.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollWriteRequest { + string id = 1; + repeated string add = 2; + repeated string rem = 3; + bool rem_all = 4; +} diff --git a/common/proto3/common/auth/data_delete_request.proto b/common/proto3/common/auth/data_delete_request.proto new file mode 100644 index 000000000..073051292 --- /dev/null +++ b/common/proto3/common/auth/data_delete_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message DataDeleteRequest { + repeated string id = 1; +} diff --git a/common/proto3/common/auth/data_get_reply.proto b/common/proto3/common/auth/data_get_reply.proto new file mode 100644 index 000000000..c2f574971 --- /dev/null +++ b/common/proto3/common/auth/data_get_reply.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/listing_data.proto"; +import "messages/task_data.proto"; + +option cc_enable_arenas = true; + +message DataGetReply { + repeated ListingData item = 1; + TaskData task = 2; +} diff --git a/common/proto3/common/auth/data_get_request.proto b/common/proto3/common/auth/data_get_request.proto new file mode 100644 index 000000000..44702f797 --- /dev/null +++ b/common/proto3/common/auth/data_get_request.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/encryption.proto"; + +option cc_enable_arenas = true; + +message DataGetRequest { + repeated string id = 1; + optional string path = 2; + optional Encryption encrypt = 3; + optional bool orig_fname = 4; + optional bool check = 5; + optional string collection_id = 6; + optional string collection_type = 7; +} diff --git a/common/proto3/common/auth/data_path_reply.proto b/common/proto3/common/auth/data_path_reply.proto new file mode 100644 index 000000000..21764fb38 --- /dev/null +++ b/common/proto3/common/auth/data_path_reply.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message DataPathReply { + string path = 1; +} diff --git a/common/proto3/common/auth/data_path_request.proto b/common/proto3/common/auth/data_path_request.proto new file mode 100644 index 000000000..d0533abaa --- /dev/null +++ b/common/proto3/common/auth/data_path_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message DataPathRequest { + string id = 1; + string domain = 2; +} diff --git a/common/proto3/common/auth/data_put_reply.proto b/common/proto3/common/auth/data_put_reply.proto new file mode 100644 index 000000000..f5bcbbe21 --- /dev/null +++ b/common/proto3/common/auth/data_put_reply.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/record_data.proto"; +import "messages/task_data.proto"; + +option cc_enable_arenas = true; + +message DataPutReply { + RecordData item = 1; + TaskData task = 2; +} diff --git a/common/proto3/common/auth/data_put_request.proto b/common/proto3/common/auth/data_put_request.proto new file mode 100644 index 000000000..bbc6e36f5 --- /dev/null +++ b/common/proto3/common/auth/data_put_request.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/encryption.proto"; + +option cc_enable_arenas = true; + +message DataPutRequest { + string id = 1; + optional string path = 2; + optional Encryption encrypt = 3; + optional string ext = 4; + optional bool check = 5; + optional string collection_id = 6; + optional string collection_type = 7; +} diff --git a/common/proto3/common/auth/generate_credentials_reply.proto b/common/proto3/common/auth/generate_credentials_reply.proto new file mode 100644 index 000000000..f50d5204b --- /dev/null +++ b/common/proto3/common/auth/generate_credentials_reply.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message GenerateCredentialsReply { + string pub_key = 1; + string priv_key = 2; +} diff --git a/common/proto3/common/auth/generate_credentials_request.proto b/common/proto3/common/auth/generate_credentials_request.proto new file mode 100644 index 000000000..302d50743 --- /dev/null +++ b/common/proto3/common/auth/generate_credentials_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message GenerateCredentialsRequest { + optional string domain = 1; + optional uint32 uid = 2; +} diff --git a/common/proto3/common/auth/get_perms_reply.proto b/common/proto3/common/auth/get_perms_reply.proto new file mode 100644 index 000000000..dd5e4b8c6 --- /dev/null +++ b/common/proto3/common/auth/get_perms_reply.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message GetPermsReply { + uint32 granted = 1; +} diff --git a/common/proto3/common/auth/get_perms_request.proto b/common/proto3/common/auth/get_perms_request.proto new file mode 100644 index 000000000..ec82b26be --- /dev/null +++ b/common/proto3/common/auth/get_perms_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message GetPermsRequest { + string id = 1; + optional uint32 perms = 2; +} diff --git a/common/proto3/common/auth/group_create_request.proto b/common/proto3/common/auth/group_create_request.proto new file mode 100644 index 000000000..d97434720 --- /dev/null +++ b/common/proto3/common/auth/group_create_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/group_data.proto"; + +option cc_enable_arenas = true; + +message GroupCreateRequest { + GroupData group = 1; +} diff --git a/common/proto3/common/auth/group_data_reply.proto b/common/proto3/common/auth/group_data_reply.proto new file mode 100644 index 000000000..7f2416e50 --- /dev/null +++ b/common/proto3/common/auth/group_data_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/group_data.proto"; + +option cc_enable_arenas = true; + +message GroupDataReply { + repeated GroupData group = 1; +} diff --git a/common/proto3/common/auth/group_delete_request.proto b/common/proto3/common/auth/group_delete_request.proto new file mode 100644 index 000000000..1cd8d3c86 --- /dev/null +++ b/common/proto3/common/auth/group_delete_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message GroupDeleteRequest { + string uid = 1; + string gid = 2; +} diff --git a/common/proto3/common/auth/group_list_request.proto b/common/proto3/common/auth/group_list_request.proto new file mode 100644 index 000000000..453dde364 --- /dev/null +++ b/common/proto3/common/auth/group_list_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message GroupListRequest { + string uid = 1; +} diff --git a/common/proto3/common/auth/group_update_request.proto b/common/proto3/common/auth/group_update_request.proto new file mode 100644 index 000000000..aea7602b7 --- /dev/null +++ b/common/proto3/common/auth/group_update_request.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message GroupUpdateRequest { + string uid = 1; + string gid = 2; + optional string title = 3; + optional string desc = 4; + repeated string add_uid = 5; + repeated string rem_uid = 6; +} diff --git a/common/proto3/common/auth/group_view_request.proto b/common/proto3/common/auth/group_view_request.proto new file mode 100644 index 000000000..b7480ea61 --- /dev/null +++ b/common/proto3/common/auth/group_view_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message GroupViewRequest { + string uid = 1; + string gid = 2; +} diff --git a/common/proto3/common/auth/listing_reply.proto b/common/proto3/common/auth/listing_reply.proto new file mode 100644 index 000000000..c45e4dcc6 --- /dev/null +++ b/common/proto3/common/auth/listing_reply.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/listing_data.proto"; + +option cc_enable_arenas = true; + +message ListingReply { + repeated ListingData item = 1; + uint32 offset = 2; + uint32 count = 3; + uint32 total = 4; +} diff --git a/common/proto3/common/auth/metadata_validate_reply.proto b/common/proto3/common/auth/metadata_validate_reply.proto new file mode 100644 index 000000000..a3ce92e42 --- /dev/null +++ b/common/proto3/common/auth/metadata_validate_reply.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message MetadataValidateReply { + string errors = 1; +} diff --git a/common/proto3/common/auth/metadata_validate_request.proto b/common/proto3/common/auth/metadata_validate_request.proto new file mode 100644 index 000000000..2ccfcc9e4 --- /dev/null +++ b/common/proto3/common/auth/metadata_validate_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message MetadataValidateRequest { + string metadata = 1; + string sch_id = 2; +} diff --git a/common/proto3/common/auth/note_comment_edit_request.proto b/common/proto3/common/auth/note_comment_edit_request.proto new file mode 100644 index 000000000..cde74c660 --- /dev/null +++ b/common/proto3/common/auth/note_comment_edit_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message NoteCommentEditRequest { + string id = 1; + uint32 comment_idx = 2; + string comment = 3; +} diff --git a/common/proto3/common/auth/note_create_request.proto b/common/proto3/common/auth/note_create_request.proto new file mode 100644 index 000000000..3e18e0ed7 --- /dev/null +++ b/common/proto3/common/auth/note_create_request.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/note_type.proto"; + +option cc_enable_arenas = true; + +message NoteCreateRequest { + NoteType type = 1; + string subject = 2; + string title = 3; + string comment = 4; + bool activate = 5; +} diff --git a/common/proto3/common/auth/note_data_reply.proto b/common/proto3/common/auth/note_data_reply.proto new file mode 100644 index 000000000..3e1c6b1ba --- /dev/null +++ b/common/proto3/common/auth/note_data_reply.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/note_data.proto"; +import "messages/listing_data.proto"; + +option cc_enable_arenas = true; + +message NoteDataReply { + repeated NoteData note = 1; + repeated ListingData update = 2; +} diff --git a/common/proto3/common/auth/note_list_by_subject_request.proto b/common/proto3/common/auth/note_list_by_subject_request.proto new file mode 100644 index 000000000..2655e5b8f --- /dev/null +++ b/common/proto3/common/auth/note_list_by_subject_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message NoteListBySubjectRequest { + string subject = 1; +} diff --git a/common/proto3/common/auth/note_update_request.proto b/common/proto3/common/auth/note_update_request.proto new file mode 100644 index 000000000..d02b86901 --- /dev/null +++ b/common/proto3/common/auth/note_update_request.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/note_type.proto"; +import "enums/note_state.proto"; + +option cc_enable_arenas = true; + +message NoteUpdateRequest { + string id = 1; + string comment = 2; + optional NoteType new_type = 3; + optional NoteState new_state = 4; + optional string new_title = 5; +} diff --git a/common/proto3/common/auth/note_view_request.proto b/common/proto3/common/auth/note_view_request.proto new file mode 100644 index 000000000..1bfed1c3e --- /dev/null +++ b/common/proto3/common/auth/note_view_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message NoteViewRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/project_create_request.proto b/common/proto3/common/auth/project_create_request.proto new file mode 100644 index 000000000..71ed5f4cd --- /dev/null +++ b/common/proto3/common/auth/project_create_request.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ProjectCreateRequest { + string id = 1; + string title = 2; + optional string desc = 3; + repeated string admin = 4; + repeated string member = 5; +} diff --git a/common/proto3/common/auth/project_data_reply.proto b/common/proto3/common/auth/project_data_reply.proto new file mode 100644 index 000000000..66d0ed0cc --- /dev/null +++ b/common/proto3/common/auth/project_data_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/project_data.proto"; + +option cc_enable_arenas = true; + +message ProjectDataReply { + repeated ProjectData proj = 1; +} diff --git a/common/proto3/common/auth/project_delete_request.proto b/common/proto3/common/auth/project_delete_request.proto new file mode 100644 index 000000000..faeb1d1f9 --- /dev/null +++ b/common/proto3/common/auth/project_delete_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ProjectDeleteRequest { + repeated string id = 1; +} diff --git a/common/proto3/common/auth/project_get_role_reply.proto b/common/proto3/common/auth/project_get_role_reply.proto new file mode 100644 index 000000000..311871326 --- /dev/null +++ b/common/proto3/common/auth/project_get_role_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/project_role.proto"; + +option cc_enable_arenas = true; + +message ProjectGetRoleReply { + ProjectRole role = 1; +} diff --git a/common/proto3/common/auth/project_get_role_request.proto b/common/proto3/common/auth/project_get_role_request.proto new file mode 100644 index 000000000..8854865ea --- /dev/null +++ b/common/proto3/common/auth/project_get_role_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ProjectGetRoleRequest { + string id = 1; + optional string subject = 2; +} diff --git a/common/proto3/common/auth/project_list_request.proto b/common/proto3/common/auth/project_list_request.proto new file mode 100644 index 000000000..0c38a5b7c --- /dev/null +++ b/common/proto3/common/auth/project_list_request.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/sort_option.proto"; + +option cc_enable_arenas = true; + +message ProjectListRequest { + optional string subject = 1; + optional bool as_owner = 2; + optional bool as_admin = 3; + optional bool as_member = 4; + optional SortOption sort = 5; + optional bool sort_rev = 6; + optional uint32 offset = 7; + optional uint32 count = 8; +} diff --git a/common/proto3/common/auth/project_search_request.proto b/common/proto3/common/auth/project_search_request.proto new file mode 100644 index 000000000..4862a16ee --- /dev/null +++ b/common/proto3/common/auth/project_search_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ProjectSearchRequest { + string text_query = 1; + repeated string scope = 2; +} diff --git a/common/proto3/common/auth/project_update_request.proto b/common/proto3/common/auth/project_update_request.proto new file mode 100644 index 000000000..1573271c7 --- /dev/null +++ b/common/proto3/common/auth/project_update_request.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ProjectUpdateRequest { + string id = 1; + optional string title = 2; + optional string desc = 3; + bool admin_set = 4; + repeated string admin = 5; + bool member_set = 6; + repeated string member = 7; +} diff --git a/common/proto3/common/auth/project_view_request.proto b/common/proto3/common/auth/project_view_request.proto new file mode 100644 index 000000000..07432ec57 --- /dev/null +++ b/common/proto3/common/auth/project_view_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ProjectViewRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/query_create_request.proto b/common/proto3/common/auth/query_create_request.proto new file mode 100644 index 000000000..0b52c65e5 --- /dev/null +++ b/common/proto3/common/auth/query_create_request.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +import "auth/search_request.proto"; + +option cc_enable_arenas = true; + +message QueryCreateRequest { + string title = 1; + SearchRequest query = 2; +} diff --git a/common/proto3/common/auth/query_data_reply.proto b/common/proto3/common/auth/query_data_reply.proto new file mode 100644 index 000000000..371a631f9 --- /dev/null +++ b/common/proto3/common/auth/query_data_reply.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package SDMS; + +import "auth/search_request.proto"; + +option cc_enable_arenas = true; + +message QueryDataReply { + string id = 1; + string title = 2; + string owner = 4; + uint32 ct = 5; + uint32 ut = 6; + SearchRequest query = 7; +} diff --git a/common/proto3/common/auth/query_delete_request.proto b/common/proto3/common/auth/query_delete_request.proto new file mode 100644 index 000000000..0bcada00d --- /dev/null +++ b/common/proto3/common/auth/query_delete_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message QueryDeleteRequest { + repeated string id = 1; +} diff --git a/common/proto3/common/auth/query_exec_request.proto b/common/proto3/common/auth/query_exec_request.proto new file mode 100644 index 000000000..a7f62e965 --- /dev/null +++ b/common/proto3/common/auth/query_exec_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message QueryExecRequest { + string id = 1; + optional uint32 offset = 2; + optional uint32 count = 3; +} diff --git a/common/proto3/common/auth/query_list_request.proto b/common/proto3/common/auth/query_list_request.proto new file mode 100644 index 000000000..a1b0b3c34 --- /dev/null +++ b/common/proto3/common/auth/query_list_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message QueryListRequest { + optional uint32 offset = 1; + optional uint32 count = 2; +} diff --git a/common/proto3/common/auth/query_update_request.proto b/common/proto3/common/auth/query_update_request.proto new file mode 100644 index 000000000..01092275d --- /dev/null +++ b/common/proto3/common/auth/query_update_request.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +import "auth/search_request.proto"; + +option cc_enable_arenas = true; + +message QueryUpdateRequest { + string id = 1; + optional string title = 2; + SearchRequest query = 3; + bool replace_query = 4; +} diff --git a/common/proto3/common/auth/query_view_request.proto b/common/proto3/common/auth/query_view_request.proto new file mode 100644 index 000000000..c4b4a7985 --- /dev/null +++ b/common/proto3/common/auth/query_view_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message QueryViewRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/record_alloc_change_reply.proto b/common/proto3/common/auth/record_alloc_change_reply.proto new file mode 100644 index 000000000..4863be29f --- /dev/null +++ b/common/proto3/common/auth/record_alloc_change_reply.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/task_data.proto"; + +option cc_enable_arenas = true; + +message RecordAllocChangeReply { + uint32 act_cnt = 1; + uint64 act_size = 2; + uint32 tot_cnt = 3; + uint64 data_limit = 4; + uint64 data_size = 5; + uint32 rec_limit = 6; + uint32 rec_count = 7; + TaskData task = 8; +} diff --git a/common/proto3/common/auth/record_alloc_change_request.proto b/common/proto3/common/auth/record_alloc_change_request.proto new file mode 100644 index 000000000..ba9a4c5f2 --- /dev/null +++ b/common/proto3/common/auth/record_alloc_change_request.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordAllocChangeRequest { + repeated string id = 1; + string repo_id = 2; + optional string proj_id = 3; + optional bool check = 4; +} diff --git a/common/proto3/common/auth/record_create_batch_request.proto b/common/proto3/common/auth/record_create_batch_request.proto new file mode 100644 index 000000000..8d244016e --- /dev/null +++ b/common/proto3/common/auth/record_create_batch_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordCreateBatchRequest { + string records = 1; // JSON array +} diff --git a/common/proto3/common/auth/record_create_request.proto b/common/proto3/common/auth/record_create_request.proto new file mode 100644 index 000000000..259ed570d --- /dev/null +++ b/common/proto3/common/auth/record_create_request.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/dependency_spec_data.proto"; + +option cc_enable_arenas = true; + +message RecordCreateRequest { + string title = 1; + optional string desc = 2; + repeated string tags = 3; + optional string alias = 4; + optional string metadata = 5; + optional string parent_id = 6; + optional bool external = 7; + optional string source = 8; + optional string repo_id = 9; + optional string ext = 10; + optional bool ext_auto = 11; + repeated DependencySpecData deps = 12; + optional string sch_id = 13; + optional bool sch_enforce = 14; +} diff --git a/common/proto3/common/auth/record_data_reply.proto b/common/proto3/common/auth/record_data_reply.proto new file mode 100644 index 000000000..09132fd36 --- /dev/null +++ b/common/proto3/common/auth/record_data_reply.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/record_data.proto"; +import "messages/listing_data.proto"; + +option cc_enable_arenas = true; + +message RecordDataReply { + repeated RecordData data = 1; + repeated ListingData update = 2; +} diff --git a/common/proto3/common/auth/record_delete_request.proto b/common/proto3/common/auth/record_delete_request.proto new file mode 100644 index 000000000..3cdc56839 --- /dev/null +++ b/common/proto3/common/auth/record_delete_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordDeleteRequest { + repeated string id = 1; +} diff --git a/common/proto3/common/auth/record_export_reply.proto b/common/proto3/common/auth/record_export_reply.proto new file mode 100644 index 000000000..17f91703a --- /dev/null +++ b/common/proto3/common/auth/record_export_reply.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordExportReply { + repeated string record = 1; +} diff --git a/common/proto3/common/auth/record_export_request.proto b/common/proto3/common/auth/record_export_request.proto new file mode 100644 index 000000000..53ab49213 --- /dev/null +++ b/common/proto3/common/auth/record_export_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordExportRequest { + repeated string id = 1; +} diff --git a/common/proto3/common/auth/record_get_dependency_graph_request.proto b/common/proto3/common/auth/record_get_dependency_graph_request.proto new file mode 100644 index 000000000..4b7ce1ac2 --- /dev/null +++ b/common/proto3/common/auth/record_get_dependency_graph_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordGetDependencyGraphRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/record_list_by_alloc_request.proto b/common/proto3/common/auth/record_list_by_alloc_request.proto new file mode 100644 index 000000000..973570e3c --- /dev/null +++ b/common/proto3/common/auth/record_list_by_alloc_request.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordListByAllocRequest { + string repo = 1; + string subject = 2; + optional uint32 offset = 3; + optional uint32 count = 4; +} diff --git a/common/proto3/common/auth/record_lock_request.proto b/common/proto3/common/auth/record_lock_request.proto new file mode 100644 index 000000000..b6513fe18 --- /dev/null +++ b/common/proto3/common/auth/record_lock_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordLockRequest { + repeated string id = 1; + bool lock = 2; +} diff --git a/common/proto3/common/auth/record_owner_change_reply.proto b/common/proto3/common/auth/record_owner_change_reply.proto new file mode 100644 index 000000000..f6598457f --- /dev/null +++ b/common/proto3/common/auth/record_owner_change_reply.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/alloc_data.proto"; +import "messages/task_data.proto"; + +option cc_enable_arenas = true; + +message RecordOwnerChangeReply { + uint32 act_cnt = 1; + uint64 act_size = 2; + uint32 tot_cnt = 3; + repeated AllocData alloc = 4; + TaskData task = 8; +} diff --git a/common/proto3/common/auth/record_owner_change_request.proto b/common/proto3/common/auth/record_owner_change_request.proto new file mode 100644 index 000000000..13ffef242 --- /dev/null +++ b/common/proto3/common/auth/record_owner_change_request.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordOwnerChangeRequest { + repeated string id = 1; + string coll_id = 2; + optional string repo_id = 3; + string proj_id = 4; + optional bool check = 5; +} diff --git a/common/proto3/common/auth/record_update_batch_request.proto b/common/proto3/common/auth/record_update_batch_request.proto new file mode 100644 index 000000000..1653e5cc1 --- /dev/null +++ b/common/proto3/common/auth/record_update_batch_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordUpdateBatchRequest { + string records = 1; // JSON array +} diff --git a/common/proto3/common/auth/record_update_request.proto b/common/proto3/common/auth/record_update_request.proto new file mode 100644 index 000000000..7fdd216df --- /dev/null +++ b/common/proto3/common/auth/record_update_request.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/dependency_spec_data.proto"; + +option cc_enable_arenas = true; + +message RecordUpdateRequest { + string id = 1; + optional string title = 2; + optional string desc = 3; + repeated string tags = 4; + optional bool tags_clear = 5; + optional string alias = 6; + optional string metadata = 7; + optional bool mdset = 8; + optional string sch_id = 9; + optional bool sch_enforce = 10; + optional string source = 11; + optional string ext = 12; + optional bool ext_auto = 13; + repeated DependencySpecData dep_add = 16; + repeated DependencySpecData dep_rem = 17; +} diff --git a/common/proto3/common/auth/record_view_request.proto b/common/proto3/common/auth/record_view_request.proto new file mode 100644 index 000000000..fa6344fbb --- /dev/null +++ b/common/proto3/common/auth/record_view_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordViewRequest { + string id = 1; + bool details = 2; +} diff --git a/common/proto3/common/auth/repo_allocation_create_request.proto b/common/proto3/common/auth/repo_allocation_create_request.proto new file mode 100644 index 000000000..876f87f7f --- /dev/null +++ b/common/proto3/common/auth/repo_allocation_create_request.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoAllocationCreateRequest { + string repo = 1; + string subject = 2; + uint64 data_limit = 3; + uint32 rec_limit = 4; +} diff --git a/common/proto3/common/auth/repo_allocation_create_response.proto b/common/proto3/common/auth/repo_allocation_create_response.proto new file mode 100644 index 000000000..cdd56a18a --- /dev/null +++ b/common/proto3/common/auth/repo_allocation_create_response.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/execution_method.proto"; +import "messages/task_data.proto"; +import "messages/alloc_data.proto"; + +option cc_enable_arenas = true; + +message RepoAllocationCreateResponse { + ExecutionMethod execution_method = 1; + TaskData task = 2; + AllocData result = 3; +} diff --git a/common/proto3/common/auth/repo_allocation_delete_request.proto b/common/proto3/common/auth/repo_allocation_delete_request.proto new file mode 100644 index 000000000..454842e4c --- /dev/null +++ b/common/proto3/common/auth/repo_allocation_delete_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoAllocationDeleteRequest { + string repo = 1; + string subject = 2; +} diff --git a/common/proto3/common/auth/repo_allocation_set_default_request.proto b/common/proto3/common/auth/repo_allocation_set_default_request.proto new file mode 100644 index 000000000..aa96c0b9f --- /dev/null +++ b/common/proto3/common/auth/repo_allocation_set_default_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoAllocationSetDefaultRequest { + string repo = 1; + optional string subject = 2; +} diff --git a/common/proto3/common/auth/repo_allocation_set_request.proto b/common/proto3/common/auth/repo_allocation_set_request.proto new file mode 100644 index 000000000..2a2e883d4 --- /dev/null +++ b/common/proto3/common/auth/repo_allocation_set_request.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoAllocationSetRequest { + string repo = 1; + string subject = 2; + uint64 data_limit = 3; + uint32 rec_limit = 4; +} diff --git a/common/proto3/common/auth/repo_allocation_stats_reply.proto b/common/proto3/common/auth/repo_allocation_stats_reply.proto new file mode 100644 index 000000000..068883291 --- /dev/null +++ b/common/proto3/common/auth/repo_allocation_stats_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/alloc_stats_data.proto"; + +option cc_enable_arenas = true; + +message RepoAllocationStatsReply { + AllocStatsData alloc = 1; +} diff --git a/common/proto3/common/auth/repo_allocation_stats_request.proto b/common/proto3/common/auth/repo_allocation_stats_request.proto new file mode 100644 index 000000000..b5a9cef86 --- /dev/null +++ b/common/proto3/common/auth/repo_allocation_stats_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoAllocationStatsRequest { + string repo = 1; + optional string subject = 2; +} diff --git a/common/proto3/common/auth/repo_allocations_reply.proto b/common/proto3/common/auth/repo_allocations_reply.proto new file mode 100644 index 000000000..fa8a28931 --- /dev/null +++ b/common/proto3/common/auth/repo_allocations_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/alloc_data.proto"; + +option cc_enable_arenas = true; + +message RepoAllocationsReply { + repeated AllocData alloc = 1; +} diff --git a/common/proto3/common/auth/repo_authz_request.proto b/common/proto3/common/auth/repo_authz_request.proto new file mode 100644 index 000000000..2c077e588 --- /dev/null +++ b/common/proto3/common/auth/repo_authz_request.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoAuthzRequest { + string repo = 1; + string client = 2; + string file = 3; + string action = 4; +} diff --git a/common/proto3/common/auth/repo_calc_size_reply.proto b/common/proto3/common/auth/repo_calc_size_reply.proto new file mode 100644 index 000000000..47abbdb87 --- /dev/null +++ b/common/proto3/common/auth/repo_calc_size_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/alloc_stats_data.proto"; + +option cc_enable_arenas = true; + +message RepoCalcSizeReply { + repeated AllocStatsData stats = 1; +} diff --git a/common/proto3/common/auth/repo_calc_size_request.proto b/common/proto3/common/auth/repo_calc_size_request.proto new file mode 100644 index 000000000..69ae02b40 --- /dev/null +++ b/common/proto3/common/auth/repo_calc_size_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoCalcSizeRequest { + bool recurse = 1; + repeated string item = 2; +} diff --git a/common/proto3/common/auth/repo_create_request.proto b/common/proto3/common/auth/repo_create_request.proto new file mode 100644 index 000000000..7efb3431f --- /dev/null +++ b/common/proto3/common/auth/repo_create_request.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoCreateRequest { + string id = 1; + string title = 2; + optional string desc = 3; + optional string domain = 5; + optional string path = 6; + optional string exp_path = 7; + optional string address = 8; + optional string endpoint = 9; + optional string pub_key = 10; + uint64 capacity = 11; + repeated string admin = 12; + string type = 13; +} diff --git a/common/proto3/common/auth/repo_data_delete_request.proto b/common/proto3/common/auth/repo_data_delete_request.proto new file mode 100644 index 000000000..5a9c5e962 --- /dev/null +++ b/common/proto3/common/auth/repo_data_delete_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/record_data_location.proto"; + +option cc_enable_arenas = true; + +message RepoDataDeleteRequest { + repeated RecordDataLocation loc = 1; +} diff --git a/common/proto3/common/auth/repo_data_get_size_request.proto b/common/proto3/common/auth/repo_data_get_size_request.proto new file mode 100644 index 000000000..ff1333b86 --- /dev/null +++ b/common/proto3/common/auth/repo_data_get_size_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/record_data_location.proto"; + +option cc_enable_arenas = true; + +message RepoDataGetSizeRequest { + repeated RecordDataLocation loc = 1; +} diff --git a/common/proto3/common/auth/repo_data_reply.proto b/common/proto3/common/auth/repo_data_reply.proto new file mode 100644 index 000000000..f4b2875be --- /dev/null +++ b/common/proto3/common/auth/repo_data_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/repo_data.proto"; + +option cc_enable_arenas = true; + +message RepoDataReply { + repeated RepoData repo = 1; +} diff --git a/common/proto3/common/auth/repo_data_size_reply.proto b/common/proto3/common/auth/repo_data_size_reply.proto new file mode 100644 index 000000000..87d4ea370 --- /dev/null +++ b/common/proto3/common/auth/repo_data_size_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/record_data_size.proto"; + +option cc_enable_arenas = true; + +message RepoDataSizeReply { + repeated RecordDataSize size = 1; +} diff --git a/common/proto3/common/auth/repo_delete_request.proto b/common/proto3/common/auth/repo_delete_request.proto new file mode 100644 index 000000000..e83e83c83 --- /dev/null +++ b/common/proto3/common/auth/repo_delete_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoDeleteRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/repo_list_allocations_request.proto b/common/proto3/common/auth/repo_list_allocations_request.proto new file mode 100644 index 000000000..304052578 --- /dev/null +++ b/common/proto3/common/auth/repo_list_allocations_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoListAllocationsRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/repo_list_object_allocations_request.proto b/common/proto3/common/auth/repo_list_object_allocations_request.proto new file mode 100644 index 000000000..ec8ced43b --- /dev/null +++ b/common/proto3/common/auth/repo_list_object_allocations_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoListObjectAllocationsRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/repo_list_request.proto b/common/proto3/common/auth/repo_list_request.proto new file mode 100644 index 000000000..7d56d9d47 --- /dev/null +++ b/common/proto3/common/auth/repo_list_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoListRequest { + optional bool details = 1; + optional bool all = 2; +} diff --git a/common/proto3/common/auth/repo_list_subject_allocations_request.proto b/common/proto3/common/auth/repo_list_subject_allocations_request.proto new file mode 100644 index 000000000..23421fe83 --- /dev/null +++ b/common/proto3/common/auth/repo_list_subject_allocations_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoListSubjectAllocationsRequest { + optional string subject = 1; + optional bool stats = 2; +} diff --git a/common/proto3/common/auth/repo_path_create_request.proto b/common/proto3/common/auth/repo_path_create_request.proto new file mode 100644 index 000000000..494122e4f --- /dev/null +++ b/common/proto3/common/auth/repo_path_create_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoPathCreateRequest { + string path = 1; +} diff --git a/common/proto3/common/auth/repo_path_delete_request.proto b/common/proto3/common/auth/repo_path_delete_request.proto new file mode 100644 index 000000000..5935c7612 --- /dev/null +++ b/common/proto3/common/auth/repo_path_delete_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoPathDeleteRequest { + string path = 1; +} diff --git a/common/proto3/common/auth/repo_update_request.proto b/common/proto3/common/auth/repo_update_request.proto new file mode 100644 index 000000000..00b83dd07 --- /dev/null +++ b/common/proto3/common/auth/repo_update_request.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoUpdateRequest { + string id = 1; + optional string title = 2; + optional string desc = 3; + optional string domain = 5; + optional string path = 6; + optional string exp_path = 7; + optional string address = 8; + optional string endpoint = 9; + optional string pub_key = 10; + optional uint64 capacity = 11; + repeated string admin = 12; + string type = 13; +} diff --git a/common/proto3/common/auth/repo_view_allocation_request.proto b/common/proto3/common/auth/repo_view_allocation_request.proto new file mode 100644 index 000000000..111482905 --- /dev/null +++ b/common/proto3/common/auth/repo_view_allocation_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoViewAllocationRequest { + string repo = 1; + optional string subject = 2; +} diff --git a/common/proto3/common/auth/repo_view_request.proto b/common/proto3/common/auth/repo_view_request.proto new file mode 100644 index 000000000..2e8bdd00b --- /dev/null +++ b/common/proto3/common/auth/repo_view_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoViewRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/revoke_credentials_request.proto b/common/proto3/common/auth/revoke_credentials_request.proto new file mode 100644 index 000000000..40dd4f3ba --- /dev/null +++ b/common/proto3/common/auth/revoke_credentials_request.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RevokeCredentialsRequest { +} diff --git a/common/proto3/common/auth/schema_create_request.proto b/common/proto3/common/auth/schema_create_request.proto new file mode 100644 index 000000000..79def7813 --- /dev/null +++ b/common/proto3/common/auth/schema_create_request.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message SchemaCreateRequest { + string id = 1; + string desc = 2; + bool pub = 3; + bool sys = 4; + string def = 5; +} diff --git a/common/proto3/common/auth/schema_data_reply.proto b/common/proto3/common/auth/schema_data_reply.proto new file mode 100644 index 000000000..301de1feb --- /dev/null +++ b/common/proto3/common/auth/schema_data_reply.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/schema_data.proto"; + +option cc_enable_arenas = true; + +message SchemaDataReply { + repeated SchemaData schema = 1; + uint32 offset = 2; + uint32 count = 3; + uint32 total = 4; +} diff --git a/common/proto3/common/auth/schema_delete_request.proto b/common/proto3/common/auth/schema_delete_request.proto new file mode 100644 index 000000000..8d93645d6 --- /dev/null +++ b/common/proto3/common/auth/schema_delete_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message SchemaDeleteRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/schema_revise_request.proto b/common/proto3/common/auth/schema_revise_request.proto new file mode 100644 index 000000000..02a1a6931 --- /dev/null +++ b/common/proto3/common/auth/schema_revise_request.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message SchemaReviseRequest { + string id = 1; + optional string desc = 2; + optional bool pub = 3; + optional bool sys = 4; + optional string def = 5; +} diff --git a/common/proto3/common/auth/schema_search_request.proto b/common/proto3/common/auth/schema_search_request.proto new file mode 100644 index 000000000..c88275d15 --- /dev/null +++ b/common/proto3/common/auth/schema_search_request.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/sort_option.proto"; + +option cc_enable_arenas = true; + +message SchemaSearchRequest { + optional string id = 1; + optional string text = 2; + optional string owner = 3; + optional SortOption sort = 4; + optional bool sort_rev = 5; + optional uint32 offset = 6; + optional uint32 count = 7; +} diff --git a/common/proto3/common/auth/schema_update_request.proto b/common/proto3/common/auth/schema_update_request.proto new file mode 100644 index 000000000..c02dee5e5 --- /dev/null +++ b/common/proto3/common/auth/schema_update_request.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message SchemaUpdateRequest { + string id = 1; + optional string id_new = 2; + optional string desc = 3; + optional bool pub = 4; + optional bool sys = 5; + optional string def = 6; +} diff --git a/common/proto3/common/auth/schema_view_request.proto b/common/proto3/common/auth/schema_view_request.proto new file mode 100644 index 000000000..4b7cb37f5 --- /dev/null +++ b/common/proto3/common/auth/schema_view_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message SchemaViewRequest { + string id = 1; + optional bool resolve = 2; +} diff --git a/common/proto3/common/auth/search_request.proto b/common/proto3/common/auth/search_request.proto new file mode 100644 index 000000000..e87533c0e --- /dev/null +++ b/common/proto3/common/auth/search_request.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/search_mode.proto"; +import "enums/sort_option.proto"; + +option cc_enable_arenas = true; + +message SearchRequest { + SearchMode mode = 1; + optional bool published = 2; + optional string id = 3; + optional string text = 4; + repeated string tags = 5; + repeated string cat_tags = 6; + optional uint32 from = 7; + optional uint32 to = 8; + optional string owner = 9; + optional string creator = 10; + repeated string coll = 11; + optional string sch_id = 12; + optional string meta = 13; + optional bool meta_err = 14; + optional SortOption sort = 15; + optional bool sort_rev = 16; + optional uint32 offset = 17; + optional uint32 count = 18; +} diff --git a/common/proto3/common/auth/tag_data_reply.proto b/common/proto3/common/auth/tag_data_reply.proto new file mode 100644 index 000000000..d789311c2 --- /dev/null +++ b/common/proto3/common/auth/tag_data_reply.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/tag_data.proto"; + +option cc_enable_arenas = true; + +message TagDataReply { + repeated TagData tag = 1; + uint32 offset = 2; + uint32 count = 3; + uint32 total = 4; +} diff --git a/common/proto3/common/auth/tag_list_by_count_request.proto b/common/proto3/common/auth/tag_list_by_count_request.proto new file mode 100644 index 000000000..e190f69a1 --- /dev/null +++ b/common/proto3/common/auth/tag_list_by_count_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message TagListByCountRequest { + optional uint32 offset = 1; + optional uint32 count = 2; +} diff --git a/common/proto3/common/auth/tag_search_request.proto b/common/proto3/common/auth/tag_search_request.proto new file mode 100644 index 000000000..741cba10c --- /dev/null +++ b/common/proto3/common/auth/tag_search_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message TagSearchRequest { + string name = 1; + optional uint32 offset = 2; + optional uint32 count = 3; +} diff --git a/common/proto3/common/auth/task_data_reply.proto b/common/proto3/common/auth/task_data_reply.proto new file mode 100644 index 000000000..543dd944f --- /dev/null +++ b/common/proto3/common/auth/task_data_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/task_data.proto"; + +option cc_enable_arenas = true; + +message TaskDataReply { + repeated TaskData task = 1; +} diff --git a/common/proto3/common/auth/task_list_request.proto b/common/proto3/common/auth/task_list_request.proto new file mode 100644 index 000000000..dae0cc665 --- /dev/null +++ b/common/proto3/common/auth/task_list_request.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/task_status.proto"; + +option cc_enable_arenas = true; + +message TaskListRequest { + optional uint32 since = 1; + optional uint32 from = 2; + optional uint32 to = 3; + repeated TaskStatus status = 4; + optional uint32 offset = 5; + optional uint32 count = 6; +} diff --git a/common/proto3/common/auth/task_view_request.proto b/common/proto3/common/auth/task_view_request.proto new file mode 100644 index 000000000..25553ce47 --- /dev/null +++ b/common/proto3/common/auth/task_view_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message TaskViewRequest { + string task_id = 1; +} diff --git a/common/proto3/common/auth/topic_data_reply.proto b/common/proto3/common/auth/topic_data_reply.proto new file mode 100644 index 000000000..593b18ac6 --- /dev/null +++ b/common/proto3/common/auth/topic_data_reply.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/topic_data.proto"; + +option cc_enable_arenas = true; + +message TopicDataReply { + repeated TopicData topic = 1; + uint32 offset = 2; + uint32 count = 3; + uint32 total = 4; +} diff --git a/common/proto3/common/auth/topic_list_topics_request.proto b/common/proto3/common/auth/topic_list_topics_request.proto new file mode 100644 index 000000000..ef4d5f448 --- /dev/null +++ b/common/proto3/common/auth/topic_list_topics_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message TopicListTopicsRequest { + optional string topic_id = 1; + optional uint32 offset = 2; + optional uint32 count = 3; +} diff --git a/common/proto3/common/auth/topic_search_request.proto b/common/proto3/common/auth/topic_search_request.proto new file mode 100644 index 000000000..90c269264 --- /dev/null +++ b/common/proto3/common/auth/topic_search_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message TopicSearchRequest { + string phrase = 1; +} diff --git a/common/proto3/common/auth/topic_view_request.proto b/common/proto3/common/auth/topic_view_request.proto new file mode 100644 index 000000000..5c16a624e --- /dev/null +++ b/common/proto3/common/auth/topic_view_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message TopicViewRequest { + string id = 1; +} diff --git a/common/proto3/common/auth/user_access_token_reply.proto b/common/proto3/common/auth/user_access_token_reply.proto new file mode 100644 index 000000000..2d2c9f333 --- /dev/null +++ b/common/proto3/common/auth/user_access_token_reply.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserAccessTokenReply { + string access = 1; + uint32 expires_in = 2; + bool needs_consent = 3; +} diff --git a/common/proto3/common/auth/user_create_request.proto b/common/proto3/common/auth/user_create_request.proto new file mode 100644 index 000000000..2595e4a82 --- /dev/null +++ b/common/proto3/common/auth/user_create_request.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserCreateRequest { + string uid = 1; + optional string password = 2; + string name = 3; + string email = 4; + repeated string uuid = 5; + optional string options = 6; + string secret = 7; +} diff --git a/common/proto3/common/auth/user_data_reply.proto b/common/proto3/common/auth/user_data_reply.proto new file mode 100644 index 000000000..4350bf024 --- /dev/null +++ b/common/proto3/common/auth/user_data_reply.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/user_data.proto"; + +option cc_enable_arenas = true; + +message UserDataReply { + repeated UserData user = 1; + uint32 offset = 2; + uint32 count = 3; + uint32 total = 4; +} diff --git a/common/proto3/common/auth/user_find_by_name_uid_request.proto b/common/proto3/common/auth/user_find_by_name_uid_request.proto new file mode 100644 index 000000000..bc527af10 --- /dev/null +++ b/common/proto3/common/auth/user_find_by_name_uid_request.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserFindByNameUIDRequest { + string name_uid = 1; + optional uint32 offset = 2; + optional uint32 count = 3; +} diff --git a/common/proto3/common/auth/user_find_by_uuids_request.proto b/common/proto3/common/auth/user_find_by_uuids_request.proto new file mode 100644 index 000000000..5dbccdd10 --- /dev/null +++ b/common/proto3/common/auth/user_find_by_uuids_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserFindByUUIDsRequest { + repeated string uuid = 1; +} diff --git a/common/proto3/common/auth/user_get_access_token_request.proto b/common/proto3/common/auth/user_get_access_token_request.proto new file mode 100644 index 000000000..c396ca9c7 --- /dev/null +++ b/common/proto3/common/auth/user_get_access_token_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserGetAccessTokenRequest { + optional string collection_id = 1; + optional string collection_type = 2; +} diff --git a/common/proto3/common/auth/user_get_recent_ep_reply.proto b/common/proto3/common/auth/user_get_recent_ep_reply.proto new file mode 100644 index 000000000..ed3609e84 --- /dev/null +++ b/common/proto3/common/auth/user_get_recent_ep_reply.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserGetRecentEPReply { + repeated string ep = 1; +} diff --git a/common/proto3/common/auth/user_get_recent_ep_request.proto b/common/proto3/common/auth/user_get_recent_ep_request.proto new file mode 100644 index 000000000..357385708 --- /dev/null +++ b/common/proto3/common/auth/user_get_recent_ep_request.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserGetRecentEPRequest { +} diff --git a/common/proto3/common/auth/user_list_all_request.proto b/common/proto3/common/auth/user_list_all_request.proto new file mode 100644 index 000000000..3bc560cf9 --- /dev/null +++ b/common/proto3/common/auth/user_list_all_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserListAllRequest { + optional uint32 offset = 1; + optional uint32 count = 2; +} diff --git a/common/proto3/common/auth/user_list_collab_request.proto b/common/proto3/common/auth/user_list_collab_request.proto new file mode 100644 index 000000000..335ca4f09 --- /dev/null +++ b/common/proto3/common/auth/user_list_collab_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserListCollabRequest { + optional uint32 offset = 1; + optional uint32 count = 2; +} diff --git a/common/proto3/common/auth/user_set_access_token_request.proto b/common/proto3/common/auth/user_set_access_token_request.proto new file mode 100644 index 000000000..c3e29cd4a --- /dev/null +++ b/common/proto3/common/auth/user_set_access_token_request.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/access_token_type.proto"; + +option cc_enable_arenas = true; + +message UserSetAccessTokenRequest { + string access = 1; + uint32 expires_in = 2; + string refresh = 3; + AccessTokenType type = 4; + string other = 5; +} diff --git a/common/proto3/common/auth/user_set_recent_ep_request.proto b/common/proto3/common/auth/user_set_recent_ep_request.proto new file mode 100644 index 000000000..839b734fa --- /dev/null +++ b/common/proto3/common/auth/user_set_recent_ep_request.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserSetRecentEPRequest { + repeated string ep = 1; +} diff --git a/common/proto3/common/auth/user_update_request.proto b/common/proto3/common/auth/user_update_request.proto new file mode 100644 index 000000000..55b550eb6 --- /dev/null +++ b/common/proto3/common/auth/user_update_request.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserUpdateRequest { + string uid = 1; + optional string email = 2; + optional string password = 3; + optional string options = 4; +} diff --git a/common/proto3/common/auth/user_view_request.proto b/common/proto3/common/auth/user_view_request.proto new file mode 100644 index 000000000..70cea53da --- /dev/null +++ b/common/proto3/common/auth/user_view_request.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message UserViewRequest { + string uid = 1; + optional bool details = 2; +} diff --git a/common/proto3/common/enums/access_token_type.proto b/common/proto3/common/enums/access_token_type.proto new file mode 100644 index 000000000..81af3bbcc --- /dev/null +++ b/common/proto3/common/enums/access_token_type.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum AccessTokenType { + TOKEN_UNSPECIFIED = 0; + GENERIC = 1; + GLOBUS = 2; + GLOBUS_AUTH = 3; + GLOBUS_TRANSFER = 4; + GLOBUS_DEFAULT = 5; +} diff --git a/common/proto3/common/enums/dependency_dir.proto b/common/proto3/common/enums/dependency_dir.proto new file mode 100644 index 000000000..cf3b2f49a --- /dev/null +++ b/common/proto3/common/enums/dependency_dir.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum DependencyDir { + DIR_IN = 0; + DIR_OUT = 1; +} diff --git a/common/proto3/common/enums/dependency_type.proto b/common/proto3/common/enums/dependency_type.proto new file mode 100644 index 000000000..16010e48a --- /dev/null +++ b/common/proto3/common/enums/dependency_type.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum DependencyType { + DEP_IS_DERIVED_FROM = 0; + DEP_IS_COMPONENT_OF = 1; + DEP_IS_NEW_VERSION_OF = 2; +} diff --git a/common/proto3/common/enums/encryption.proto b/common/proto3/common/enums/encryption.proto new file mode 100644 index 000000000..e9c814a5e --- /dev/null +++ b/common/proto3/common/enums/encryption.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum Encryption { + ENCRYPT_NONE = 0; + ENCRYPT_AVAIL = 1; + ENCRYPT_FORCE = 2; +} diff --git a/common/proto3/common/enums/error_code.proto b/common/proto3/common/enums/error_code.proto new file mode 100644 index 000000000..3abad9577 --- /dev/null +++ b/common/proto3/common/enums/error_code.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum ErrorCode { + UNSPECIFIED = 0; + BAD_REQUEST = 1; + INTERNAL_ERROR = 2; + CLIENT_ERROR = 3; + SERVICE_ERROR = 4; + AUTHN_REQUIRED = 5; + AUTHN_ERROR = 6; + DEST_PATH_ERROR = 7; + DEST_FILE_ERROR = 8; +} diff --git a/common/proto3/common/enums/execution_method.proto b/common/proto3/common/enums/execution_method.proto new file mode 100644 index 000000000..2cd3ef0b7 --- /dev/null +++ b/common/proto3/common/enums/execution_method.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum ExecutionMethod { + EXEC_UNSPECIFIED = 0; + DIRECT = 1; + DEFERRED = 2; +} diff --git a/common/proto3/common/enums/note_state.proto b/common/proto3/common/enums/note_state.proto new file mode 100644 index 000000000..d76bae2c1 --- /dev/null +++ b/common/proto3/common/enums/note_state.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum NoteState { + NOTE_STATE_CLOSED = 0; + NOTE_STATE_OPEN = 1; + NOTE_STATE_ACTIVE = 2; +} diff --git a/common/proto3/common/enums/note_type.proto b/common/proto3/common/enums/note_type.proto new file mode 100644 index 000000000..48f8e64fe --- /dev/null +++ b/common/proto3/common/enums/note_type.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum NoteType { + NOTE_TYPE_QUESTION = 0; + NOTE_TYPE_INFO = 1; + NOTE_TYPE_WARN = 2; + NOTE_TYPE_ERROR = 3; +} diff --git a/common/proto3/common/enums/project_role.proto b/common/proto3/common/enums/project_role.proto new file mode 100644 index 000000000..e13d94d35 --- /dev/null +++ b/common/proto3/common/enums/project_role.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum ProjectRole { + PROJECT_ROLE_NO_ROLE = 0; + PROJECT_ROLE_MEMBER = 1; + PROJECT_ROLE_MANAGER = 2; + PROJECT_ROLE_ADMIN = 3; +} diff --git a/common/proto3/common/enums/search_mode.proto b/common/proto3/common/enums/search_mode.proto new file mode 100644 index 000000000..f5675ac58 --- /dev/null +++ b/common/proto3/common/enums/search_mode.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum SearchMode { + SM_DATA = 0; + SM_COLLECTION = 1; +} diff --git a/common/proto3/common/enums/service_status.proto b/common/proto3/common/enums/service_status.proto new file mode 100644 index 000000000..473d784a2 --- /dev/null +++ b/common/proto3/common/enums/service_status.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum ServiceStatus { + SERVICE_STATUS_NORMAL = 0; + SERVICE_STATUS_DEGRADED = 1; + SERVICE_STATUS_FAILED = 2; + SERVICE_STATUS_OFFLINE = 3; +} diff --git a/common/proto3/common/enums/sort_option.proto b/common/proto3/common/enums/sort_option.proto new file mode 100644 index 000000000..1342c507f --- /dev/null +++ b/common/proto3/common/enums/sort_option.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum SortOption { + SORT_ID = 0; + SORT_TITLE = 1; + SORT_OWNER = 2; + SORT_TIME_CREATE = 3; + SORT_TIME_UPDATE = 4; + SORT_RELEVANCE = 5; +} diff --git a/common/proto3/common/enums/task_command.proto b/common/proto3/common/enums/task_command.proto new file mode 100644 index 000000000..15fc469a7 --- /dev/null +++ b/common/proto3/common/enums/task_command.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum TaskCommand { + TC_STOP = 0; + TC_RAW_DATA_TRANSFER = 1; + TC_RAW_DATA_DELETE = 2; + TC_RAW_DATA_UPDATE_SIZE = 3; + TC_ALLOC_CREATE = 4; + TC_ALLOC_DELETE = 5; +} diff --git a/common/proto3/common/enums/task_status.proto b/common/proto3/common/enums/task_status.proto new file mode 100644 index 000000000..e42287e44 --- /dev/null +++ b/common/proto3/common/enums/task_status.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum TaskStatus { + TS_BLOCKED = 0; + TS_READY = 1; + TS_RUNNING = 2; + TS_SUCCEEDED = 3; + TS_FAILED = 4; +} diff --git a/common/proto3/common/enums/task_type.proto b/common/proto3/common/enums/task_type.proto new file mode 100644 index 000000000..947ec650f --- /dev/null +++ b/common/proto3/common/enums/task_type.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +enum TaskType { + TT_DATA_GET = 0; + TT_DATA_PUT = 1; + TT_DATA_DEL = 2; + TT_REC_CHG_ALLOC = 3; + TT_REC_CHG_OWNER = 4; + TT_REC_DEL = 5; + TT_ALLOC_CREATE = 6; + TT_ALLOC_DEL = 7; + TT_USER_DEL = 8; + TT_PROJ_DEL = 9; +} diff --git a/common/proto3/common/envelope.proto b/common/proto3/common/envelope.proto new file mode 100644 index 000000000..20233f220 --- /dev/null +++ b/common/proto3/common/envelope.proto @@ -0,0 +1,393 @@ +// Envelope message wrapping all DataFed protocol messages +// Use has_*() methods in C++ to check which message type is present +// +// Example C++ usage: +// sdms::Envelope env; +// if (env.has_version_request()) { +// // Handle version request +// } else if (env.has_record_create_request()) { +// // Handle record creation +// auto& req = env.record_create_request(); +// } + +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +// Anonymous messages +import "anon/ack_reply.proto"; +import "anon/nack_reply.proto"; +import "anon/version_request.proto"; +import "anon/version_reply.proto"; +import "anon/get_auth_status_request.proto"; +import "anon/authenticate_by_password_request.proto"; +import "anon/authenticate_by_token_request.proto"; +import "anon/auth_status_reply.proto"; +import "anon/daily_message_request.proto"; +import "anon/daily_message_reply.proto"; + +// Auth - Credentials +import "auth/generate_credentials_request.proto"; +import "auth/revoke_credentials_request.proto"; +import "auth/generate_credentials_reply.proto"; + +// Auth - Permissions +import "auth/check_perms_request.proto"; +import "auth/check_perms_reply.proto"; +import "auth/get_perms_request.proto"; +import "auth/get_perms_reply.proto"; + +// Auth - User +import "auth/user_view_request.proto"; +import "auth/user_data_reply.proto"; +import "auth/user_set_access_token_request.proto"; +import "auth/user_get_access_token_request.proto"; +import "auth/user_access_token_reply.proto"; +import "auth/user_create_request.proto"; +import "auth/user_find_by_uuids_request.proto"; +import "auth/user_find_by_name_uid_request.proto"; +import "auth/user_update_request.proto"; +import "auth/user_list_all_request.proto"; +import "auth/user_list_collab_request.proto"; +import "auth/user_get_recent_ep_request.proto"; +import "auth/user_get_recent_ep_reply.proto"; +import "auth/user_set_recent_ep_request.proto"; + +// Auth - General +import "auth/listing_reply.proto"; + +// Auth - Record +import "auth/record_list_by_alloc_request.proto"; +import "auth/record_view_request.proto"; +import "auth/record_data_reply.proto"; +import "auth/record_create_request.proto"; +import "auth/record_create_batch_request.proto"; +import "auth/record_update_request.proto"; +import "auth/record_update_batch_request.proto"; +import "auth/record_export_request.proto"; +import "auth/record_export_reply.proto"; +import "auth/record_lock_request.proto"; +import "auth/record_delete_request.proto"; +import "auth/record_get_dependency_graph_request.proto"; +import "auth/record_alloc_change_request.proto"; +import "auth/record_alloc_change_reply.proto"; +import "auth/record_owner_change_request.proto"; +import "auth/record_owner_change_reply.proto"; + +// Auth - Data +import "auth/data_get_request.proto"; +import "auth/data_put_request.proto"; +import "auth/data_get_reply.proto"; +import "auth/data_put_reply.proto"; +import "auth/data_delete_request.proto"; +import "auth/data_path_request.proto"; +import "auth/data_path_reply.proto"; + +// Auth - Search +import "auth/search_request.proto"; + +// Auth - Collection +import "auth/coll_view_request.proto"; +import "auth/coll_data_reply.proto"; +import "auth/coll_read_request.proto"; +import "auth/coll_create_request.proto"; +import "auth/coll_update_request.proto"; +import "auth/coll_delete_request.proto"; +import "auth/coll_write_request.proto"; +import "auth/coll_move_request.proto"; +import "auth/coll_get_parents_request.proto"; +import "auth/coll_path_reply.proto"; +import "auth/coll_get_offset_request.proto"; +import "auth/coll_get_offset_reply.proto"; +import "auth/coll_list_published_request.proto"; + +// Auth - Group +import "auth/group_create_request.proto"; +import "auth/group_update_request.proto"; +import "auth/group_data_reply.proto"; +import "auth/group_delete_request.proto"; +import "auth/group_list_request.proto"; +import "auth/group_view_request.proto"; + +// Auth - ACL +import "auth/acl_view_request.proto"; +import "auth/acl_update_request.proto"; +import "auth/acl_shared_list_request.proto"; +import "auth/acl_shared_list_items_request.proto"; +import "auth/acl_data_reply.proto"; + +// Auth - Project +import "auth/project_view_request.proto"; +import "auth/project_data_reply.proto"; +import "auth/project_create_request.proto"; +import "auth/project_update_request.proto"; +import "auth/project_delete_request.proto"; +import "auth/project_list_request.proto"; +import "auth/project_search_request.proto"; +import "auth/project_get_role_request.proto"; +import "auth/project_get_role_reply.proto"; + +// Auth - Repository +import "auth/repo_data_delete_request.proto"; +import "auth/repo_data_get_size_request.proto"; +import "auth/repo_data_size_reply.proto"; +import "auth/repo_path_create_request.proto"; +import "auth/repo_path_delete_request.proto"; +import "auth/repo_list_request.proto"; +import "auth/repo_view_request.proto"; +import "auth/repo_create_request.proto"; +import "auth/repo_update_request.proto"; +import "auth/repo_delete_request.proto"; +import "auth/repo_data_reply.proto"; +import "auth/repo_calc_size_request.proto"; +import "auth/repo_calc_size_reply.proto"; +import "auth/repo_list_allocations_request.proto"; +import "auth/repo_list_subject_allocations_request.proto"; +import "auth/repo_list_object_allocations_request.proto"; +import "auth/repo_view_allocation_request.proto"; +import "auth/repo_allocations_reply.proto"; +import "auth/repo_allocation_stats_request.proto"; +import "auth/repo_allocation_stats_reply.proto"; +import "auth/repo_allocation_create_request.proto"; +import "auth/repo_allocation_create_response.proto"; +import "auth/repo_allocation_set_request.proto"; +import "auth/repo_allocation_set_default_request.proto"; +import "auth/repo_allocation_delete_request.proto"; +import "auth/repo_authz_request.proto"; + +// Auth - Query +import "auth/query_create_request.proto"; +import "auth/query_update_request.proto"; +import "auth/query_delete_request.proto"; +import "auth/query_list_request.proto"; +import "auth/query_view_request.proto"; +import "auth/query_exec_request.proto"; +import "auth/query_data_reply.proto"; + +// Auth - Note +import "auth/note_list_by_subject_request.proto"; +import "auth/note_view_request.proto"; +import "auth/note_create_request.proto"; +import "auth/note_update_request.proto"; +import "auth/note_comment_edit_request.proto"; +import "auth/note_data_reply.proto"; + +// Auth - Task +import "auth/task_view_request.proto"; +import "auth/task_list_request.proto"; +import "auth/task_data_reply.proto"; + +// Auth - Tag +import "auth/tag_search_request.proto"; +import "auth/tag_list_by_count_request.proto"; +import "auth/tag_data_reply.proto"; + +// Auth - Schema +import "auth/metadata_validate_request.proto"; +import "auth/metadata_validate_reply.proto"; +import "auth/schema_view_request.proto"; +import "auth/schema_search_request.proto"; +import "auth/schema_data_reply.proto"; +import "auth/schema_create_request.proto"; +import "auth/schema_update_request.proto"; +import "auth/schema_revise_request.proto"; +import "auth/schema_delete_request.proto"; + +// Auth - Topic +import "auth/topic_list_topics_request.proto"; +import "auth/topic_view_request.proto"; +import "auth/topic_search_request.proto"; +import "auth/topic_data_reply.proto"; + +// Envelope message containing all possible request/reply types +// Each field is optional - use has_*() to check which is set +message Envelope { + oneof payload { + // Anonymous messages (10-29) + AckReply ack_reply = 10; + NackReply nack_reply = 11; + VersionRequest version_request = 12; + VersionReply version_reply = 13; + GetAuthStatusRequest get_auth_status_request = 14; + AuthenticateByPasswordRequest authenticate_by_password_request = 15; + AuthenticateByTokenRequest authenticate_by_token_request = 16; + AuthStatusReply auth_status_reply = 17; + DailyMessageRequest daily_message_request = 18; + DailyMessageReply daily_message_reply = 19; + + // Credentials (100-109) + GenerateCredentialsRequest generate_credentials_request = 100; + RevokeCredentialsRequest revoke_credentials_request = 101; + GenerateCredentialsReply generate_credentials_reply = 102; + + // Permissions (110-119) + CheckPermsRequest check_perms_request = 110; + CheckPermsReply check_perms_reply = 111; + GetPermsRequest get_perms_request = 112; + GetPermsReply get_perms_reply = 113; + + // User (120-149) + UserViewRequest user_view_request = 120; + UserDataReply user_data_reply = 121; + UserSetAccessTokenRequest user_set_access_token_request = 122; + UserGetAccessTokenRequest user_get_access_token_request = 123; + UserAccessTokenReply user_access_token_reply = 124; + UserCreateRequest user_create_request = 125; + UserFindByUUIDsRequest user_find_by_uuids_request = 126; + UserFindByNameUIDRequest user_find_by_name_uid_request = 127; + UserUpdateRequest user_update_request = 128; + UserListAllRequest user_list_all_request = 129; + UserListCollabRequest user_list_collab_request = 130; + UserGetRecentEPRequest user_get_recent_ep_request = 131; + UserGetRecentEPReply user_get_recent_ep_reply = 132; + UserSetRecentEPRequest user_set_recent_ep_request = 133; + + // General (150-159) + ListingReply listing_reply = 150; + + // Record (200-249) + RecordListByAllocRequest record_list_by_alloc_request = 200; + RecordViewRequest record_view_request = 201; + RecordDataReply record_data_reply = 202; + RecordCreateRequest record_create_request = 203; + RecordCreateBatchRequest record_create_batch_request = 204; + RecordUpdateRequest record_update_request = 205; + RecordUpdateBatchRequest record_update_batch_request = 206; + RecordExportRequest record_export_request = 207; + RecordExportReply record_export_reply = 208; + RecordLockRequest record_lock_request = 209; + RecordDeleteRequest record_delete_request = 210; + RecordGetDependencyGraphRequest record_get_dependency_graph_request = 211; + RecordAllocChangeRequest record_alloc_change_request = 212; + RecordAllocChangeReply record_alloc_change_reply = 213; + RecordOwnerChangeRequest record_owner_change_request = 214; + RecordOwnerChangeReply record_owner_change_reply = 215; + + // Data (250-269) + DataGetRequest data_get_request = 250; + DataPutRequest data_put_request = 251; + DataGetReply data_get_reply = 252; + DataPutReply data_put_reply = 253; + DataDeleteRequest data_delete_request = 254; + DataPathRequest data_path_request = 255; + DataPathReply data_path_reply = 256; + + // Search (270-279) + SearchRequest search_request = 270; + + // Collection (300-349) + CollViewRequest coll_view_request = 300; + CollDataReply coll_data_reply = 301; + CollReadRequest coll_read_request = 302; + CollCreateRequest coll_create_request = 303; + CollUpdateRequest coll_update_request = 304; + CollDeleteRequest coll_delete_request = 305; + CollWriteRequest coll_write_request = 306; + CollMoveRequest coll_move_request = 307; + CollGetParentsRequest coll_get_parents_request = 308; + CollPathReply coll_path_reply = 309; + CollGetOffsetRequest coll_get_offset_request = 310; + CollGetOffsetReply coll_get_offset_reply = 311; + CollListPublishedRequest coll_list_published_request = 312; + + // Group (350-369) + GroupCreateRequest group_create_request = 350; + GroupUpdateRequest group_update_request = 351; + GroupDataReply group_data_reply = 352; + GroupDeleteRequest group_delete_request = 353; + GroupListRequest group_list_request = 354; + GroupViewRequest group_view_request = 355; + + // ACL (370-389) + ACLViewRequest acl_view_request = 370; + ACLUpdateRequest acl_update_request = 371; + ACLSharedListRequest acl_shared_list_request = 372; + ACLSharedListItemsRequest acl_shared_list_items_request = 373; + ACLDataReply acl_data_reply = 374; + + // Project (400-429) + ProjectViewRequest project_view_request = 400; + ProjectDataReply project_data_reply = 401; + ProjectCreateRequest project_create_request = 402; + ProjectUpdateRequest project_update_request = 403; + ProjectDeleteRequest project_delete_request = 404; + ProjectListRequest project_list_request = 405; + ProjectSearchRequest project_search_request = 406; + ProjectGetRoleRequest project_get_role_request = 407; + ProjectGetRoleReply project_get_role_reply = 408; + + // Repository (450-519) + RepoDataDeleteRequest repo_data_delete_request = 450; + RepoDataGetSizeRequest repo_data_get_size_request = 451; + RepoDataSizeReply repo_data_size_reply = 452; + RepoPathCreateRequest repo_path_create_request = 453; + RepoPathDeleteRequest repo_path_delete_request = 454; + RepoListRequest repo_list_request = 455; + RepoViewRequest repo_view_request = 456; + RepoCreateRequest repo_create_request = 457; + RepoUpdateRequest repo_update_request = 458; + RepoDeleteRequest repo_delete_request = 459; + RepoDataReply repo_data_reply = 460; + RepoCalcSizeRequest repo_calc_size_request = 461; + RepoCalcSizeReply repo_calc_size_reply = 462; + RepoListAllocationsRequest repo_list_allocations_request = 463; + RepoListSubjectAllocationsRequest repo_list_subject_allocations_request = 464; + RepoListObjectAllocationsRequest repo_list_object_allocations_request = 465; + RepoViewAllocationRequest repo_view_allocation_request = 466; + RepoAllocationsReply repo_allocations_reply = 467; + RepoAllocationStatsRequest repo_allocation_stats_request = 468; + RepoAllocationStatsReply repo_allocation_stats_reply = 469; + RepoAllocationCreateRequest repo_allocation_create_request = 470; + RepoAllocationCreateResponse repo_allocation_create_response = 471; + RepoAllocationSetRequest repo_allocation_set_request = 472; + RepoAllocationSetDefaultRequest repo_allocation_set_default_request = 473; + RepoAllocationDeleteRequest repo_allocation_delete_request = 474; + RepoAuthzRequest repo_authz_request = 475; + + // Query (520-539) + QueryCreateRequest query_create_request = 520; + QueryUpdateRequest query_update_request = 521; + QueryDeleteRequest query_delete_request = 522; + QueryListRequest query_list_request = 523; + QueryViewRequest query_view_request = 524; + QueryExecRequest query_exec_request = 525; + QueryDataReply query_data_reply = 526; + + // Note (540-559) + NoteListBySubjectRequest note_list_by_subject_request = 540; + NoteViewRequest note_view_request = 541; + NoteCreateRequest note_create_request = 542; + NoteUpdateRequest note_update_request = 543; + NoteCommentEditRequest note_comment_edit_request = 544; + NoteDataReply note_data_reply = 545; + + // Task (560-579) + TaskViewRequest task_view_request = 560; + TaskListRequest task_list_request = 561; + TaskDataReply task_data_reply = 562; + + // Tag (580-599) + TagSearchRequest tag_search_request = 580; + TagListByCountRequest tag_list_by_count_request = 581; + TagDataReply tag_data_reply = 582; + + // Schema (600-629) + MetadataValidateRequest metadata_validate_request = 600; + MetadataValidateReply metadata_validate_reply = 601; + SchemaViewRequest schema_view_request = 602; + SchemaSearchRequest schema_search_request = 603; + SchemaDataReply schema_data_reply = 604; + SchemaCreateRequest schema_create_request = 605; + SchemaUpdateRequest schema_update_request = 606; + SchemaReviseRequest schema_revise_request = 607; + SchemaDeleteRequest schema_delete_request = 608; + + // Topic (630-649) + TopicListTopicsRequest topic_list_topics_request = 630; + TopicViewRequest topic_view_request = 631; + TopicSearchRequest topic_search_request = 632; + TopicDataReply topic_data_reply = 633; + } +} diff --git a/common/proto3/common/messages/acl_rule.proto b/common/proto3/common/messages/acl_rule.proto new file mode 100644 index 000000000..0c30977b1 --- /dev/null +++ b/common/proto3/common/messages/acl_rule.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message ACLRule { + string id = 1; + uint32 grant = 2; + uint32 inhgrant = 3; +} diff --git a/common/proto3/common/messages/alloc_data.proto b/common/proto3/common/messages/alloc_data.proto new file mode 100644 index 000000000..f57089501 --- /dev/null +++ b/common/proto3/common/messages/alloc_data.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/alloc_stats_data.proto"; + +option cc_enable_arenas = true; + +message AllocData { + string repo = 1; + uint64 data_limit = 2; + uint64 data_size = 3; + uint32 rec_limit = 4; + uint32 rec_count = 5; + string path = 6; + string id = 7; + bool is_def = 8; + AllocStatsData stats = 9; + string repo_type = 10; +} diff --git a/common/proto3/common/messages/alloc_stats_data.proto b/common/proto3/common/messages/alloc_stats_data.proto new file mode 100644 index 000000000..5a62c8d10 --- /dev/null +++ b/common/proto3/common/messages/alloc_stats_data.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message AllocStatsData { + string repo = 1; + uint32 rec_count = 2; + uint32 file_count = 3; + uint64 data_size = 4; + repeated uint32 histogram = 5; +} diff --git a/common/proto3/common/messages/coll_data.proto b/common/proto3/common/messages/coll_data.proto new file mode 100644 index 000000000..f865483ed --- /dev/null +++ b/common/proto3/common/messages/coll_data.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message CollData { + string id = 1; + string title = 2; + string alias = 3; + string desc = 4; + repeated string tags = 5; + string topic = 6; + string owner = 7; + string creator = 8; + uint32 ct = 9; + uint32 ut = 10; + string parent_id = 11; + uint32 notes = 12; +} diff --git a/common/proto3/common/messages/dependency_data.proto b/common/proto3/common/messages/dependency_data.proto new file mode 100644 index 000000000..9a598c7cd --- /dev/null +++ b/common/proto3/common/messages/dependency_data.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/dependency_type.proto"; +import "enums/dependency_dir.proto"; + +option cc_enable_arenas = true; + +message DependencyData { + string id = 1; + string alias = 2; + uint32 notes = 3; + DependencyType type = 4; + DependencyDir dir = 5; +} diff --git a/common/proto3/common/messages/dependency_spec_data.proto b/common/proto3/common/messages/dependency_spec_data.proto new file mode 100644 index 000000000..91a4afae9 --- /dev/null +++ b/common/proto3/common/messages/dependency_spec_data.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/dependency_type.proto"; + +option cc_enable_arenas = true; + +message DependencySpecData { + string id = 1; + DependencyType type = 2; +} diff --git a/common/proto3/common/messages/group_data.proto b/common/proto3/common/messages/group_data.proto new file mode 100644 index 000000000..393d80fda --- /dev/null +++ b/common/proto3/common/messages/group_data.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message GroupData { + string uid = 1; + string gid = 2; + optional string title = 3; + optional string desc = 4; + repeated string member = 5; +} diff --git a/common/proto3/common/messages/listing_data.proto b/common/proto3/common/messages/listing_data.proto new file mode 100644 index 000000000..4e1a43aba --- /dev/null +++ b/common/proto3/common/messages/listing_data.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/dependency_data.proto"; + +option cc_enable_arenas = true; + +message ListingData { + string id = 1; + string title = 2; + string alias = 3; + bool locked = 4; + string owner = 5; + string creator = 6; + double size = 7; + bool external = 8; + uint32 notes = 9; + int32 gen = 10; + bool deps_avail = 11; + repeated DependencyData dep = 12; + string owner_name = 13; + string desc = 14; +} diff --git a/common/proto3/common/messages/note_comment.proto b/common/proto3/common/messages/note_comment.proto new file mode 100644 index 000000000..5cb7f40a2 --- /dev/null +++ b/common/proto3/common/messages/note_comment.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/note_type.proto"; +import "enums/note_state.proto"; + +option cc_enable_arenas = true; + +message NoteComment { + string user = 1; + uint32 time = 2; + string comment = 3; + NoteType type = 4; + NoteState state = 5; +} diff --git a/common/proto3/common/messages/note_data.proto b/common/proto3/common/messages/note_data.proto new file mode 100644 index 000000000..33c5166f3 --- /dev/null +++ b/common/proto3/common/messages/note_data.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/note_type.proto"; +import "enums/note_state.proto"; +import "messages/note_comment.proto"; + +option cc_enable_arenas = true; + +message NoteData { + string id = 1; + NoteType type = 2; + NoteState state = 3; + string subject_id = 4; + string title = 5; + repeated NoteComment comment = 6; + uint32 ct = 7; + uint32 ut = 8; + string parent_id = 9; + bool has_child = 10; +} diff --git a/common/proto3/common/messages/path_data.proto b/common/proto3/common/messages/path_data.proto new file mode 100644 index 000000000..d340ca25d --- /dev/null +++ b/common/proto3/common/messages/path_data.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/listing_data.proto"; + +option cc_enable_arenas = true; + +message PathData { + repeated ListingData item = 1; +} diff --git a/common/proto3/common/messages/project_data.proto b/common/proto3/common/messages/project_data.proto new file mode 100644 index 000000000..21e54f65d --- /dev/null +++ b/common/proto3/common/messages/project_data.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/alloc_data.proto"; + +option cc_enable_arenas = true; + +message ProjectData { + string id = 1; + string title = 2; + string desc = 3; + string owner = 4; + uint32 ct = 5; + uint32 ut = 6; + repeated string admin = 7; + repeated string member = 8; + repeated AllocData alloc = 9; +} diff --git a/common/proto3/common/messages/record_data.proto b/common/proto3/common/messages/record_data.proto new file mode 100644 index 000000000..dd3d15c1e --- /dev/null +++ b/common/proto3/common/messages/record_data.proto @@ -0,0 +1,34 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/dependency_data.proto"; + +option cc_enable_arenas = true; + +message RecordData { + string id = 1; + string title = 2; + string alias = 3; + string desc = 4; + repeated string tags = 5; + string metadata = 6; + bool external = 7; + string source = 8; + string repo_id = 9; + double size = 10; + string ext = 11; + bool ext_auto = 12; + uint32 ct = 13; + uint32 ut = 14; + uint32 dt = 15; + string owner = 16; + string creator = 17; + bool locked = 18; + string parent_id = 19; + repeated DependencyData deps = 20; + uint32 notes = 21; + string md_err_msg = 22; + string sch_id = 23; + uint32 sch_ver = 24; +} diff --git a/common/proto3/common/messages/record_data_location.proto b/common/proto3/common/messages/record_data_location.proto new file mode 100644 index 000000000..d4f19a891 --- /dev/null +++ b/common/proto3/common/messages/record_data_location.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordDataLocation { + string id = 1; + string path = 2; +} diff --git a/common/proto3/common/messages/record_data_size.proto b/common/proto3/common/messages/record_data_size.proto new file mode 100644 index 000000000..2a6821703 --- /dev/null +++ b/common/proto3/common/messages/record_data_size.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RecordDataSize { + string id = 1; + double size = 2; +} diff --git a/common/proto3/common/messages/repo_data.proto b/common/proto3/common/messages/repo_data.proto new file mode 100644 index 000000000..765c09d35 --- /dev/null +++ b/common/proto3/common/messages/repo_data.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message RepoData { + string id = 1; + string title = 2; + string desc = 3; + uint64 capacity = 4; + string pub_key = 5; + string address = 6; + string endpoint = 7; + string path = 8; + string domain = 9; + string exp_path = 10; + repeated string admin = 11; + string type = 12; +} diff --git a/common/proto3/common/messages/repo_record_data_locations.proto b/common/proto3/common/messages/repo_record_data_locations.proto new file mode 100644 index 000000000..7a4a7edab --- /dev/null +++ b/common/proto3/common/messages/repo_record_data_locations.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/record_data_location.proto"; + +option cc_enable_arenas = true; + +message RepoRecordDataLocations { + string repo_id = 1; + repeated RecordDataLocation loc = 2; +} diff --git a/common/proto3/common/messages/schema_data.proto b/common/proto3/common/messages/schema_data.proto new file mode 100644 index 000000000..d4153eb61 --- /dev/null +++ b/common/proto3/common/messages/schema_data.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message SchemaData { + string id = 1; + uint32 ver = 2; + bool depr = 3; + uint32 cnt = 4; + bool ref = 5; + string own_id = 6; + string own_nm = 7; + string desc = 8; + bool pub = 9; + string def = 10; + repeated SchemaData uses = 11; + repeated SchemaData used_by = 12; +} diff --git a/common/proto3/common/messages/tag_data.proto b/common/proto3/common/messages/tag_data.proto new file mode 100644 index 000000000..6dfdb2e69 --- /dev/null +++ b/common/proto3/common/messages/tag_data.proto @@ -0,0 +1,10 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message TagData { + string name = 1; + uint64 count = 2; +} diff --git a/common/proto3/common/messages/task_data.proto b/common/proto3/common/messages/task_data.proto new file mode 100644 index 000000000..2c569692c --- /dev/null +++ b/common/proto3/common/messages/task_data.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package SDMS; + +import "enums/task_type.proto"; +import "enums/task_status.proto"; + +option cc_enable_arenas = true; + +message TaskData { + string id = 1; + TaskType type = 2; + TaskStatus status = 3; + string client = 4; + uint32 step = 5; + uint32 steps = 6; + string msg = 7; + uint32 ct = 8; + uint32 ut = 9; + string source = 10; + string dest = 11; +} diff --git a/common/proto3/common/messages/topic_data.proto b/common/proto3/common/messages/topic_data.proto new file mode 100644 index 000000000..56e6cafb9 --- /dev/null +++ b/common/proto3/common/messages/topic_data.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package SDMS; + +option cc_enable_arenas = true; + +message TopicData { + string id = 1; + string title = 2; + repeated TopicData path = 3; + string desc = 4; + string creator = 5; + bool admin = 6; + uint32 coll_cnt = 7; +} diff --git a/common/proto3/common/messages/user_data.proto b/common/proto3/common/messages/user_data.proto new file mode 100644 index 000000000..9b35f1f5d --- /dev/null +++ b/common/proto3/common/messages/user_data.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +package SDMS; + +import "messages/alloc_data.proto"; + +option cc_enable_arenas = true; + +message UserData { + string uid = 1; + string name_last = 2; + string name_first = 3; + string email = 4; + string options = 5; + bool is_admin = 6; + bool is_repo_admin = 7; + repeated string ident = 8; + repeated AllocData alloc = 9; +} diff --git a/common/source/Frame.cpp b/common/source/Frame.cpp index 5f248143a..cf3ff8474 100644 --- a/common/source/Frame.cpp +++ b/common/source/Frame.cpp @@ -24,11 +24,12 @@ namespace g_constants = constants::message::google; * * zmq_msg_init_size( &zmq_msg, 8 ); **/ +// Frame.cpp + void FrameConverter::copy(CopyDirection direction, zmq_msg_t &zmq_msg, Frame &frame) { if (direction == CopyDirection::FROM_FRAME) { if (zmq_msg_size(&zmq_msg) != sizeof(Frame)) { - EXCEPT_PARAM( 1, "Unable to copy frame to zmq_msg sizes are inconsistent Frame: " << sizeof(Frame) << " zmq_msg " << zmq_msg_size(&zmq_msg)); @@ -36,8 +37,7 @@ void FrameConverter::copy(CopyDirection direction, zmq_msg_t &zmq_msg, unsigned char *msg_frame_allocation = (unsigned char *)zmq_msg_data(&zmq_msg); *((uint32_t *)msg_frame_allocation) = htonl(frame.size); - *(msg_frame_allocation + 4) = frame.proto_id; - *(msg_frame_allocation + 5) = frame.msg_id; + *((uint16_t *)(msg_frame_allocation + 4)) = htons(frame.msg_type); *((uint16_t *)(msg_frame_allocation + 6)) = htons(frame.context); } else { // TO_FRAME if (zmq_msg_size(&zmq_msg) != sizeof(Frame)) { @@ -48,8 +48,7 @@ void FrameConverter::copy(CopyDirection direction, zmq_msg_t &zmq_msg, unsigned char *msg_frame_allocation = (unsigned char *)zmq_msg_data(&zmq_msg); frame.size = ntohl(*((uint32_t *)msg_frame_allocation)); - frame.proto_id = *(msg_frame_allocation + 4); - frame.msg_id = *(msg_frame_allocation + 5); + frame.msg_type = ntohs(*((uint16_t *)(msg_frame_allocation + 4))); frame.context = ntohs(*((uint16_t *)(msg_frame_allocation + 6))); } } @@ -58,9 +57,7 @@ void FrameConverter::copy(CopyDirection direction, IMessage &msg, const Frame &frame) { if (direction == CopyDirection::FROM_FRAME) { msg.set(g_constants::FRAME_SIZE, frame.size); - msg.set(g_constants::PROTO_ID, frame.proto_id); - msg.set(g_constants::MSG_ID, frame.msg_id); - msg.set(g_constants::MSG_TYPE, frame.getMsgType()); + msg.set(g_constants::MSG_TYPE, frame.msg_type); msg.set(g_constants::CONTEXT, frame.context); } else { EXCEPT(1, "Unsupported copy direction for FrameConverter working on " @@ -71,9 +68,7 @@ void FrameConverter::copy(CopyDirection direction, IMessage &msg, Frame FrameFactory::create(::google::protobuf::Message &a_msg, ProtoBufMap &proto_map) { Frame frame; - auto msg_type = proto_map.getMessageType(a_msg); - frame.proto_id = msg_type >> 8; - frame.msg_id = msg_type & 0xFF; + frame.msg_type = proto_map.getMessageType(a_msg); frame.size = a_msg.ByteSizeLong(); return frame; } @@ -88,21 +83,13 @@ Frame FrameFactory::create(const IMessage &msg) { "constant is not defined cannot create Frame from IMessage, missing: " << g_constants::FRAME_SIZE); } - if (msg.exists(g_constants::PROTO_ID)) { - frame.proto_id = std::get(msg.get(g_constants::PROTO_ID)); - } else { - EXCEPT_PARAM( - 1, - "constant is not defined cannot create Frame from IMessage, missing: " - << g_constants::PROTO_ID); - } - if (msg.exists(g_constants::MSG_ID)) { - frame.msg_id = std::get(msg.get(g_constants::MSG_ID)); + if (msg.exists(g_constants::MSG_TYPE)) { + frame.msg_type = std::get(msg.get(g_constants::MSG_TYPE)); } else { EXCEPT_PARAM( 1, "constant is not defined cannot create Frame from IMessage, missing: " - << g_constants::MSG_ID); + << g_constants::MSG_TYPE); } if (msg.exists(g_constants::CONTEXT)) { frame.context = std::get(msg.get(g_constants::CONTEXT)); @@ -117,7 +104,6 @@ Frame FrameFactory::create(const IMessage &msg) { Frame FrameFactory::create(zmq_msg_t &zmq_msg) { Frame frame; - // No need for conversion if the message size is 0 just use default frame if (zmq_msg_size(&zmq_msg) > 0) { FrameConverter converter; converter.copy(FrameConverter::CopyDirection::TO_FRAME, zmq_msg, frame); diff --git a/common/source/Frame.hpp b/common/source/Frame.hpp index 58a0aa6b7..aaa263ac4 100644 --- a/common/source/Frame.hpp +++ b/common/source/Frame.hpp @@ -1,3 +1,14 @@ +/** + * @file Frame.hpp + * @brief Wire-level frame header and conversion utilities for the SDMS + * messaging transport layer. + * + * Defines the fixed-size Frame header that prefixes every message on the wire, + * along with FrameConverter (for copying between Frame and ZMQ/IMessage + * representations) and FrameFactory (for constructing Frame headers from + * various message sources). + */ + #ifndef FRAME_HPP #define FRAME_HPP #pragma once @@ -16,45 +27,119 @@ namespace SDMS { // Forward declarations class ProtoBufMap; +/** + * @struct Frame + * @brief Fixed-size 8-byte header that precedes every message on the wire. + * + * Contains the serialized payload size, the message type identifier (envelope + * field number), and an optional application-defined context value. Fields are + * converted to/from network byte order at the ZMQ serialization boundary. + */ struct Frame { + uint32_t size = 0; ///< Size of the serialized payload in bytes. + uint16_t msg_type = 0; ///< Message type ID (envelope field number). + uint16_t context = 0; ///< Optional application-defined context value. - uint32_t size = 0; ///< Size of buffer in bytes - uint8_t proto_id = - 0; ///< Protocol ID (defined by Protocol enum in proto file) - uint8_t msg_id = 0; ///< Message ID (defined by alphabetical order of message - ///< names in proto file) - uint16_t context = 0; ///< Optional context value - + /** + * @brief Resets all fields to zero. + */ void clear() { size = 0; - proto_id = 0; - msg_id = 0; + msg_type = 0; context = 0; } - - /// Message type is 16 bits with protocol ID as the upper 8 bits and message - /// ID as the lower 8 bits - inline uint16_t getMsgType() const { - return (((uint16_t)proto_id) << 8) | msg_id; - } }; +/** + * @class FrameConverter + * @brief Copies data between Frame headers and ZMQ or IMessage representations. + * + * Provides bidirectional conversion for ZMQ messages. The IMessage overload + * only supports the FROM_FRAME direction (Frame → IMessage); the reverse + * is unsupported and will throw. + */ class FrameConverter { public: - /** - * Make sure that zmq_msg_init is not called on this message before it - * is passed in. - **/ - enum class CopyDirection { TO_FRAME, FROM_FRAME }; + /** @brief Specifies the direction of a copy operation. */ + enum class CopyDirection { + TO_FRAME, ///< Deserialize: copy from source into Frame. + FROM_FRAME ///< Serialize: copy from Frame into destination. + }; + /** + * @brief Copies between a raw ZMQ message buffer and a Frame. + * + * The ZMQ message must be pre-initialized and sized to exactly 8 bytes + * (sizeof(Frame)) before calling. For the FROM_FRAME direction, use: + * @code + * zmq_msg_init_size(&zmq_msg, 8); + * @endcode + * + * @param copy Direction of the copy operation. + * @param zmq_msg Pre-initialized ZMQ message of exactly 8 bytes. + * @param frame Frame to populate (TO_FRAME) or read from (FROM_FRAME). + * + * @throws TraceException if zmq_msg size != sizeof(Frame). + */ void copy(CopyDirection copy, zmq_msg_t &zmq_msg, Frame &frame); + + /** + * @brief Copies frame fields from a Frame into an IMessage. + * + * @note Only CopyDirection::FROM_FRAME is supported. Passing TO_FRAME + * will throw. + * + * @param copy Must be CopyDirection::FROM_FRAME. + * @param msg IMessage to populate with frame metadata. + * @param frame Source Frame to read from. + * + * @throws TraceException if direction is TO_FRAME. + */ void copy(CopyDirection copy, IMessage &msg, const Frame &frame); }; +/** + * @class FrameFactory + * @brief Constructs Frame headers from various message representations. + * + * Each overload computes the payload size and resolves the message type ID + * appropriate to the source type. The context field is only populated when + * constructing from an IMessage that carries it. + */ class FrameFactory { public: + /** + * @brief Creates a Frame from a protobuf Message, resolving the type ID + * via ProtoBufMap. + * + * The context field is left at its default (0). + * + * @param a_msg Protobuf message (ByteSizeLong() determines frame size). + * @param proto_map Registry used to look up the message type ID. + * @return A Frame with size and msg_type populated. + */ Frame create(::google::protobuf::Message &a_msg, ProtoBufMap &proto_map); + + /** + * @brief Creates a Frame by extracting metadata from an IMessage. + * + * All three fields (FRAME_SIZE, MSG_TYPE, CONTEXT) must be present in the + * IMessage or the call will throw. + * + * @param msg The IMessage containing frame metadata. + * @return A fully populated Frame. + * @throws TraceException if any required field is missing from the IMessage. + */ Frame create(const IMessage &msg); + + /** + * @brief Creates a Frame by deserializing a raw ZMQ message. + * + * Returns a zeroed Frame if the ZMQ message is empty. + * + * @param zmq_msg Raw ZMQ message containing serialized frame bytes. + * @return The deserialized Frame, or a zeroed Frame if empty. + */ Frame create(zmq_msg_t &zmq_msg); }; diff --git a/common/source/ProtoBufFactory.cpp b/common/source/ProtoBufFactory.cpp index 6580f0ccd..99a7a7329 100644 --- a/common/source/ProtoBufFactory.cpp +++ b/common/source/ProtoBufFactory.cpp @@ -2,8 +2,7 @@ #include "ProtoBufFactory.hpp" // Local public includes -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" +#include "common/envelope.pb.h" #include "common/TraceException.hpp" // Standard includes @@ -11,24 +10,19 @@ namespace SDMS { -ProtoBufFactory::ProtoBufFactory() { - Anon::Protocol_descriptor(); - Auth::Protocol_descriptor(); - m_factory = ::google::protobuf::MessageFactory::generated_factory(); -} - -std::unique_ptr<::google::protobuf::Message> -ProtoBufFactory::create(uint16_t desc_type) { - const ::google::protobuf::Descriptor *msg_descriptor = - m_proto_map.getDescriptorType(desc_type); - return create(msg_descriptor); -} - -// https://stackoverflow.com/questions/29960871/protobuf-message-object-creation-by-name -std::unique_ptr<::google::protobuf::Message> -ProtoBufFactory::create(const ::google::protobuf::Descriptor *msg_descriptor) { - const ::google::protobuf::Message *prototype_msg = - m_factory->GetPrototype(msg_descriptor); +ProtoBufFactory::ProtoBufFactory() {} + +std::unique_ptr ProtoBufFactory::create(uint16_t desc_type) { + const google::protobuf::Descriptor* msg_descriptor = + m_proto_map.getDescriptorType(desc_type); + return create(msg_descriptor); + } + +std::unique_ptr<::google::protobuf::Message> + ProtoBufFactory::create(const ::google::protobuf::Descriptor* msg_descriptor) { + const google::protobuf::Message* prototype_msg = + google::protobuf::MessageFactory::generated_factory() + ->GetPrototype(msg_descriptor); if (prototype_msg == nullptr) { EXCEPT(1, "Cannot create prototype message from message descriptor"); @@ -41,6 +35,7 @@ ProtoBufFactory::create(const ::google::protobuf::Descriptor *msg_descriptor) { } return std::unique_ptr<::google::protobuf::Message>(mutable_msg); -} + + } } // namespace SDMS diff --git a/common/source/ProtoBufFactory.hpp b/common/source/ProtoBufFactory.hpp index 444d5a26f..f6766b8e0 100644 --- a/common/source/ProtoBufFactory.hpp +++ b/common/source/ProtoBufFactory.hpp @@ -7,8 +7,7 @@ #include "common/TraceException.hpp" // Local protobuf includes -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" +#include "common/envelope.pb.h" // Standard includes #include @@ -17,13 +16,11 @@ namespace SDMS { class ProtoBufFactory { ProtoBufMap m_proto_map; - ::google::protobuf::MessageFactory *m_factory; public: ProtoBufFactory(); std::unique_ptr<::google::protobuf::Message> create(uint16_t desc_type); - std::unique_ptr<::google::protobuf::Message> - create(const ::google::protobuf::Descriptor *msg_descriptor); + std::unique_ptr<::google::protobuf::Message> create(const ::google::protobuf::Descriptor *msg_descriptor); }; } // namespace SDMS diff --git a/common/source/ProtoBufMap.cpp b/common/source/ProtoBufMap.cpp index 6c6fcf58b..731009370 100644 --- a/common/source/ProtoBufMap.cpp +++ b/common/source/ProtoBufMap.cpp @@ -1,8 +1,7 @@ // Local public includes #include "common/ProtoBufMap.hpp" -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" +#include "common/envelope.pb.h" #include "common/TraceException.hpp" // Third party includes @@ -13,135 +12,107 @@ namespace proto = ::google::protobuf; namespace SDMS { -ProtoBufMap::ProtoBufMap() { - // These two code blocks should be templatized to make them DRY - - { - auto a_enum_desc = Anon::Protocol_descriptor(); - if (a_enum_desc->name() != "Protocol") - EXCEPT(EC_PROTO_INIT, "Must register with Protocol EnumDescriptor."); - - const proto::FileDescriptor *file = a_enum_desc->file(); - if (!file) - EXCEPT(EC_PROTO_INIT, - "Failed to acquire protocol buffer file descriptor."); - - const proto::EnumValueDescriptor *val_desc = - a_enum_desc->FindValueByName("ID"); - if (!val_desc) - EXCEPT(EC_PROTO_INIT, "Protocol enum missing required ID field."); - - uint16_t id = val_desc->number(); - // std::cout << __FILE__ << ":" << __LINE__ << " PROTOCOL id is " << id << - // std::endl; - m_file_descriptor_map[id] = file; - - int count = file->message_type_count(); - uint16_t msg_type = id << 8; - - for (int i = 0; i < count; i++, msg_type++) { - const proto::Descriptor *desc = file->message_type(i); - m_descriptor_map[msg_type] = desc; - // Register Message types from Anon - m_msg_type_map[desc] = msg_type; - } - m_protocol_ids[MessageProtocol::GOOGLE_ANONONYMOUS] = id; - } - { - auto a_enum_desc = Auth::Protocol_descriptor(); - if (a_enum_desc->name() != "Protocol") - EXCEPT(EC_PROTO_INIT, "Must register with Protocol EnumDescriptor."); - - const proto::FileDescriptor *file = a_enum_desc->file(); - if (!file) - EXCEPT(EC_PROTO_INIT, - "Failed to acquire protocol buffer file descriptor."); - - const proto::EnumValueDescriptor *val_desc = - a_enum_desc->FindValueByName("ID"); - if (!val_desc) - EXCEPT(EC_PROTO_INIT, "Protocol enum missing required ID field."); - - uint16_t id = val_desc->number(); - // std::cout << "PROTOCOL id is " << id << std::endl; - // std::cout << __FILE__ << ":" << __LINE__ << " PROTOCOL id is " << id << - // std::endl; - m_file_descriptor_map[id] = file; - - int count = file->message_type_count(); - uint16_t msg_type = id << 8; - - for (int i = 0; i < count; i++, msg_type++) { - const proto::Descriptor *desc = file->message_type(i); - m_descriptor_map[msg_type] = desc; - m_msg_type_map[desc] = msg_type; +ProtoBufMap::ProtoBufMap() {} + +std::unique_ptr +ProtoBufMap::wrapInEnvelope(const proto::Message& inner) const { + // If already an Envelope, return a copy — no double-wrapping + const auto* env = dynamic_cast(&inner); + if (env) { + return std::make_unique(*env); } - m_protocol_ids[MessageProtocol::GOOGLE_AUTHORIZED] = id; - } -} -uint16_t ProtoBufMap::getMessageType(proto::Message &a_msg) { - const proto::Descriptor *desc = a_msg.GetDescriptor(); - if (m_msg_type_map.count(desc) == 0) { - EXCEPT_PARAM(EC_INVALID_PARAM, - "Unknown descriptor encountered: " << desc->name()); - } - return m_msg_type_map.at(desc); + uint16_t field_number = getMessageType(inner); + auto envelope = std::make_unique(); + const auto* field_desc = + envelope->GetDescriptor()->FindFieldByNumber(field_number); + if (!field_desc) { + EXCEPT_PARAM(EC_INVALID_PARAM, + "Cannot wrap message in envelope: no field number " + << field_number << " for type " + << inner.GetDescriptor()->name()); + } + envelope->GetReflection() + ->MutableMessage(envelope.get(), field_desc) + ->CopyFrom(inner); + return envelope; } -std::string ProtoBufMap::toString(uint16_t msg_type) const { - if (m_descriptor_map.count(msg_type)) { - return m_descriptor_map.at(msg_type)->name(); - } - EXCEPT_PARAM(1, "Provided message type is unknown cannot retrieve name."); +std::unique_ptr +ProtoBufMap::unwrapFromEnvelope(SDMS::Envelope& envelope) const { + uint16_t msg_type = getMessageType(envelope); + const auto* field_desc = + envelope.GetDescriptor()->FindFieldByNumber(msg_type); + if (!field_desc) { + EXCEPT_PARAM(EC_INVALID_PARAM, + "Cannot unwrap envelope: unknown field number " + << msg_type); + } + // ReleaseMessage transfers ownership out of the envelope + return std::unique_ptr( + envelope.GetReflection()->ReleaseMessage(&envelope, field_desc)); } -uint16_t ProtoBufMap::getMessageType(uint8_t a_proto_id, - const std::string &a_message_name) { +uint16_t ProtoBufMap::getMessageType(const ::google::protobuf::Message& msg) const { + // If it's an Envelope, use the envelope method + const SDMS::Envelope* env = dynamic_cast(&msg); + if (env) { + + const auto* reflection = env->GetReflection(); + const auto* descriptor = env->GetDescriptor(); + + for (int i = 0; i < descriptor->field_count(); ++i) { + const auto* field = descriptor->field(i); + if (field->type() == proto::FieldDescriptor::TYPE_MESSAGE) { + if (reflection->HasField(*env, field)) { + return static_cast(field->number()); + } + } + } - // std::cout << "PROTOCOL id is " << a_proto_id << std::endl; - // std::cout << __FILE__ << ":" << __LINE__ << " PROTOCOL id is " << - // static_cast(a_proto_id) << std::endl; - if (m_file_descriptor_map.count(a_proto_id) == 0) { - EXCEPT_PARAM(EC_INVALID_PARAM, - "Protocol ID " << a_proto_id << " has not been registered."); } + + // Otherwise, look up by message type name + const auto* msg_desc = msg.GetDescriptor(); + return getMessageType(msg_desc->name()); +} - const proto::Descriptor *desc = m_file_descriptor_map.at(a_proto_id) - ->FindMessageTypeByName(a_message_name); - if (!desc) - EXCEPT_PARAM(EC_PROTO_INIT, "Could not find specified message: " - << a_message_name << " for protocol: " - << (unsigned int)a_proto_id); - - if (m_msg_type_map.count(desc) == 0) { - EXCEPT_PARAM(EC_INVALID_PARAM, "Message name \"" - << a_message_name - << "\" is not registered with protocol " - << a_proto_id); - } +const proto::Descriptor* ProtoBufMap::getDescriptorType(uint16_t field_number) const { + const auto* envelope_desc = SDMS::Envelope::descriptor(); + const auto* field = envelope_desc->FindFieldByNumber(field_number); + if (field && field->type() == proto::FieldDescriptor::TYPE_MESSAGE) { + return field->message_type(); + } + return nullptr; +} - return m_msg_type_map.at(desc); +uint16_t ProtoBufMap::getMessageType(const std::string& message_name) const { + const auto* envelope_desc = SDMS::Envelope::descriptor(); + + for (int i = 0; i < envelope_desc->field_count(); ++i) { + const auto* field = envelope_desc->field(i); + if (field->type() == proto::FieldDescriptor::TYPE_MESSAGE) { + if (field->message_type()->name() == message_name) { + return static_cast(field->number()); + } + } + } + + EXCEPT_PARAM(EC_INVALID_PARAM, + "Message name \"" << message_name << "\" not found in Envelope"); } -const proto::Descriptor * -ProtoBufMap::getDescriptorType(uint16_t message_type) const { - if (m_descriptor_map.count(message_type)) { - return m_descriptor_map.at(message_type); - } else { - EXCEPT_PARAM(EC_PROTO_INIT, - "Descriptor type mapping failed, unregistered message type " - << message_type); - } +std::string ProtoBufMap::toString(uint16_t field_number) const { + const auto* desc = getDescriptorType(field_number); + if (desc) { + return desc->name(); + } + return "Unknown(" + std::to_string(field_number) + ")"; } -uint8_t ProtoBufMap::getProtocolID(MessageProtocol msg_protocol) const { - if (m_protocol_ids.count(msg_protocol)) { - return static_cast(m_protocol_ids.at(msg_protocol)); - } else { - EXCEPT( - 1, - "Unsupported MessageProtocol specified, cannot map to a protocol id"); - } +bool ProtoBufMap::exists(uint16_t field_number) const { + return getDescriptorType(field_number) != nullptr; } + + } // namespace SDMS diff --git a/common/source/Util.cpp b/common/source/Util.cpp index 1170e265d..419f8b5f7 100644 --- a/common/source/Util.cpp +++ b/common/source/Util.cpp @@ -1,6 +1,5 @@ // Local public includes #include "common/Util.hpp" -#include "common/SDMS.pb.h" #include "common/TraceException.hpp" // Third party includes diff --git a/common/source/communicators/ZeroMQCommunicator.cpp b/common/source/communicators/ZeroMQCommunicator.cpp index 8ada24922..6096c0de1 100644 --- a/common/source/communicators/ZeroMQCommunicator.cpp +++ b/common/source/communicators/ZeroMQCommunicator.cpp @@ -336,13 +336,9 @@ void receiveFrame(IMessage &msg, void *incoming_zmq_socket, void sendFrame(IMessage &msg, void *outgoing_zmq_socket) { zmq_msg_t zmq_msg; zmq_msg_init_size(&zmq_msg, 8); - // WARNING do not call zmq_msg_init it is called in copy method - // this is a code smell and should be fixed in the future FrameFactory factory; Frame frame = factory.create(msg); FrameConverter converter; - // Will call zmq_msg_init and create space for 8 bytes - // Convert host binary to network (endian) format converter.copy(FrameConverter::CopyDirection::FROM_FRAME, zmq_msg, frame); int number_of_bytes = @@ -374,7 +370,6 @@ void receiveBody(IMessage &msg, Buffer &buffer, ProtoBufFactory &factory, << frame_size << " received " << number_of_bytes); } - // Only set payload if there is a payload if (frame_size > 0) { if (zmq_msg_size(&zmq_msg) != frame_size) { @@ -385,20 +380,20 @@ void receiveBody(IMessage &msg, Buffer &buffer, ProtoBufFactory &factory, << ", got: " << msg_size); } + // Deserialize wire bytes into Envelope copyToBuffer(buffer, zmq_msg_data(&zmq_msg), frame_size); - uint16_t desc_type = std::get(msg.get(MSG_TYPE)); - std::unique_ptr payload = factory.create(desc_type); - if (payload == nullptr) { - zmq_msg_close(&zmq_msg); - EXCEPT(1, "No payload was assigned something is wrong"); - } - copyFromBuffer(payload.get(), buffer); - msg.setPayload(std::move(payload)); - } else { + SDMS::Envelope envelope; + copyFromBuffer(&envelope, buffer); + + // Extract inner message from Envelope + ProtoBufMap proto_map; + std::unique_ptr inner = + proto_map.unwrapFromEnvelope(envelope); - // Even if the frame has 0 size it does not mean it is not a legitimate - // message some messages have zero size but are still legitimate such - // as a NACK + msg.setPayload(std::move(inner)); + + } else { + // Zero-size: no envelope on the wire. Frame msg_type identifies it. uint16_t msg_type = std::get(msg.get(MSG_TYPE)); ProtoBufMap proto_map; @@ -430,29 +425,31 @@ void sendBody(IMessage &msg, Buffer &buffer, void *outgoing_zmq_socket) { uint32_t frame_size = std::get(msg.get(FRAME_SIZE)); if (frame_size > 0) { zmq_msg_t zmq_msg; - zmq_msg_init_size(&zmq_msg, frame_size); - proto::Message *payload; + proto::Message *inner = nullptr; try { - payload = std::get(msg.getPayload()); + inner = std::get(msg.getPayload()); } catch (std::bad_variant_access const &ex) { EXCEPT(1, ex.what()); } - if (payload) { - auto size = payload->ByteSizeLong(); + if (inner) { + ProtoBufMap proto_map; + auto envelope = proto_map.wrapInEnvelope(*inner); + + auto size = envelope->ByteSizeLong(); if (size != frame_size) { zmq_msg_close(&zmq_msg); - EXCEPT_PARAM(1, "Frame and message sizes differ message size: " + EXCEPT_PARAM(1, "Frame and envelope sizes differ. Envelope size: " << size << " frame size: " << frame_size); } - copyToBuffer(buffer, payload, size); + copyToBuffer(buffer, envelope.get(), size); copyFromBuffer(zmq_msg_data(&zmq_msg), buffer); int number_of_bytes = 0; - if ((number_of_bytes = zmq_msg_send(&zmq_msg, outgoing_zmq_socket, 0)) < - 0) { + if ((number_of_bytes = + zmq_msg_send(&zmq_msg, outgoing_zmq_socket, 0)) < 0) { zmq_msg_close(&zmq_msg); EXCEPT(1, "zmq_msg_send (body) failed."); } @@ -463,7 +460,6 @@ void sendBody(IMessage &msg, Buffer &buffer, void *outgoing_zmq_socket) { zmq_msg_close(&zmq_msg); } else { - sendFinalDelimiter(outgoing_zmq_socket); } } else { diff --git a/common/source/communicators/ZeroMQCommunicator.hpp b/common/source/communicators/ZeroMQCommunicator.hpp index ac131d5b1..8c8130608 100644 --- a/common/source/communicators/ZeroMQCommunicator.hpp +++ b/common/source/communicators/ZeroMQCommunicator.hpp @@ -4,9 +4,9 @@ // Local private includes #include "../Buffer.hpp" +#include "../ProtoBufFactory.hpp" // Local public includes -#include "../ProtoBufFactory.hpp" #include "common/DynaLog.hpp" #include "common/ICommunicator.hpp" #include "common/IMessage.hpp" diff --git a/common/source/communicators/ZeroMQCommunicatorSecure.cpp b/common/source/communicators/ZeroMQCommunicatorSecure.cpp index b6e59b871..4a5608e4e 100644 --- a/common/source/communicators/ZeroMQCommunicatorSecure.cpp +++ b/common/source/communicators/ZeroMQCommunicatorSecure.cpp @@ -1,6 +1,5 @@ // Local private includes #include "ZeroMQCommunicatorSecure.hpp" -#include "ProtoBufFactory.hpp" #include "support/zeromq/Context.hpp" #include "support/zeromq/SocketTranslator.hpp" diff --git a/common/source/messages/GoogleProtoMessage.cpp b/common/source/messages/GoogleProtoMessage.cpp index d7daafc70..fd237adee 100644 --- a/common/source/messages/GoogleProtoMessage.cpp +++ b/common/source/messages/GoogleProtoMessage.cpp @@ -21,8 +21,6 @@ namespace SDMS { GoogleProtoMessage::GoogleProtoMessage() { m_dyn_attributes[constants::message::google::FRAME_SIZE] = (uint32_t)0; - m_dyn_attributes[constants::message::google::PROTO_ID] = (uint8_t)0; - m_dyn_attributes[constants::message::google::MSG_ID] = (uint8_t)0; m_dyn_attributes[constants::message::google::MSG_TYPE] = (uint16_t)0; m_dyn_attributes[constants::message::google::CONTEXT] = (uint16_t)0; @@ -42,25 +40,30 @@ bool GoogleProtoMessage::exists(const std::string &attribute_type) const { /** * Setters **/ + void GoogleProtoMessage::setPayload( std::variant, std::string> payload) { if (std::holds_alternative>( payload)) { - // Because the frame depends on the payload, the frame needs to be created - // here + auto &inner = + std::get>(payload); + + // msg_type come from the inner message's + // envelope field number — this is already correct FrameFactory frame_factory; - Frame frame = frame_factory.create( - *std::get>(payload), - m_proto_map); + Frame frame = frame_factory.create(*inner, m_proto_map); + + // FRAME_SIZE must reflect what goes on the wire: the Envelope, + // not the inner message + auto temp_envelope = m_proto_map.wrapInEnvelope(*inner); + frame.size = static_cast(temp_envelope->ByteSizeLong()); + m_dyn_attributes[constants::message::google::FRAME_SIZE] = frame.size; - m_dyn_attributes[constants::message::google::PROTO_ID] = frame.proto_id; - m_dyn_attributes[constants::message::google::MSG_ID] = frame.msg_id; - m_dyn_attributes[constants::message::google::MSG_TYPE] = frame.getMsgType(); - // Do not overload the context because this is not associated with the - // message payload but with the response - m_payload = std::move( - std::get>(payload)); + m_dyn_attributes[constants::message::google::MSG_TYPE] = frame.msg_type; + + // Store the INNER message, not the envelope + m_payload = std::move(inner); } else { EXCEPT(1, "Attempt to add unsupported payload to GoogleProtoMessage."); } diff --git a/common/source/servers/Proxy.cpp b/common/source/servers/Proxy.cpp index e7b4cc5a7..000229446 100644 --- a/common/source/servers/Proxy.cpp +++ b/common/source/servers/Proxy.cpp @@ -7,10 +7,6 @@ #include "common/ICommunicator.hpp" #include "common/TraceException.hpp" -// Proto file includes -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" - // Standard includes #include #include diff --git a/common/tests/security/tcp_secure/test_tcp_secure_client.cpp b/common/tests/security/tcp_secure/test_tcp_secure_client.cpp index 2568c0a43..1f2a9f009 100644 --- a/common/tests/security/tcp_secure/test_tcp_secure_client.cpp +++ b/common/tests/security/tcp_secure/test_tcp_secure_client.cpp @@ -8,9 +8,7 @@ #include "common/TraceException.hpp" // Proto file includes -#include "common/SDMS.pb.h" -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" +#include "common/envelope.pb.h" // Third party includes #include @@ -155,11 +153,12 @@ int main(int a_argc, char **a_argv) { uint16_t context = 0; msg_from_client->set(constants::message::google::CONTEXT, context); - auto auth_by_token_req = - std::make_unique(); + auto envelope = + std::make_unique(); + auto auth_by_token_req = envelope->mutable_authenticate_by_token_request(); auth_by_token_req->set_token(token); - msg_from_client->setPayload(std::move(auth_by_token_req)); + msg_from_client->setPayload(std::move(envelope)); client->send(*msg_from_client); } std::cout << client->id() << " Message sent..." << std::endl; @@ -192,26 +191,34 @@ int main(int a_argc, char **a_argv) { auto response_google_msg_ptr = std::get<::google::protobuf::Message *>( response_client.message->getPayload()); - Anon::NackReply *response_payload = - dynamic_cast(response_google_msg_ptr); + auto envelope = + dynamic_cast(response_google_msg_ptr); std::cout << client->id() << " Validating message content received from server..." << std::endl; - if (response_payload->err_code() != ErrorCode::ID_SERVICE_ERROR) { + // Changed: BOOST_CHECK -> if check (this isn't a Boost test file) + if (!envelope || !envelope->has_nack_reply()) { + std::cout << client->id() << " FAILED" << std::endl; + EXCEPT_PARAM(1, "TCP Secure test failed - no nack_reply in envelope"); + } + + // Changed: SERVICE_ERROR -> ID_SERVICE_ERROR + if (envelope->nack_reply().err_code() != ErrorCode::SERVICE_ERROR) { std::cout << client->id() << " FAILED" << std::endl; EXCEPT_PARAM(1, "TCP Secure test failed unexpected ErrorCode returned by " "NACK reply from server, client failing."); } - if (response_payload->err_msg().compare(error_msg) != 0) { + + // Changed: response_payload -> envelope->nack_reply() + if (envelope->nack_reply().err_msg().compare(error_msg) != 0) { std::cout << client->id() << " FAILED" << std::endl; EXCEPT_PARAM(1, "TCP Secure test failed unexpected error message " "returned from server provided: " - << response_payload->err_msg() + << envelope->nack_reply().err_msg() << " Expected: " << error_msg); } } - std::cout << client->id() << " SUCCESS" << std::endl; return 0; } diff --git a/common/tests/security/tcp_secure/test_tcp_secure_server.cpp b/common/tests/security/tcp_secure/test_tcp_secure_server.cpp index e5a52e8a7..566200ab0 100644 --- a/common/tests/security/tcp_secure/test_tcp_secure_server.cpp +++ b/common/tests/security/tcp_secure/test_tcp_secure_server.cpp @@ -9,9 +9,7 @@ #include "common/TraceException.hpp" // Proto file includes -#include "common/SDMS.pb.h" -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" +#include "common/envelope.pb.h" // Third party includes #include @@ -180,16 +178,23 @@ int main(int a_argc, char **a_argv) { } std::cout << std::endl; + // Changed: cast to Envelope, access inner message auto google_msg_ptr = std::get<::google::protobuf::Message *>(response.message->getPayload()); - Anon::AuthenticateByTokenRequest *payload = - dynamic_cast(google_msg_ptr); + auto envelope = + dynamic_cast(google_msg_ptr); - if (payload->token().compare(token) != 0) { + if (!envelope || !envelope->has_authenticate_by_token_request()) { + std::cout << server->id() << " FAILED" << std::endl; + EXCEPT_PARAM(1, "Error detected in server, expected authenticate_by_token_request not found in envelope"); + } + + if (envelope->authenticate_by_token_request().token().compare(token) != 0) { std::cout << server->id() << " FAILED" << std::endl; EXCEPT_PARAM(1, "Error detected in server, expected message content is " "incorrect. Actual token value is " - << payload->token() << " Expected token value is " + << envelope->authenticate_by_token_request().token() + << " Expected token value is " << token); } @@ -197,13 +202,14 @@ int main(int a_argc, char **a_argv) { // Server send a reply auto nack_msg = msg_factory.createResponseEnvelope(*response.message); - // Create Google proto message - auto nack_reply = std::make_unique(); - nack_reply->set_err_code(ErrorCode::ID_SERVICE_ERROR); + // Changed: wrap in envelope + auto nack_envelope = std::make_unique(); + auto* nack_reply = nack_envelope->mutable_nack_reply(); + nack_reply->set_err_code(ErrorCode::SERVICE_ERROR); nack_reply->set_err_msg(error_msg); // Place google proto message in IMessage - nack_msg->setPayload(std::move(nack_reply)); + nack_msg->setPayload(std::move(nack_envelope)); server->send(*nack_msg); } diff --git a/common/tests/unit/CMakeLists.txt b/common/tests/unit/CMakeLists.txt index 60f169546..78a65fba4 100644 --- a/common/tests/unit/CMakeLists.txt +++ b/common/tests/unit/CMakeLists.txt @@ -7,7 +7,6 @@ foreach(PROG test_Value test_MessageFactory test_OperatorFactory - test_ProtoBufFactory test_ProtoBufMap test_Proxy test_ProxyBasicZMQ diff --git a/common/tests/unit/test_Buffer.cpp b/common/tests/unit/test_Buffer.cpp index e258b2b30..cd22284ba 100644 --- a/common/tests/unit/test_Buffer.cpp +++ b/common/tests/unit/test_Buffer.cpp @@ -113,7 +113,7 @@ BOOST_AUTO_TEST_CASE(testing_Buffer_googleprotobuf_repo_create_request) { ProtoBufMap proto_map; ProtoBufFactory proto_factory; - SDMS::Auth::RepoCreateRequest repo_create_req; + SDMS::RepoCreateRequest repo_create_req; const std::string id = "bonanza"; const std::string title = "All you can eat."; @@ -158,7 +158,7 @@ BOOST_AUTO_TEST_CASE(testing_Buffer_googleprotobuf_repo_create_request) { copyFromBuffer(new_msg.get(), buffer); auto new_repo_create_req = - dynamic_cast(new_msg.get()); + dynamic_cast(new_msg.get()); BOOST_CHECK(new_repo_create_req->id().compare(id) == 0); BOOST_CHECK(new_repo_create_req->title().compare(title) == 0); @@ -177,7 +177,7 @@ BOOST_AUTO_TEST_CASE(testing_Buffer_googleprotobuf) { ProtoBufMap proto_map; ProtoBufFactory proto_factory; - SDMS::Anon::AuthenticateByPasswordRequest auth_by_pass_req; + SDMS::AuthenticateByPasswordRequest auth_by_pass_req; const std::string uid = "tonystark"; const std::string password = "skeleton_key"; @@ -203,7 +203,7 @@ BOOST_AUTO_TEST_CASE(testing_Buffer_googleprotobuf) { copyFromBuffer(new_msg.get(), buffer); auto new_auth_by_pass_req = - dynamic_cast(new_msg.get()); + dynamic_cast(new_msg.get()); BOOST_CHECK(new_auth_by_pass_req->password().compare(password) == 0); BOOST_CHECK(new_auth_by_pass_req->uid().compare(uid) == 0); @@ -214,7 +214,7 @@ BOOST_AUTO_TEST_CASE(testing_Buffer_googleprotobuf_empty_payload) { ProtoBufMap proto_map; ProtoBufFactory proto_factory; - SDMS::Anon::AckReply ack_reply; + SDMS::AckReply ack_reply; Buffer buffer; std::cout << "Calling Copy to buffer" << std::endl; @@ -232,6 +232,6 @@ BOOST_AUTO_TEST_CASE(testing_Buffer_googleprotobuf_empty_payload) { copyFromBuffer(new_msg.get(), buffer); auto new_auth_by_pass_req = - dynamic_cast(new_msg.get()); + dynamic_cast(new_msg.get()); } BOOST_AUTO_TEST_SUITE_END() diff --git a/common/tests/unit/test_CommunicatorFactory.cpp b/common/tests/unit/test_CommunicatorFactory.cpp index a5c6f3a77..3ebf1dd56 100644 --- a/common/tests/unit/test_CommunicatorFactory.cpp +++ b/common/tests/unit/test_CommunicatorFactory.cpp @@ -12,8 +12,7 @@ #include "common/ProtocolTypes.hpp" // Proto file includes -#include "common/SDMS.pb.h" -#include "common/SDMS_Anon.pb.h" +#include "common/envelope.pb.h" // Standard includes #include @@ -139,7 +138,7 @@ BOOST_AUTO_TEST_CASE(testing_CommunicatorFactory) { msg_from_client->set(MessageAttribute::KEY, key); auto auth_by_token_req = - std::make_unique(); + std::make_unique(); auth_by_token_req->set_token(token); msg_from_client->setPayload(std::move(auth_by_token_req)); @@ -178,8 +177,8 @@ BOOST_AUTO_TEST_CASE(testing_CommunicatorFactory) { auto google_msg_ptr = std::get<::google::protobuf::Message *>(response.message->getPayload()); - Anon::AuthenticateByTokenRequest *payload = - dynamic_cast(google_msg_ptr); + SDMS::AuthenticateByTokenRequest *payload = + dynamic_cast(google_msg_ptr); BOOST_CHECK(payload->token().compare(token) == 0); } @@ -269,7 +268,7 @@ BOOST_AUTO_TEST_CASE(testing_CommunicatorFactorySecure) { msg_from_client->set(MessageAttribute::KEY, key); auto auth_by_token_req = - std::make_unique(); + std::make_unique(); auth_by_token_req->set_token(token); msg_from_client->setPayload(std::move(auth_by_token_req)); @@ -308,8 +307,8 @@ BOOST_AUTO_TEST_CASE(testing_CommunicatorFactorySecure) { auto google_msg_ptr = std::get<::google::protobuf::Message *>(response.message->getPayload()); - Anon::AuthenticateByTokenRequest *payload = - dynamic_cast(google_msg_ptr); + SDMS::AuthenticateByTokenRequest *payload = + dynamic_cast(google_msg_ptr); BOOST_CHECK(payload->token().compare(token) == 0); } @@ -320,7 +319,7 @@ BOOST_AUTO_TEST_CASE(testing_CommunicatorFactorySecure) { msg_from_client->set(MessageAttribute::ID, id); msg_from_client->set(MessageAttribute::KEY, key); - auto ack_reply = std::make_unique(); + auto ack_reply = std::make_unique(); msg_from_client->setPayload(std::move(ack_reply)); @@ -358,7 +357,7 @@ BOOST_AUTO_TEST_CASE(testing_CommunicatorFactorySecure) { auto google_msg_ptr = std::get<::google::protobuf::Message *>(response.message->getPayload()); - dynamic_cast(google_msg_ptr); + dynamic_cast(google_msg_ptr); } } @@ -454,7 +453,7 @@ BOOST_AUTO_TEST_CASE(testing_CommunicatorFactoryReply) { msg_from_client->set(MessageAttribute::ID, id); msg_from_client->set(MessageAttribute::KEY, key); - auto auth_by_token_req = std::make_unique(); + auto auth_by_token_req = std::make_unique(); auth_by_token_req->set_token(token); msg_from_client->setPayload(std::move(auth_by_token_req)); @@ -494,8 +493,8 @@ BOOST_AUTO_TEST_CASE(testing_CommunicatorFactoryReply) { auto google_msg_ptr = std::get<::google::protobuf::Message *>(response.message->getPayload()); - Anon::AuthenticateByTokenRequest *payload = - dynamic_cast(google_msg_ptr); + SDMS::AuthenticateByTokenRequest *payload = + dynamic_cast(google_msg_ptr); BOOST_CHECK(payload->token().compare(token) == 0); // Server receive @@ -504,8 +503,8 @@ BOOST_AUTO_TEST_CASE(testing_CommunicatorFactoryReply) { auto nack_msg = msg_factory.createResponseEnvelope(*response.message); // Create Google proto message - auto nack_reply = std::make_unique(); - nack_reply->set_err_code(ErrorCode::ID_SERVICE_ERROR); + auto nack_reply = std::make_unique(); + nack_reply->set_err_code(ErrorCode::SERVICE_ERROR); std::string error_msg = "testing_no_error"; nack_reply->set_err_msg(error_msg); @@ -530,10 +529,10 @@ BOOST_AUTO_TEST_CASE(testing_CommunicatorFactoryReply) { auto response_google_msg_ptr = std::get<::google::protobuf::Message *>( response_client.message->getPayload()); - Anon::NackReply *response_payload = - dynamic_cast(response_google_msg_ptr); + SDMS::NackReply *response_payload = + dynamic_cast(response_google_msg_ptr); - BOOST_CHECK(response_payload->err_code() == ErrorCode::ID_SERVICE_ERROR); + BOOST_CHECK(response_payload->err_code() == ErrorCode::SERVICE_ERROR); BOOST_CHECK(response_payload->err_msg().compare(error_msg) == 0); // Client receive diff --git a/common/tests/unit/test_Frame.cpp b/common/tests/unit/test_Frame.cpp index 2ee80b09e..40493c41f 100644 --- a/common/tests/unit/test_Frame.cpp +++ b/common/tests/unit/test_Frame.cpp @@ -12,7 +12,7 @@ #include "common/ProtoBufMap.hpp" // Proto file includes -#include "common/SDMS_Anon.pb.h" +#include "common/envelope.pb.h" // Standard includes #include @@ -36,25 +36,21 @@ BOOST_AUTO_TEST_CASE(testing_Frame) { Frame frame; BOOST_CHECK(frame.size == 0); - BOOST_CHECK(frame.proto_id == 0); - BOOST_CHECK(frame.msg_id == 0); + BOOST_CHECK(frame.msg_type == 0); BOOST_CHECK(frame.context == 0); frame.size = 4; - frame.proto_id = 3; - frame.msg_id = 1; + frame.msg_type = 1; frame.context = 2; BOOST_CHECK(frame.size == 4); - BOOST_CHECK(frame.proto_id == 3); - BOOST_CHECK(frame.msg_id == 1); + BOOST_CHECK(frame.msg_type == 1); BOOST_CHECK(frame.context == 2); frame.clear(); BOOST_CHECK(frame.size == 0); - BOOST_CHECK(frame.proto_id == 0); - BOOST_CHECK(frame.msg_id == 0); + BOOST_CHECK(frame.msg_type == 0); BOOST_CHECK(frame.context == 0); } @@ -62,8 +58,7 @@ BOOST_AUTO_TEST_CASE(testing_FrameConverter) { Frame frame; frame.size = 4; - frame.proto_id = 3; - frame.msg_id = 1; + frame.msg_type = 1; frame.context = 2; FrameConverter converter; @@ -76,8 +71,7 @@ BOOST_AUTO_TEST_CASE(testing_FrameConverter) { converter.copy(FrameConverter::CopyDirection::TO_FRAME, zmq_msg, frame_new); BOOST_CHECK(frame_new.size == 4); - BOOST_CHECK(frame_new.proto_id == 3); - BOOST_CHECK(frame_new.msg_id == 1); + BOOST_CHECK(frame_new.msg_type == 1); BOOST_CHECK(frame_new.context == 2); } @@ -87,7 +81,7 @@ BOOST_AUTO_TEST_CASE(testing_FrameFactory) { ProtoBufMap proto_map; - SDMS::Anon::AuthenticateByPasswordRequest auth_by_pass_req; + SDMS::AuthenticateByPasswordRequest auth_by_pass_req; const std::string uid = "tonystark"; const std::string password = "skeleton_key"; auth_by_pass_req.set_uid(uid); @@ -99,7 +93,7 @@ BOOST_AUTO_TEST_CASE(testing_FrameFactory) { Frame frame = factory.create(auth_by_pass_req, proto_map); BOOST_CHECK(frame.size == expected_size); - BOOST_CHECK(frame.getMsgType() == expected_msg_type); + BOOST_CHECK(frame.msg_type == expected_msg_type); } BOOST_AUTO_TEST_CASE(testing_FrameFactory_EmptyPayload) { @@ -111,9 +105,7 @@ BOOST_AUTO_TEST_CASE(testing_FrameFactory_EmptyPayload) { Frame frame = factory.create(*msg); BOOST_CHECK(frame.size == 0); - BOOST_CHECK(frame.proto_id == 0); - BOOST_CHECK(frame.msg_id == 0); - BOOST_CHECK(frame.getMsgType() == 0); + BOOST_CHECK(frame.msg_type == 0); auto msg_new = msg_factory.create(MessageType::GOOGLE_PROTOCOL_BUFFER); FrameConverter converter; @@ -122,9 +114,7 @@ BOOST_AUTO_TEST_CASE(testing_FrameFactory_EmptyPayload) { frame_new); BOOST_CHECK(frame_new.size == 0); - BOOST_CHECK(frame_new.proto_id == 0); - BOOST_CHECK(frame_new.msg_id == 0); - BOOST_CHECK(frame_new.getMsgType() == 0); + BOOST_CHECK(frame_new.msg_type == 0); } BOOST_AUTO_TEST_CASE(testing_FrameFactory2) { @@ -134,30 +124,21 @@ BOOST_AUTO_TEST_CASE(testing_FrameFactory2) { ProtoBufMap proto_map; auto msg = msg_factory.create(MessageType::GOOGLE_PROTOCOL_BUFFER); - auto auth_by_token_req = std::make_unique(); + auto auth_by_token_req = std::make_unique(); auth_by_token_req->set_token("magic_token"); + // Reference frame should reflect envelope (wire) size + auto temp_envelope = proto_map.wrapInEnvelope(*auth_by_token_req); Frame frame_from_protocol_msg = frame_factory.create(*auth_by_token_req, proto_map); + frame_from_protocol_msg.size = + static_cast(temp_envelope->ByteSizeLong()); msg->setPayload(std::move(auth_by_token_req)); Frame frame_IMessage = frame_factory.create(*msg); - std::cout << "frame_generated_from_IMessage" << std::endl; - std::cout << frame_IMessage.size << std::endl; - std::cout << frame_IMessage.proto_id << std::endl; - std::cout << frame_IMessage.msg_id << std::endl; - std::cout << frame_IMessage.context << std::endl; - std::cout << "frame_generated_from_protocol_msg" << std::endl; - std::cout << frame_from_protocol_msg.size << std::endl; - std::cout << frame_from_protocol_msg.proto_id << std::endl; - std::cout << frame_from_protocol_msg.msg_id << std::endl; - std::cout << frame_from_protocol_msg.context << std::endl; - BOOST_CHECK(frame_IMessage.size == frame_from_protocol_msg.size); - BOOST_CHECK(frame_IMessage.proto_id == frame_from_protocol_msg.proto_id); - BOOST_CHECK(frame_IMessage.msg_id == frame_from_protocol_msg.msg_id); BOOST_CHECK(frame_IMessage.context == frame_from_protocol_msg.context); } diff --git a/common/tests/unit/test_MessageFactory.cpp b/common/tests/unit/test_MessageFactory.cpp index 3df758e7e..65130756b 100644 --- a/common/tests/unit/test_MessageFactory.cpp +++ b/common/tests/unit/test_MessageFactory.cpp @@ -9,7 +9,7 @@ #include "common/ProtoBufMap.hpp" // Proto file includes -#include "common/SDMS_Anon.pb.h" +#include "common/envelope.pb.h" using namespace SDMS; @@ -39,7 +39,7 @@ BOOST_AUTO_TEST_CASE(testing_MessageFactory) { message->set(MessageAttribute::KEY, key); message->set(MessageAttribute::STATE, MessageState::REQUEST); message->set(constants::message::google::CONTEXT, context); - auto auth_by_token_req = std::make_unique(); + auto auth_by_token_req = std::make_unique(); std::string token = "golden_chest"; auth_by_token_req->set_token(token); diff --git a/common/tests/unit/test_ProtoBufFactory.cpp b/common/tests/unit/test_ProtoBufFactory.cpp deleted file mode 100644 index a3d0c4910..000000000 --- a/common/tests/unit/test_ProtoBufFactory.cpp +++ /dev/null @@ -1,59 +0,0 @@ -#define BOOST_TEST_MAIN - -#define BOOST_TEST_MODULE protobuffactory -#include -#include - -// Local private includes -#include "ProtoBufFactory.hpp" - -// Local public includes -#include "common/ProtoBufMap.hpp" - -// Proto file includes -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" - -// Standard includes -#include - -using namespace SDMS; - -struct GlobalProtobufTeardown { - ~GlobalProtobufTeardown() { - // This is the teardown function that runs once at the end - google::protobuf::ShutdownProtobufLibrary(); - } -}; - -// Declare a global fixture instance -BOOST_GLOBAL_FIXTURE(GlobalProtobufTeardown); - -BOOST_AUTO_TEST_SUITE(ProtoBufFactoryTest) - -BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory) { - - ProtoBufMap proto_map; - ProtoBufFactory proto_factory; - - SDMS::Anon::VersionRequest version_request; - uint16_t msg_type = proto_map.getMessageType(version_request); - auto msg = proto_factory.create(msg_type); - BOOST_CHECK(msg_type == proto_map.getMessageType(*msg)); -} - -BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory2) { - - ProtoBufMap proto_map; - ProtoBufFactory proto_factory; - - SDMS::Anon::NackReply nack_reply; - uint16_t msg_type = proto_map.getMessageType(nack_reply); - auto msg = proto_factory.create(msg_type); - BOOST_CHECK(msg_type == proto_map.getMessageType(*msg)); - - auto nack_reply_new = dynamic_cast(*msg); - - nack_reply_new.set_err_msg("This is working"); -} -BOOST_AUTO_TEST_SUITE_END() diff --git a/common/tests/unit/test_ProtoBufMap.cpp b/common/tests/unit/test_ProtoBufMap.cpp index 21c12c2e0..d12f83edf 100644 --- a/common/tests/unit/test_ProtoBufMap.cpp +++ b/common/tests/unit/test_ProtoBufMap.cpp @@ -27,21 +27,11 @@ BOOST_GLOBAL_FIXTURE(GlobalProtobufTeardown); BOOST_AUTO_TEST_SUITE(ProtoBufFactoryTest) -BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory_ProtocolID) { - ProtoBufMap proto_map; - - uint8_t proto_id = - proto_map.getProtocolID(MessageProtocol::GOOGLE_ANONONYMOUS); - BOOST_CHECK(proto_id == 1); - proto_id = proto_map.getProtocolID(MessageProtocol::GOOGLE_AUTHORIZED); - BOOST_CHECK(proto_id == 2); -} - BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory) { ProtoBufMap proto_map; ProtoBufFactory proto_factory; - SDMS::Anon::VersionRequest version_request; + SDMS::VersionRequest version_request; uint16_t msg_type = proto_map.getMessageType(version_request); auto msg = proto_factory.create(msg_type); std::cout << "VersionRequest msg_type of VersionRequest, " << msg_type @@ -50,7 +40,7 @@ BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory) { BOOST_AUTO_TEST_CASE(testing_ProtoBufMap_toString) { ProtoBufMap proto_map; - SDMS::Anon::VersionRequest version_request; + SDMS::VersionRequest version_request; uint16_t msg_type = proto_map.getMessageType(version_request); auto name = proto_map.toString(msg_type); BOOST_CHECK(name.compare("VersionRequest") == 0); diff --git a/common/tests/unit/test_Proxy.cpp b/common/tests/unit/test_Proxy.cpp index 5156756d6..b4ab4c4fe 100644 --- a/common/tests/unit/test_Proxy.cpp +++ b/common/tests/unit/test_Proxy.cpp @@ -19,8 +19,7 @@ #include "common/SocketOptions.hpp" // Proto file includes -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" +#include "common/envelope.pb.h" // Standard includes #include @@ -229,7 +228,7 @@ BOOST_AUTO_TEST_CASE(testing_Proxy) { std::move(incoming_operators), log_context_proxy); std::chrono::duration duration = - std::chrono::milliseconds(100); + std::chrono::milliseconds(1000); proxy.setRunDuration(duration); proxy.run(); @@ -251,7 +250,7 @@ BOOST_AUTO_TEST_CASE(testing_Proxy) { msg_from_client->set(MessageAttribute::KEY, key); msg_from_client->set(constants::message::google::CONTEXT, context); auto auth_by_token_req = - std::make_unique(); + std::make_unique(); auth_by_token_req->set_token(token); msg_from_client->setPayload(std::move(auth_by_token_req)); client->send(*msg_from_client); @@ -449,7 +448,7 @@ BOOST_AUTO_TEST_CASE(testing_Proxy2) { log_context_proxy_middle); std::chrono::duration duration = - std::chrono::milliseconds(30); + std::chrono::milliseconds(400); proxy.setRunDuration(duration); proxy.run(); @@ -468,7 +467,7 @@ BOOST_AUTO_TEST_CASE(testing_Proxy2) { msg_from_client->set(MessageAttribute::ID, id); msg_from_client->set(MessageAttribute::KEY, key); auto auth_by_token_req = - std::make_unique(); + std::make_unique(); auth_by_token_req->set_token(token); msg_from_client->setPayload(std::move(auth_by_token_req)); client->send(*msg_from_client); @@ -506,7 +505,7 @@ BOOST_AUTO_TEST_CASE(testing_Proxy2) { auto google_msg = std::get<::google::protobuf::Message *>(response.message->getPayload()); auto new_auth_by_pass_req = - dynamic_cast(google_msg); + dynamic_cast(google_msg); BOOST_CHECK(new_auth_by_pass_req->token().compare(token) == 0); @@ -766,7 +765,7 @@ BOOST_AUTO_TEST_CASE(testing_ProxyChain) { msg_factory.create(MessageType::GOOGLE_PROTOCOL_BUFFER); msg_from_client->set(MessageAttribute::ID, id); msg_from_client->set(MessageAttribute::KEY, key); - auto auth_by_token_req = std::make_unique(); + auto auth_by_token_req = std::make_unique(); auth_by_token_req->set_token(token); msg_from_client->setPayload(std::move(auth_by_token_req)); client->send(*msg_from_client); @@ -807,7 +806,7 @@ BOOST_AUTO_TEST_CASE(testing_ProxyChain) { auto google_msg = std::get<::google::protobuf::Message *>(response.message->getPayload()); auto new_auth_by_pass_req = - dynamic_cast(google_msg); + dynamic_cast(google_msg); BOOST_CHECK(new_auth_by_pass_req->token().compare(token) == 0); @@ -816,8 +815,8 @@ BOOST_AUTO_TEST_CASE(testing_ProxyChain) { // the proxy chain auto return_msg = msg_factory.createResponseEnvelope(*response.message); // We will just pass a nack reply because it is easy - auto nack_reply = std::make_unique(); - nack_reply->set_err_code(ErrorCode::ID_SERVICE_ERROR); + auto nack_reply = std::make_unique(); + nack_reply->set_err_code(ErrorCode::SERVICE_ERROR); nack_reply->set_err_msg(error_msg); // Place google proto message in IMessage @@ -834,10 +833,10 @@ BOOST_AUTO_TEST_CASE(testing_ProxyChain) { auto response_google_msg_ptr = std::get<::google::protobuf::Message *>( msg_from_server.message->getPayload()); - Anon::NackReply *response_payload = - dynamic_cast(response_google_msg_ptr); + SDMS::NackReply *response_payload = + dynamic_cast(response_google_msg_ptr); - BOOST_CHECK(response_payload->err_code() == ErrorCode::ID_SERVICE_ERROR); + BOOST_CHECK(response_payload->err_code() == ErrorCode::SERVICE_ERROR); BOOST_CHECK(response_payload->err_msg().compare(error_msg) == 0); proxy_thread->join(); @@ -1015,7 +1014,7 @@ BOOST_AUTO_TEST_CASE(testing_Proxy_with_PERSISTENT_proxy_client) { msg_from_client->set(MessageAttribute::ID, id); msg_from_client->set(MessageAttribute::KEY, key); auto auth_by_token_req = - std::make_unique(); + std::make_unique(); auth_by_token_req->set_token(token); msg_from_client->setPayload(std::move(auth_by_token_req)); client->send(*msg_from_client); @@ -1053,7 +1052,7 @@ BOOST_AUTO_TEST_CASE(testing_Proxy_with_PERSISTENT_proxy_client) { auto google_msg = std::get<::google::protobuf::Message *>(response.message->getPayload()); auto new_auth_by_pass_req = - dynamic_cast(google_msg); + dynamic_cast(google_msg); BOOST_CHECK(new_auth_by_pass_req->token().compare(token) == 0); diff --git a/common/tests/unit/test_ProxyBasicZMQ.cpp b/common/tests/unit/test_ProxyBasicZMQ.cpp index 03b2f3328..a5e319ddd 100644 --- a/common/tests/unit/test_ProxyBasicZMQ.cpp +++ b/common/tests/unit/test_ProxyBasicZMQ.cpp @@ -20,8 +20,7 @@ #include "common/SocketOptions.hpp" // Proto file includes -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" +#include "common/envelope.pb.h" // Standard includes #include @@ -219,7 +218,7 @@ BOOST_AUTO_TEST_CASE(testing_ProxyBasicZMQ) { log_context1); std::chrono::duration duration = - std::chrono::milliseconds(400); + std::chrono::milliseconds(1000); proxy.setRunDuration(duration); proxy.run(); @@ -238,19 +237,31 @@ BOOST_AUTO_TEST_CASE(testing_ProxyBasicZMQ) { msg_from_client->set(MessageAttribute::ID, id); msg_from_client->set(MessageAttribute::KEY, key); auto auth_by_token_req = - std::make_unique(); + std::make_unique(); auth_by_token_req->set_token(token); msg_from_client->setPayload(std::move(auth_by_token_req)); - std::this_thread::sleep_for(std::chrono::milliseconds(100)); - client->send(*msg_from_client); + std::this_thread::sleep_for(std::chrono::milliseconds(250)); + //client->send(*msg_from_client); + // Client send with retry + auto end_time = std::chrono::steady_clock::now() + std::chrono::milliseconds(2000); + bool sent = false; + while (!sent && std::chrono::steady_clock::now() < end_time) { + try { + client->send(*msg_from_client); + sent = true; + } catch (...) { + std::this_thread::sleep_for(std::chrono::milliseconds(10)); + } + } + BOOST_REQUIRE(sent); // Client send { // Server receive ICommunicator::Response response = server->receive(MessageType::GOOGLE_PROTOCOL_BUFFER); - std::chrono::duration duration = std::chrono::milliseconds(50); + std::chrono::duration duration = std::chrono::milliseconds(400); auto end_time = std::chrono::steady_clock::now() + duration; while (response.time_out and end_time > std::chrono::steady_clock::now()) { @@ -291,7 +302,7 @@ BOOST_AUTO_TEST_CASE(testing_ProxyBasicZMQ) { auto google_msg = std::get<::google::protobuf::Message *>( response.message->getPayload()); auto new_auth_by_pass_req = - dynamic_cast(google_msg); + dynamic_cast(google_msg); BOOST_CHECK(new_auth_by_pass_req->token().compare(token) == 0); } // Server receive @@ -476,7 +487,7 @@ BOOST_AUTO_TEST_CASE(testing_ProxyBasicZMQ_Reply) { msg_from_client->set(MessageAttribute::ID, id); msg_from_client->set(MessageAttribute::KEY, key); auto auth_by_token_req = - std::make_unique(); + std::make_unique(); auth_by_token_req->set_token(token); msg_from_client->setPayload(std::move(auth_by_token_req)); @@ -506,8 +517,8 @@ BOOST_AUTO_TEST_CASE(testing_ProxyBasicZMQ_Reply) { // the proxy chain auto return_msg = msg_factory.createResponseEnvelope(*response.message); // We will just pass a nack reply because it is easy - auto nack_reply = std::make_unique(); - nack_reply->set_err_code(ErrorCode::ID_SERVICE_ERROR); + auto nack_reply = std::make_unique(); + nack_reply->set_err_code(ErrorCode::SERVICE_ERROR); nack_reply->set_err_msg(error_msg); // Place google proto message in IMessage @@ -528,11 +539,11 @@ BOOST_AUTO_TEST_CASE(testing_ProxyBasicZMQ_Reply) { std::cout << __FILE__ << ":" << __LINE__ << std::endl; auto response_google_msg_ptr = std::get<::google::protobuf::Message *>( msg_from_server.message->getPayload()); - Anon::NackReply *response_payload = - dynamic_cast(response_google_msg_ptr); + SDMS::NackReply *response_payload = + dynamic_cast(response_google_msg_ptr); std::cout << __FILE__ << ":" << __LINE__ << std::endl; - BOOST_CHECK(response_payload->err_code() == ErrorCode::ID_SERVICE_ERROR); + BOOST_CHECK(response_payload->err_code() == ErrorCode::SERVICE_ERROR); BOOST_CHECK(response_payload->err_msg().compare(error_msg) == 0); std::cout << __FILE__ << ":" << __LINE__ << std::endl; @@ -742,7 +753,7 @@ BOOST_AUTO_TEST_CASE(testing_ProxyBasicZMQ_TCPServer_Reply) { msg_from_client->set(MessageAttribute::KEY, key); msg_from_client->set(constants::message::google::CONTEXT, context); auto auth_by_token_req = - std::make_unique(); + std::make_unique(); auth_by_token_req->set_token(token); msg_from_client->setPayload(std::move(auth_by_token_req)); @@ -785,8 +796,8 @@ BOOST_AUTO_TEST_CASE(testing_ProxyBasicZMQ_TCPServer_Reply) { // the proxy chain auto return_msg = msg_factory.createResponseEnvelope(*response.message); // We will just pass a nack reply because it is easy - auto nack_reply = std::make_unique(); - nack_reply->set_err_code(ErrorCode::ID_SERVICE_ERROR); + auto nack_reply = std::make_unique(); + nack_reply->set_err_code(ErrorCode::SERVICE_ERROR); nack_reply->set_err_msg(error_msg); // Place google proto message in IMessage @@ -812,11 +823,11 @@ BOOST_AUTO_TEST_CASE(testing_ProxyBasicZMQ_TCPServer_Reply) { std::cout << __FILE__ << ":" << __LINE__ << std::endl; auto response_google_msg_ptr = std::get<::google::protobuf::Message *>( msg_from_server.message->getPayload()); - Anon::NackReply *response_payload = - dynamic_cast(response_google_msg_ptr); + SDMS::NackReply *response_payload = + dynamic_cast(response_google_msg_ptr); std::cout << __FILE__ << ":" << __LINE__ << std::endl; - BOOST_CHECK(response_payload->err_code() == ErrorCode::ID_SERVICE_ERROR); + BOOST_CHECK(response_payload->err_code() == ErrorCode::SERVICE_ERROR); BOOST_CHECK(response_payload->err_msg().compare(error_msg) == 0); std::cout << __FILE__ << ":" << __LINE__ << std::endl; diff --git a/core/database/CMakeLists.txt b/core/database/CMakeLists.txt index b4ebeb975..0a7f3026a 100644 --- a/core/database/CMakeLists.txt +++ b/core/database/CMakeLists.txt @@ -23,16 +23,27 @@ if( ENABLE_FOXX_TESTS ) add_test(NAME foxx_base_repo COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_base_repository:") add_test(NAME foxx_repositories COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "integration_repositories:") add_test(NAME foxx_repo_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "integration_repo_router:") + add_test(NAME foxx_admin_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_admin_router:") add_test(NAME foxx_validation_repo COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_validation_repository:") add_test(NAME foxx_path COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_path:") add_test(NAME foxx_version COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_version:") add_test(NAME foxx_support COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_support:") add_test(NAME foxx_user_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_user_router:") + add_test(NAME foxx_coll_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_coll_router:") + add_test(NAME foxx_proj_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_proj_router:") + add_test(NAME foxx_schema_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_schema_router:") + add_test(NAME foxx_acl_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_acl_router:") + add_test(NAME foxx_data_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_data_router:") + add_test(NAME foxx_config_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_config_router:") + add_test(NAME foxx_group_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_group_router:") + add_test(NAME foxx_note_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_note_router:") add_test(NAME foxx_version_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_version_router:") + add_test(NAME foxx_topic_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_topic_router:") add_test(NAME foxx_tag_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_tag_router:") add_test(NAME foxx_task_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_task_router:") add_test(NAME foxx_authz_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_authz_router:") add_test(NAME foxx_query_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_query_router:") + add_test(NAME foxx_metrics_router COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_metrics_router:") add_test(NAME foxx_unit_user_token COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_user_token:") add_test(NAME foxx_unit_user_model COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_user_model:") add_test(NAME foxx_unit_globus_collection_model COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_globus_collection_model:") @@ -46,6 +57,7 @@ if( ENABLE_FOXX_TESTS ) set_tests_properties(foxx_authz PROPERTIES FIXTURES_REQUIRED Foxx) set_tests_properties(foxx_authz_router PROPERTIES FIXTURES_REQUIRED Foxx) set_tests_properties(foxx_record PROPERTIES FIXTURES_REQUIRED Foxx) + set_tests_properties(foxx_data_router PROPERTIES FIXTURES_REQUIRED Foxx) set_tests_properties(foxx_repo PROPERTIES FIXTURES_REQUIRED Foxx) set_tests_properties(foxx_repo_globus PROPERTIES FIXTURES_REQUIRED Foxx) set_tests_properties(foxx_repo_metadata PROPERTIES FIXTURES_REQUIRED Foxx) @@ -55,6 +67,16 @@ if( ENABLE_FOXX_TESTS ) set_tests_properties(foxx_validation_repo PROPERTIES FIXTURES_REQUIRED Foxx) set_tests_properties(foxx_path PROPERTIES FIXTURES_REQUIRED Foxx) set_tests_properties(foxx_user_router PROPERTIES FIXTURES_REQUIRED "Foxx;FoxxDBFixtures") + set_tests_properties(foxx_coll_router PROPERTIES FIXTURES_REQUIRED Foxx) + set_tests_properties(foxx_proj_router PROPERTIES FIXTURES_REQUIRED Foxx) + set_tests_properties(foxx_schema_router PROPERTIES FIXTURES_REQUIRED Foxx) + set_tests_properties(foxx_acl_router PROPERTIES FIXTURES_REQUIRED Foxx) + set_tests_properties(foxx_config_router PROPERTIES FIXTURES_REQUIRED Foxx) + set_tests_properties(foxx_topic_router PROPERTIES FIXTURES_REQUIRED Foxx) + set_tests_properties(foxx_group_router PROPERTIES FIXTURES_REQUIRED Foxx) + set_tests_properties(foxx_admin_router PROPERTIES FIXTURES_REQUIRED Foxx) + set_tests_properties(foxx_metrics_router PROPERTIES FIXTURES_REQUIRED Foxx) + set_tests_properties(foxx_note_router PROPERTIES FIXTURES_REQUIRED Foxx) set_tests_properties(foxx_version_router PROPERTIES FIXTURES_REQUIRED Foxx) set_tests_properties(foxx_tag_router PROPERTIES FIXTURES_REQUIRED Foxx) set_tests_properties(foxx_query_router PROPERTIES FIXTURES_REQUIRED Foxx) diff --git a/core/database/foxx/api/acl_router.js b/core/database/foxx/api/acl_router.js index 336afbc00..23fad8393 100644 --- a/core/database/foxx/api/acl_router.js +++ b/core/database/foxx/api/acl_router.js @@ -7,6 +7,8 @@ const g_db = require("@arangodb").db; const g_lib = require("./support"); const error = require("./lib/error_codes"); const permissions = require("./lib/permissions"); +const logger = require("./lib/logger"); +const basePath = "acl"; module.exports = router; @@ -14,8 +16,17 @@ module.exports = router; router .get("/update", function (req, res) { + let result = null; try { - var result = []; + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Started", + description: `Update ACL(s) on a data record or collection. ID: ${req.queryParams.id}`, + }); + result = []; g_db._executeTransaction({ collections: { @@ -185,7 +196,27 @@ router }); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Success", + description: `Update ACL(s) on a data record or collection. ID: ${req.queryParams.id}`, + extra: result, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Failure", + description: `Update ACL(s) on a data record or collection. ID: ${req.queryParams.id}`, + extra: result, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -203,7 +234,17 @@ router router .get("/view", function (req, res) { + let rules = []; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Started", + description: `View current ACL on an object. ID: ${req.queryParams.id}`, + }); + const client = g_lib.getUserFromClientID(req.queryParams.client); var object = g_lib.getObject(req.queryParams.id, client); @@ -215,7 +256,7 @@ router throw error.ERR_PERM_DENIED; } - var rules = g_db + rules = g_db ._query( "for v, e in 1..1 outbound @object acl return { id: v._id, gid: v.gid, grant: e.grant, inhgrant: e.inhgrant }", { @@ -226,7 +267,26 @@ router postProcACLRules(rules, object); res.send(rules); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Success", + description: `View current ACL on an object. ID: ${req.queryParams.id}`, + extra: { NumOfRules: rules.length }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Failure", + description: `View current ACL on an object. ID: ${req.queryParams.id}`, + extra: { NumOfRules: rules.length }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -234,20 +294,46 @@ router .queryParam("id", joi.string().required(), "ID or alias of data record or collection") .summary("View current ACL on an object") .description("View current ACL on an object (data record or collection)"); - router .get("/shared/list", function (req, res) { + let result = []; try { const client = g_lib.getUserFromClientID(req.queryParams.client); - - res.send( - g_lib.getACLOwnersBySubject( - client._id, - req.queryParams.inc_users, - req.queryParams.inc_projects, - ), + result = g_lib.getACLOwnersBySubject( + client._id, + req.queryParams.inc_users, + req.queryParams.inc_projects, ); + + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/shared/list", + status: "Started", + description: `List users/projects that have shared data or collections with client/subject. Users: ${req.queryParams.inc_users}; Projects: ${req.queryParams.inc_projects}`, + }); + res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/shared/list", + status: "Success", + description: `List users/projects that have shared data or collections with client/subject. Users: ${req.queryParams.inc_users}; Projects: ${req.queryParams.inc_projects}`, + extra: { NumOfUsersAndProjs: result.length }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/shared/list", + status: "Failure", + description: `List users/projects that have shared data or collections with client/subject. Users: ${req.queryParams.inc_users}; Projects: ${req.queryParams.inc_projects}`, + extra: { NumOfUsersAndProjs: result.length }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -259,7 +345,16 @@ router router .get("/shared/list/items", function (req, res) { + let shares = []; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/shared/list/items", + status: "Started", + description: `Lists data and collections shared with client/subject by owner. Owner ID: ${req.queryParams.owner}`, + }); const client = g_lib.getUserFromClientID(req.queryParams.client); var owner_id; @@ -273,17 +368,16 @@ router owner_id = g_lib.getUserFromClientID(req.queryParams.owner)._id; } - var i, - share, - shares = g_db - ._query( - "for v in 1..2 inbound @client member, acl filter v.owner == @owner return {id:v._id,title:v.title,alias:v.alias,owner:v.owner,creator:v.creator,md_err:v.md_err,external:v.external,locked:v.locked}", - { - client: client._id, - owner: owner_id, - }, - ) - .toArray(); + var i, share; + shares = g_db + ._query( + "for v in 1..2 inbound @client member, acl filter v.owner == @owner return {id:v._id,title:v.title,alias:v.alias,owner:v.owner,creator:v.creator,md_err:v.md_err,external:v.external,locked:v.locked}", + { + client: client._id, + owner: owner_id, + }, + ) + .toArray(); for (i in shares) { share = shares[i]; @@ -295,7 +389,27 @@ router } else { res.send(dedupShares(client, shares)); } + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/shared/list/items", + status: "Success", + description: `Lists data and collections shared with client/subject by owner. Owner ID: ${req.queryParams.owner}`, + extra: { NumOfShares: shares.length }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/shared/list/items", + status: "Failure", + description: `Lists data and collections shared with client/subject by owner. Owner ID: ${req.queryParams.owner}`, + extra: { NumOfShares: shares.length }, + error: e, + }); + g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/admin_router.js b/core/database/foxx/api/admin_router.js index 9468118f1..6cadf2b57 100644 --- a/core/database/foxx/api/admin_router.js +++ b/core/database/foxx/api/admin_router.js @@ -8,16 +8,45 @@ const g_db = require("@arangodb").db; const g_lib = require("./support"); const permissions = require("./lib/permissions"); //const perf = require('@arangodb/foxx'); +const basePath = "admin"; +const logger = require("./lib/logger"); module.exports = router; router .get("/ping", function (req, res) { try { + logger.logRequestStarted({ + client: "N/A", + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/ping", + status: "Started", + description: "Ping DB server", + }); res.send({ status: 1, }); + logger.logRequestSuccess({ + client: "N/A", + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/ping", + status: "Success", + description: "Ping DB server", + extra: "N/A", + }); } catch (e) { + logger.logRequestFailure({ + client: "N/A", + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/ping", + status: "Failure", + description: "Ping DB server", + extra: "N/A", + error: e, + }); g_lib.handleException(e, res); } }) @@ -26,9 +55,18 @@ router router .get("/test", function (req, res) { + let result = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/test", + status: "Started", + description: "Do perf test", + }); const client = g_lib.getUserFromClientID(req.queryParams.client); - var result = true; + result = true; var item = g_lib.resolveID(req.queryParams.item, client); var obj = g_db[item[0]].document(item); @@ -43,7 +81,26 @@ router perm: result, time: (t2 - t1) / 1000, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/test", + status: "Success", + extra: { execution_time_seconds: (t2 - t1) / 1000 }, + description: "Do perf test", + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/test", + status: "Failure", + description: "Do perf test", + extra: { execution_time_seconds: 0 }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -54,8 +111,17 @@ router router .get("/check", function (req, res) { + let result = null; try { - var result = {}; + logger.logRequestStarted({ + client: "N/A", + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/check", + status: "Started", + description: "Database integrity check", + }); + result = {}; g_db._executeTransaction({ collections: { @@ -281,7 +347,26 @@ router }); res.send(result); + logger.logRequestSuccess({ + client: "N/A", + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/check", + status: "Success", + description: "Database integrity check", + extra: "N/A", + }); } catch (e) { + logger.logRequestFailure({ + client: "N/A", + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/check", + status: "Failure", + description: "Database integrity check", + extra: "N/A", + error: e, + }); g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/authz.js b/core/database/foxx/api/authz.js index 8ecbd8227..4977f9a30 100644 --- a/core/database/foxx/api/authz.js +++ b/core/database/foxx/api/authz.js @@ -56,7 +56,6 @@ module.exports = (function () { let record = new Record(data_key); if (!record.exists()) { // Return not found error for non-existent records - console.log("AUTHZ act: read client: " + client._id + " path " + path + " NOT_FOUND"); throw [error.ERR_NOT_FOUND, "Record not found: " + path]; } @@ -64,14 +63,12 @@ module.exports = (function () { // if record exists and if it is a public record if (!client) { if (!g_lib.hasPublicRead(record.id())) { - console.log("AUTHZ act: read" + " unknown client " + " path " + path + " FAILED"); throw [ error.ERR_PERM_DENIED, "Unknown client does not have read permissions on " + path, ]; } } else if (!obj.isRecordActionAuthorized(client, data_key, permission)) { - console.log("AUTHZ act: read" + " client: " + client._id + " path " + path + " FAILED"); throw [ error.ERR_PERM_DENIED, "Client " + client._id + " does not have read permissions on " + path, @@ -79,7 +76,6 @@ module.exports = (function () { } if (!record.isPathConsistent(path)) { - console.log("AUTHZ act: read client: " + client._id + " path " + path + " FAILED"); throw [record.error(), record.errorMessage()]; } }; @@ -98,17 +94,11 @@ module.exports = (function () { const data_key = path_components.at(-1); if (!client) { - console.log( - "AUTHZ act: create" + " client: " + client._id + " path " + path + " FAILED", - ); throw [ error.ERR_PERM_DENIED, "Unknown client does not have create permissions on " + path, ]; } else if (!obj.isRecordActionAuthorized(client, data_key, permission)) { - console.log( - "AUTHZ act: create" + " client: " + client._id + " path " + path + " FAILED", - ); throw [ error.ERR_PERM_DENIED, "Client " + client._id + " does not have create permissions on " + path, @@ -120,14 +110,12 @@ module.exports = (function () { // exists in the database. if (!record.exists()) { // If the record does not exist then the path would not be consistent. - console.log("AUTHZ act: create client: " + client._id + " path " + path + " FAILED"); throw [error.ERR_PERM_DENIED, "Invalid record specified: " + path]; } // This will tell us if the proposed path is consistent with what we expect // GridFTP will fail if the posix file path does not exist. if (!record.isPathConsistent(path)) { - console.log("AUTHZ act: create client: " + client._id + " path " + path + " FAILED"); throw [record.error(), record.errorMessage()]; } }; diff --git a/core/database/foxx/api/authz_router.js b/core/database/foxx/api/authz_router.js index 162585499..6a099099d 100644 --- a/core/database/foxx/api/authz_router.js +++ b/core/database/foxx/api/authz_router.js @@ -8,23 +8,25 @@ const g_lib = require("./support"); const error = require("./lib/error_codes"); const permissions = require("./lib/permissions"); const authzModule = require("./authz"); +const logger = require("./lib/logger"); const { Repo, PathType } = require("./repo"); - +const basePath = "authz"; module.exports = router; router .get("/gridftp", function (req, res) { + let client = null; + let description = `Check authorization to ${req.queryParams.act} ${req.queryParams.file} on ${req.queryParams.repo} `; try { - console.log( - "/gridftp start authz client", - req.queryParams.client, - "repo", - req.queryParams.repo, - "file", - req.queryParams.file, - "act", - req.queryParams.act, - ); + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/gridftp", + status: "Started", + description: description, + }); // Client will contain the following information // @@ -39,33 +41,22 @@ router // "max_sav_qry" : 20, // : // "email" : "bobjones@gmail.com" - const client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); + client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); if (!client) { - console.log( - "AUTHZ act: " + - req.queryParams.act + - " client: " + - +req.queryParams.client + - " path " + - req.queryParams.file + - " FAILED", - ); throw [error.ERR_PERM_DENIED, "Unknown client: " + req.queryParams.client]; } - let repo = new Repo(req.queryParams.repo); + let repo = Repo.resolveFromPath(req.queryParams.file); + + if (repo.id() !== req.queryParams.repo) { + throw [ + error.ERR_PERM_DENIED, + "File path does not match repository: " + req.queryParams.file, + ]; + } let path_type = repo.pathType(req.queryParams.file); // If the provided path is not within the repo throw an error if (path_type === PathType.UNKNOWN) { - console.log( - "AUTHZ act: " + - req.queryParams.act + - " client: " + - client._id + - " path " + - req.queryParams.file + - " FAILED", - ); throw [ error.ERR_PERM_DENIED, "Unknown path, or path is not consistent with supported repository folder hierarchy: " + @@ -83,16 +74,33 @@ router } else { throw [error.ERR_INVALID_PARAM, "Invalid gridFTP action: ", req.queryParams.act]; } - console.log( - "AUTHZ act: " + - req.queryParams.act + - " client: " + - client._id + - " path " + - req.queryParams.file + - " SUCCESS", - ); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/gridftp", + status: "Success", + description: description, + extra: { + id: client?._id, + is_admin: client?.is_admin, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/gridftp", + status: "Failure", + description: description, + extra: { + id: client?._id, + is_admin: client?.is_admin, + }, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -113,12 +121,23 @@ router router .get("/perm/check", function (req, res) { + let client = null; + let result = null; try { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/perm/check", + status: "Started", + description: "Checks client permissions for object", + }); + var perms = req.queryParams.perms ? req.queryParams.perms : permissions.PERM_ALL; - var obj, - result = true, - id = g_lib.resolveID(req.queryParams.id, client), + var obj; + result = true; + var id = g_lib.resolveID(req.queryParams.id, client), ty = id[0]; if (id[1] != "/") { @@ -172,7 +191,34 @@ router res.send({ granted: result, }); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/perm/check", + status: "Success", + description: + "Checks client permissions for object. OBJ ID:" + + req.queryParams.id + + ", Permissions: " + + req.queryParams.perms, + extra: result, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/perm/check", + status: "Failure", + description: + "Checks client permissions for object. OBJ ID:" + + req.queryParams.id + + ", Permissions: " + + req.queryParams.perms, + extra: result, + error: e, + }); g_lib.handleException(e, res); } }) @@ -184,9 +230,21 @@ router router .get("/perm/get", function (req, res) { + let client = null; + let result = null; try { - const client = g_lib.getUserFromClientID(req.queryParams.client); - var result = req.queryParams.perms ? req.queryParams.perms : permissions.PERM_ALL; + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/perm/get", + status: "Started", + description: + "Gets client permissions for object. Permissions:" + req.queryParams.perms, + }); + + result = req.queryParams.perms ? req.queryParams.perms : permissions.PERM_ALL; var obj, id = g_lib.resolveID(req.queryParams.id, client), ty = id[0]; @@ -220,7 +278,28 @@ router res.send({ granted: result, }); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/perm/get", + status: "Success", + description: + "Gets client permissions for object. Permissions:" + req.queryParams.perms, + extra: `Object ID: ${req.queryParams.id}`, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/perm/get", + status: "Failure", + description: + "Gets client permissions for object. Permissions:" + req.queryParams.perms, + extra: `Object ID: ${req.queryParams.id}`, + error: e, + }); g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/coll_router.js b/core/database/foxx/api/coll_router.js index e3c66b21d..c5ab01b77 100644 --- a/core/database/foxx/api/coll_router.js +++ b/core/database/foxx/api/coll_router.js @@ -9,7 +9,8 @@ const g_graph = require("@arangodb/general-graph")._graph("sdmsg"); const g_lib = require("./support"); const error = require("./lib/error_codes"); const permissions = require("./lib/permissions"); - +const logger = require("./lib/logger"); +const basePath = "col"; module.exports = router; //===== COLLECTION API FUNCTIONS ===== @@ -17,9 +18,19 @@ module.exports = router; router .post("/create", function (req, res) { var retry = 10; - + let client = null; + let log_extra = null; for (;;) { try { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Started", + description: "Create a new data collection", + }); var result = []; g_db._executeTransaction({ @@ -28,14 +39,15 @@ router write: ["c", "a", "alias", "owner", "item", "t", "top", "tag"], }, action: function () { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); var owner = client, parent_id; + let owner_id = owner._id; if (req.body.parent) { parent_id = g_lib.resolveCollID(req.body.parent, client); - var owner_id = g_db.owner.firstExample({ + owner_id = g_db.owner.firstExample({ _from: parent_id, })._to; if (owner_id != client._id) { @@ -171,8 +183,38 @@ router res.send({ results: result, }); + + const item = result[0]; // the newly created collection + + log_extra = { + owner: item.owner, + creator: item.creator, + title: item.title?.substring(0, 20), + tags: Array.isArray(item.tags) ? item.tags.slice(0, 10) : [], + parent_id: item.parent_id, + id: item.id, + }; + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Success", + description: "Create a new data collection", + extra: log_extra, + }); break; } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Failure", + description: "Create a new data collection", + extra: log_extra, + error: e, + }); if (--retry == 0 || !e.errorNum || e.errorNum != 1200) { g_lib.handleException(e, res); } @@ -199,9 +241,20 @@ router router .post("/update", function (req, res) { var retry = 10; - + let client = null; + let extra_log = null; for (;;) { try { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Started", + description: `Update an existing collection. ID: ${req.body.id}`, + }); + var result = { results: [], updates: [], @@ -388,8 +441,36 @@ router }); res.send(result); + + extra_log = { + updates: result.updates.map(({ ut, title, desc, ...rest }) => ({ + ...rest, + title: title?.length > 15 ? title.slice(0, 15) + "..." : title, + desc: desc?.length > 15 ? desc.slice(0, 15) + "..." : desc, + })), + }; + + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Success", + description: `Update an existing collection. ID: ${req.body.id}`, + extra: extra_log, + }); break; } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Failure", + description: `Update an existing collection. ID: ${req.body.id}`, + extra: extra_log, + error: e, + }); if (--retry == 0 || !e.errorNum || e.errorNum != 1200) { g_lib.handleException(e, res); } @@ -416,13 +497,24 @@ router router .get("/view", function (req, res) { + let client = null; + let coll = null; + let extra_log = null; try { - const client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); + client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Started", + description: `View collection information by ID or alias. ID: ${req.queryParams.id}`, + }); var coll_id = g_lib.resolveCollID(req.queryParams.id, client), - coll = g_db.c.document(coll_id), admin = false; + coll = g_db.c.document(coll_id); if (client) { admin = permissions.hasAdminPermObject(client, coll_id); @@ -446,7 +538,33 @@ router res.send({ results: [coll], }); + + extra_log = { + ...coll, + title: coll?.title?.length > 15 ? coll.title.slice(0, 15) + "..." : coll?.title, + desc: coll?.desc?.length > 15 ? coll.desc.slice(0, 15) + "..." : coll?.desc, + }; + + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Success", + description: `View collection information by ID or alias. ID: ${req.queryParams.id}`, + extra: extra_log, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Failure", + description: `View collection information by ID or alias. ID: ${req.queryParams.id}`, + extra: extra_log, + error: e, + }); g_lib.handleException(e, res); } }) @@ -457,8 +575,18 @@ router router .get("/read", function (req, res) { + let client = null; + let result = null; try { - const client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); + client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/read", + status: "Started", + description: `Read contents of a collection by ID or alias. ID: ${req.queryParams.id}`, + }); var coll_id = g_lib.resolveCollID(req.queryParams.id, client), coll = g_db.c.document(coll_id), @@ -477,7 +605,6 @@ router var qry = "for v in 1..1 outbound @coll item sort is_same_collection('c',v) DESC, v.title", - result, params = { coll: coll_id, }, @@ -518,7 +645,26 @@ router } res.send(result); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/read", + status: "Success", + description: `Read contents of a collection by ID or alias. ID: ${req.queryParams.id}`, + extra: result, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/read", + status: "Failure", + extra: result, + description: `Read contents of a collection by ID or alias. ID: ${req.queryParams.id}`, + error: e, + }); g_lib.handleException(e, res); } }) @@ -531,15 +677,25 @@ router router .get("/write", function (req, res) { + let client = null; + let loose_res = null; try { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/write", + status: "Started", + description: "Add/remove items in a collection", + }); + g_db._executeTransaction({ collections: { read: ["u", "c", "uuid", "accn"], write: ["item", "d"], }, action: function () { - const client = g_lib.getUserFromClientID(req.queryParams.client); - if (req.queryParams.add && req.queryParams.remove) { throw [ error.ERR_INVALID_PARAM, @@ -730,8 +886,8 @@ router // 7. Re-link loose items to root if (have_loose) { var root_id = g_lib.getRootID(owner_id), - rctxt = null, - loose_res = []; + rctxt = null; + loose_res = []; cres = g_db._query("for v in 1..1 outbound @coll item return v._id", { coll: root_id, @@ -795,7 +951,34 @@ router } }, }); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/write", + status: "Success", + description: "Add/remove items in a collection", + extra: { + addedCount: req.queryParams.add?.length || 0, + removedCount: req.queryParams.remove?.length || 0, + looseCount: loose_res ? loose_res?.length : 0, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/write", + status: "Failure", + description: "Add/remove items in a collection", + extra: { + addedCount: req.queryParams.add?.length || 0, + removedCount: req.queryParams.remove?.length || 0, + looseCount: loose_res ? loose_res?.length : 0, + }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -808,14 +991,27 @@ router router .get("/move", function (req, res) { + let client = null; + const itemCount = Array.isArray(req.queryParams.items) ? req.queryParams.items.length : 0; + try { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/move", + status: "Started", + description: `Move items from source collection: ${req.queryParams.source} to destination collection: ${req.queryParams.dest}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "c", "uuid", "accn"], write: ["item", "d"], }, action: function () { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); var src_id = g_lib.resolveCollID(req.queryParams.source, client), src = g_db.c.document(src_id), dst_id = g_lib.resolveCollID(req.queryParams.dest, client), @@ -954,7 +1150,26 @@ router res.send({}); }, }); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/move", + status: "Success", + description: `Move items from source collection: ${req.queryParams.source} to destination collection: ${req.queryParams.dest}`, + extra: { movedCount: itemCount }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/move", + status: "Failure", + description: `Move items from source collection: ${req.queryParams.source} to destination collection: ${req.queryParams.dest}`, + extra: { movedCount: itemCount }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -967,14 +1182,25 @@ router router .get("/get_parents", function (req, res) { + let client = null; + let results = null; try { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/get_parents", + status: "Started", + description: `Get parent collection(s) (path) of item. ID: ${req.queryParams.id}`, + }); + var item_id = g_lib.resolveID(req.queryParams.id, client); if (!item_id.startsWith("d/") && !item_id.startsWith("c/")) throw [error.ERR_INVALID_PARAM, "ID is not a collection or record."]; - var results = g_lib.getParents(item_id); + results = g_lib.getParents(item_id); if (req.queryParams.inclusive) { var item; if (item_id[0] == "c") item = g_db.c.document(item_id); @@ -990,7 +1216,26 @@ router } } res.send(results); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/get_parents", + status: "Success", + description: `Get parent collection(s) (path) of item. ID: ${req.queryParams.id}`, + extra: { NumOfParentColls: results?.length }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/get_parents", + status: "Failure", + description: `Get parent collection(s) (path) of item. ID: ${req.queryParams.id}`, + extra: { NumOfParentColls: results?.length }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -1002,8 +1247,20 @@ router router .get("/get_offset", function (req, res) { + let get_offset = null; + let client = null; + let idx = null; try { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/get_offset", + status: "Started", + description: `Get offset to item in collection. ID: ${req.queryParams.id}; Item ID: ${req.queryParams.item}; Page Size: ${req.queryParams.page_sz}`, + }); + client = g_lib.getUserFromClientID(req.queryParams.client); var coll_id = g_lib.resolveID(req.queryParams.id, client); var item_id = g_lib.resolveID(req.queryParams.item, client); @@ -1025,7 +1282,7 @@ router offset: 0, }); else { - var idx = ids.indexOf(item_id); + idx = ids.indexOf(item_id); if (idx < 0) throw [ error.ERR_NOT_FOUND, @@ -1038,8 +1295,31 @@ router res.send({ offset: req.queryParams.page_sz * Math.floor(idx / req.queryParams.page_sz), }); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/get_offset", + status: "Success", + description: `Get offset to item in collection. ID: ${req.queryParams.id}; Item ID: ${req.queryParams.item}; Page Size: ${req.queryParams.page_sz}`, + extra: { + offset: req.queryParams.page_sz * Math.floor(idx / req.queryParams.page_sz), + }, + }); } } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/get_offset", + status: "Failure", + description: `Get offset to item in collection. ID: ${req.queryParams.id}; Item ID: ${req.queryParams.item}; Page Size: ${req.queryParams.page_sz}`, + extra: { + offset: req.queryParams.page_sz * Math.floor(idx / req.queryParams.page_sz), + }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -1054,8 +1334,19 @@ router router .get("/published/list", function (req, res) { + let client = null; + let result = null; try { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/published/list", + status: "Started", + description: "Get list of clients published collections.", + }); + var owner_id; if (req.queryParams.subject) { @@ -1066,7 +1357,6 @@ router var qry = "for v in 1..1 inbound @user owner filter is_same_collection('c',v) && v.public sort v.title"; - var result; if (req.queryParams.offset != undefined && req.queryParams.count != undefined) { qry += " limit " + req.queryParams.offset + ", " + req.queryParams.count; @@ -1098,7 +1388,26 @@ router } res.send(result); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/published/list", + status: "Success", + description: "Get list of clients published collections.", + extra: { total: result._countTotal }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/published/list", + status: "Failure", + description: "Get list of clients published collections.", + extra: { total: result._countTotal }, + error: e, + }); g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/config_router.js b/core/database/foxx/api/config_router.js index 2531a1167..37b4d18e3 100644 --- a/core/database/foxx/api/config_router.js +++ b/core/database/foxx/api/config_router.js @@ -4,16 +4,25 @@ const createRouter = require("@arangodb/foxx/router"); const router = createRouter(); const g_db = require("@arangodb").db; const g_lib = require("./support"); - +const logger = require("./lib/logger"); +const basePath = "config"; module.exports = router; router .get("/msg/daily", function (req, res) { + let msg = null; try { - var msg = {}, - key = { - _key: "msg_daily", - }; + logger.logRequestStarted({ + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/msg/daily", + status: "Started", + description: "Get message of the day", + }); + msg = {}; + var key = { + _key: "msg_daily", + }; if (g_db.config.exists(key)) { msg = g_db.config.document(key); @@ -24,7 +33,25 @@ router } res.send(msg); + logger.logRequestSuccess({ + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/msg/daily", + status: "Success", + description: "Get message of the day", + extra: (msg.msg || "").substring(0, 10), + }); } catch (e) { + logger.logRequestFailure({ + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/msg/daily", + status: "Failure", + description: "Get message of the day", + extra: ((msg && msg.msg) || "").substring(0, 10), + error: e, + }); + g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/data_router.js b/core/database/foxx/api/data_router.js index c72f20b05..457a95831 100644 --- a/core/database/foxx/api/data_router.js +++ b/core/database/foxx/api/data_router.js @@ -10,6 +10,8 @@ const permissions = require("./lib/permissions"); const g_proc = require("./process"); const g_tasks = require("./tasks"); const { UserToken } = require("./lib/user_token"); +const logger = require("./lib/logger"); +const basePath = "dat"; module.exports = router; @@ -101,7 +103,6 @@ function recordCreate(client, record, result) { obj.ext_auto = true; } } - if (record.md) { obj.md = JSON.parse(record.md); // parse escaped JSON string TODO: this could be dangerous if (Array.isArray(obj.md)) throw [error.ERR_INVALID_PARAM, "Metadata cannot be an array"]; @@ -256,10 +257,20 @@ function recordCreate(client, record, result) { router .post("/create", function (req, res) { var retry = 10; - + let client = null; + let result = null; for (;;) { try { - var result = { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Started", + description: "Create a new data record", + }); + result = { results: [], }; @@ -288,8 +299,34 @@ router }); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Success", + description: "Create a new data record", + extra: { + id: result?.results[0]?.id, + count: result?.results.length, + }, + }); + break; } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Failure", + description: "Create a new data record", + extra: { + id: result?.results[0]?.id, + count: result?.results.length, + }, + error: e, + }); if (--retry == 0 || !e.errorNum || e.errorNum != 1200) { g_lib.handleException(e, res); } @@ -331,10 +368,20 @@ router router .post("/create/batch", function (req, res) { var retry = 10; - + let client = null; + let result = null; for (;;) { try { - var result = { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create/batch", + status: "Started", + description: "Create a batch of new data records", + }); + + result = { results: [], }; @@ -367,8 +414,35 @@ router }); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create/batch", + status: "Success", + description: "Create a batch of new data records", + extra: { + latestId: result?.results[result?.results.length - 1]?.id, + count: result?.results.length, + }, + }); + break; } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create/batch", + status: "Failure", + description: "Create a batch of new data records", + extra: { + latestId: result?.results[result?.results.length - 1]?.id, + count: result?.results.length, + }, + + error: e, + }); if (--retry == 0 || !e.errorNum || e.errorNum != 1200) { g_lib.handleException(e, res); } @@ -420,7 +494,6 @@ router function recordUpdate(client, record, result) { // /console.log("recordUpdate:",record); - var data_id = g_lib.resolveDataID(record.id, client); var data = g_db.d.document(data_id); @@ -574,14 +647,14 @@ function recordUpdate(client, record, result) { for (i in data.tags) { tag = data.tags[i]; - if (!(tag in record.tags)) { + if (!record.tags.includes(tag)) { rem_tags.push(tag); } } for (i in record.tags) { tag = record.tags[i]; - if (!(tag in data.tags)) { + if (!data.tags.includes(tag)) { add_tags.push(tag); } } @@ -635,7 +708,7 @@ function recordUpdate(client, record, result) { } } - if (record.deps != undefined && (record.deps_add != undefined || record.deps_rem != undefined)) + if (record.deps != undefined && (record.dep_add != undefined || record.dep_rem != undefined)) throw [error.ERR_INVALID_PARAM, "Cannot use both dependency set and add/remove."]; var dep, @@ -762,13 +835,23 @@ function recordUpdate(client, record, result) { router .post("/update", function (req, res) { + let client = null; + let result = null; try { - var result = { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Started", + description: `Update an existing data record. RecordID: ${req.body.id}`, + }); + + result = { results: [], updates: new Set(), }; - const client = g_lib.getUserFromClientID(req.queryParams.client); - g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "loc"], @@ -794,6 +877,7 @@ router var doc, updates = []; + result.updates.forEach(function (id) { if (id == req.body.id) { // Updated record is already in results - just copy it @@ -802,6 +886,7 @@ router doc = g_db._document(id); doc.notes = g_lib.getNoteMask(client, doc); } + delete doc.desc; //delete doc.md; updates.push(doc); @@ -809,7 +894,28 @@ router result.updates = updates; res.send(result); + + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Success", + description: `Update an existing data record. RecordID: ${req.body.id}`, + extra: "N/A", + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Failure", + description: `Update an existing data record. RecordID: ${req.body.id}`, + extra: "N/A", + error: e, + }); + g_lib.handleException(e, res); } }) @@ -858,8 +964,22 @@ router router .post("/update/batch", function (req, res) { + let result = null; + const ids = Array.isArray(req.body) ? req.body.map((r) => r.id) : []; + + let totalCount = ids.length; + const displayedIds = totalCount > 5 ? `${ids.slice(0, 5).join(",")}...` : ids.join(","); try { - var result = { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update/batch", + status: "Started", + description: `Update a batch of existing data record. RecordIDs: ${displayedIds}`, + }); + + result = { results: [], updates: new Set(), }; @@ -912,7 +1032,31 @@ router result.updates = updates; res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update/batch", + status: "Success", + description: `Update a batch of existing data record. RecordIDs: ${displayedIds}`, + extra: { + count: totalCount, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update/batch", + status: "Failure", + description: `Update a batch of existing data record. RecordIDs: ${displayedIds}`, + extra: { + count: totalCount, + }, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -970,6 +1114,15 @@ router router .post("/update/md_err_msg", function (req, res) { try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update/md_err_msg", + status: "Started", + description: `Update data record schema validation error message. RecordID: ${req.queryParams.id}`, + }); + g_db._executeTransaction({ collections: { write: ["d"], @@ -998,8 +1151,27 @@ router ); }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update/md_err_msg", + status: "Success", + description: `Update data record schema validation error message. RecordID: ${req.queryParams.id}`, + extra: "N/A", + }); } catch (e) { g_lib.handleException(e, res); + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update/md_err_msg", + status: "Failure", + description: `Update data record schema validation error message. RecordID: ${req.queryParams.id}`, + extra: "N/A", + error: e, + }); } }) .queryParam("client", joi.string().optional(), "Client ID") @@ -1013,11 +1185,32 @@ router router .post("/update/size", function (req, res) { var retry = 10; + let result = null; + const records = Array.isArray(req.body?.records) ? req.body.records : []; + + const total = records.length; + + const summary = records + .slice(0, 3) + .map((r) => `${r.id}:${r.size}`) + .join(", "); + + const recordSummary = + total > 3 ? `${summary}... (${total} total)` : `${summary} (${total} total)`; // Must do this in a retry loop in case of concurrent (non-put) updates for (;;) { try { - var result = []; + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update/size", + status: "Started", + description: `Update existing data record size. Summary: ${recordSummary}`, + }); + + result = []; g_db._executeTransaction({ collections: { @@ -1066,8 +1259,27 @@ router }); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update/size", + status: "Success", + description: `Update existing data record size. Summary: ${recordSummary}`, + extra: "N/A", + }); + break; } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update/size", + status: "Failure", + description: `Update existing data record size. Summary: ${recordSummary}`, + extra: "N/A", + }); if (--retry == 0 || !e.errorNum || e.errorNum != 1200) { g_lib.handleException(e, res); } @@ -1097,6 +1309,15 @@ router router .get("/view", function (req, res) { try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Started", + description: `Get data by ID or alias. ID: ${req.queryParams.id}`, + }); + const client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); var data_id = g_lib.resolveDataID(req.queryParams.id, client); @@ -1166,7 +1387,26 @@ router res.send({ results: [data], }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Success", + description: `Get data by ID or alias. ID: ${req.queryParams.id}`, + results: "N/A", + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Failure", + description: `Get data by ID or alias. ID: ${req.queryParams.id}`, + extra: "N/A", + }); + g_lib.handleException(e, res); } }) @@ -1177,7 +1417,20 @@ router router .post("/export", function (req, res) { + const ids = req.body.id || []; + const preview = ids.slice(0, 3).join(", "); + const idSummary = ids.length > 3 ? `${preview}, ...` : preview; + const idCount = ids.length; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/export", + status: "Started", + description: `Export record data. ID: ${idSummary}`, + }); + g_db._executeTransaction({ collections: { read: ["uuid", "accn", "d", "c", "item"], @@ -1224,7 +1477,28 @@ router res.send(results); }, }); + + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/export", + status: "Success", + description: `Export record data. ID: ${idSummary}`, + extra: { count: idCount }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/export", + status: "Failure", + description: `Export record data. ID: ${idSummary}`, + extra: { count: idCount }, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -1242,7 +1516,17 @@ router router .get("/dep/graph/get", function (req, res) { + let result = []; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/dep/graph/get", + status: "Started", + description: `Get data dependency graph. ID: ${req.queryParams.id}`, + }); + const client = g_lib.getUserFromClientID(req.queryParams.client); var data_id = g_lib.resolveDataID(req.queryParams.id, client); var i, @@ -1255,7 +1539,6 @@ router visited = [data_id], cur = [[data_id, true]], next = [], - result = [], notes, gen = 0; @@ -1412,7 +1695,27 @@ router } res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/dep/graph/get", + status: "Success", + description: `Get data dependency graph. ID: ${req.queryParams.id}`, + extra: { count: result.length }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/dep/graph/get", + status: "Failure", + description: `Get data dependency graph. ID: ${req.queryParams.id}`, + extra: { count: result.length }, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -1423,7 +1726,20 @@ router router .get("/lock", function (req, res) { + const ids = req.queryParams.ids || []; + const preview = ids.slice(0, 3).join(", "); + const idSummary = ids.length > 3 ? `${preview}, ...` : preview; + const idCount = ids.length; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/lock", + status: "Started", + description: `Toggle data record lock. IDs: ${idSummary}, Lock: ${req.queryParams.lock}`, + }); + const client = g_lib.getUserFromClientID(req.queryParams.client); g_db._executeTransaction({ collections: { @@ -1461,7 +1777,26 @@ router res.send(result); }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/lock", + status: "Success", + description: `Toggle data record lock. IDs: ${idSummary}, Lock: ${req.queryParams.lock}`, + extra: { count: idCount }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/lock", + status: "Failure", + description: `Toggle data record lock. IDs: ${idSummary}, Lock: ${req.queryParams.lock}`, + extra: { count: idCount }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -1490,7 +1825,17 @@ router */ router .get("/path", function (req, res) { + let path = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/path", + status: "Started", + description: `Get raw data local path. IDs: ${req.queryParams.id}; Domain: ${req.queryParams.domain}`, + }); + const client = g_lib.getUserFromClientID(req.queryParams.client); var data_id = g_lib.resolveDataID(req.queryParams.id, client); @@ -1512,12 +1857,31 @@ router "Can only access data from '" + repo.domain + "' domain", ]; - var path = g_lib.computeDataPath(loc, true); + path = g_lib.computeDataPath(loc, true); res.send({ path: path, }); //res.send({ path: repo.exp_path + loc.path.substr( repo.path.length ) }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/path", + status: "Success", + description: `Get raw data local path. IDs: ${req.queryParams.id}; Domain: ${req.queryParams.domain}`, + extra: { path: path }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/path", + status: "Failure", + description: `Get raw data local path. IDs: ${req.queryParams.id}; Domain: ${req.queryParams.domain}`, + extra: { path: path }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -1529,7 +1893,17 @@ router router .get("/list/by_alloc", function (req, res) { + let result = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list/by_alloc", + status: "Started", + description: `List data records by allocation. repo: ${req.queryParams.repo}`, + }); + const client = g_lib.getUserFromClientID(req.queryParams.client); var owner_id; @@ -1545,7 +1919,6 @@ router } var qry = "for v,e in 1..1 inbound @repo loc filter e.uid == @uid sort v.title", - result, doc; if (req.queryParams.offset != undefined && req.queryParams.count != undefined) { @@ -1579,6 +1952,7 @@ router repo: req.queryParams.repo, uid: owner_id, }); + result = result.toArray(); } for (var i in result) { @@ -1589,7 +1963,27 @@ router } res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list/by_alloc", + status: "Success", + description: `List data records by allocation. repo: ${req.queryParams.repo}`, + extra: { count: req.queryParams?.count }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list/by_alloc", + status: "Failure", + description: `List data records by allocation. repo: ${req.queryParams.repo}`, + extra: { count: req.queryParams?.count }, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -1604,6 +1998,15 @@ router router .post("/get", function (req, res) { try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/get", + status: "Started", + description: `Get (download) data to Globus destination path. ID: ${req.body.id}`, + }); + g_db._executeTransaction({ collections: { read: ["uuid", "accn", "d", "c", "item"], @@ -1658,9 +2061,29 @@ router g_lib.saveRecentGlobusPath(client, req.body.path, g_lib.TT_DATA_GET); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/get", + status: "Success", + description: `Get (download) data to Globus destination path. ID: ${req.body.id}`, + extra: "N/A", + }); }, }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/get", + status: "Failure", + description: `Get (download) data to Globus destination path. ID: ${req.body.id}`, + extra: "N/A", + error: e, + }); + g_lib.handleException(e, res); } }) @@ -1687,6 +2110,15 @@ router router .post("/put", function (req, res) { try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/put", + status: "Started", + description: `Put (upload) raw data to record. ID: ${req.body.id}`, + }); + g_db._executeTransaction({ collections: { read: ["uuid", "accn", "d", "c", "item"], @@ -1747,7 +2179,27 @@ router res.send(result); }, }); + + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/put", + status: "Success", + description: `Put (upload) raw data to record. ID: ${req.body.id}`, + extra: "N/A", + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/put", + status: "Failure", + description: `Put (upload) raw data to record. ID: ${req.body.id}`, + extra: "N/A", + error: e, + }); g_lib.handleException(e, res); } }) @@ -1773,7 +2225,20 @@ router router .post("/alloc_chg", function (req, res) { + const ids = req.body.ids || []; + const id_count = ids.length; + + const ids_preview = id_count > 3 ? ids.slice(0, 3).concat("...") : ids; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/alloc_chg", + status: "Started", + description: `Move raw data to new allocation. Repo ID: ${req.body.repo_id}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "d", "c", "item"], @@ -1800,7 +2265,26 @@ router res.send(result); }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/alloc_chg", + status: "Success", + description: `Move raw data to new allocation. Repo ID: ${req.body.repo_id}`, + extra: { IDs: ids_preview, count: id_count }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/alloc_chg", + status: "Failure", + description: `Move raw data to new allocation. Repo ID: ${req.body.repo_id}`, + extra: { IDs: ids_preview, count: id_count }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -1821,7 +2305,20 @@ router router .post("/owner_chg", function (req, res) { + const ids = Array.isArray(req.body.ids) ? req.body.ids : []; + const id_count = ids.length; + + const id_preview = id_count > 3 ? ids.slice(0, 3).concat("...") : ids; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/owner_chg", + status: "Started", + description: `Move data records and raw data to a new owner/allocation. Repo ID: ${req.body.coll_id}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "d", "c", "item", "admin"], @@ -1836,6 +2333,7 @@ router id = g_lib.resolveDataCollID(req.body.ids[i], client); res_ids.push(id); } + var coll_id = g_lib.resolveDataCollID(req.body.coll_id, client); var result = g_tasks.taskInitRecOwnerChg( client, @@ -1848,7 +2346,27 @@ router res.send(result); }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/owner_chg", + status: "Success", + description: `Move data records and raw data to a new owner/allocation. Coll ID: ${req.body.coll_id}`, + extra: { IDs: id_preview, count: id_count }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/owner_chg", + status: "Failure", + description: `Move data records and raw data to a new owner/allocation. Coll ID: ${req.body.coll_id}`, + extra: { IDs: id_preview, count: id_count }, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -1872,6 +2390,15 @@ router router .post("/delete", function (req, res) { var retry = 10; + let ids = []; + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/delete", + status: "Started", + description: `Attempting to delete a total of: ${req.body.ids.length}`, + }); for (;;) { try { @@ -1899,10 +2426,11 @@ router }, action: function () { const client = g_lib.getUserFromClientID(req.queryParams.client); - var i, - id, - ids = []; + var i, id; + // Needs to be reinitialized to an empty array to avoid + // accumulating content from retries + ids = []; for (i in req.body.ids) { id = g_lib.resolveDataCollID(req.body.ids[i], client); ids.push(id); @@ -1913,8 +2441,30 @@ router res.send(result); }, }); + const preview = ids.slice(0, 5).join(", "); + const idSummary = ids.length > 5 ? `${preview}, ...` : preview; + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/delete", + status: "Success", + description: `Delete data items: ${idSummary}...`, + extra: { count: ids.length }, + }); + break; } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/delete", + status: "Failure", + description: `Attempting to delete a total of: ${req.body.ids.length}`, + extra: { retry_attempt: retry }, + error: e, + }); if (--retry == 0 || !e.errorNum || e.errorNum != 1200) { g_lib.handleException(e, res); } diff --git a/core/database/foxx/api/group_router.js b/core/database/foxx/api/group_router.js index f2b0960a6..9c80ccbb3 100644 --- a/core/database/foxx/api/group_router.js +++ b/core/database/foxx/api/group_router.js @@ -9,14 +9,26 @@ const permissions = require("./lib/permissions"); const g_db = require("@arangodb").db; const g_graph = require("@arangodb/general-graph")._graph("sdmsg"); const g_lib = require("./support"); - +const logger = require("./lib/logger"); +const basePath = "grp"; module.exports = router; //========== GROUP API FUNCTIONS ========== router .get("/create", function (req, res) { + let client = null; + let logExtra = null; try { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Started", + description: "Create a new group", + }); var result = []; g_db._executeTransaction({ @@ -25,7 +37,7 @@ router write: ["g", "owner", "member"], }, action: function () { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); var uid; if (req.queryParams.proj) { @@ -87,9 +99,38 @@ router result.push(group.new); }, }); + logExtra = { + _id: result[0]._id, + uid: result[0].uid, + gid: result[0].gid, + title: (result[0].title || "").slice(0, 10), + truncated_members: Array.isArray(result[0].members) + ? result[0].members.slice(-5) + : [], + }; res.send(result); + + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Success", + description: "Create a new group", + extra: logExtra, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Failure", + description: "Create a new group", + extra: logExtra, + error: e, + }); g_lib.handleException(e, res); } }) @@ -104,7 +145,18 @@ router router .get("/update", function (req, res) { + let client = null; + let logExtra = null; try { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Started", + description: "Updates an existing group. GID:" + req.queryParams.gid, + }); var result = []; g_db._executeTransaction({ @@ -113,7 +165,7 @@ router write: ["g", "owner", "member"], }, action: function () { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); var group; if (req.queryParams.proj) { @@ -204,9 +256,37 @@ router result.push(group); }, }); + logExtra = { + title: (result[0].title || "").slice(0, 10), + description: (result[0].desc || "").slice(0, 10), + truncated_members: Array.isArray(result[0].members) + ? result[0].members.slice(-5) + : [], + }; res.send(result); + + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Success", + description: "Updates an existing group. GID:" + req.queryParams.gid, + extra: logExtra, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Failure", + description: "Updates an existing group. GID:" + req.queryParams.gid, + extra: logExtra, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -230,15 +310,26 @@ router router .get("/delete", function (req, res) { + let client = null; + let group = null; try { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/delete", + status: "Started", + description: "Deletes an existing group. GID:" + req.queryParams.gid, + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "owner", "admin"], write: ["g", "owner", "member", "acl"], }, action: function () { - const client = g_lib.getUserFromClientID(req.queryParams.client); - var group; + client = g_lib.getUserFromClientID(req.queryParams.client); if (req.queryParams.proj) { var uid = req.queryParams.proj; @@ -271,7 +362,26 @@ router g_graph.g.remove(group._id); }, }); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/delete", + status: "Success", + description: `Deletes an existing group: ${group?._id}, GID: ${req.queryParams.gid}`, + extra: "N/A", + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/delete", + status: "Failure", + description: `Deletes an existing group: ${group?._id}, GID: ${req.queryParams.gid}`, + extra: "N/A", + error: e, + }); g_lib.handleException(e, res); } }) @@ -283,8 +393,20 @@ router router .get("/list", function (req, res) { + let client = null; + let groups = null; + let logExtra = null; try { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list", + status: "Started", + description: "List groups", + }); + var owner_id; if (req.queryParams.proj) { @@ -295,7 +417,7 @@ router owner_id = client._id; } - var groups = g_db + groups = g_db ._query( "for v in 1..1 inbound @client owner filter IS_SAME_COLLECTION('g', v) return { uid: v.uid, gid: v.gid, title: v.title }", { @@ -304,8 +426,29 @@ router ) .toArray(); + logExtra = { + totalGroups: groups.length, + }; res.send(groups); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list", + status: "Success", + description: "List groups", + extra: logExtra, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list", + status: "Failure", + description: "List groups", + extra: logExtra, + }); g_lib.handleException(e, res); } }) @@ -316,8 +459,19 @@ router router .get("/view", function (req, res) { + let client = null; + let logExtra = null; try { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Started", + description: `View group details. GID: ${req.queryParams.gid}`, + }); + var group; if (req.queryParams.proj) { @@ -351,8 +505,33 @@ router group: group._id, }) .toArray(); + logExtra = { + title: (result.title || "").slice(0, 10), + description: (result.desc || "").slice(0, 10), + truncated_members: Array.isArray(result.members) ? result.members.slice(-5) : [], + }; res.send([result]); + + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Successful", + description: `View group details. GID: ${req.queryParams.gid}`, + extra: logExtra, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Failure", + description: `View group details. GID: ${req.queryParams.gid}`, + extra: logExtra, + error: e, + }); g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/lib/logger.js b/core/database/foxx/api/lib/logger.js index c410a8040..8098dbee5 100644 --- a/core/database/foxx/api/lib/logger.js +++ b/core/database/foxx/api/lib/logger.js @@ -11,7 +11,7 @@ function logRequestSuccess({ extra, }) { // helper to pad fields - const pad = (label, value, length = 20) => + const pad = (label, value, length = 10) => `${label}: ${value || "unknown"}`.padEnd(length, " "); console.info( @@ -42,7 +42,7 @@ function logRequestFailure({ error, }) { // helper to pad fields - const pad = (label, value, length = 20) => + const pad = (label, value, length = 10) => `${label}: ${value || "unknown"}`.padEnd(length, " "); //PUT IF STATEMENT if (g_lib.isInteger(error) || Array.isArray(error)) { @@ -88,7 +88,7 @@ function logRequestFailure({ function logRequestStarted({ client, correlationId, httpVerb, routePath, status, description }) { // helper to pad fields - const pad = (label, value, length = 20) => + const pad = (label, value, length = 10) => `${label}: ${value || "unknown"}`.padEnd(length, " "); console.info( diff --git a/core/database/foxx/api/metrics_router.js b/core/database/foxx/api/metrics_router.js index 0345e1188..76dc05e68 100644 --- a/core/database/foxx/api/metrics_router.js +++ b/core/database/foxx/api/metrics_router.js @@ -5,12 +5,22 @@ const router = createRouter(); const g_db = require("@arangodb").db; const g_lib = require("./support"); const joi = require("joi"); +const logger = require("./lib/logger"); module.exports = router; +const basePath = "metrics"; router .post("/msg_count/update", function (req, res) { try { + logger.logRequestStarted({ + client: "N/A", + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/msg_count/update", + status: "Started", + description: "Update message metrics", + }); var i, u, ts = req.body.timestamp, @@ -33,7 +43,26 @@ router }; g_db.metrics.save(obj); } + logger.logRequestSuccess({ + client: "N/A", + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/msg_count/update", + status: "Success", + description: "Update message metrics", + extra: obj, + }); } catch (e) { + logger.logRequestFailure({ + client: "N/A", + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/msg_count/update", + status: "Failure", + description: "Update message metrics", + extra: obj, + error: e, + }); g_lib.handleException(e, res); } }) @@ -43,7 +72,18 @@ router router .get("/msg_count", function (req, res) { + let client = null; + let result = []; try { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/msg_count", + status: "Started", + description: "Grab all message metrics", + }); var par = { now: Date.now() / 1000, since: 60 * (req.queryParams.since ? req.queryParams.since : 60), @@ -61,9 +101,9 @@ router } var qry = "for i in metrics filter " + filter + " sort i.timestamp return i", - result = g_db._query(qry, par).toArray(), r; + result = g_db._query(qry, par).toArray(); for (var i in result) { r = result[i]; delete r._rev; @@ -71,7 +111,26 @@ router } res.send(result); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/msg_count", + status: "Success", + description: "Grab all message metrics", + extra: { count_msg_types: result.length }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/msg_count", + status: "Failure", + description: "Grab all message metrics", + extra: { count_msg_types: result.length }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -82,14 +141,27 @@ router "Return since last specified minutes ago (default 60)", ) .queryParam("uid", joi.string().optional(), "User ID (default none)") - .summary("Update message metrics.") - .description("Update message metrics."); + .summary("Grab all message metrics.") + .description("Grab all message metrics."); router .get("/users/active", function (req, res) { + let client = null; + let cnt = {}; try { - var cnt = {}, - u, + client = req.queryParams.client + ? g_lib.getUserFromClientID(req.queryParams.client) + : null; + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/users/active", + status: "Started", + description: "Get recently active users from metrics", + }); + + var u, r, qryres = g_db ._query( @@ -111,7 +183,26 @@ router } res.json(cnt); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/users/active", + status: "Success", + description: "Get recently active users from metrics", + extra: { total_active_users: Object.keys(cnt).length }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/users/active", + status: "Failure", + description: "Get recently active users from metrics", + extra: { total_active_users: Object.keys(cnt).length }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -126,6 +217,15 @@ router router .post("/purge", function (req, res) { try { + logger.logRequestStarted({ + client: "undefined", + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/purge", + status: "Started", + description: "Purge older metrics", + }); + g_db.metrics.save({ timestamp: Math.floor(Date.now() / 1000), type: "purge", @@ -135,7 +235,26 @@ router g_db._query("for i in metrics filter i.timestamp < @ts remove i in metrics", { ts: req.queryParams.timestamp, }); + logger.logRequestSuccess({ + client: "undefined", + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/purge", + status: "Success", + description: "Purge older metrics", + extra: "undefined", + }); } catch (e) { + logger.logRequestFailure({ + client: "undefined", + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/purge", + status: "Failure", + description: "Purge older metrics", + extra: "undefined", + error: e, + }); g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/models/repositories/base_repository.js b/core/database/foxx/api/models/repositories/base_repository.js index f11878805..20674abea 100644 --- a/core/database/foxx/api/models/repositories/base_repository.js +++ b/core/database/foxx/api/models/repositories/base_repository.js @@ -90,8 +90,6 @@ class BaseRepository { if (g_db._exists(config.id)) { const existingDoc = g_db.repo.document(config.key); const { _id, _key, _rev, ...temp } = existingDoc; - console.log("existingData found"); - console.log(existingDoc); this.repoData = { id: existingDoc._id, key: existingDoc._key, @@ -101,8 +99,6 @@ class BaseRepository { } else { this.repoData = new_repo_data; } - console.log("Repo data after deepMerge"); - console.log(this.repoData); } catch { return Result.err({ code: error.ERR_INVALID_PARAM, @@ -111,9 +107,6 @@ class BaseRepository { } } else { this.repoData = { ...new_repo_data, id: `repo/${config.key}`, key: config.key }; - - console.log("Repo data, now that we know document doesn't exist"); - console.log(this.repoData); } return Result.ok(this); diff --git a/core/database/foxx/api/note_router.js b/core/database/foxx/api/note_router.js index 445ed28d1..3fad32187 100644 --- a/core/database/foxx/api/note_router.js +++ b/core/database/foxx/api/note_router.js @@ -7,14 +7,18 @@ const g_db = require("@arangodb").db; const g_lib = require("./support"); const error = require("./lib/error_codes"); const permissions = require("./lib/permissions"); - +const basePath = "note"; +const logger = require("./lib/logger"); module.exports = router; //==================== ACL API FUNCTIONS router .post("/create", function (req, res) { - console.log("note/create"); + let client = null; + let result = {}; + let doc; + let _key, _rev; try { g_db._executeTransaction({ collections: { @@ -22,9 +26,17 @@ router write: ["d", "n", "note"], }, action: function () { - const client = g_lib.getUserFromClientID(req.queryParams.client); - var id = g_lib.resolveDataCollID(req.queryParams.subject, client), - doc = g_db._document(id); + client = g_lib.getUserFromClientID(req.queryParams.client); + var id = g_lib.resolveDataCollID(req.queryParams.subject, client); + doc = g_db._document(id); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Started", + description: "Create an annotation on an object " + req.queryParams.subject, + }); if (!permissions.hasAdminPermObject(client, id)) { if ( @@ -87,9 +99,32 @@ router results: [note.new], updates: Object.values(updates), }); + ({ _key, _rev, ...result } = doc); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Success", + description: "Create an annotation on an object " + req.queryParams.subject, + extra: result, + }); }, }); } catch (e) { + if (doc) { + ({ _key, _rev, ...result } = doc); + } + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Failure", + description: "Create an annotation on an object " + req.queryParams.subject, + extra: result, + error: e, + }); g_lib.handleException(e, res); } }) @@ -108,7 +143,8 @@ router router .post("/update", function (req, res) { - console.log("note/update"); + let client = null; + let result, doc, _key, _rev; try { g_db._executeTransaction({ collections: { @@ -116,7 +152,15 @@ router write: ["d", "n", "note"], }, action: function () { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Started", + description: "Update annotation " + req.queryParams.id, + }); if (!req.queryParams.id.startsWith("n/")) throw [ @@ -136,9 +180,10 @@ router }), old_state = note.state, old_type = note.type, - doc = g_db._document(ne._from), updates = {}; + doc = g_db._document(ne._from); + /* Permissions to update: Currently any admin of the subject and the creator of the annotation may make edits to the annotation. This approach is optimistic in assuming that conflicts will not arise and all parties are ethical. Eventually a mechanism will be put in place to deal with conflicts and @@ -232,9 +277,32 @@ router results: [note], updates: Object.values(updates), }); + ({ _key, _rev, ...result } = doc); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Success", + description: "Update annotation " + req.queryParams.id, + extra: result, + }); }, }); } catch (e) { + if (doc) { + ({ _key, _rev, ...result } = doc); + } + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Failure", + description: "Update annotation " + req.queryParams.id, + extra: result, + error: e, + }); g_lib.handleException(e, res); } }) @@ -253,6 +321,8 @@ router router .post("/comment/edit", function (req, res) { + let client = null; + let note = null; try { g_db._executeTransaction({ collections: { @@ -260,7 +330,19 @@ router write: ["n"], }, action: function () { - const client = g_lib.getUserFromClientID(req.queryParams.client); + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/comment/edit", + status: "Started", + description: + "Edit annotation comment " + + req.queryParams.id + + " Comment ID:" + + req.queryParams.comment_idx, + }); if (!req.queryParams.id.startsWith("n/")) throw [ @@ -274,7 +356,7 @@ router "Annotaion ID '" + req.queryParams.id + "' does not exist.", ]; - var note = g_db.n.document(req.queryParams.id); + note = g_db.n.document(req.queryParams.id); if (req.queryParams.comment_idx >= note.comments.length) throw [error.ERR_INVALID_PARAM, "Comment index out of range."]; @@ -300,9 +382,40 @@ router res.send({ results: [note.new], }); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/comment/edit", + status: "Success", + description: + "Edit annotation comment " + + req.queryParams.id + + " Comment ID:" + + req.queryParams.comment_idx, + extra: { + title: note.new.title, + creator: note.new.creator, + comments: note.new.comments[req.queryParams.comment_idx], + }, + }); }, }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/comment/edit", + status: "Failure", + description: + "Edit an annotation comment " + + req.queryParams.id + + " Comment ID:" + + req.queryParams.comment_idx, + extra: note, + error: e, + }); g_lib.handleException(e, res); } }) @@ -315,8 +428,17 @@ router router .get("/view", function (req, res) { + let client = null; try { - const client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); + client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Started", + description: "View annotation " + req.queryParams.id, + }); if (!req.queryParams.id.startsWith("n/")) throw [ @@ -369,7 +491,39 @@ router res.send({ results: [note], }); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Success", + description: "View annotation " + req.queryParams.id, + extra: { + title: note?.title, + creator: note?.creator, + lastComment: Array.isArray(note.comments) + ? note.comments[note.comments.length - 1] || null + : null, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Failure", + description: "View annotation " + req.queryParams.id, + extra: { + title: note?.title, + creator: note?.creator, + lastComment: Array.isArray(note.comments) + ? note.comments[note.comments.length - 1] || null + : null, + }, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -380,11 +534,20 @@ router router .get("/list/by_subject", function (req, res) { + let client = null; + let results = null; try { - const client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); + client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list/by_subject", + status: "Started", + description: "List annotations by subject " + req.queryParams.subject, + }); - var results, - qry, + var qry, id = g_lib.resolveDataCollID(req.queryParams.subject, client); if (!client) { @@ -411,7 +574,26 @@ router res.send({ results: results.toArray(), }); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list/by_subject", + status: "Success", + description: "List annotations by subject " + req.queryParams.subject, + extra: { found: results?._countTotal }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list/by_subject", + status: "Failure", + description: "List annotations by subject " + req.queryParams.subject, + extra: { found: results?._countTotal }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -422,7 +604,23 @@ router router .get("/purge", function (req, res) { + let client = null; + let id = null; + const purgedIds = []; try { + client = req.queryParams?.client; + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/purge", + status: "Started", + description: + "Purge old closed annotations older than " + + req.queryParams.age_sec + + " seconds", + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn"], @@ -432,23 +630,50 @@ router //console.log("note purge, age:", req.queryParams.age_sec ); var t = Date.now() / 1000 - req.queryParams.age_sec; - var id, - notes = g_db._query( - "for i in n filter i.state == " + - g_lib.NOTE_CLOSED + - " && i.ut < " + - t + - " and i.parent_id == null return i._id", - ); + + //maybe id = below + var notes = g_db._query( + "for i in n filter i.state == " + + g_lib.NOTE_CLOSED + + " && i.ut < " + + t + + " and i.parent_id == null return i._id", + ); while (notes.hasNext()) { id = notes.next(); - console.log("purging", id); + purgedIds.push(id); + // This will also delete all dependent annotations g_lib.annotationDelete(id); } }, }); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/purge", + status: "Success", + description: + "Purge old closed annotations older than " + + req.queryParams.age_sec + + " seconds.", + extra: `Ids of purged notes: ${purgedIds.join(", ")}`, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/purge", + status: "Failure", + description: + "Purge old closed annotations older than " + + req.queryParams.age_sec + + " seconds.", + extra: { last_purged_note: id }, + error: e, + }); g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/posix_path.js b/core/database/foxx/api/posix_path.js index f171b1d8b..ae0f45cf0 100644 --- a/core/database/foxx/api/posix_path.js +++ b/core/database/foxx/api/posix_path.js @@ -1,5 +1,6 @@ "use strict"; +const error = require("./lib/error_codes"); const path = require("path"); module.exports = (function () { @@ -45,5 +46,12 @@ module.exports = (function () { return components.filter((component) => component !== ""); }; + obj.normalizePOSIXPath = function (a_posix_path) { + if (!a_posix_path || typeof a_posix_path !== "string") { + throw [error.ERR_INVALID_PARAM, "Invalid POSIX path"]; + } + return path.posix.normalize(a_posix_path); + }; + return obj; })(); diff --git a/core/database/foxx/api/proj_router.js b/core/database/foxx/api/proj_router.js index a237b561f..b8ccd3a68 100644 --- a/core/database/foxx/api/proj_router.js +++ b/core/database/foxx/api/proj_router.js @@ -9,6 +9,8 @@ const g_lib = require("./support"); const error = require("./lib/error_codes"); const permissions = require("./lib/permissions"); const g_tasks = require("./tasks"); +const logger = require("./lib/logger"); +const basePath = "prj"; module.exports = router; @@ -16,8 +18,16 @@ module.exports = router; router .get("/create", function (req, res) { + let result = null; try { - var result; + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Started", + description: `Create new projects. Project ID: ${req.queryParams.id}`, + }); g_db._executeTransaction({ collections: { @@ -199,7 +209,44 @@ router }); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Success", + description: `Create new projects. Project ID: ${req.queryParams.id}`, + }); + if (req.queryParams.admins?.length) { + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Success", + description: `Admins added: ${req.queryParams.admins}`, + }); + } + if (req.queryParams.members?.length) { + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Success", + description: `Members added: ${req.queryParams.members}`, + }); + } } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Failure", + description: `Create new projects. Project ID: ${req.queryParams.id}`, + error: e, + }); g_lib.handleException(e, res); } }) @@ -218,8 +265,17 @@ router router .get("/update", function (req, res) { + let proj = null; + let result = null; try { - var result; + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Started", + description: `Update project information. Project ID: ${req.queryParams.id}`, + }); g_db._executeTransaction({ collections: { @@ -264,7 +320,7 @@ router } } - var proj = g_db._update(proj_id, obj, { + proj = g_db._update(proj_id, obj, { keepNull: false, returnNew: true, }); @@ -367,7 +423,61 @@ router }); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Success", + description: `Update project information. Project ID: ${req.queryParams.id}`, + extra: { + owner: proj.new?.owner, + title: proj.new?.title + ? proj.new?.title.length > 15 + ? proj.new?.title.slice(0, 15) + "…" + : proj.new?.title + : undefined, + }, + }); + if (req.queryParams.admins?.length) { + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Success", + description: `Admins added: ${req.queryParams.admins}`, + }); + } + if (req.queryParams.members?.length) { + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Success", + description: `Members added: ${req.queryParams.members}`, + }); + } } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Failure", + description: `Update project information. Project ID: ${req.queryParams.id}`, + extra: { + owner: proj?.new?.owner, + title: proj?.new?.title + ? proj?.new?.title.length > 15 + ? proj?.new?.title.slice(0, 15) + "…" + : proj?.new?.title + : undefined, + }, + + error: e, + }); g_lib.handleException(e, res); } }) @@ -386,7 +496,17 @@ router router .get("/view", function (req, res) { + let proj = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Started", + description: `View project information. ID: ${req.queryParams.id}`, + }); + // TODO Enforce view permission const client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); @@ -394,7 +514,7 @@ router if (!g_db.p.exists(req.queryParams.id)) throw [error.ERR_INVALID_PARAM, "No such project '" + req.queryParams.id + "'"]; - var proj = g_db.p.document({ + proj = g_db.p.document({ _id: req.queryParams.id, }); @@ -451,7 +571,26 @@ router delete proj._rev; res.send([proj]); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Success", + description: `View project information. ID: ${req.queryParams.id}`, + result: proj, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Failure", + description: `View project information. ID: ${req.queryParams.id}`, + extra: proj, + error: e, + }); g_lib.handleException(e, res); } }) @@ -462,114 +601,149 @@ router router .get("/list", function (req, res) { - const client = g_lib.getUserFromClientID(req.queryParams.client); - var qry, - result, - count = - (req.queryParams.as_owner ? 1 : 0) + - (req.queryParams.as_admin ? 1 : 0) + - (req.queryParams.as_member ? 1 : 0); - - if (count) { - var comma = false; - - if (count > 1) qry = "for i in union(("; - else qry = ""; - - if (req.queryParams.as_owner) { - qry += "for i in 1..1 inbound @user owner filter IS_SAME_COLLECTION('p',i)"; - if (count > 1) qry += " return { _id: i._id, title: i.title, owner: i.owner }"; - comma = true; - } + let tot = null; + try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list", + status: "Started", + description: `List projects`, + }); - if (!count || req.queryParams.as_admin) { - qry += - (comma ? "),(" : "") + - "for i in 1..1 inbound @user admin filter IS_SAME_COLLECTION('p',i)"; - if (count > 1) - qry += " return { _id: i._id, title: i.title, owner: i.owner, creator: @user }"; - comma = true; - } + const client = g_lib.getUserFromClientID(req.queryParams.client); + var qry, + result, + count = + (req.queryParams.as_owner ? 1 : 0) + + (req.queryParams.as_admin ? 1 : 0) + + (req.queryParams.as_member ? 1 : 0); + + if (count) { + var comma = false; + + if (count > 1) qry = "for i in union(("; + else qry = ""; + + if (req.queryParams.as_owner) { + qry += "for i in 1..1 inbound @user owner filter IS_SAME_COLLECTION('p',i)"; + if (count > 1) qry += " return { _id: i._id, title: i.title, owner: i.owner }"; + comma = true; + } + + if (!count || req.queryParams.as_admin) { + qry += + (comma ? "),(" : "") + + "for i in 1..1 inbound @user admin filter IS_SAME_COLLECTION('p',i)"; + if (count > 1) + qry += + " return { _id: i._id, title: i.title, owner: i.owner, creator: @user }"; + comma = true; + } - if (req.queryParams.as_member) { - qry += - (comma ? "),(" : "") + - "for i,e,p in 2..2 inbound @user member, outbound owner filter p.vertices[1].gid == 'members'"; - if (count > 1) qry += " return { _id: i._id, title: i.title, owner: i.owner }"; + if (req.queryParams.as_member) { + qry += + (comma ? "),(" : "") + + "for i,e,p in 2..2 inbound @user member, outbound owner filter p.vertices[1].gid == 'members'"; + if (count > 1) qry += " return { _id: i._id, title: i.title, owner: i.owner }"; + } + + if (count > 1) qry += "))"; + } else { + qry = "for i in p"; } - if (count > 1) qry += "))"; - } else { - qry = "for i in p"; - } + qry += " sort i."; + + switch (req.queryParams.sort) { + case g_lib.SORT_ID: + qry += "_id"; + break; + case g_lib.SORT_TITLE: + qry += "title"; + break; + case g_lib.SORT_TIME_CREATE: + qry += "ct"; + break; + case g_lib.SORT_TIME_UPDATE: + qry += "ut"; + break; + default: + qry += "_id"; + break; + } - qry += " sort i."; - - switch (req.queryParams.sort) { - case g_lib.SORT_ID: - qry += "_id"; - break; - case g_lib.SORT_TITLE: - qry += "title"; - break; - case g_lib.SORT_TIME_CREATE: - qry += "ct"; - break; - case g_lib.SORT_TIME_UPDATE: - qry += "ut"; - break; - default: - qry += "_id"; - break; - } + if (req.queryParams.sort_rev) qry += " desc"; + + let user_id; + if (req.queryParams.subject) { + permissions.ensureAdminPermUser(client, req.queryParams.subject); + user_id = req.queryParams.subject; + } else user_id = client._id; + + if (req.queryParams.offset != undefined && req.queryParams.count != undefined) { + qry += " limit " + req.queryParams.offset + ", " + req.queryParams.count; + qry += " return { id: i._id, title: i.title, owner: i.owner, creator: i.creator }"; + //console.log("proj list qry:",qry); + result = g_db._query( + qry, + count + ? { + user: user_id, + } + : {}, + {}, + { + fullCount: true, + }, + ); + tot = result.getExtra().stats.fullCount; + result = result.toArray(); + result.push({ + paging: { + off: req.queryParams.offset, + cnt: req.queryParams.count, + tot: tot, + }, + }); + } else { + qry += " return { id: i._id, title: i.title, owner: i.owner, creator: i.creator }"; + //console.log("proj list qry:",qry); + result = g_db._query( + qry, + count + ? { + user: user_id, + } + : {}, + ); + } - if (req.queryParams.sort_rev) qry += " desc"; - - var user_id; - if (req.queryParams.subject) { - permissions.ensureAdminPermUser(client, req.queryParams.subject); - } else user_id = client._id; - - if (req.queryParams.offset != undefined && req.queryParams.count != undefined) { - qry += " limit " + req.queryParams.offset + ", " + req.queryParams.count; - qry += " return { id: i._id, title: i.title, owner: i.owner, creator: i.creator }"; - //console.log("proj list qry:",qry); - result = g_db._query( - qry, - count - ? { - user: user_id, - } - : {}, - {}, - { - fullCount: true, - }, - ); - var tot = result.getExtra().stats.fullCount; - result = result.toArray(); - result.push({ - paging: { - off: req.queryParams.offset, - cnt: req.queryParams.count, - tot: tot, - }, + //res.send( g_db._query( qry, { user: client._id })); + res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list", + status: "Success", + description: `List projects`, + extra: { NumOfProjs: tot }, + }); + } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list", + status: "Failure", + description: `List projects`, + extra: { NumOfProjs: tot }, + error: e, }); - } else { - qry += " return { id: i._id, title: i.title, owner: i.owner, creator: i.creator }"; - //console.log("proj list qry:",qry); - result = g_db._query( - qry, - count - ? { - user: user_id, - } - : {}, - ); + g_lib.handleException(e, res); } - - //res.send( g_db._query( qry, { user: client._id })); - res.send(result); }) .queryParam("client", joi.string().required(), "Client ID") .queryParam("subject", joi.string().optional(), "Subject (user) ID") @@ -589,24 +763,19 @@ router "List projects. If no options are provided, lists all projects associated with client.", ); -router - .get("/search", function (req, res) { - try { - g_lib.getUserFromClientID(req.queryParams.client); - - res.send(g_db._query(req.queryParams.query, {})); - } catch (e) { - g_lib.handleException(e, res); - } - }) - .queryParam("client", joi.string().required(), "Client ID") - .queryParam("query", joi.string().required(), "Query") - .summary("Find all projects that match query") - .description("Find all projects that match query"); - router .post("/delete", function (req, res) { + let result = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/delete", + status: "Started", + description: `Delete project(s) and all associated data records and raw data. IDs: ${req.body.ids}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "p", "alloc"], @@ -616,12 +785,31 @@ router action: function () { const client = g_lib.getUserFromClientID(req.queryParams.client); - var result = g_tasks.taskInitProjDelete(client, req.body.ids); + result = g_tasks.taskInitProjDelete(client, req.body.ids); res.send(result); }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/delete", + status: "Success", + description: `Delete project(s) and all associated data records and raw data. IDs: ${req.body.ids}`, + extra: result, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/delete", + status: "Failure", + description: `Delete project(s) and all associated data records and raw data. IDs: ${req.body.ids}`, + extra: result, + error: e, + }); g_lib.handleException(e, res); } }) @@ -639,7 +827,17 @@ router router .get("/get_role", function (req, res) { + let role = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/get_role", + status: "Started", + description: `Get client/subject project role. ID: ${req.queryParams.id}`, + }); + const client = g_lib.getUserFromClientID(req.queryParams.client); var subj; @@ -653,12 +851,32 @@ router if (!g_db._exists(req.queryParams.id)) throw [error.ERR_NOT_FOUND, "Project, " + req.queryParams.id + ", not found"]; - var role = g_lib.getProjectRole(subj, req.queryParams.id); + role = g_lib.getProjectRole(subj, req.queryParams.id); res.send({ role: role, }); + + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/get_role", + status: "Success", + description: `Get client/subject project role. ID: ${req.queryParams.id}`, + extra: { role: role }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/get_role", + status: "Failure", + description: `Get client/subject project role. ID: ${req.queryParams.id}`, + extra: { role: role }, + error: e, + }); g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/query_router.js b/core/database/foxx/api/query_router.js index 88e3accb4..ff679bb5b 100644 --- a/core/database/foxx/api/query_router.js +++ b/core/database/foxx/api/query_router.js @@ -17,24 +17,23 @@ module.exports = router; router .post("/create", function (req, res) { - let client = undefined; let result = undefined; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Started", + description: "Create Query", + }); g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "admin"], write: ["q", "owner"], }, action: function () { - client = g_lib.getUserFromClientID(req.queryParams.client); - logger.logRequestStarted({ - client: client?._id, - correlationId: req.headers["x-correlation-id"], - httpVerb: "POST", - routePath: basePath + "/create", - status: "Started", - description: "Create Query", - }); + const client = g_lib.getUserFromClientID(req.queryParams.client); // Check max number of saved queries if (client.max_sav_qry >= 0) { @@ -83,7 +82,7 @@ router delete qry.qry_end; delete qry.qry_filter; delete qry.params; - delete qry.lmit; + delete qry.limit; result = qry; }, @@ -91,7 +90,7 @@ router res.send(result); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "POST", routePath: basePath + "/create", @@ -101,7 +100,7 @@ router }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "POST", routePath: basePath + "/create", @@ -134,25 +133,24 @@ router router .post("/update", function (req, res) { - let client = undefined; let result = undefined; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Started", + description: "Update a saved query", + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "admin"], write: ["q", "owner"], }, action: function () { - client = g_lib.getUserFromClientID(req.queryParams.client); - logger.logRequestStarted({ - client: client?._id, - correlationId: req.headers["x-correlation-id"], - httpVerb: "POST", - routePath: basePath + "/update", - status: "Started", - description: "Update a saved query", - }); - + const client = g_lib.getUserFromClientID(req.queryParams.client); var qry = g_db.q.document(req.body.id); if (client._id != qry.owner && !client.is_admin) { @@ -191,14 +189,14 @@ router delete qry.qry_end; delete qry.qry_filter; delete qry.params; - delete qry.lmit; + delete qry.limit; result = qry; }, }); res.send(result); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "POST", routePath: basePath + "/update", @@ -208,7 +206,7 @@ router }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "POST", routePath: basePath + "/update", @@ -241,12 +239,12 @@ router router .get("/view", function (req, res) { - let client = undefined; let qry = undefined; + let client = null; try { client = g_lib.getUserFromClientID(req.queryParams.client); logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/view", @@ -267,11 +265,11 @@ router delete qry.qry_end; delete qry.qry_filter; delete qry.params; - delete qry.lmit; + delete qry.limit; res.send(qry); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/view", @@ -281,7 +279,7 @@ router }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/view", @@ -306,7 +304,7 @@ router client = g_lib.getUserFromClientID(req.queryParams.client); var owner; logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/delete", @@ -338,7 +336,7 @@ router g_graph.q.remove(owner._from); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/delete", @@ -347,9 +345,10 @@ router extra: req.queryParams.ids[i], }); } + res.send(); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/delete", @@ -369,12 +368,11 @@ router router .get("/list", function (req, res) { - let client = undefined; let result = undefined; try { - client = g_lib.getUserFromClientID(req.queryParams.client); + const client = g_lib.getUserFromClientID(req.queryParams.client); logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/list", @@ -416,7 +414,7 @@ router res.send(result); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/list", @@ -429,7 +427,7 @@ router }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/list", @@ -448,6 +446,10 @@ router .description("List client saved queries"); function execQuery(client, mode, published, orig_query) { + // Make sure we are always dealing with strings. + if (typeof mode === "string" && mode in g_lib) { + mode = g_lib[mode]; + } var col_chk = true, ctxt = client._id; let query = { @@ -474,7 +476,7 @@ function execQuery(client, mode, published, orig_query) { }, ) .toArray(); - if (!query.params.cols) { + if (!query.params.cols.length) { throw [ error.ERR_PERM_DENIED, "No access to user '" + query.params.owner + "' data/collections.", @@ -508,7 +510,7 @@ function execQuery(client, mode, published, orig_query) { }, ) .toArray(); - if (!query.params.cols) { + if (!query.params.cols.length) { throw [ error.ERR_PERM_DENIED, "No access to project '" + query.params.owner + "'.", @@ -645,12 +647,11 @@ function execQuery(client, mode, published, orig_query) { router .get("/exec", function (req, res) { - let client = undefined; let results = undefined; try { - client = g_lib.getUserFromClientID(req.queryParams.client); + let client = g_lib.getUserFromClientID(req.queryParams.client); logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/exec", @@ -660,6 +661,15 @@ router var qry = g_db.q.document(req.queryParams.id); + // Legacy query documents may have `params` stored as a JSON string + // rather than an object, because the original schema validation + // (joi.any()) accepted both. New documents are stored as objects + // (joi.object()), but old records remain until migrated. + // TODO: Remove after backfilling existing queries in ArangoDB. + if (typeof qry.params === "string") { + qry.params = JSON.parse(qry.params); + } + if (client._id != qry.owner && !client.is_admin) { throw error.ERR_PERM_DENIED; } @@ -673,23 +683,29 @@ router res.send(results); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/exec", status: "Success", description: "Execute specified queries", - extra: results, + extra: { + count: Array.isArray(results) ? results.length : undefined, + query_id: req.queryParams.id, + }, }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/exec", status: "Failure", description: "Execute specified queries", - extra: results, + extra: { + count: Array.isArray(results) ? results.length : undefined, + query_id: req.queryParams.id, + }, error: e, }); g_lib.handleException(e, res); @@ -705,11 +721,9 @@ router router .post("/exec/direct", function (req, res) { let results = undefined; - let client = undefined; try { - client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "POST", routePath: basePath + "/exec/direct", @@ -717,6 +731,8 @@ router description: "Execute published data search query", }); + let client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); + const query = { ...req.body, params: req.body.params, @@ -725,23 +741,27 @@ router res.send(results); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "POST", routePath: basePath + "/exec/direct", status: "Success", description: "Execute published data search query", - extra: results, + extra: { + count: Array.isArray(results) ? results.length : undefined, + }, }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "POST", routePath: basePath + "/exec/direct", status: "Failure", description: "Execute published data search query", - extra: results, + extra: { + count: Array.isArray(results) ? results.length : undefined, + }, error: e, }); g_lib.handleException(e, res); diff --git a/core/database/foxx/api/record.js b/core/database/foxx/api/record.js index 1c3fbbf97..dd66ec597 100644 --- a/core/database/foxx/api/record.js +++ b/core/database/foxx/api/record.js @@ -66,23 +66,23 @@ class Record { /** * Generates the full path to the record as it should appear in the repository. * - * @param {object} loc - The location object which specifies the owner of the record. - * @param {string} basePath - The base path where the record is stored. - * - * @returns {string} - the path to the record or null if error + * @param {string} uid - The owner uid (e.g. "u/bob" or "p/myproject") + * @param {string} basePath - The base path of the repository. + * @returns {string|null} - the path to the record or null if error */ - _pathToRecord(loc, basePath) { + _pathToRecord(uid, basePath) { const path = basePath.endsWith("/") ? basePath : basePath + "/"; - if (loc.uid.charAt(0) == "u") { - return path + "user/" + loc.uid.substr(2) + "/" + this.#key; - } else if (loc.uid.charAt(0) == "p") { - return path + "project/" + loc.uid.substr(2) + "/" + this.#key; + if (uid.charAt(0) === "u") { + return path + "user/" + uid.substr(2) + "/" + this.#key; + } else if (uid.charAt(0) === "p") { + return path + "project/" + uid.substr(2) + "/" + this.#key; } else { this.#error = error.ERR_INTERNAL_FAULT; - this.#err_msg = "Provided path does not fit within supported directory "; - this.#err_msg += "structure for repository, no user or project folder has"; - this.#err_msg += " been determined for the record."; - console.log(e); + this.#err_msg = + "Provided uid does not fit within supported directory " + + "structure for repository, no user or project folder has " + + "been determined for the record. uid: " + + uid; return null; } } @@ -174,77 +174,71 @@ class Record { return !!this.#alloc; } - /** - * Validates if the provided record path is consistent with the database. - * - * @param {string} a_path - The path to validate. - * @returns {boolean} True if consistent, otherwise false. - */ isPathConsistent(a_path) { - // This function will populate the this.#loc member and the this.#alloc - // member if (!this.isManaged()) { return false; } - // If there is a new repo we need to check the path there and use that + if (!a_path.startsWith("/")) { + a_path = "/" + a_path; + } + + // If record is in flight, only check new location if (this.#loc.hasOwnProperty("new_repo") && this.#loc.new_repo) { - // Below we get the allocation associated with data item by - // 1. Checking if the data item is in flight, is in the process - // of being moved to a new location or new owner and using that - // oweners id. - // 2. Using the loc.uid parameter if not inflight to get the owner - // id. + const new_uid = this.#loc.new_owner ? this.#loc.new_owner : this.#loc.uid; const new_alloc = g_db.alloc.firstExample({ - _from: this.#loc.new_owner ? this.#loc.new_owner : this.#loc.uid, + _from: new_uid, _to: this.#loc.new_repo, }); - // If no allocation is found for the item throw an error - // if the paths do not align also throw an error. if (!new_alloc) { this.#error = error.ERR_PERM_DENIED; this.#err_msg = - "Permission denied, '" + this.#key + "' is not part of an allocation '"; + "Permission denied, '" + this.#key + "' is not part of an allocation'"; return false; } - this.#repo = g_db._document(this.#loc.new_repo); - - if (!this.#repo) { + const new_repo = g_db._document(this.#loc.new_repo); + if (!new_repo) { this.#error = error.ERR_INTERNAL_FAULT; this.#err_msg = - "Unable to find repo that record is meant to be allocated too, '" + + "Unable to find repo '" + this.#loc.new_repo + - "' record '" + - this.#data_id; + "' for record '" + + this.#data_id + + "'"; return false; } - // If path is missing the starting "/" add it back in - if (!a_path.startsWith("/") && this.#repo.path.startsWith("/")) { - a_path = "/" + a_path; + let new_path = this._pathToRecord(new_uid, new_repo.path); + if (new_path === a_path) { + return true; } - let stored_path = this._pathToRecord(this.#loc, this.#repo.path); - - if (!this._comparePaths(stored_path, a_path)) { - return false; - } - } else { - this.#repo = g_db._document(this.#loc._to); + this.#error = error.ERR_PERM_DENIED; + this.#err_msg = + "Record path is not consistent with repo. Expected: " + + new_path + + " but got: " + + a_path; + return false; + } - if (!a_path.startsWith("/") && this.#repo.path.startsWith("/")) { - a_path = "/" + a_path; - } - let stored_path = this._pathToRecord(this.#loc, this.#repo.path); + // No in-flight move — check current location + this.#repo = g_db._document(this.#loc._to); + let current_path = this._pathToRecord(this.#loc.uid, this.#repo.path); - // If there is no new repo check that the paths align - if (!this._comparePaths(stored_path, a_path)) { - return false; - } + if (current_path === a_path) { + return true; } - return true; + + this.#error = error.ERR_PERM_DENIED; + this.#err_msg = + "Record path is not consistent with repo. Expected: " + + current_path + + " but got: " + + a_path; + return false; } } diff --git a/core/database/foxx/api/repo.js b/core/database/foxx/api/repo.js index 226179e0b..9d1c5a9e2 100644 --- a/core/database/foxx/api/repo.js +++ b/core/database/foxx/api/repo.js @@ -185,6 +185,40 @@ class Repo { return PathType.UNKNOWN; } + + static resolveFromPath(file_path) { + var canonical = pathModule.normalizePOSIXPath(file_path); + + if (canonical !== file_path) { + throw [error.ERR_PERM_DENIED, "Path contains invalid sequences: " + file_path]; + } + + var repos = g_db.repo.all().toArray(); + var best_match = null; + var best_length = 0; + + for (var i = 0; i < repos.length; i++) { + var repo_path = repos[i].path; + if (repo_path.charAt(repo_path.length - 1) !== "/") { + repo_path += "/"; + } + if (canonical.indexOf(repo_path) === 0 || canonical === repo_path.slice(0, -1)) { + if (repo_path.length > best_length) { + best_match = repos[i]; + best_length = repo_path.length; + } + } + } + + if (!best_match) { + throw [ + error.ERR_PERM_DENIED, + "File path does not match any known repository: " + file_path, + ]; + } + + return new Repo(best_match._id); + } } module.exports = { Repo, PathType }; diff --git a/core/database/foxx/api/repo_router.js b/core/database/foxx/api/repo_router.js index 6f79c53fb..64a913490 100644 --- a/core/database/foxx/api/repo_router.js +++ b/core/database/foxx/api/repo_router.js @@ -12,6 +12,9 @@ const g_db = require("@arangodb").db; const g_lib = require("./support"); const g_tasks = require("./tasks"); +const logger = require("./lib/logger"); +const basePath = "repo"; + module.exports = router; function validateAndNormalizeRepoPath(obj) { @@ -47,6 +50,14 @@ function validateAndNormalizeRepoPath(obj) { router .get("/list", function (req, res) { var client; + logger.logRequestStarted({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list", + status: "Started", + description: "List repo servers administered by client", + }); if (req.queryParams.client) { client = g_lib.getUserFromClientID(req.queryParams.client); @@ -95,6 +106,15 @@ router } res.send(result); + logger.logRequestSuccess({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list", + status: "Success", + description: "List repo servers administered by client", + extra: { NumOfRepoServers: result.length }, + }); }) .queryParam("client", joi.string().allow("").optional(), "Client ID") .queryParam("details", joi.boolean().optional(), "Show additional record details") @@ -106,8 +126,17 @@ router router .get("/view", function (req, res) { + let repo = null; try { - var repo = g_db.repo.document(req.queryParams.id); + logger.logRequestStarted({ + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Started", + description: `View repo server record: ${req.queryParams.id}`, + }); + + repo = g_db.repo.document(req.queryParams.id); repo.admins = []; var admins = g_db.admin @@ -122,7 +151,32 @@ router delete repo._rev; res.send([repo]); + logger.logRequestSuccess({ + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Success", + description: `View repo server record: ${req.queryParams.id}`, + extra: { + type: repo.type, + capacity: repo.capacity, + admins: repo.admins, + }, + }); } catch (e) { + logger.logRequestFailure({ + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Failure", + description: `View repo server record: ${req.queryParams.id}`, + extra: { + type: repo?.type, + capacity: repo?.capacity, + admins: repo?.admins, + }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -132,7 +186,17 @@ router router .post("/create", function (req, res) { + let repo_doc = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Started", + description: `Create a server record: ${req.body.id}`, + }); + g_db._executeTransaction({ collections: { read: ["u"], @@ -166,7 +230,7 @@ router } const repo = Repositories.createRepositoryByType(obj).raiseIfError(); - const repo_doc = repo.save().raiseIfError(); + repo_doc = repo.save().raiseIfError(); for (const adminId of req.body.admins) { if (!g_db._exists(adminId)) @@ -185,7 +249,34 @@ router res.send([repo_doc]); }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Success", + description: `Create a server record: ${req.body.id}`, + extra: { + type: repo_doc?.type, + capacity: repo_doc?.capacity, + admins: repo_doc?.admins, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/create", + status: "Failure", + description: `Create a server record: ${req.body.id}`, + extra: { + type: repo_doc?.type, + capacity: repo_doc?.capacity, + admins: repo_doc?.admins, + }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -215,7 +306,16 @@ router router .post("/update", function (req, res) { + let repo = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Started", + description: `Update a repo server record: ${req.queryParams.id}`, + }); g_db._executeTransaction({ collections: { read: ["u"], @@ -294,7 +394,33 @@ router res.send([repo.new]); }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Success", + description: `Update a repo server record: ${req.queryParams.id}`, + extra: { + capacity: req.queryParams.capacity, + admins: req.queryParams.admins, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Failure", + description: `Update a repo server record: ${req.queryParams.id}`, + extra: { + capacity: req.queryParams.capacity, + admins: req.queryParams.admins, + }, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -323,6 +449,15 @@ router router .get("/delete", function (req, res) { try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/delete", + status: "Started", + description: `Delete a repo server record: ${req.queryParams.id}`, + }); + g_db._executeTransaction({ collections: { read: ["lock"], @@ -361,7 +496,6 @@ router var alloc = g_db._query("for v in 1..1 inbound @repo alloc return {id:v._id}", { repo: req.queryParams.id, }); - console.log(alloc); if (alloc.hasNext()) throw [ error.ERR_IN_USE, @@ -372,7 +506,24 @@ router graph.repo.remove(req.queryParams.id); }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/delete", + status: "Success", + description: `Delete a repo server record: ${req.queryParams.id}`, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/delete", + status: "Failure", + description: `Delete a repo server record: ${req.queryParams.id}`, + error: e, + }); g_lib.handleException(e, res); } }) @@ -393,6 +544,15 @@ router */ router .get("/calc_size", function (req, res) { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/calc_size", + status: "Started", + description: `Calculate per-repo sizes for specified data records and collections: ${req.queryParams.items.length}`, + }); + g_lib.getUserFromClientID(req.queryParams.client); // TODO Check permissions @@ -411,6 +571,14 @@ router } res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/calc_size", + status: "Success", + description: `Calculate per-repo sizes for specified data records and collections: ${req.queryParams.items.length}`, + }); }) .queryParam("client", joi.string().required(), "Client ID") .queryParam( @@ -462,6 +630,14 @@ function calcSize(a_item, a_recurse, a_depth, a_visited, a_result) { router .get("/alloc/list/by_repo", function (req, res) { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/list/by_repo", + status: "Started", + description: `List all allocations for a repo: ${req.queryParams.repo}`, + }); var client = g_lib.getUserFromClientID(req.queryParams.client); var repo = g_db.repo.document(req.queryParams.repo); @@ -477,6 +653,14 @@ router .toArray(); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/list/by_repo", + status: "Success", + description: `List all allocations for a repo: ${req.queryParams.repo}`, + }); }) .queryParam("client", joi.string().required(), "Client ID") .queryParam("repo", joi.string().required(), "Repo ID") @@ -485,6 +669,14 @@ router router .get("/alloc/list/by_owner", function (req, res) { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/list/by_owner", + status: "Started", + description: `List owner's repo allocations: ${req.queryParams.owner}`, + }); var obj, result = g_db.alloc .byExample({ @@ -511,6 +703,14 @@ router } res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/list/by_owner", + status: "Success", + description: `List owner's repo allocations: ${req.queryParams.owner}`, + }); }) .queryParam("owner", joi.string().required(), "Owner ID (user or project)") .queryParam("stats", joi.boolean().optional(), "Include statistics") @@ -519,6 +719,14 @@ router router .get("/alloc/list/by_object", function (req, res) { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/list/by_object", + status: "Started", + description: `List object repo allocations: ${req.queryParams.object}`, + }); var client = g_lib.getUserFromClientID(req.queryParams.client); var obj_id = g_lib.resolveID(req.queryParams.object, client); var owner_id = g_db.owner.firstExample({ @@ -546,6 +754,14 @@ router } res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/list/by_object", + status: "Success", + description: `List object repo allocations: ${req.queryParams.object}`, + }); }) .queryParam("client", joi.string().required(), "Client ID") .queryParam("object", joi.string().required(), "Object ID (data or collection ID or alias)") @@ -554,7 +770,16 @@ router router .get("/alloc/view", function (req, res) { + let result = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/view", + status: "Started", + description: `View allocation details: ${req.queryParams.repo}`, + }); var owner_id, client = g_lib.getUserFromClientID(req.queryParams.client); @@ -571,13 +796,13 @@ router owner_id = client._id; } - var obj, - result = g_db.alloc - .byExample({ - _from: owner_id, - _to: req.queryParams.repo, - }) - .toArray(); + var obj; + result = g_db.alloc + .byExample({ + _from: owner_id, + _to: req.queryParams.repo, + }) + .toArray(); for (var i in result) { obj = result[i]; @@ -592,7 +817,26 @@ router } res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/view", + status: "Success", + description: `View allocation details: ${req.queryParams.repo}`, + extra: result, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/view", + status: "Failure", + description: `View allocation details: ${req.queryParams.repo}`, + extra: result, + error: e, + }); g_lib.handleException(e, res); } }) @@ -659,12 +903,40 @@ function getAllocStats(a_repo, a_subject) { router .get("/alloc/stats", function (req, res) { + let result = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/stats", + status: "Started", + description: `View allocation statistics: ${req.queryParams.repo}`, + }); var client = g_lib.getUserFromClientID(req.queryParams.client); permissions.ensureAdminPermRepo(client, req.queryParams.repo); - var result = getAllocStats(req.queryParams.repo, req.queryParams.subject); + result = getAllocStats(req.queryParams.repo, req.queryParams.subject); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/stats", + status: "Success", + description: `View allocation statistics: ${req.queryParams.repo}`, + extra: result, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/stats", + status: "Failure", + description: `View allocation statistics: ${req.queryParams.repo}`, + extra: result, + error: e, + }); g_lib.handleException(e, res); } }) @@ -676,7 +948,17 @@ router router .get("/alloc/create", function (req, res) { + let result = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/create", + status: "Started", + description: `Create user/projects repo allocation: ${req.queryParams.repo}. Subject: ${req.queryParams.subject}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "repo", "admin"], @@ -700,9 +982,40 @@ router ); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/create", + status: "Success", + description: `Create user/projects repo allocation: ${req.queryParams.repo}. Subject: ${req.queryParams.subject}`, + extra: { + task_id: result.task._id, + repo: result.task.state.repo_id, + data_limit: result.task.state.data_limit, + rec_limit: result.task.state.rec_limit, + status: "queued", + }, + }); }, }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/create", + status: "Failure", + description: `Create user/projects repo allocation: ${req.queryParams.repo}. Subject: ${req.queryParams.subject}`, + extra: { + task_id: result?.task?._id, + repo: result?.task?.state?.repo_id, + data_limit: result?.task?.state?.data_limit, + rec_limit: result?.task?.state?.rec_limit, + status: "queued", + }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -726,7 +1039,17 @@ router router .get("/alloc/delete", function (req, res) { + let result = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/delete", + status: "Started", + description: `Delete user/projects repo allocation: ${req.queryParams.repo}. Subject: ${req.queryParams.subject}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "repo", "admin"], @@ -741,16 +1064,31 @@ router subject_id = req.queryParams.subject; else subject_id = g_lib.getUserFromClientID(req.queryParams.subject)._id; - var result = g_tasks.taskInitAllocDelete( - client, - req.queryParams.repo, - subject_id, - ); + result = g_tasks.taskInitAllocDelete(client, req.queryParams.repo, subject_id); res.send(result); }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/delete", + status: "Success", + description: `Delete user/projects repo allocation: ${req.queryParams.repo}. Subject: ${req.queryParams.subject}`, + extra: result, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/delete", + status: "Failure", + description: `Delete user/projects repo allocation: ${req.queryParams.repo}. Subject: ${req.queryParams.subject}`, + extra: result, + error: e, + }); g_lib.handleException(e, res); } }) @@ -765,6 +1103,14 @@ router router .get("/alloc/set", function (req, res) { try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/set", + status: "Started", + description: `Set user/projects repo allocation: ${req.queryParams.repo}. Subject: ${req.queryParams.subject}`, + }); g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "repo", "admin"], @@ -806,7 +1152,24 @@ router }); }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/set", + status: "Success", + description: `Set user/projects repo allocation: ${req.queryParams.repo}. Subject: ${req.queryParams.subject}`, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/set", + status: "Failure", + description: `Set user/projects repo allocation: ${req.queryParams.repo}. Subject: ${req.queryParams.subject}`, + error: e, + }); g_lib.handleException(e, res); } }) @@ -829,6 +1192,14 @@ router router .get("/alloc/set/default", function (req, res) { try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/set/default", + status: "Started", + description: `Set user/projects repo allocation as default: ${req.queryParams.repo}`, + }); g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "repo", "admin"], @@ -840,7 +1211,7 @@ router if (req.queryParams.subject) { if (req.queryParams.subject.startsWith("p/")) { - if (!g_db._exists(subject_id)) + if (!g_db._exists(req.queryParams.subject)) throw [ error.ERR_NOT_FOUND, "Project, " + req.queryParams.subject + ", not found", @@ -888,7 +1259,24 @@ router } }, }); + logger.logRequestSuccess({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/set/default", + status: "Success", + description: `Set user/projects repo allocation as default: ${req.queryParams.repo}`, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/alloc/set/default", + status: "Failure", + description: `Set user/projects repo allocation as default: ${req.queryParams.repo}`, + error: e, + }); g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/schema_router.js b/core/database/foxx/api/schema_router.js index 51734b678..7dc84fee8 100644 --- a/core/database/foxx/api/schema_router.js +++ b/core/database/foxx/api/schema_router.js @@ -8,6 +8,8 @@ const error = require("./lib/error_codes"); const g_db = require("@arangodb").db; const g_lib = require("./support"); const g_graph = require("@arangodb/general-graph")._graph("sdmsg"); +const logger = require("./lib/logger"); +const basePath = "schema"; module.exports = router; @@ -78,7 +80,16 @@ function _resolveDeps(a_sch_id, a_refs) { router .post("/create", function (req, res) { + let sch = null; try { + logger.logRequestStarted({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Started", + description: `Create schema. ID: ${req.body.id}`, + }); g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn"], @@ -115,7 +126,7 @@ router g_lib.procInputParam(req.body, "_sch_id", false, obj); g_lib.procInputParam(req.body, "desc", false, obj); - var sch = g_db.sch.save(obj, { + sch = g_db.sch.save(obj, { returnNew: true, }).new; @@ -129,7 +140,36 @@ router res.send([sch]); }, }); + logger.logRequestSuccess({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Success", + description: `Create schema. ID: ${req.body.id}`, + extra: { + sch_id: sch?.id, + own_id: sch?.own_id, + pub: req.body.pub, + sys: req.body.sys, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/create", + status: "Failure", + description: `Create schema. ID: ${req.body.id}`, + extra: { + sch_id: sch?.id, + own_id: sch?.own_id, + pub: req.body.pub, + sys: req.body.sys, + }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -151,7 +191,17 @@ router router .post("/update", function (req, res) { + let sch_new = null; try { + logger.logRequestStarted({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Started", + description: `Update schema. Schema ID: ${req.queryParams.id}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn"], @@ -243,7 +293,7 @@ router obj.def = req.body.def; } - var sch_new = g_db.sch.update(sch_old._id, obj, { + sch_new = g_db.sch.update(sch_old._id, obj, { returnNew: true, mergeObjects: false, keepNull: false, @@ -259,7 +309,38 @@ router res.send([sch_new]); }, }); + logger.logRequestSuccess({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Success", + description: `Update schema. Schema ID: ${req.queryParams.id}`, + extra: { + id: sch_new.id, + own_id: sch_new.own_id, + pub: sch_new.pub, + sys: req.body?.sys ?? false, + ver: sch_new.ver, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/update", + status: "Failure", + description: `Update schema. Schema ID: ${req.queryParams.id}`, + extra: { + id: sch_new?.id, + own_id: sch_new?.own_id, + pub: sch_new?.pub, + sys: req.body?.sys ?? false, + ver: sch_new?.ver, + }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -282,7 +363,17 @@ router router .post("/revise", function (req, res) { + let sch_new = null; try { + logger.logRequestStarted({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/revise", + status: "Started", + description: `Revise schema. Schema ID: ${req.queryParams.id}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn"], @@ -364,7 +455,7 @@ router delete sch._key; delete sch._rev; - var sch_new = g_db.sch.save(sch, { + sch_new = g_db.sch.save(sch, { returnNew: true, }).new; @@ -384,7 +475,39 @@ router res.send([sch_new]); }, }); + logger.logRequestSuccess({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/revise", + status: "Success", + description: `Revise schema. Schema ID: ${req.queryParams.id}`, + extra: { + own_id: sch_new.own_id, + own_nm: sch_new.own_nm, + id: sch_new.id, + pub: req.body.pub, + sys: req.body.sys, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/revise", + status: "Failure", + description: `Revise schema. Schema ID: ${req.queryParams.id}`, + extra: { + own_id: sch_new?.own_id, + own_nm: sch_new?.own_nm, + id: sch_new?.id, + pub: req.body?.pub, + sys: req.body?.sys, + }, + error: e, + }); + g_lib.handleException(e, res); } }) @@ -406,18 +529,29 @@ router router .post("/delete", function (req, res) { + let sch_old = null; try { + logger.logRequestStarted({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/delete", + status: "Started", + description: `Delete schema. Schema ID: ${req.queryParams.id}`, + }); + const client = g_lib.getUserFromClientID(req.queryParams.client); var idx = req.queryParams.id.indexOf(":"); if (idx < 0) { throw [error.ERR_INVALID_PARAM, "Schema ID missing version number suffix."]; } var sch_id = req.queryParams.id.substr(0, idx), - sch_ver = parseInt(req.queryParams.id.substr(idx + 1)), - sch_old = g_db.sch.firstExample({ - id: sch_id, - ver: sch_ver, - }); + sch_ver = parseInt(req.queryParams.id.substr(idx + 1)); + + sch_old = g_db.sch.firstExample({ + id: sch_id, + ver: sch_ver, + }); if (!sch_old) throw [error.ERR_NOT_FOUND, "Schema '" + req.queryParams.id + "' not found."]; @@ -457,7 +591,25 @@ router } g_graph.sch.remove(sch_old._id); + logger.logRequestSuccess({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/delete", + status: "Success", + description: `Delete schema. Schema ID: ${req.queryParams.id}`, + extra: { deleted: sch_old._id }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "POST", + routePath: basePath + "/delete", + status: "Failure", + description: `Delete schema. Schema ID: ${req.queryParams.id}`, + extra: { deleted: sch_old?._id }, + }); g_lib.handleException(e, res); } }) @@ -468,18 +620,27 @@ router router .get("/view", function (req, res) { + let sch = null; try { + logger.logRequestStarted({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Started", + description: `View schema. Schema ID: ${req.queryParams.id}`, + }); const client = g_lib.getUserFromClientID(req.queryParams.client); var idx = req.queryParams.id.indexOf(":"); if (idx < 0) { throw [error.ERR_INVALID_PARAM, "Schema ID missing version number suffix."]; } var sch_id = req.queryParams.id.substr(0, idx), - sch_ver = parseInt(req.queryParams.id.substr(idx + 1)), - sch = g_db.sch.firstExample({ - id: sch_id, - ver: sch_ver, - }); + sch_ver = parseInt(req.queryParams.id.substr(idx + 1)); + sch = g_db.sch.firstExample({ + id: sch_id, + ver: sch_ver, + }); if (!sch) throw [error.ERR_NOT_FOUND, "Schema '" + req.queryParams.id + "' not found."]; @@ -515,7 +676,34 @@ router fixSchOwnNm(sch); res.send([sch]); + logger.logRequestSuccess({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Success", + description: `View schema. Schema ID: ${req.queryParams.id}`, + extra: { + own_id: sch.own_id, + own_nm: sch.own_nm, + id: sch.id, + pub: sch.pub, + sys: sch.sys, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Failure", + description: `View schema. Schema ID: ${req.queryParams.id}`, + extra: { + pub: sch?.pub, + sys: sch?.sys, + }, + }); g_lib.handleException(e, res); } }) @@ -527,11 +715,20 @@ router router .get("/search", function (req, res) { + let result = null; try { + logger.logRequestStarted({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/search", + status: "Started", + description: `Search schema.`, + }); + const client = g_lib.getUserFromClientID(req.queryParams.client); var qry, par = {}, - result, off = 0, cnt = 50, doc; @@ -628,8 +825,28 @@ router }, }); + const first = result.find((r) => r.own_id); res.send(result); + logger.logRequestSuccess({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/search", + status: "Success", + description: `Search schema.`, + extra: first ? { own_id: first.own_id, own_nm: first.own_nm } : {}, + }); } catch (e) { + logger.logRequestFailure({ + client: req.queryParams?.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/search", + status: "Failure", + description: `Search schema.`, + extra: result, + error: e, + }); g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/support.js b/core/database/foxx/api/support.js index 8f7ec612e..12a526397 100644 --- a/core/database/foxx/api/support.js +++ b/core/database/foxx/api/support.js @@ -401,8 +401,6 @@ module.exports = (function () { };*/ obj.handleException = function (e, res) { - console.log("Service exception:", e); - if (obj.isInteger(e) && e >= 0 && e < error.ERR_COUNT) { res.throw(error.ERR_INFO[e][0], error.ERR_INFO[e][1]); } else if (Array.isArray(e)) { @@ -528,8 +526,6 @@ module.exports = (function () { for (var i in potential_uuids) { uuids.push("uuid/" + potential_uuids[i]); } - console.log("resolveUUIDsToID"); - console.log("uuids: ", uuids); var result = obj.db ._query("for i in ident filter i._to in @ids return distinct document(i._from)", { ids: uuids, @@ -564,8 +560,6 @@ module.exports = (function () { for (var i in potential_uuids) { uuids.push("uuid/" + potential_uuids[i]); } - console.log("resolveUUIDsToID_noexcept"); - console.log("uuids: ", uuids); var result = obj.db ._query("for i in ident filter i._to in @ids return distinct document(i._from)", { ids: uuids, @@ -578,7 +572,6 @@ module.exports = (function () { var first_uuid = result[0]._id; // Next we need to make sure the provided ids are all the same if there is more than one for (var i = 1; i < result.length; i++) { - console.log("resolveUUID comparing " + first_uuid + " with " + result[i]); if (first_uuid != result[i]._id) { return; } @@ -623,8 +616,11 @@ module.exports = (function () { // Client ID can be an SDMS uname (xxxxx...), a UUID (xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx), or an account (domain.uname) // UUID are defined by length and format, accounts have a "." (and known domains), SDMS unames have no "." or "-" characters + if (!a_client_id) { + throw [error.ERR_INVALID_PARAM, "Client ID must be provided"]; + } + var params; - console.log("getUserFromClient id: ", a_client_id); if (a_client_id.startsWith("u/")) { if (!obj.db.u.exists(a_client_id)) { @@ -649,7 +645,6 @@ module.exports = (function () { // determine the UUID, if they are not, then we will throw an error for now, var unambiguous_id = obj.resolveUUIDsToID(a_client_id); if (!unambiguous_id) { - console.log("Undefined"); return; } //params = { 'id': unambiguous_id }; @@ -706,7 +701,6 @@ module.exports = (function () { // determine the UUID, if they are not, then we will throw an error for now, var unambiguous_id = obj.resolveUUIDsToID_noexcept(a_client_id); if (!unambiguous_id) { - console.log("Undefined"); return; } //params = { 'id': unambiguous_id }; @@ -735,8 +729,6 @@ module.exports = (function () { }; obj.findUserFromUUIDs = function (a_uuids) { - console.log("findUserFromUUIDs"); - console.log("a_uuids: ", a_uuids); var result = obj.db ._query("for i in ident filter i._to in @ids return distinct document(i._from)", { ids: a_uuids, @@ -1121,7 +1113,6 @@ module.exports = (function () { obj.getCollCategoryTags = function (a_coll_id) { var coll = obj.db.c.document(a_coll_id), ctx = obj.catalogCalcParCtxt(coll, {}); - if (ctx.pub) return Array.from(ctx.tags); }; @@ -1670,7 +1661,6 @@ module.exports = (function () { }; obj.hasPublicRead = function (a_id) { - console.log("Has public read a_id is ", a_id); // Check for local topic on collections if (a_id.startsWith("c/")) { var col = obj.db.c.document(a_id); diff --git a/core/database/foxx/api/tag_router.js b/core/database/foxx/api/tag_router.js index 575d04f3a..ea30323d9 100644 --- a/core/database/foxx/api/tag_router.js +++ b/core/database/foxx/api/tag_router.js @@ -19,6 +19,7 @@ router let client = null; let result = null; let tot = null; + let name = null; try { client = req.queryParams.client ? g_lib.getUserFromClientID(req.queryParams.client) @@ -31,7 +32,7 @@ router status: "Started", description: `Search for tags by name (${req.queryParams?.name?.trim()})`, }); - var name = req.queryParams.name.trim(); + name = req.queryParams.name.trim(); if (name.length < 3) throw [error.ERR_INVALID_PARAM, "Input is too short for tag search."]; diff --git a/core/database/foxx/api/task_router.js b/core/database/foxx/api/task_router.js index 292beb922..e9ae12dd1 100644 --- a/core/database/foxx/api/task_router.js +++ b/core/database/foxx/api/task_router.js @@ -347,6 +347,7 @@ router throw [error.ERR_IN_USE, "Cannot delete task that is still scheduled."]; g_lib.graph.task.remove(req.queryParams.task_id); + res.send(); logger.logRequestSuccess({ client: req?.queryParams?.task_id, correlationId: req.headers["x-correlation-id"], @@ -357,7 +358,6 @@ router extra: req.queryParams.task_id, }); } catch (e) { - g_lib.handleException(e, res); logger.logRequestFailure({ client: req?.queryParams?.task_id, correlationId: req.headers["x-correlation-id"], @@ -368,6 +368,7 @@ router extra: "undefined", error: e, }); + g_lib.handleException(e, res); } }) .queryParam("task_id", joi.string().required(), "Task ID") @@ -380,7 +381,7 @@ router try { const client = g_lib.getUserFromClientID(req.queryParams.client); logger.logRequestStarted({ - client: req?.queryParams?.task_id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/list", @@ -422,7 +423,7 @@ router res.send(result); logger.logRequestSuccess({ - client: req?.queryParams?.task_id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/list", @@ -435,7 +436,7 @@ router }); } catch (e) { logger.logRequestFailure({ - client: req?.queryParams?.task_id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/list", @@ -471,7 +472,7 @@ router try { result = []; logger.logRequestStarted({ - client: req?.queryParams?.task_id, + client: "system", correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/reload", @@ -495,7 +496,7 @@ router res.send(result); logger.logRequestSuccess({ - client: req?.queryParams?.task_id, + client: "system", correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/reload", @@ -505,7 +506,7 @@ router }); } catch (e) { logger.logRequestFailure({ - client: req?.queryParams?.task_id, + client: "system", correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/reload", @@ -549,6 +550,7 @@ router ); }, }); + res.send(); logger.logRequestSuccess({ client: "undefined", correlationId: req.headers["x-correlation-id"], diff --git a/core/database/foxx/api/tasks.js b/core/database/foxx/api/tasks.js index 8e7ac603d..023c18484 100644 --- a/core/database/foxx/api/tasks.js +++ b/core/database/foxx/api/tasks.js @@ -24,8 +24,6 @@ var tasks_func = (function () { a_data_limit, a_rec_limit, ) { - console.log("taskInitAllocCreate"); - // Check if repo and subject exist if (!g_db._exists(a_repo_id)) throw [error.ERR_NOT_FOUND, "Repo, '" + a_repo_id + "', does not exist"]; @@ -106,8 +104,6 @@ var tasks_func = (function () { }; obj.taskRunAllocCreate = function (a_task) { - console.log("taskRunAllocCreate"); - var reply, state = a_task.state; @@ -156,8 +152,6 @@ var tasks_func = (function () { // ----------------------- ALLOC DELETE ---------------------------- obj.taskInitAllocDelete = function (a_client, a_repo_id, a_subject_id) { - console.log("taskInitAllocDelete"); - if (!g_db._exists(a_repo_id)) throw [error.ERR_NOT_FOUND, "Repo, '" + a_repo_id + "', does not exist"]; @@ -247,8 +241,6 @@ var tasks_func = (function () { }; obj.taskRunAllocDelete = function (a_task) { - console.log("taskRunAllocDelete"); - var reply, state = a_task.state; @@ -305,8 +297,6 @@ var tasks_func = (function () { is_collection_token_required = false, collection_info = {}, ) { - console.log("taskInitDataGet"); - var result = g_proc.preprocessItems(a_client, null, a_res_ids, g_lib.TT_DATA_GET); if (result.glob_data.length + result.ext_data.length > 0 && !a_check) { @@ -387,8 +377,6 @@ var tasks_func = (function () { }; obj.taskRunDataGet = function (a_task) { - console.log("taskRunDataGet"); - var reply, state = a_task.state; @@ -489,8 +477,6 @@ var tasks_func = (function () { is_collection_token_required = false, collection_info = {}, ) { - console.log("taskInitDataPut"); - var result = g_proc.preprocessItems(a_client, null, a_res_ids, g_lib.TT_DATA_PUT); if (result.glob_data.length > 0 && !a_check) { @@ -531,7 +517,6 @@ var tasks_func = (function () { }; obj.taskRunDataPut = function (a_task) { - console.log("taskRunDataPut"); var reply, state = a_task.state, params, @@ -541,7 +526,6 @@ var tasks_func = (function () { // No rollback functionality if (a_task.step < 0) return; - console.log("taskRunDataPut begin Step: ", a_task.step); if (a_task.step == 0) { //console.log("taskRunDataPut - do setup"); obj._transact( @@ -660,8 +644,6 @@ var tasks_func = (function () { ids: [xfr.files[0].id], }; - console.log("Printing params in task update size"); - console.log(params); reply = { cmd: g_lib.TC_RAW_DATA_UPDATE_SIZE, params: params, @@ -683,8 +665,6 @@ var tasks_func = (function () { ); } - console.log("taskRunDataPut final reply"); - console.log(reply); return reply; }; @@ -696,8 +676,6 @@ var tasks_func = (function () { involved allocations. Unmanaged records do not use allocations and are ignored. */ obj.taskInitRecAllocChg = function (a_client, a_proj_id, a_res_ids, a_dst_repo_id, a_check) { - console.log("taskInitRecAllocChg"); - // Verify that client is owner, or has admin permission to project owner var owner_id; @@ -791,8 +769,6 @@ var tasks_func = (function () { }; obj.taskRunRecAllocChg = function (a_task) { - console.log("taskRunRecAllocChg"); - var reply, state = a_task.state, params, @@ -916,7 +892,11 @@ var tasks_func = (function () { //console.log("taskRunRecAllocChg - do xfr"); // Transfer data step - var tokens = g_lib.getAccessToken(a_task.client); + const token_doc = new UserToken({ + user_id: a_task.client, + }).get_token(); + var tokens = UserToken.formatUserTokenForTransferTask(token_doc); + const extra_token_format = UserToken.formatUserToken(false, token_doc, false); params = { uid: a_task.client, type: a_task.type, @@ -924,6 +904,8 @@ var tasks_func = (function () { acc_tok: tokens.acc_tok, ref_tok: tokens.ref_tok, acc_tok_exp_in: tokens.acc_tok_exp_in, + token_type: extra_token_format.token_type, + scopes: extra_token_format.scopes, }; params = Object.assign(params, xfr); reply = { @@ -1155,8 +1137,6 @@ var tasks_func = (function () { }; obj.taskRunRecOwnerChg = function (a_task) { - console.log("taskRunRecOwnerChg"); - var reply, state = a_task.state, params, @@ -1302,8 +1282,11 @@ var tasks_func = (function () { case 1: //console.log("taskRunRecOwnerChg - do xfr"); // Transfer data step - - var tokens = g_lib.getAccessToken(a_task.client); + const token_doc = new UserToken({ + user_id: a_task.client, + }).get_token(); + var tokens = UserToken.formatUserTokenForTransferTask(token_doc); + const extra_token_format = UserToken.formatUserToken(false, token_doc, false); params = { uid: a_task.client, type: a_task.type, @@ -1311,6 +1294,8 @@ var tasks_func = (function () { acc_tok: tokens.acc_tok, ref_tok: tokens.ref_tok, acc_tok_exp_in: tokens.acc_tok_exp_in, + token_type: extra_token_format.token_type, + scopes: extra_token_format.scopes, }; params = Object.assign(params, xfr); reply = { @@ -1376,8 +1361,6 @@ var tasks_func = (function () { }; obj.taskInitRecCollDelete = function (a_client, a_ids) { - console.log("taskInitRecCollDelete start", Date.now()); - var result = g_proc.preprocessItems(a_client, null, a_ids, g_lib.TT_REC_DEL); if (result.has_pub) { @@ -1451,8 +1434,6 @@ var tasks_func = (function () { }; obj.taskRunRecCollDelete = function (a_task) { - console.log("taskRunRecCollDelete"); - var i, reply, state = a_task.state, @@ -1626,8 +1607,6 @@ var tasks_func = (function () { }; obj.taskRunProjDelete = function (a_task) { - console.log("taskRunProjDelete"); - var reply, state = a_task.state; @@ -1893,8 +1872,6 @@ var tasks_func = (function () { xfr_docs - chunked per source repo and max data transfer size */ - console.log("_buildTransferDoc", a_mode, a_remote, a_orig_fname); - var fnames, i, idx, @@ -2440,7 +2417,6 @@ var tasks_func = (function () { * contain raw data. */ obj._projectDelete = function (a_proj_id) { - console.log("_projectDelete", a_proj_id); // Delete allocations g_db.alloc.removeByExample({ _from: a_proj_id, diff --git a/core/database/foxx/api/topic_router.js b/core/database/foxx/api/topic_router.js index 38bc07866..ef284d292 100644 --- a/core/database/foxx/api/topic_router.js +++ b/core/database/foxx/api/topic_router.js @@ -7,17 +7,30 @@ const error = require("./lib/error_codes"); const g_db = require("@arangodb").db; const g_lib = require("./support"); +const logger = require("./lib/logger"); +const basePath = "topic"; module.exports = router; //==================== TOPIC API FUNCTIONS router .get("/list/topics", function (req, res) { + let client = null; + let result = null; try { + client = g_lib.getUserFromClientID(req.queryParams.client); + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list/topics", + status: "Started", + description: "List topics", + }); + var qry, par = {}, - result, off = 0, cnt = 50; @@ -58,7 +71,30 @@ router }); res.send(result); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list/topics", + status: "Success", + description: "List topics", + extra: { + topicCount: Array.isArray(result) ? result.length : undefined, + }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/list/topics", + status: "Failure", + description: "List topics", + extra: { + topicCount: Array.isArray(result) ? result.length : undefined, + }, + error: e, + }); g_lib.handleException(e, res); } }) @@ -71,32 +107,87 @@ router router .get("/view", function (req, res) { + let client = null; + let topic_extra = undefined; try { + client = req.queryParams.client + ? g_lib.getUserFromClientID(req.queryParams.client) + : undefined; + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Started", + description: `View topic. ID: ${req.queryParams.id}`, + }); + if (!g_db.t.exists(req.queryParams.id)) throw [error.ERR_NOT_FOUND, "Topic, " + req.queryParams.id + ", not found"]; var topic = g_db.t.document(req.queryParams.id); res.send([topic]); + + topic_extra = { + title: topic.title, + creator: topic.creator, + coll_cnt: topic.coll_cnt, + }; + + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Success", + description: `View topic. ID: ${req.queryParams.id}`, + extra: topic_extra, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/view", + status: "Failure", + description: `View topic. ID: ${req.queryParams.id}`, + extra: topic_extra, + error: e, + }); g_lib.handleException(e, res); } }) .queryParam("client", joi.string().optional(), "Client ID") - .queryParam("id", joi.string().optional(), "ID of topic to view") + .queryParam("id", joi.string().required(), "ID of topic to view") .summary("View topic") .description("View a topic."); router .get("/search", function (req, res) { + let client = null; + let result = []; + const phrase = req.queryParams.phrase; + const shortPhrase = phrase.length > 10 ? phrase.slice(0, 10) + "..." : phrase; try { + client = req.queryParams.client + ? g_lib.getUserFromClientID(req.queryParams.client) + : undefined; + logger.logRequestStarted({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/search", + status: "Started", + description: `Search topics. Search Phrase: ${shortPhrase}`, + }); + var tokens = req.queryParams.phrase.match(/(?:[^\s"]+|"[^"]*")+/g), qry = "for i in topicview search analyzer((", params = {}, i, p, qry_res, - result = [], item, it, topic, @@ -154,7 +245,26 @@ router } res.send(result); + logger.logRequestSuccess({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/search", + status: "Success", + description: `Search topics. Search Phrase: ${shortPhrase}`, + extra: { numOfResults: result.length }, + }); } catch (e) { + logger.logRequestFailure({ + client: client?._id, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/search", + status: "Failure", + description: `Search topics. Search Phrase: ${shortPhrase}`, + extra: { numOfResults: result.length }, + error: e, + }); g_lib.handleException(e, res); } }) diff --git a/core/database/foxx/api/user_router.js b/core/database/foxx/api/user_router.js index 23dabdd8e..4c4b1e031 100644 --- a/core/database/foxx/api/user_router.js +++ b/core/database/foxx/api/user_router.js @@ -30,7 +30,7 @@ router } logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/authn/password", @@ -43,23 +43,23 @@ router authorized: true, }); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/authn/password", status: "Success", + extra: `Resolved Client: ${client}`, description: "Authenticating user via password", - extra: "undefined", }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/authn/password", status: "Failure", description: "Authenticating user via password", - extra: "undefined", + extra: `Resolved Client: ${client}`, error: e, }); g_lib.handleException(e, res); @@ -100,7 +100,7 @@ router routePath: basePath + "/authn/token", status: "Success", description: "Authenticating user via access token", - extra: "undefined", + extra: "N/A", }); } catch (e) { logger.logRequestFailure({ @@ -110,7 +110,7 @@ router routePath: basePath + "/authn/token", status: "Failure", description: "Authenticating user via access token", - extra: "undefined", + extra: "N/A", error: e, }); @@ -291,7 +291,7 @@ router routePath: basePath + "/create", status: "Failure", description: "Create new user entry", - extra: user.new.uid, + extra: user?.new?.uid, error: e, }); g_lib.handleException(e, res); @@ -316,8 +316,17 @@ router .get("/update", function (req, res) { let client = null; let result = null; - let extra_log_info = null; + let sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/update", + status: "Started", + description: `Update user information. Subject: ${sub}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn"], @@ -392,36 +401,27 @@ router delete user.new.refresh; result = [user.new]; - - const { is_admin, max_coll, max_proj, max_sav_qry } = user.new; - - extra_log_info = { - is_admin, - max_coll, - max_proj, - max_sav_qry, - }; }, }); res.send(result); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/update", status: "Success", - description: "Update user information", - extra: extra_log_info, + description: `Update user information. Subject: ${sub}`, + extra: result, }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/update", status: "Failure", - description: "Update user information", - extra: extra_log_info, + description: `Update user information. Subject: ${sub}`, + extra: result, error: e, }); g_lib.handleException(e, res); @@ -506,12 +506,12 @@ router try { name = req.queryParams.name_uid.trim(); logger.logRequestStarted({ - client: name, + client: "N/A", correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/find/by_name_uid", status: "Started", - description: "Find users matching partial name and/or uid", + description: `Find users matching partial name and/or uid: ${name}`, }); if (name.length < 2) @@ -551,7 +551,7 @@ router httpVerb: "GET", routePath: basePath + "/find/by_name_uid", status: "Success", - description: "Find users matching partial name and/or uid", + description: `Find users matching partial name and/or uid: ${name}`, extra: result, }); } catch (e) { @@ -561,7 +561,7 @@ router httpVerb: "GET", routePath: basePath + "/find/by_name_uid", status: "Failure", - description: "Find users matching partial name and/or uid", + description: `Find users matching partial name and/or uid: ${name}`, extra: result, error: e, }); @@ -576,7 +576,7 @@ router router .get("/keys/set", function (req, res) { - let client = null; + let sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; try { g_db._executeTransaction({ collections: { @@ -584,15 +584,15 @@ router write: ["u"], }, action: function () { - client = g_lib.getUserFromClientID(req.queryParams.client); + const client = g_lib.getUserFromClientID(req.queryParams.client); logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/keys/set", status: "Started", - description: "Set user public and private keys", + description: `Set user public and private keys. Subject: ${sub}`, }); var user_id; @@ -617,23 +617,23 @@ router }); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/keys/set", status: "Success", - description: "Set user public and private keys", - extra: "undefined", + description: `Set user public and private keys. Subject: ${sub}`, + extra: "N/A", }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/keys/set", status: "Failure", - description: "Set user public and private keys", - extra: "undefined", + description: `Set user public and private keys. Subject: ${sub}`, + extra: "N/A", error: e, }); g_lib.handleException(e, res); @@ -648,23 +648,23 @@ router router .get("/keys/clear", function (req, res) { - let client = null; + let sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/keys/clear", + status: "Started", + description: `Clear user public and private keys. Subject: ${sub}`, + }); g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn"], write: ["u"], }, action: function () { - client = g_lib.getUserFromClientID(req.queryParams.client); - logger.logRequestStarted({ - client: client?._id, - correlationId: req.headers["x-correlation-id"], - httpVerb: "GET", - routePath: basePath + "/keys/clear", - status: "Started", - description: "Clear user public and private keys", - }); + const client = g_lib.getUserFromClientID(req.queryParams.client); var user_id; @@ -687,23 +687,23 @@ router }, }); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/keys/clear", status: "Success", - description: "Clear user public and private keys", - extra: "undefined", + description: `Clear user public and private keys. Subject: ${sub}`, + extra: "N/A", }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/keys/clear", status: "Failure", - description: "Clear user public and private keys", - extra: "undefined", + description: `Clear user public and private keys. Subject: ${sub}`, + extra: "N/A", error: e, }); g_lib.handleException(e, res); @@ -716,6 +716,7 @@ router router .get("/keys/get", function (req, res) { + let sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; let user = null; try { if (req.queryParams.subject) { @@ -729,12 +730,12 @@ router _id: req.queryParams.subject, }); logger.logRequestStarted({ - client: user?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/keys/get", status: "Started", - description: "Get user public and private keys", + description: `Get user public and private keys. ${sub}`, }); } else { user = g_lib.getUserFromClientID(req.queryParams.client); @@ -747,13 +748,12 @@ router }, ]); logger.logRequestSuccess({ - client: user?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/keys/get", status: "Success", description: "Get user public and private keys", - extra: "undefined", }); } else { res.send([ @@ -764,24 +764,24 @@ router }, ]); logger.logRequestSuccess({ - client: user?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/keys/get", status: "Success", - description: "Get user public and private keys", - extra: "undefined", + description: `Get user public and private keys. ${sub}`, + extra: "N/A", }); } } catch (e) { logger.logRequestFailure({ - client: user?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/keys/get", status: "Failure", - description: "Get user public and private keys", - extra: "undefined", + description: `Get user public and private keys. ${sub}`, + extra: "N/A", error: e, }); g_lib.handleException(e, res); @@ -835,24 +835,24 @@ router router .get("/token/set", function (req, res) { - let client = null; + let sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/token/set", + status: "Started", + description: `Setting user token. Subject: ${sub}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "uuid", "accn", "globus_coll"], write: ["u", "globus_coll", "globus_token"], }, action: function () { - client = g_lib.getUserFromClientID(req.queryParams.client); - - logger.logRequestStarted({ - client: client?._id, - correlationId: req.headers["x-correlation-id"], - httpVerb: "GET", - routePath: basePath + "/token/set", - status: "Started", - description: "Setting user token", - }); + const client = g_lib.getUserFromClientID(req.queryParams.client); var user_id; let user_doc; @@ -912,11 +912,18 @@ router other_token_data, ); // TODO: the call site and function and docs will need to be updated if changes are made to assumed data // GLOBUS_TRANSFER parse currently assumes uuid and scopes exist - let globus_collection = g_db.globus_coll.exists({ - _key: collection_search_key, - }); - if (!globus_collection) { - globus_collection = g_db.globus_coll.save({ + let globus_collection; + + if ( + g_db.globus_coll.exists({ + _key: collection_search_key, + }) + ) { + globus_collection = g_db.globus_coll.document({ + _key: collection_search_key, + }); + } else { + const meta = g_db.globus_coll.save({ _key: collection_search_key, name: "Newly Inserted Collection", description: "The collection description", @@ -927,16 +934,21 @@ router type: "mapped", // mapped/guest TODO: to be pulled from token data on follow-up ticket ha_enabled: false, // boolean - TODO: to be pulled from token data on follow-up ticket }); + globus_collection = g_db.globus_coll.document(meta); } const token_key = globus_collection._key + "_" + token_type + "_" + user_doc._key; + + const dependent_scopes_val = + scopes || globus_collection.required_scopes; + const token_doc = { _key: token_key, - _from: user_id, // the uid field + _from: user_id, _to: globus_collection._id, type: token_type, - dependent_scopes: scopes, + dependent_scopes: dependent_scopes_val, request_time: Math.floor(Date.now() / 1000), last_used: Math.floor(Date.now() / 1000), status: @@ -947,7 +959,7 @@ router }; const token_doc_upsert = g_db.globus_token.insert(token_doc, { - overwriteMode: "replace", // TODO: perhaps use 'update' and specify values for true upsert. + overwriteMode: "replace", }); break; } @@ -964,25 +976,25 @@ router ); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/token/set", status: "Success", - description: "Setting user token", + description: `Setting user token. Subject: ${sub}`, extra: `${tokenTypeName} (${token_type})`, }); }, }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/token/set", status: "Failure", - description: "Setting user tokens", - extra: "undefined", + description: `Setting user token. Subject: ${sub}`, + extra: "N/A", error: e, }); g_lib.handleException(e, res); @@ -1016,6 +1028,7 @@ router router .get("/token/get", function (req, res) { + let sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; let user = null; try { const collection_token = UserToken.validateRequestParams(req.queryParams); @@ -1037,12 +1050,12 @@ router } logger.logRequestStarted({ - client: user?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/token/get", status: "Started", - description: "Getting user token", + description: `Getting user token. Subject: ${sub}`, }); const user_token = new UserToken({ @@ -1063,22 +1076,23 @@ router res.send(result); logger.logRequestSuccess({ - client: user?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/token/get", status: "Success", - description: "Getting user token", + description: `Getting user token. Subject: ${sub}`, + extra: "N/A", }); } catch (e) { logger.logRequestFailure({ - client: user?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/token/get", status: "Failure", - description: "Getting user tokens", - extra: "undefined", + description: `Getting user token. Subject: ${sub}`, + extra: "N/A", error: e, }); g_lib.handleException(e, res); @@ -1101,6 +1115,7 @@ router router .get("/token/get/access", function (req, res) { + let sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; let user = null; try { if (req.queryParams.subject) { @@ -1116,35 +1131,35 @@ router user = g_lib.getUserFromClientID(req.queryParams.client); } logger.logRequestStarted({ - client: user?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/token/get/access", status: "Started", - description: "Getting User Access Token", + description: `Getting User Access Token. Subject: ${sub}`, }); if (!user.access) throw [error.ERR_NOT_FOUND, "No access token found"]; res.send(user.access); logger.logRequestSuccess({ - client: user?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/token/get/access", status: "Success", - description: "Getting User Access Token", - extra: "undefined", + description: `Getting User Access Token. Subject: ${sub}`, + extra: "N/A", }); } catch (e) { logger.logRequestFailure({ - client: user?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/token/get/access", status: "Failure", - description: "Getting User Access Token", - extra: "undefined", + description: `Getting User Access Token. Subject: ${sub}`, + extra: "N/A", error: e, }); @@ -1208,16 +1223,17 @@ router router .get("/view", function (req, res) { + let sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; let client = null; try { client = g_lib.getUserFromClientID_noexcept(req.queryParams.client); logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/view", status: "Started", - description: "View User Information", + description: `View User Information. Subject: ${sub}`, }); var user, @@ -1300,26 +1316,26 @@ router res.send([user]); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/view", status: "Success", - description: "View User Information", + description: `View User Information. Subject: ${sub}`, extra: `uid=${user.uid}, is_admin=${!!client?.is_admin}`, }); //req.queryParams.details ? } catch (e) { - g_lib.handleException(e, res); logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/view", status: "Failure", - description: "View User Information", - extra: `uid=${user.uid}, is_admin=${!!client?.is_admin}`, + description: `View User Information. Subject: ${sub}`, + extra: `uid=${user?.uid}, is_admin=${!!client?.is_admin}`, error: e, }); + g_lib.handleException(e, res); } }) .queryParam("client", joi.string().required(), "Client ID") @@ -1334,7 +1350,7 @@ router var qry = "for i in u sort i.name_last, i.name_first"; var result; logger.logRequestStarted({ - client: client?._id, + client: "N/A", correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/list/all", @@ -1373,7 +1389,7 @@ router res.send(result); logger.logRequestSuccess({ - client: client?._id, + client: "N/A", correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/list/all", @@ -1392,7 +1408,7 @@ router var result, client = g_lib.getUserFromClientID(req.queryParams.client); logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/list/collab", @@ -1441,7 +1457,7 @@ router res.send(result); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/list/collab", @@ -1464,9 +1480,19 @@ Note: must delete ALL data records and projects owned by the user being deleted */ router .get("/delete", function (req, res) { - let client = null; let user_id = null; + let sub = null; try { + sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/delete", + status: "Started", + description: `Remove existing user entry: ${user_id}, Subject: ${sub}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "admin"], @@ -1491,15 +1517,7 @@ router ], }, action: function () { - client = g_lib.getUserFromClientID(req.queryParams.client); - logger.logRequestStarted({ - client: client?._id, - correlationId: req.headers["x-correlation-id"], - httpVerb: "GET", - routePath: basePath + "/delete", - status: "Started", - description: "Remove existing user entry", - }); + const client = g_lib.getUserFromClientID(req.queryParams.client); if (req.queryParams.subject) { user_id = req.queryParams.subject; @@ -1562,23 +1580,21 @@ router }, }); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/delete", status: "Success", - description: "Remove existing user entry", - extra: user_id, + description: `Remove existing user entry: ${user_id}, Subject: ${sub}`, }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/delete", status: "Failure", - description: "Remove existing user entry", - extra: user_id, + description: `Remove existing user entry: ${user_id}, Subject: ${sub}`, error: e, }); g_lib.handleException(e, res); @@ -1591,16 +1607,18 @@ router router .get("/ident/list", function (req, res) { + let sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; let client = null; + let extra_log = []; try { client = g_lib.getUserFromClientID(req.queryParams.client); logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ident/list", status: "Started", - description: "List user linked IDs", + description: `List user linked IDs. Subject: ${sub}`, }); if (req.queryParams.subject) { if (!g_db.u.exists(req.queryParams.subject)) @@ -1611,42 +1629,45 @@ router const subject = g_db.u.document(req.queryParams.subject); permissions.ensureAdminPermUser(client, subject._id); - res.send( - g_db._query("for v in 1..1 outbound @client ident return v._key", { - client: subject._id, - }), - ); + var result = g_db._query("for v in 1..1 outbound @client ident return v._key", { + client: subject._id, + }); + extra_log = result.toArray(); + res.send(result); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ident/list", status: "Success", - description: "List user linked IDs", + description: `List user linked IDs. Subject: ${sub}`, + extra: { NumOfIds: extra_log.length }, }); } else { - res.send( - g_db._query("for v in 1..1 outbound @client ident return v._key", { - client: client._id, - }), - ); + var result = g_db._query("for v in 1..1 outbound @client ident return v._key", { + client: client._id, + }); + res.send(result); + extra_log = result.toArray(); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ident/list", status: "Success", - description: "List user linked IDs", + description: `List user linked IDs. Subject: ${sub}`, + extra: { NumOfIds: extra_log.length }, }); } } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ident/list", status: "Failure", - description: "List user linked IDs", + description: `List user linked IDs. Subject: ${sub}`, + extra: { NumOfIds: extra_log.length }, error: e, }); g_lib.handleException(e, res); @@ -1659,7 +1680,17 @@ router router .get("/ident/add", function (req, res) { let client = null; + let sub = req.queryParams.subject ? req.queryParams.subject : req.queryParams.client; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/ident/add", + status: "Started", + description: `Add new linked identity. Subject: ${sub}`, + }); + g_db._executeTransaction({ collections: { read: ["u", "admin"], @@ -1682,18 +1713,18 @@ router g_db._exists({ _id: "uuid/" + req.queryParams.ident, }) - ) + ) { logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ident/add", status: "Success", - description: "Add new linked identity", + description: `Add new linked identity. Subject: ${sub}`, extra: req.queryParams.ident, }); - - return; + return; + } id = g_db.uuid.save( { _key: req.queryParams.ident, @@ -1721,12 +1752,12 @@ router ); } logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ident/add", status: "Success", - description: "Add new linked identity", + description: `Add new linked identity. Subject: ${sub}`, extra: req.queryParams.ident, }); return; @@ -1771,22 +1802,22 @@ router }, }); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ident/add", status: "Success", - description: "Add new linked identity", + description: `Add new linked identity. Subject: ${sub}`, extra: req.queryParams.ident, }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ident/add", status: "Failure", - description: "Add new linked identity", + description: `Add new linked identity. Subject: ${sub}`, extra: req.queryParams.ident, error: e, }); @@ -1807,6 +1838,14 @@ router .get("/ident/remove", function (req, res) { let client = null; try { + logger.logRequestStarted({ + client: req.queryParams.client, + correlationId: req.headers["x-correlation-id"], + httpVerb: "GET", + routePath: basePath + "/ident/remove", + status: "Started", + description: `Remove linked identity ${req.queryParams.ident} from user account.`, + }); g_db._executeTransaction({ collections: { read: ["u", "admin"], @@ -1815,14 +1854,6 @@ router action: function () { client = g_lib.getUserFromClientID(req.queryParams.client); const owner = g_lib.getUserFromClientID(req.queryParams.ident); - logger.logRequestStarted({ - client: client?._id, - correlationId: req.headers["x-correlation-id"], - httpVerb: "GET", - routePath: basePath + "/ident/remove", - status: "Started", - description: "Remove linked identity from user account", - }); permissions.ensureAdminPermUser(client, owner._id); if (g_lib.isUUID(req.queryParams.ident)) { @@ -1837,23 +1868,21 @@ router }, }); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ident/remove", status: "Success", - description: "Remove linked identity from user account", - extra: req.queryParams.ident, + description: `Remove linked identity ${req.queryParams.ident} from user account.`, }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ident/remove", status: "Failure", - description: "Remove linked identity from user account", - extra: req.queryParams.ident, + description: `Remove linked identity ${req.queryParams.ident} from user account.`, error: e, }); g_lib.handleException(e, res); @@ -1867,36 +1896,38 @@ router router .get("/ep/get", function (req, res) { let client = null; + let first = null; try { - client = g_lib.getUserFromClientID(req.queryParams.client); logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ep/get", status: "Started", description: "Get recent end-points", }); + client = g_lib.getUserFromClientID(req.queryParams.client); + first = client.eps && client.eps.length ? client.eps[0] : undefined; res.send(client.eps ? client.eps : []); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ep/get", status: "Success", description: "Get recent end-points", - extra: client.eps, + extra: { most_recent: first, count: client.eps ? client.eps.length : 0 }, }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ep/get", status: "Failure", description: "Get recent end-points", - extra: client.eps, + extra: { most_recent: first, count: client && client.eps ? client.eps.length : 0 }, error: e, }); g_lib.handleException(e, res); @@ -1910,15 +1941,15 @@ router .get("/ep/set", function (req, res) { let client = null; try { - client = g_lib.getUserFromClientID(req.queryParams.client); logger.logRequestStarted({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ep/set", status: "Started", description: "Set recent end-points", }); + client = g_lib.getUserFromClientID(req.queryParams.client); g_db._update( client._id, { @@ -1929,23 +1960,23 @@ router }, ); logger.logRequestSuccess({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ep/set", - status: "Started", + status: "Success", description: "Set recent end-points", extra: client.eps, }); } catch (e) { logger.logRequestFailure({ - client: client?._id, + client: req.queryParams.client, correlationId: req.headers["x-correlation-id"], httpVerb: "GET", routePath: basePath + "/ep/set", status: "Failure", description: "Set recent end-points", - extra: client.eps, + extra: client && client.eps ? client.eps : undefined, error: e, }); g_lib.handleException(e, res); diff --git a/core/database/foxx/tests/acl_router.test.js b/core/database/foxx/tests/acl_router.test.js new file mode 100644 index 000000000..77fddb15a --- /dev/null +++ b/core/database/foxx/tests/acl_router.test.js @@ -0,0 +1,155 @@ +"use strict"; + +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { db } = require("@arangodb"); +const { baseUrl } = module.context; + +const acl_base_url = `${baseUrl}/acl`; + +describe("unit_acl_router: test /update route", () => { + after(function () { + const collections = ["member", "u", "c", "d", "acl", "owner", "g"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) col.truncate(); + }); + }); + + beforeEach(() => { + // Ensure necessary collections exist + const collections = ["member", "u", "c", "d", "acl", "owner", "g"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) col.truncate(); + else db._create(name); + }); + + // Create a fake user + db.u.save({ + _key: "fakeUser", + _id: "u/fakeUser", + name: "Fake User", + is_admin: true, + }); + + // Create a fake collection + db.c.save({ + _key: "coll1", + _id: "c/coll1", + name: "Fake Collection", + }); + + // Link owner + db.owner.save({ + _from: "c/coll1", + _to: "u/fakeUser", + }); + db.acl.save({ + _from: "c/coll1", + _to: "u/fakeUser", + id: "u/fakeUser", + grant: 1, + inhgrant: 0, + }); + + db.u.save({ + _key: "otherUser", + _id: "u/otherUser", + name: "Other User", + is_admin: false, + }); + + db.c.update("coll1", { owner: "u/fakeUser" }); + + db.acl.save({ + _from: "c/coll1", + _to: "u/otherUser", + grant: 1, + inhgrant: 0, + }); + + db.member.save({ + _from: "c/coll1", + _to: "u/otherUser", + }); + db.c.update("coll1", { owner: "u/fakeUser" }); + }); + + it("should update ACL for a collection", () => { + const rules = [ + { + id: "u/fakeUser", + grant: 1, // minimal permission for testing + inhgrant: 0, + }, + ]; + + // Build query string + const query = `client=u/fakeUser&id=c/coll1&rules=${encodeURIComponent( + JSON.stringify(rules), + )}`; + + // Act + const response = request.get(`${acl_base_url}/update?${query}`); + + // Assert + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + expect(body).to.be.an("array"); + expect(body[0]).to.have.property("id", "u/fakeUser"); + expect(body[0]).to.have.property("grant", 1); + }); + + it("should view ACLs for a collection", () => { + const query = `client=u/fakeUser&id=c/coll1`; + + const response = request.get(`${baseUrl}/acl/view?${query}`); + + // Expect HTTP 200 + expect(response.status).to.equal(200); + + // Parse body + const body = JSON.parse(response.body); + expect(body).to.be.an("array"); + expect(body.length).to.equal(2); + + const ids = body.map((x) => x.id); + expect(ids).to.include("u/fakeUser"); + expect(ids).to.include("u/otherUser"); + }); + + it("should list users who have shared objects with the subject", () => { + const query = "client=u/otherUser&inc_users=true&inc_projects=false"; + + const response = request.get(`${baseUrl}/acl/shared/list?${query}`); + + db.member.save({ _from: "c/coll1", _to: "u/otherUser" }); + + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + + // Expect an array of users/projects that shared with otherUser + expect(body).to.be.an("array"); + expect(body.length).to.equal(1); + + // Expect fakeUser to appear as the one who shared + expect(body[0]).to.have.property("id", "u/fakeUser"); + }); + + it("should list items shared by owner with the client", () => { + const query = "client=u/otherUser&owner=u/fakeUser"; + + const response = request.get(`${baseUrl}/acl/shared/list/items?${query}`); + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + expect(body).to.be.an("array"); + + // Should include the collection already shared + const ids = body.map((item) => item.id); + expect(ids).to.include("c/coll1"); // already exists + }); +}); diff --git a/core/database/foxx/tests/admin_router.test.js b/core/database/foxx/tests/admin_router.test.js new file mode 100644 index 000000000..f305de9c0 --- /dev/null +++ b/core/database/foxx/tests/admin_router.test.js @@ -0,0 +1,147 @@ +"use strict"; + +// Integration test of API +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { baseUrl } = module.context; +const { db } = require("@arangodb"); + +// router base path (same pattern as your tag example) +const admin_base_url = `${baseUrl}/admin`; + +describe("unit_admin_router: the Foxx microservice admin_router /ping endpoint", () => { + // Clean up any collections if needed (this router doesn't use any) + after(function () { + const collections = ["u", "test_collection", "d"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) col.truncate(); + }); + }); + + beforeEach(() => { + // no collections used, but keeping consistency with your example + const collections = ["u", "test_collection", "d"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) { + col.truncate(); + } else { + db._create(name); + } + }); + }); + + it("should successfully run the ping route", () => { + // arrange + const request_string = `${admin_base_url}/ping`; + + // act + const response = request.get(request_string); + + // assert + expect(response.status).to.equal(200); + + // Response structure from router: + // { status: 1 } + const body = JSON.parse(response.body); + expect(body).to.be.an("object"); + expect(body.status).to.equal(1); + }); + + it("should successfully run the test route", () => { + //Create user document for the client + db.u.save({ + _key: "testUser", + name: "Test User", + email: "testuser@example.com", + is_admin: true, + }); + + const doc = db.d.save({ value: "testValue" }); // 'd' collection is allowed + const item = `d/${doc._key}`; + + // Build query params + const client = "testUser"; + + const request_string = `${admin_base_url}/test?client=${encodeURIComponent(client)}&item=${encodeURIComponent(item)}`; + + // Act + const response = request.get(request_string); + + // Assert response code + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + + expect(body).to.be.an("object"); + expect(body).to.have.property("perm"); + expect(body).to.have.property("time"); + + // perm should be boolean + expect(body.perm).to.be.a("boolean"); + + // time should be numeric (seconds) + expect(body.time).to.be.a("number"); + }); + + it("should successfully run the check route", () => { + const request_string = `${admin_base_url}/check`; + + const response = request.get(request_string); + + expect(response.status).to.equal(200); + const body = JSON.parse(response.body); + + // Basic structure checks + expect(body).to.be.an("object"); + expect(body).to.have.property("edge_bad_count"); + expect(body).to.have.property("vertex_bad_count"); + + // Each edge/vertex category should exist + const expectedKeys = [ + "owner", + "member", + "item", + "acl", + "ident", + "admin", + "alias", + "alloc", + "loc", + "top", + "dep", + "data_no_owner", + "data_multi_owner", + "data_no_loc", + "data_multi_loc", + "data_no_parent", + "coll_no_owner", + "coll_multi_owner", + "coll_no_parent", + "coll_multi_parent", + "group_no_owner", + "group_multi_owner", + "alias_no_owner", + "alias_multi_owner", + "alias_no_alias", + "alias_multi_alias", + "proj_no_owner", + "proj_multi_owner", + "query_no_owner", + "query_multi_owner", + "topic_no_parent", + "topic_multi_parent", + "repo_no_admin", + ]; + + expectedKeys.forEach((key) => { + expect(body).to.have.property(key); + expect(body[key]).to.be.an("array"); + }); + + // Counts should be numeric + expect(body.edge_bad_count).to.be.a("number"); + expect(body.vertex_bad_count).to.be.a("number"); + }); +}); diff --git a/core/database/foxx/tests/authz_router.test.js b/core/database/foxx/tests/authz_router.test.js index 134abfaef..20984d405 100644 --- a/core/database/foxx/tests/authz_router.test.js +++ b/core/database/foxx/tests/authz_router.test.js @@ -218,6 +218,28 @@ describe("unit_authz_router: the Foxx microservice authz_router", () => { g_db.repo.truncate(); }); + after(() => { + [ + "u", + "ident", + "uuid", + "acl", + "item", + "c", + "g", + "p", + "owner", + "member", + "d", + "alloc", + "loc", + "repo", + ].forEach((name) => { + const col = g_db._collection(name); + if (col) col.truncate(); + }); + }); + it("unit_authz_router: gridftp create action with user record and valid file path.", () => { defaultWorkingSetup(); const request_string = @@ -335,4 +357,58 @@ describe("unit_authz_router: the Foxx microservice authz_router", () => { // assert expect(response.status).to.equal(204); }); + // + // ===== PERM CHECK TESTS ===== + // + it("unit_authz_router: perm/check should return granted=true for admin user on owned record", () => { + defaultWorkingSetup(); + + const request_string = + `${authz_base_url}/perm/check?client=` + + james_uuid + + `&id=` + + encodeURIComponent(record_id) + + `&perms=` + + permissions.PERM_ALL; + + const response = request.get(request_string); + + expect(response.status).to.equal(200); + const body = JSON.parse(response.body); + expect(body).to.have.property("granted", true); + }); + // + // ===== PERM GET TESTS ===== + // + it("unit_authz_router: perm/get should return permission bits for admin user on record", () => { + defaultWorkingSetup(); + + const request_string = + `${authz_base_url}/perm/get?client=` + + james_uuid + + `&id=` + + encodeURIComponent(record_id); + + const response = request.get(request_string); + + expect(response.status).to.equal(200); + const body = JSON.parse(response.body); + expect(body).to.have.property("granted"); + expect(body.granted).to.be.a("number"); + }); + + it("unit_authz_router: perm/get should fail with invalid id", () => { + defaultWorkingSetup(); + + const request_string = + `${authz_base_url}/perm/get?client=` + + james_uuid + + `&id=` + + encodeURIComponent("x/invalid") + + `&perms=` + + permissions.PERM_ALL; + + const response = request.get(request_string); + expect(response.status).to.equal(400); + }); }); diff --git a/core/database/foxx/tests/coll_router.test.js b/core/database/foxx/tests/coll_router.test.js new file mode 100644 index 000000000..0b6b94c0b --- /dev/null +++ b/core/database/foxx/tests/coll_router.test.js @@ -0,0 +1,593 @@ +"use strict"; + +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { baseUrl } = module.context; +const { db } = require("@arangodb"); + +// import support utilities if needed +const g_lib = require("../api/support"); + +const coll_base_url = `${baseUrl}/col/create`; + +describe("unit_coll_router: /col/create endpoint", () => { + // + // NOTE: + // The /create route requires many collections and relations: + // - c, owner, alloc, a, alias, item, t, top, tag, uuid, accn + // You must prepare enough minimal fixture data so the transaction succeeds. + // + + beforeEach(() => { + const collections = [ + "c", + "owner", + "alloc", + "a", + "alias", + "item", + "t", + "top", + "tag", + "uuid", + "accn", + "u", + "d", + ]; + + collections.forEach((name) => { + let col = db._collection(name); + if (!col) { + db._create(name); + } else { + col.truncate(); + } + }); + + // + // MINIMAL FIXTURE SETUP REQUIRED + // + + // 1. Create a fake client user + db.u.save({ + _key: "client1", + name: "Test User", + max_coll: 10, + }); + + // 2. Alloc record so "owner" has an allocation + db.alloc.save({ + _from: "u/client1", + _to: "alloc/owner1", + }); + + // 3. Root collection for this user + const root = db.c.save({ + _key: "root1", + owner: "u/client1", + creator: "u/client1", + ct: 0, + ut: 0, + title: "root", + }); + + // 4. Owner edge pointing to root + db.owner.save({ + _from: "c/root1", + _to: "u/client1", + }); + + // (Optional) If your g_lib.getRootID depends on something else, adjust accordingly + }); + + after(() => { + const collections = [ + "c", + "owner", + "alloc", + "a", + "alias", + "item", + "t", + "top", + "tag", + "uuid", + "accn", + "u", + "d", + ]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) col.truncate(); + }); + }); + + it("should successfully create a new collection", () => { + const body = { + title: "Test Collection", + desc: "Unit Test Desc", + parent: "c/root1", + tags: ["alpha", "beta"], + }; + + // Send POST with query param ?client=client1 + const response = request.post(coll_base_url + "?client=client1", { + json: true, + body, + }); + + expect(response.status).to.equal(200); + expect(response.json).to.have.property("results"); + expect(response.json.results).to.be.an("array").with.length(1); + + const created = response.json.results[0]; + + expect(created).to.have.property("title", "Test Collection"); + expect(created).to.have.property("parent_id", "c/root1"); + }); + + it("should NOT crash if collection creation fails before result is built", () => { + const body = { + title: "Broken Collection", + parent: "c/doesNotExist", + }; + + const response = request.post(`${baseUrl}/col/create?client=client1`, { + json: true, + body, + }); + + // Should return controlled error, not logging crash + expect(response.status).to.not.equal(500); + expect(response.json).to.have.property("error"); + }); + + it("should update an existing collection", () => { + db.c.save({ + _key: "coll1", + owner: "u/client1", + creator: "u/client1", + title: "Old Title", + desc: "Old Desc", + tags: ["old"], + }); + + db.owner.save({ + _from: "c/coll1", + _to: "u/client1", + }); + // + // ---- CALL UPDATE ---- + // + const body = { + id: "c/coll1", + title: "New Title", + desc: "New Desc", + tags: ["x", "y"], + }; + + const response = request.post(`${baseUrl}/col/update?client=client1`, { + json: true, + body, + }); + + expect(response.status).to.equal(200); + expect(response.json.results).to.be.an("array").with.length(1); + + const updated = response.json.results[0]; + + // + // ---- ASSERTIONS ---- + // + expect(updated.title).to.equal("New Title"); + expect(updated.desc).to.equal("New Desc"); + expect(updated.tags).to.deep.equal(["x", "y"]); + }); + + it("should NOT crash when updating a non-existent collection", () => { + const body = { + id: "c/missing", + title: "Nope", + }; + + const response = request.post(`${baseUrl}/col/update?client=client1`, { + json: true, + body, + }); + + expect(response.status).to.not.equal(500); + expect(response.json).to.have.property("error"); + }); + + it("should view an existing collection", () => { + // + // Minimal fixture data required for view route + // + + // The collection we want to view + db.c.save({ + _key: "collview1", + owner: "u/client1", + creator: "u/client1", + title: "View Title", + desc: "View Desc", + tags: ["v1", "v2"], + notes: "This is a test note", + }); + + // Owner edge + db.owner.save({ + _from: "c/collview1", + _to: "u/client1", + }); + + // + // ---- CALL VIEW ---- + // + const response = request.get(`${baseUrl}/col/view?client=client1&id=c/collview1`, { + json: true, + }); + + // + // ---- ASSERTIONS ---- + // + expect(response.status).to.equal(200); + expect(response.json.results).to.be.an("array").with.length(1); + + const viewed = response.json.results[0]; + + expect(viewed.id).to.equal("c/collview1"); + expect(viewed.title).to.equal("View Title"); + expect(viewed.desc).to.equal("View Desc"); + + // tags come through normally + expect(viewed.tags).to.deep.equal(["v1", "v2"]); + + // notes are passed through mask (not null) + expect(viewed.notes).to.exist; + }); + + it("should read the contents of a collection", () => { + // Create parent + db.c.save({ + _key: "readParent", + owner: "u/client1", + creator: "u/client1", + title: "Parent", + }); + + // Allow client1 to list it + db.owner.save({ + _from: "c/readParent", + _to: "u/client1", + }); + + // Create one child in c + db.c.save({ + _key: "readChild", + owner: "u/client1", + creator: "u/client1", + title: "Child", + }); + + // Link parent -> child with item edge + db.item.save({ + _from: "c/readParent", + _to: "c/readChild", + }); + + // ---- Call /read ---- + const response = request.get(`${baseUrl}/col/read?client=client1&id=c/readParent`, { + json: true, + }); + + // ---- Assertions ---- + expect(response.status).to.equal(200); + expect(response.json).to.be.an("array"); + + // Should contain the child + const child = response.json.find((r) => r.id === "c/readChild"); + expect(child).to.exist; + expect(child.title).to.equal("Child"); + }); + + it("should NOT crash when user lacks permission to read collection", () => { + // Create collection owned by someone else + db.c.save({ + _key: "privateColl", + owner: "u/other", + creator: "u/other", + title: "Private", + }); + + const response = request.get(`${baseUrl}/col/read?client=client1&id=c/privateColl`, { + json: true, + }); + + expect(response.status).to.not.equal(500); + expect(response.json).to.have.property("error"); + }); + + it("should add an item to a collection", () => { + // + // --- FIXTURE --- + // + + // Parent collection + db.c.save({ + _key: "wpParent", + owner: "u/client1", + creator: "u/client1", + title: "Parent", + }); + + // Owner edge + db.owner.save({ + _from: "c/wpParent", + _to: "u/client1", + }); + + // Item to add + db.c.save({ + _key: "wpChild", + owner: "u/client1", + creator: "u/client1", + title: "Child", + }); + + // Owner edge for child (required because write route checks owners) + db.owner.save({ + _from: "c/wpChild", + _to: "u/client1", + }); + + // + // --- CALL /write (ADD) --- + // + const response = request.get( + `${baseUrl}/col/write?client=client1&id=c/wpParent&add[]=c/wpChild`, + { json: true }, + ); + + // + // --- ASSERTIONS --- + // + expect(response.status).to.equal(200); + + // Should return empty array because no "loose" items + expect(response.json).to.be.an("array").that.is.empty; + }); + + it("should move an item between collections", () => { + // --- FIXTURE --- + // Source collection + db.c.save({ _key: "srcColl", owner: "u/client1", creator: "u/client1", title: "Source" }); + db.owner.save({ _from: "c/srcColl", _to: "u/client1" }); + + // Destination collection + db.c.save({ + _key: "dstColl", + owner: "u/client1", + creator: "u/client1", + title: "Destination", + }); + db.owner.save({ _from: "c/dstColl", _to: "u/client1" }); + + // Item to move + db.c.save({ _key: "item1", owner: "u/client1", creator: "u/client1", title: "Item" }); + db.owner.save({ _from: "c/item1", _to: "u/client1" }); + + // Link item to source collection (required by /move) + db.item.save({ _from: "c/srcColl", _to: "c/item1" }); + + // --- CALL /move --- + const response = request.get( + `${baseUrl}/col/move?client=client1&source=c/srcColl&dest=c/dstColl&items[]=c/item1`, + { json: true }, + ); + + // --- ASSERTIONS --- + expect(response.status).to.equal(200); + expect(response.json).to.deep.equal({}); // /move returns empty object + }); + + it("should return parent collections for an item", () => { + // --- FIXTURE --- + // Parent collection + db.c.save({ + _key: "parentColl", + owner: "u/client1", + creator: "u/client1", + title: "Parent", + }); + db.owner.save({ _from: "c/parentColl", _to: "u/client1" }); + + // Child item + db.d.save({ _key: "childItem", owner: "u/client1", creator: "u/client1", title: "Child" }); + db.owner.save({ _from: "d/childItem", _to: "u/client1" }); + + // Link child to parent + db.item.save({ _from: "c/parentColl", _to: "d/childItem" }); + + // --- CALL /get_parents --- + const response = request.get(`${baseUrl}/col/get_parents?client=client1&id=d/childItem`, { + json: true, + }); + + // --- ASSERTIONS --- + expect(response.status).to.equal(200); + expect(response.json).to.be.an("array"); + expect(response.json[0][0]).to.have.property("id", "c/parentColl"); + expect(response.json[0][0]).to.have.property("title", "Parent"); + }); + + it("should include the child item if inclusive=true", () => { + db.c.save({ + _key: "parentColl", + owner: "u/client1", + creator: "u/client1", + title: "Parent", + }); + db.owner.save({ _from: "c/parentColl", _to: "u/client1" }); + + db.d.save({ _key: "childItem", owner: "u/client1", creator: "u/client1", title: "Child" }); + db.owner.save({ _from: "d/childItem", _to: "u/client1" }); + + db.item.save({ _from: "c/parentColl", _to: "d/childItem" }); + const response = request.get( + `${baseUrl}/col/get_parents?client=client1&id=d/childItem&inclusive=true`, + { json: true }, + ); + + expect(response.status).to.equal(200); + expect(response.json).to.be.an("array"); + // The first element of the first path should be the child itself + expect(response.json[0][0]).to.have.property("id", "d/childItem"); + expect(response.json[0][0]).to.have.property("title", "Child"); + }); + + it("should return the correct offset of an item in a collection", () => { + // --- FIXTURE --- + const clientId = "client1"; + + // Parent collection + db.c.save({ + _key: "coll1", + owner: "u/client1", + creator: "u/client1", + title: "My Collection", + }); + db.owner.save({ _from: "c/coll1", _to: "u/client1" }); + + // Items in the collection + for (let i = 1; i <= 10; i++) { + const itemId = `d/item${i}`; + db.d.save({ + _key: `item${i}`, + owner: "u/client1", + creator: "u/client1", + title: `Item ${i}`, + }); + db.owner.save({ _from: itemId, _to: "u/client1" }); + db.item.save({ _from: "c/coll1", _to: itemId }); + } + + // --- CALL /get_offset --- + const pageSize = 3; + const targetItem = "d/item5"; + + const response = request.get( + `${baseUrl}/col/get_offset?client=${clientId}&id=c/coll1&item=${targetItem}&page_sz=${pageSize}`, + { json: true }, + ); + + expect(response.status).to.equal(200); + + // Items 1-3 -> offset 0 + // Items 4-6 -> offset 3 + // Item 5 is in second page, offset should be 3 + expect(response.json).to.have.property("offset", 3); + }); + + it("should NOT crash if item is not found when getting offset", () => { + // --- FIXTURE --- + db.c.save({ + _key: "collOffsetFail", + owner: "u/client1", + creator: "u/client1", + title: "Offset Test", + }); + db.owner.save({ _from: "c/collOffsetFail", _to: "u/client1" }); + + // No items added at all + + const response = request.get( + `${baseUrl}/col/get_offset?client=client1&id=c/collOffsetFail&item=d/doesNotExist&page_sz=5`, + { json: true }, + ); + + // Should return a controlled error, NOT a 500 crash + expect(response.status).to.not.equal(500); + expect(response.json).to.have.property("error"); + }); + it("should NOT crash if page_sz is invalid in get_offset", () => { + db.c.save({ + _key: "collBadPage", + owner: "u/client1", + creator: "u/client1", + title: "Bad Page", + }); + db.owner.save({ _from: "c/collBadPage", _to: "u/client1" }); + + const response = request.get( + `${baseUrl}/col/get_offset?client=client1&id=c/collBadPage&item=d/item1&page_sz=0`, + { json: true }, + ); + + expect(response.status).to.not.equal(500); + }); + + it("should return a list of published collections for a client", () => { + const clientId = "client1"; + const userId = `u/${clientId}`; + + // --- Ensure collections exist --- + if (!db._collection("c")) db._createDocumentCollection("c"); + if (!db._collection("u")) db._createDocumentCollection("u"); + if (!db._collection("owner")) db._createEdgeCollection("owner"); + + // --- Ensure test user exists --- + let userDoc = db.u.firstExample({ _key: clientId }); + if (!userDoc) { + userDoc = db.u.save({ _key: clientId, name: "Client One" }); + } + + // --- Clean up previous test data --- + db.c.truncate(); + db.owner.truncate(); + + // --- Create some published collections --- + const publishedColls = [ + { + _key: "pub1", + owner: userDoc._id, + creator: userDoc._id, + title: "Alpha", + public: true, + }, + { _key: "pub2", owner: userDoc._id, creator: userDoc._id, title: "Beta", public: true }, + { + _key: "pub3", + owner: userDoc._id, + creator: userDoc._id, + title: "Gamma", + public: true, + }, + ]; + + publishedColls.forEach((c) => { + const collDoc = db.c.save(c); // Save collection + db.owner.save({ _from: collDoc._id, _to: userDoc._id }); // Edge must use real _id + }); + + // --- CALL /published/list without pagination --- + let response = request.get(`${baseUrl}/col/published/list?client=${clientId}`, { + json: true, + }); + expect(response.status).to.equal(200); + const titles = response.json.map((x) => x.title); + expect(titles).to.include.members(["Alpha", "Beta", "Gamma"]); + + // --- CALL /published/list with pagination --- + const offset = 1; + const count = 2; + response = request.get( + `${baseUrl}/col/published/list?client=${clientId}&offset=${offset}&count=${count}`, + { json: true }, + ); + expect(response.status).to.equal(200); + const paged = response.json; + const pagingInfo = paged.pop().paging; + expect(pagingInfo).to.deep.equal({ off: offset, cnt: count, tot: 3 }); + }); +}); diff --git a/core/database/foxx/tests/config_router.test.js b/core/database/foxx/tests/config_router.test.js new file mode 100644 index 000000000..593156f6a --- /dev/null +++ b/core/database/foxx/tests/config_router.test.js @@ -0,0 +1,66 @@ +"use strict"; +// NOTE: completion of tests requires successful run of user_fixture.js script + +// Need to pull enum from support +const g_lib = require("../api/support"); + +// Integration test of API +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { baseUrl } = module.context; +const { db } = require("@arangodb"); + +const config_base_url = `${baseUrl}/config`; + +describe("unit_config_router: test /msg/daily route", () => { + after(function () { + const col = db._collection("config"); + if (col) col.truncate(); + }); + + beforeEach(() => { + let col = db._collection("config"); + if (col) { + col.truncate(); + } else { + db._create("config"); + } + }); + + it("should return an empty object when no daily message exists", () => { + // arrange + const url = `${config_base_url}/msg/daily`; + + // act + const response = request.get(url); + + // assert + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + expect(body).to.deep.equal({}); // empty object expected + }); + + it("should return the daily message when it exists", () => { + // arrange: insert a config entry + db.config.save({ + _key: "msg_daily", + msg: "Hello world!", + }); + + const url = `${config_base_url}/msg/daily`; + + // act + const response = request.get(url); + + // assert + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + + // The route strips _id, _key, _rev + expect(body).to.deep.equal({ + msg: "Hello world!", + }); + }); +}); diff --git a/core/database/foxx/tests/data_router.test.js b/core/database/foxx/tests/data_router.test.js new file mode 100644 index 000000000..72879cc16 --- /dev/null +++ b/core/database/foxx/tests/data_router.test.js @@ -0,0 +1,835 @@ +"use strict"; + +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { db } = require("@arangodb"); +const { baseUrl } = module.context; + +const data_base_url = `${baseUrl}/dat`; + +after(function () { + // clean up all collections used in the test + const collections = ["u", "d", "c", "repo", "alloc", "loc", "owner", "alias", "item", "dep"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) col.truncate(); + }); +}); + +describe("unit_data_router: the Foxx microservice data_router create/ endpoint", () => { + beforeEach(() => { + const collections = [ + "u", + "d", + "c", + "repo", + "alloc", + "loc", + "owner", + "alias", + "item", + "dep", + ]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) + col.truncate(); // truncate if exists + else db._create(name); // create if missing + }); + + // Create the fake user + db.u.save({ + _key: "fakeUser", + _id: "u/fakeUser", + name: "fake user", + name_first: "fake", + name_last: "user", + is_admin: true, + max_coll: 50, + max_proj: 10, + max_sav_qry: 20, + email: "fakeuser@gmail.com", + }); + + // Create a fake repo + db.repo.save({ + _key: "fakeRepo", + path: "/tmp/fakeRepo", + }); + + // Create allocation for fakeUser -> fakeRepo + db.alloc.save({ + _from: "u/fakeUser", // user _id + _to: "repo/fakeRepo", // repo _id + rec_count: 0, // current record count + rec_limit: 10, // max number of records allowed + data_size: 0, // current data size + data_limit: 100000000, // max data size + state: "active", + }); + + db.owner.save({ + _from: "c/root_fakeUser", + _to: "u/fakeUser", + }); + // Create a root collection for fakeUser + if (!db._collection("c")) db._create("c"); + db.c.save({ + _key: "u_fakeUser_root", + }); + }); + it("should create exactly one data record", () => { + const res = request.post(`${data_base_url}/create?client=fakeUser`, { + body: { + title: "My First Data", + }, + json: true, + }); + + expect(res.statusCode).to.equal(200); + + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + expect(body).to.have.property("results"); + expect(body.results).to.be.an("array").with.lengthOf(1); + }); + + it("should create multiple data records in a batch", () => { + const records = [ + { title: "First Batch Record" }, + { title: "Second Batch Record" }, + { title: "Third Batch Record" }, + ]; + + const res = request.post(`${data_base_url}/create/batch?client=fakeUser`, { + body: records, + json: true, + }); + + expect(res.statusCode).to.equal(200); + + // parse body in case it's returned as a string + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + expect(body).to.have.property("results"); + expect(body.results).to.be.an("array").with.lengthOf(records.length); + + // verify each record was created with the correct title and an ID + records.forEach((r, i) => { + expect(body.results[i]).to.have.property("title", r.title); + expect(body.results[i]).to.have.property("id").that.is.a("string"); + }); + }); + + it("should update an existing data record", () => { + //CREATING NEW RECORD + let res = request.post(`${data_base_url}/create?client=fakeUser`, { + body: { + title: "Title Original", + }, + json: true, + }); + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + const recordId = body.results[0].id; + + //UPDATING EXISTING RECORD + res = request.post(`${data_base_url}/update?client=fakeUser`, { + body: { + id: recordId, + title: "New Title", + }, + json: true, + }); + expect(res.statusCode).to.equal(200); + const updateBody = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + // ASSERTION: verify the title actually changed + expect(updateBody.results[0].title).to.equal("New Title"); + }); + + it("should update multiple data records in a batch", () => { + // Step 1: create two records + const createRes = request.post(`${data_base_url}/create/batch?client=fakeUser`, { + body: [{ title: "Batch Original 1" }, { title: "Batch Original 2" }], + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const ids = createBody.results.map((r) => r.id); + + // Step 2: update both records + const updateRes = request.post(`${data_base_url}/update/batch?client=fakeUser`, { + body: [ + { id: ids[0], title: "Batch Updated 1" }, + { id: ids[1], title: "Batch Updated 2" }, + ], + json: true, + }); + + expect(updateRes.statusCode).to.equal(200); + + const updateBody = + typeof updateRes.body === "string" ? JSON.parse(updateRes.body) : updateRes.body; + + // Step 3: assertions + expect(updateBody).to.have.property("updates"); + expect(updateBody.updates).to.be.an("array").with.lengthOf(2); + + const titles = updateBody.updates.map((r) => r.title); + expect(titles).to.include("Batch Updated 1"); + expect(titles).to.include("Batch Updated 2"); + }); + + it("should update the metadata schema validation error message on a data record", () => { + // Step 1: create a data record + const createRes = request.post(`${data_base_url}/create?client=fakeUser`, { + body: { + title: "Record With Schema Error", + }, + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const recordId = createBody.results[0].id; + + // Step 2: update md_err_msg via plain-text body + const errorMessage = "Schema validation failed: missing required field"; + + const updateRes = request.post( + `${data_base_url}/update/md_err_msg?id=${recordId}&client=fakeUser`, + { + body: errorMessage, + headers: { + "content-type": "text/plain", + }, + }, + ); + + expect(updateRes.statusCode).to.equal(204); + + // Step 3: verify DB was updated + const doc = db._document(recordId); + + expect(doc).to.have.property("md_err", true); + expect(doc).to.have.property("md_err_msg", errorMessage); + }); + + it("should update the size of an existing data record and update allocation usage", () => { + // Step 1: create a data record + const createRes = request.post(`${data_base_url}/create?client=fakeUser`, { + body: { + title: "Data Record With Size", + }, + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const recordId = createBody.results[0].id; + + // Step 2: create required loc edge (owner & alloc already exist from beforeEach) + db.loc.save({ + _from: recordId, + _to: "repo/fakeRepo", + }); + + // Step 3: verify initial values + let dataDoc = db._document(recordId); + expect(dataDoc.size || 0).to.equal(0); + + let allocDoc = db.alloc.firstExample({ + _from: "u/fakeUser", + _to: "repo/fakeRepo", + }); + expect(allocDoc.data_size).to.equal(0); + + // Step 4: call update/size + const newSize = 4096; + + const updateRes = request.post(`${data_base_url}/update/size?client=fakeUser`, { + body: { + records: [ + { + id: recordId, + size: newSize, + }, + ], + }, + json: true, + }); + + expect(updateRes.statusCode).to.equal(200); + + // Step 5: verify data record updated + dataDoc = db._document(recordId); + expect(dataDoc.size).to.equal(newSize); + expect(dataDoc).to.have.property("ut"); + expect(dataDoc).to.have.property("dt"); + + // Step 6: verify allocation updated + allocDoc = db.alloc.firstExample({ + _from: "u/fakeUser", + _to: "repo/fakeRepo", + }); + expect(allocDoc.data_size).to.equal(newSize); + }); + + it("should retrieve a data record by ID via /view", () => { + // Step 1: create a data record + const createRes = request.post(`${data_base_url}/create?client=fakeUser`, { + body: { title: "Record To View" }, + json: true, + }); + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + const recordId = createBody.results[0].id; + + // Step 2: optionally create loc edge (if your /view code expects it) + db.loc.save({ + _from: recordId, + _to: "repo/fakeRepo", + }); + + // Step 3: call the /view endpoint + const viewRes = request.get(`${data_base_url}/view`, { + qs: { + client: "fakeUser", + id: recordId, + }, + headers: { + "x-correlation-id": "test-corr-001", + }, + json: true, + }); + + expect(viewRes.statusCode).to.equal(200); + + const viewBody = typeof viewRes.body === "string" ? JSON.parse(viewRes.body) : viewRes.body; + expect(viewBody).to.have.property("results").that.is.an("array").with.lengthOf(1); + + const data = viewBody.results[0]; + expect(data).to.have.property("id", recordId); + expect(data).to.have.property("title", "Record To View"); + expect(data).to.not.have.property("_id"); + expect(data).to.not.have.property("_key"); + expect(data).to.not.have.property("_rev"); + }); + it("should export one or more data records as JSON strings via /export", () => { + // Step 1: create two data records + const createRes = request.post(`${data_base_url}/create/batch?client=fakeUser`, { + body: [{ title: "Export Record 1" }, { title: "Export Record 2" }], + json: true, + }); + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + const recordIds = createBody.results.map((r) => r.id); + + // Step 2: optionally create loc edges if required by export + recordIds.forEach((id) => { + db.loc.save({ + _from: id, + _to: "repo/fakeRepo", + }); + }); + + // Step 3: call the /export endpoint + const exportRes = request.post(`${data_base_url}/export?client=fakeUser`, { + body: { + id: recordIds, + }, + headers: { + "x-correlation-id": "test-corr-002", + }, + json: true, + }); + + expect(exportRes.statusCode).to.equal(200); + + // Step 4: parse and validate exported data + const exportBody = + typeof exportRes.body === "string" ? JSON.parse(exportRes.body) : exportRes.body; + + expect(exportBody).to.be.an("array").with.lengthOf(recordIds.length); + + exportBody.forEach((jsonStr, idx) => { + const data = JSON.parse(jsonStr); + expect(data).to.have.property("id", recordIds[idx]); + expect(data).to.have.property("title").that.includes("Export Record"); + expect(data).to.not.have.property("_id"); + expect(data).to.not.have.property("_key"); + expect(data).to.not.have.property("_rev"); + + // Optional: verify deps array exists + if (data.deps) expect(data.deps).to.be.an("array"); + }); + }); + it("should return a dependency graph for a data record", () => { + // Step 1: create three data records + const createRes = request.post(`${data_base_url}/create/batch?client=fakeUser`, { + body: [ + { title: "Root Record" }, + { title: "Child Record" }, + { title: "Grandchild Record" }, + ], + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const [rootId, childId, grandchildId] = createBody.results.map((r) => r.id); + + // Step 2: create dependency edges + // root -> child -> grandchild + db.dep.save({ + _from: rootId, + _to: childId, + type: 1, + }); + + db.dep.save({ + _from: childId, + _to: grandchildId, + type: 1, + }); + + // Step 3: call dep graph endpoint + const res = request.get( + `${data_base_url}/dep/graph/get?client=fakeUser&id=${encodeURIComponent(rootId)}`, + ); + + expect(res.statusCode).to.equal(200); + + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + // Step 4: assertions + expect(body).to.be.an("array"); + expect(body.length).to.be.greaterThan(0); + + const ids = body.map((n) => n.id); + + expect(ids).to.include(rootId); + expect(ids).to.include(childId); + expect(ids).to.include(grandchildId); + + // Root node should have deps + const rootNode = body.find((n) => n.id === rootId); + expect(rootNode).to.have.property("deps"); + expect(rootNode.deps).to.be.an("array"); + + // Child node should reference root or grandchild + const childNode = body.find((n) => n.id === childId); + expect(childNode).to.have.property("id", childId); + + // Grandchild node should exist + const grandchildNode = body.find((n) => n.id === grandchildId); + expect(grandchildNode).to.have.property("id", grandchildId); + }); + it("should lock multiple data records", () => { + // Step 1: create multiple data records + const createRes = request.post(`${data_base_url}/create/batch?client=fakeUser`, { + body: [ + { title: "Lock Test 1" }, + { title: "Lock Test 2" }, + { title: "Lock Test 3" }, + { title: "Lock Test 4" }, + ], + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const ids = createBody.results.map((r) => r.id); + + // sanity check + expect(ids).to.have.lengthOf(4); + + // Step 2: lock the records + const lockRes = request.get( + `${data_base_url}/lock?client=fakeUser&lock=true&ids=${ids.join("&ids=")}`, + ); + + expect(lockRes.statusCode).to.equal(200); + + const lockBody = typeof lockRes.body === "string" ? JSON.parse(lockRes.body) : lockRes.body; + + // Step 3: verify response + expect(lockBody).to.be.an("array").with.lengthOf(ids.length); + + lockBody.forEach((r) => { + expect(r).to.have.property("id"); + expect(r).to.have.property("locked", true); + }); + + // Step 4: verify DB state + ids.forEach((id) => { + const doc = db._document(id); + expect(doc.locked).to.equal(true); + }); + }); + it("should return the raw data local path for a data record", () => { + // Step 1: create a data record + const createRes = request.post(`${data_base_url}/create?client=fakeUser`, { + body: { + title: "Path Test Record", + }, + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const recordId = createBody.results[0].id; + + // Step 2: ensure repo has a domain + const repo = db.repo.document("repo/fakeRepo"); + db.repo.update(repo._id, { + domain: "local", + path: "/tmp/fakeRepo", + exp_path: "/tmp/fakeRepo", + }); + + // Step 3: create loc edge for the record + db.loc.save({ + _from: recordId, + _to: "repo/fakeRepo", + path: "/tmp/fakeRepo/data/file.bin", + }); + + // Step 4: call /path + const res = request.get( + `${data_base_url}/path?client=fakeUser&id=${recordId}&domain=local`, + ); + + expect(res.statusCode).to.equal(200); + + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + // Step 5: assertions + expect(body).to.have.property("path"); + expect(body.path).to.be.a("string"); + expect(body.path.length).to.be.greaterThan(0); + }); + it("should list data records by allocation for a repo", () => { + // Step 1: create two data records + const createRes = request.post(`${data_base_url}/create/batch?client=fakeUser`, { + body: [{ title: "Alloc Record One" }, { title: "Alloc Record Two" }], + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const ids = createBody.results.map((r) => r.id); + expect(ids).to.have.lengthOf(2); + + // Step 2: ensure repo exists (already created in beforeEach) + const repoId = "repo/fakeRepo"; + + // Step 3: create loc edges linking records to repo + ids.forEach((id) => { + db.loc.save({ + _from: id, + _to: repoId, + uid: "u/fakeUser", // required by query filter e.uid == @uid + }); + }); + + // Step 4: call list/by_alloc + const res = request.get(`${data_base_url}/list/by_alloc?client=fakeUser&repo=${repoId}`); + + expect(res.statusCode).to.equal(200); + + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + // Step 5: assertions + + expect(body).to.be.an("array"); + const uniqueIds = new Set(body.map((r) => r.id)); + expect(uniqueIds.size).to.equal(2); + + const titles = body.map((r) => r.title); + expect(titles).to.include("Alloc Record One"); + expect(titles).to.include("Alloc Record Two"); + + body.forEach((rec) => { + expect(rec).to.have.property("id"); + expect(rec).to.have.property("owner"); + expect(rec).to.have.property("creator"); + }); + }); + + it("should initialize a data get task in check-only mode", () => { + // Step 1: create a data record + const createRes = request.post(`${data_base_url}/create?client=fakeUser`, { + body: { + title: "Get IT Test Record", + }, + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const recordId = createBody.results[0].id; + + // Step 2: create required loc edge (used by get path resolution) + db.loc.save({ + _from: recordId, + _to: "repo/fakeRepo", + path: "/tmp/fakeRepo/data/file.bin", + }); + + // Step 3: call /get in check-only mode + const res = request.post(`${data_base_url}/get?client=fakeUser`, { + body: { + id: [recordId], + check: true, // avoids real Globus transfer + }, + headers: { + "x-correlation-id": "test-corr-get-it-001", + }, + json: true, + }); + + expect(res.statusCode).to.equal(200); + + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + // Step 4: assertions + expect(body).to.be.an("object"); + }); + + it("should initialize a data put task in check-only mode", () => { + // Step 1: create a data record + const createRes = request.post(`${data_base_url}/create?client=fakeUser`, { + body: { + title: "Put Test Record", + }, + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const recordId = createBody.results[0].id; + + // Step 2: call /put in check-only mode + const res = request.post(`${data_base_url}/put?client=fakeUser`, { + body: { + id: [recordId], + check: true, // avoid real Globus upload + ext: ".bin", + }, + headers: { + "x-correlation-id": "test-corr-put-it-001", + }, + json: true, + }); + + expect(res.statusCode).to.equal(200); + + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + // Step 3: IT-safe assertions + expect(body).to.be.an("object"); + }); + + it("should initialize an allocation change task for data records", () => { + // Step 1: create two data records + const createRes = request.post(`${data_base_url}/create/batch?client=fakeUser`, { + body: [{ title: "AllocChg Record 1" }, { title: "AllocChg Record 2" }], + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const ids = createBody.results.map((r) => r.id); + expect(ids).to.have.lengthOf(2); + + // Step 2: create loc edges pointing to the original repo + ids.forEach((id) => { + db.loc.save({ + _from: id, + _to: "repo/fakeRepo", + uid: "u/fakeUser", + }); + }); + + // Step 3: create a new repo + db.repo.save({ + _key: "newRepo", + path: "/tmp/newRepo", + }); + + // Step 4: create allocation for fakeUser -> newRepo + db.alloc.save({ + _from: "u/fakeUser", + _to: "repo/newRepo", + rec_count: 0, + rec_limit: 10, + data_size: 0, + data_limit: 100000000, + state: "active", + }); + + // Step 5: call alloc_chg in check-only mode + const res = request.post(`${data_base_url}/alloc_chg?client=fakeUser`, { + body: { + ids, + repo_id: "repo/newRepo", + check: true, // IMPORTANT: do not actually move data + }, + headers: { + "x-correlation-id": "test-corr-alloc-chg-001", + }, + json: true, + }); + + expect(res.statusCode).to.equal(200); + + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + // Step 6: assertions + expect(body).to.be.an("object"); + const locEdges = db.loc.byExample({ _to: "repo/newRepo" }).toArray(); + expect(locEdges.length).to.equal(0); + }); + it("should initialize an owner change task for data records", () => { + db.c.save({ + _key: "u_fakeUser_newOwner", + }); + + const newCollId = "c/u_fakeUser_newOwner"; + + // 🔑 REQUIRED: owner edge for destination collection + db.owner.save({ + _from: newCollId, + _to: "u/fakeUser", + }); + // Step 1: create multiple data records + const createRes = request.post(`${data_base_url}/create/batch?client=fakeUser`, { + body: [ + { title: "OwnerChg Record 1" }, + { title: "OwnerChg Record 2" }, + { title: "OwnerChg Record 3" }, + { title: "OwnerChg Record 4" }, + ], + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const ids = createBody.results.map((r) => r.id); + expect(ids).to.have.lengthOf(4); + + // Step 2: ensure original owner collection exists + const originalCollId = "c/u_fakeUser_root"; + + // Step 3: create owner edges for each record (REQUIRED) + ids.forEach((id) => { + db.owner.save({ + _from: id, + _to: originalCollId, + }); + }); + + // Step 5: call owner_chg in check-only mode + const res = request.post(`${data_base_url}/owner_chg?client=fakeUser`, { + body: { + ids, + coll_id: newCollId, + check: true, + }, + headers: { + "x-correlation-id": "test-corr-owner-chg-001", + }, + json: true, + }); + + expect(res.statusCode).to.equal(200); + + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + // Step 6: assertions + expect(body).to.be.an("object"); + }); + it("should initialize a delete task for data records", () => { + // Step 1: create data records + const createRes = request.post(`${data_base_url}/create/batch?client=fakeUser`, { + body: [{ title: "Delete Test Record 1" }, { title: "Delete Test Record 2" }], + json: true, + }); + + expect(createRes.statusCode).to.equal(200); + + const createBody = + typeof createRes.body === "string" ? JSON.parse(createRes.body) : createRes.body; + + const ids = createBody.results.map((r) => r.id); + expect(ids).to.have.lengthOf(2); + + // Step 2: sanity check — records exist + ids.forEach((id) => { + const doc = db.d.document(id, true); // safe: returns null if not found + expect(doc).to.be.an("object"); + expect(doc).to.have.property("_id", id); + }); + + // Step 3: call delete route + const res = request.post(`${data_base_url}/delete?client=fakeUser`, { + body: { + ids, + }, + headers: { + "x-correlation-id": "test-corr-delete-001", + }, + json: true, + }); + + expect(res.statusCode).to.equal(200); + + const body = typeof res.body === "string" ? JSON.parse(res.body) : res.body; + + // Step 4: assertions — task created + expect(body).to.be.an("object"); + expect(body).to.have.property("task"); + expect(body.task).to.have.property("_id"); + expect(body.task).to.have.property("status"); + }); +}); diff --git a/core/database/foxx/tests/group_router.test.js b/core/database/foxx/tests/group_router.test.js new file mode 100644 index 000000000..bd3441df2 --- /dev/null +++ b/core/database/foxx/tests/group_router.test.js @@ -0,0 +1,142 @@ +"use strict"; + +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { db } = require("@arangodb"); +const { baseUrl } = module.context; + +const group_base_url = `${baseUrl}/grp`; + +describe("unit_group_router: test group router endpoints", () => { + beforeEach(() => { + const collections = ["u", "g", "owner", "member", "p", "uuid", "accn", "admin"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) col.truncate(); + else db._create(name); + }); + }); + + after(() => { + const collections = ["u", "g", "owner", "member", "p", "uuid", "accn", "admin"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) col.truncate(); + }); + }); + + // ==================================================================== + // /create + // ==================================================================== + + it("should create a new group", () => { + db.u.save({ + _key: "fakeUser", + _id: "u/fakeUser", + name_first: "Fake", + name_last: "User", + is_admin: true, + email: "fake@user.com", + }); + + const url = `${group_base_url}/create?client=u/fakeUser&gid=testgroup&title=Test+Group`; + + const response = request.get(url, { + headers: { "x-correlation-id": "test-correlation-id" }, + }); + + expect(response.status).to.equal(200); + const body = JSON.parse(response.body); + + expect(body).to.be.an("array"); + expect(body[0]).to.have.property("gid", "testgroup"); + expect(body[0]).to.have.property("title", "Test Group"); + expect(body[0]).to.have.property("members"); + expect(body[0].members).to.be.an("array").that.is.empty; + }); + + // ==================================================================== + // /list + // ==================================================================== + + it("should list groups for the user", () => { + db.u.save({ _key: "fakeUser", _id: "u/fakeUser", is_admin: true }); + + request.get(`${group_base_url}/create?client=u/fakeUser&gid=a&title=A`); + request.get(`${group_base_url}/create?client=u/fakeUser&gid=b&title=B`); + + const response = request.get(`${group_base_url}/list?client=u/fakeUser`); + expect(response.status).to.equal(200); + + const list = JSON.parse(response.body); + + expect(list.length).to.equal(2); + expect(list.map((g) => g.gid)).to.have.members(["a", "b"]); + }); + + // ==================================================================== + // /delete + // ==================================================================== + + it("should delete a group", () => { + db.u.save({ _key: "fakeUser", _id: "u/fakeUser", is_admin: true }); + + request.get(`${group_base_url}/create?client=u/fakeUser&gid=testgroup&title=A`); + + const delUrl = `${group_base_url}/delete?client=u/fakeUser&gid=testgroup&title=A`; + const response = request.get(delUrl); + + expect(response.status).to.equal(204); + + // Now verify it is actually deleted + const list = request.get(`${group_base_url}/list?client=u/fakeUser`); + const groups = JSON.parse(list.body); + + expect(groups).to.be.an("array").that.is.empty; + }); + // ==================================================================== + // /view + // ==================================================================== + + it("should view an existing group", () => { + db.u.save({ _key: "fakeUser", _id: "u/fakeUser", is_admin: true }); + + request.get(`${group_base_url}/create?client=u/fakeUser&gid=viewtest&title=Viewer`); + + const response = request.get(`${group_base_url}/view?client=u/fakeUser&gid=viewtest`); + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + + expect(body[0]).to.include({ + gid: "viewtest", + title: "Viewer", + }); + }); + + // ==================================================================== + // /update + // ==================================================================== + it("should update an existing group", () => { + db.u.save({ _key: "fakeUser", _id: "u/fakeUser", is_admin: true }); + + db.g.save({ + uid: "u/fakeUser", + gid: "updateMe", + title: "OldTitle", + desc: "Old description", + }); + // Create + request.get(`${group_base_url}/create?client=u/fakeUser&gid=updateMe&title=OldTitle`); + + // Update title via endpoint + const response = request.get( + `${group_base_url}/update?client=u/fakeUser&gid=updateMe&title=NewTitle`, + ); + + expect(response.status).to.equal(200); + + const updated = JSON.parse(response.body)[0]; + expect(updated.title).to.equal("NewTitle"); + }); +}); diff --git a/core/database/foxx/tests/metrics_router.test.js b/core/database/foxx/tests/metrics_router.test.js new file mode 100644 index 000000000..7df39b4a3 --- /dev/null +++ b/core/database/foxx/tests/metrics_router.test.js @@ -0,0 +1,217 @@ +"use strict"; + +const g_lib = require("../api/support"); +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { baseUrl } = module.context; +const { db } = require("@arangodb"); + +const metrics_base_url = `${baseUrl}/metrics`; + +describe("unit_metrics_router: /users/active endpoint", () => { + after(function () { + const collections = ["metrics", "u"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) col.truncate(); + }); + }); + beforeEach(() => { + const collections = ["metrics", "u"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) { + col.truncate(); + } else { + db._create(name); + } + }); + }); + + it("should return active users within the default 15 minutes", () => { + const now = Math.floor(Date.now() / 1000); + + db.metrics.save([ + { + _key: "m1", + type: "msgcnt_user", + uid: "u/fakeUser", + total: 5, + timestamp: now - 60 * 5, // 5 minutes ago + }, + { + _key: "m2", + type: "msgcnt_user", + uid: "u/otherUser", + total: 10, + timestamp: now - 60 * 20, // 20 minutes ago + }, + ]); + + const request_string = `${metrics_base_url}/users/active`; + const response = request.get(request_string); + + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + + // u/fakeUser should appear, u/otherUser should not + expect(body).to.have.property("u/fakeUser"); + expect(body["u/fakeUser"]).to.equal(5); + expect(body).to.not.have.property("u/otherUser"); + }); + + it("should respect the 'since' query parameter", () => { + const now = Math.floor(Date.now() / 1000); + + db.metrics.save([ + { + _key: "m3", + type: "msgcnt_user", + uid: "u/fakeUser", + total: 7, + timestamp: now - 60 * 30, // 30 minutes ago + }, + ]); + + // since=45 → include 30-min-old record + let response = request.get(`${metrics_base_url}/users/active?since=45`); + expect(response.status).to.equal(200); + let body = JSON.parse(response.body); + expect(body).to.have.property("u/fakeUser"); + + // since=15 → exclude 30-min-old record + response = request.get(`${metrics_base_url}/users/active?since=15`); + expect(response.status).to.equal(200); + body = JSON.parse(response.body); + expect(body).to.not.have.property("u/fakeUser"); + }); + + it("should return an empty object if no users are active", () => { + const response = request.get(`${metrics_base_url}/users/active`); + expect(response.status).to.equal(200); + const body = JSON.parse(response.body); + expect(body).to.deep.equal({}); + }); + + it("POST /msg_count/update should succeed and write metrics", () => { + // create user explicitly + db.u.save({ + _key: "fakeUser", + _id: "u/fakeUser", + name: "Fake", + email: "fake@example.com", + is_admin: true, + }); + + const payload = { + timestamp: Math.floor(Date.now() / 1000), + total: 99, + uids: { + a: { tot: 5, msg: "hello" }, + b: { tot: 7, msg: "yo" }, + }, + }; + + const res = request.post(`${metrics_base_url}/msg_count/update?client=u/fakeUser`, { + body: payload, + json: true, + }); + + expect(res.status).to.equal(204); + + const docs = db.metrics.toArray(); + expect(docs.length).to.equal(3); // total + 2 users + }); + + it("GET /msg_count should return items within default 60 minutes", () => { + // create user explicitly + db.u.save({ + _key: "fakeUser", + _id: "u/fakeUser", + name: "Fake", + email: "fake@example.com", + is_admin: true, + }); + + const now = Math.floor(Date.now() / 1000); + + // recent item (should return) + db.metrics.save({ + timestamp: now, + type: "msgcnt_total", + total: 1, + }); + + // old item (should NOT return) + db.metrics.save({ + timestamp: now - 60 * 60 * 2, // older than 60 min + type: "msgcnt_total", + total: 999, + }); + + const res = request.get(`${metrics_base_url}/msg_count?client=u/fakeUser`); + + expect(res.status).to.equal(200); + + const arr = JSON.parse(res.body); + expect(arr.length).to.equal(1); + expect(arr[0].total).to.equal(1); + }); + + it("GET /msg_count should filter by type and uid", () => { + // create user explicitly + db.u.save({ + _key: "fakeUser", + _id: "u/fakeUser", + name: "Fake", + email: "fake@example.com", + is_admin: true, + }); + + const ts = Math.floor(Date.now() / 1000); + + db.metrics.save({ + timestamp: ts, + type: "msgcnt_user", + uid: "u1", + total: 10, + }); + + db.metrics.save({ + timestamp: ts, + type: "msgcnt_user", + uid: "u2", + total: 20, + }); + + const res = request.get( + `${metrics_base_url}/msg_count?client=u/fakeUser&type=msgcnt_user&uid=u2`, + ); + + expect(res.status).to.equal(200); + + const arr = JSON.parse(res.body); + expect(arr.length).to.equal(1); + expect(arr[0].uid).to.equal("u2"); + }); + + it("POST /purge should remove metrics older than timestamp", () => { + const now = Math.floor(Date.now() / 1000); + + db.metrics.save([ + { timestamp: now - 1000, type: "msgcnt_total", total: 1 }, // should be removed + { timestamp: now, type: "msgcnt_total", total: 2 }, // should stay + ]); + + const ts = now - 500; + const res = request.post(`${metrics_base_url}/purge?timestamp=${ts}`); + + expect(res.status).to.equal(204); + + const docs = db.metrics.toArray(); + //Equals 2 due to writing the purge doc + expect(docs.length).to.equal(2); + expect(docs[0].total).to.equal(2); + }); +}); diff --git a/core/database/foxx/tests/note_router.test.js b/core/database/foxx/tests/note_router.test.js new file mode 100644 index 000000000..9aac6dc7d --- /dev/null +++ b/core/database/foxx/tests/note_router.test.js @@ -0,0 +1,327 @@ +"use strict"; +// NOTE: completion of tests requires successful run of user_fixture.js script + +// Need to pull enum from support +const g_lib = require("../api/support"); + +// Integration test of API +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { baseUrl } = module.context; +const { db } = require("@arangodb"); + +const note_base_url = `${baseUrl}/note`; + +describe("unit_note_router: the Foxx microservice note_router /create endpoint", () => { + after(function () { + const collections = ["note", "d", "u"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) col.truncate(); + }); + }); + + beforeEach(() => { + const collections = [ + { name: "u", type: "document" }, + { name: "d", type: "document" }, + { name: "note", type: "edge" }, // must be edge + ]; + + collections.forEach(({ name, type }) => { + const col = db._collection(name); + if (col) { + col.truncate(); + } else { + db._create(name, { type: type === "edge" ? 3 : 2 }); + } + }); + }); + + it("should successfully run the search route", () => { + const user = db.u.save({ + _key: "testUser", + _id: "u/testUser", + name: "Test User", + email: "testuser@example.com", + is_admin: true, + }); + + const data = db.d.save({ + _key: "ID", + _id: "d/ID", + owner: user._id, + }); + + // Prepare the request + const request_string = `${note_base_url}/create?client=${encodeURIComponent( + user._id, + )}&subject=${encodeURIComponent(data._id)}&type=1&title=UnitTestTitle&comment=UnitTestComment`; + + // act + const response = request.post(request_string); + // assert + expect(response.status).to.equal(200); + }); + + it("should successfully update an existing annotation", () => { + // Arrange + const user = db.u.save({ + _key: "testUser", + _id: "u/testUser", + name: "Test User", + email: "testuser@example.com", + is_admin: true, + }); + + const data = db.d.save({ + _key: "ID", + _id: "d/ID", + owner: user._id, + }); + + // Create a note record (edge + document) + const note_doc = db.n.save({ + type: 1, + state: 0, + title: "OldTitle", + creator: user._id, + comments: [], + }); + + db.note.save({ + _from: data._id, + _to: note_doc._id, + }); + + // Act — call /update + const request_string = `${note_base_url}/update?client=${encodeURIComponent(user._id)}&id=${encodeURIComponent(note_doc._id)}&new_state=1&new_title=UpdatedTitle&comment=UpdatedComment`; + + const response = request.post(request_string); + + // Assert + expect(response.status).to.equal(200); + const body = JSON.parse(response.body); + expect(body.results[0]._id).to.equal(note_doc._id); + expect(body.results[0].state).to.equal(1); + }); + + it("should successfully edit an annotation comment", () => { + // Arrange: create user and data + const user = db.u.save({ + _key: "testUser", + _id: "u/testUser", + name: "Test User", + email: "testuser@example.com", + is_admin: true, + }); + + const data = db.d.save({ + _key: "ID", + _id: "d/ID", + owner: user._id, + }); + + // Create a note document with one comment + const note_doc = db.n.save({ + type: 1, + state: 0, + title: "Test Note", + creator: user._id, + comments: [ + { + user: user._id, + time: Math.floor(Date.now() / 1000), + comment: "Original Comment", + }, + ], + }); + + db.note.save({ + _from: data._id, + _to: note_doc._id, + }); + + // Act: edit the existing comment + const newComment = "Edited Comment"; + const request_string = `${note_base_url}/comment/edit?client=${encodeURIComponent(user._id)}&id=${encodeURIComponent(note_doc._id)}&comment=${encodeURIComponent(newComment)}&comment_idx=0`; + + const response = request.post(request_string); + + // Assert + expect(response.status).to.equal(200); + const body = JSON.parse(response.body); + const updatedNote = body.results[0]; + expect(updatedNote.comments[0].comment).to.equal(newComment); + }); + + it("should successfully view an annotation", () => { + // Arrange: create user and data record + const user = db.u.save({ + _key: "testUser", + _id: "u/testUser", + name: "Test User", + email: "testuser@example.com", + is_admin: true, + }); + + const data = db.d.save({ + _key: "ID", + _id: "d/ID", + owner: user._id, + }); + + // Create a note linked to the data record + const note_doc = db.n.save({ + type: 1, + state: 0, + title: "Viewable Note", + creator: user._id, + comments: [ + { + user: user._id, + time: Math.floor(Date.now() / 1000), + comment: "Initial Comment", + }, + ], + }); + + db.note.save({ + _from: data._id, + _to: note_doc._id, + }); + + // Act: call /note/view + const request_string = `${note_base_url}/view?client=${encodeURIComponent( + user._id, + )}&id=${encodeURIComponent(note_doc._id)}`; + + const response = request.get(request_string); + + // Assert + expect(response.status).to.equal(200); + const body = JSON.parse(response.body); + expect(body.results).to.be.an("array").that.is.not.empty; + expect(body.results[0]._id).to.equal(note_doc._id); + expect(body.results[0].title).to.equal("Viewable Note"); + }); + + it("should list all annotations for a subject", () => { + // Arrange: create a user and subject document + const user = db.u.save({ + _key: "testUser", + _id: "u/testUser", + name: "Test User", + email: "testuser@example.com", + is_admin: true, + }); + + const subject = db.d.save({ + _key: "subj1", + _id: "d/subj1", + owner: user._id, + }); + + // Create a few note documents and connect them + const note1 = db.n.save({ + type: 1, + state: 2, // active + title: "Subject Note 1", + creator: user._id, + comments: [ + { + user: user._id, + time: Math.floor(Date.now() / 1000), + comment: "First comment", + }, + ], + }); + + const note2 = db.n.save({ + type: 0, + state: 2, + title: "Subject Note 2", + creator: user._id, + comments: [], + }); + + db.note.save({ _from: subject._id, _to: note1._id }); + db.note.save({ _from: subject._id, _to: note2._id }); + + // Act: call /note/list/by_subject + const request_string = `${note_base_url}/list/by_subject?client=${encodeURIComponent( + user._id, + )}&subject=${encodeURIComponent(subject._id)}`; + + const response = request.get(request_string); + + // Assert + expect(response.status).to.equal(200); + const body = JSON.parse(response.body); + expect(body.results).to.be.an("array"); + expect(body.results.length).to.equal(2); + expect(body.results.map((r) => r.title)).to.include("Subject Note 1"); + }); + + it("should purge old closed annotations", () => { + // Arrange: create user and subject + const user = db.u.save({ + _key: "purgeUser", + _id: "u/purgeUser", + name: "Purge Tester", + email: "purgetest@example.com", + is_admin: true, + }); + + const subject = db.d.save({ + _key: "purgeSubj", + _id: "d/purgeSubj", + owner: user._id, + }); + + const now = Math.floor(Date.now() / 1000); + + // Create one old closed note (should be deleted) + const oldClosedNote = db.n.save({ + type: 1, + state: g_lib.NOTE_CLOSED, // usually 2 or something similar + title: "Old Closed Note", + creator: user._id, + ut: now - 100000, // 100k seconds old + parent_id: null, + }); + db.note.save({ _from: subject._id, _to: oldClosedNote._id }); + + // Create one recent closed note (should stay) + const recentClosedNote = db.n.save({ + type: 1, + state: g_lib.NOTE_CLOSED, + title: "Recent Closed Note", + creator: user._id, + ut: now, // current time + parent_id: null, + }); + db.note.save({ _from: subject._id, _to: recentClosedNote._id }); + + // Create one open note (should stay) + const openNote = db.n.save({ + type: 1, + state: 0, // open + title: "Open Note", + creator: user._id, + ut: now - 200000, + parent_id: null, + }); + db.note.save({ _from: subject._id, _to: openNote._id }); + + // Act: purge notes older than 50,000 seconds + const request_string = `${note_base_url}/purge?client=${encodeURIComponent( + user._id, + )}&age_sec=50000`; + + const response = request.get(request_string); + + // Assert: response should be OK + expect(response.status).to.equal(204); + }); +}); diff --git a/core/database/foxx/tests/proj_router.test.js b/core/database/foxx/tests/proj_router.test.js new file mode 100644 index 000000000..1d8e46272 --- /dev/null +++ b/core/database/foxx/tests/proj_router.test.js @@ -0,0 +1,517 @@ +"use strict"; + +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { db } = require("@arangodb"); +const { baseUrl } = module.context; + +const proj_base_url = `${baseUrl}/prj`; + +describe("unit_proj_router: test project create endpoint", () => { + beforeEach(() => { + const collections = [ + "u", + "p", + "repo", + "admin", + "owner", + "c", + "a", + "g", + "acl", + "member", + "ident", + "alias", + "uuid", + "accn", + ]; + + collections.forEach((name) => { + const col = db._collection(name); + if (col) col.truncate(); + else db._create(name); + }); + }); + + after(() => { + const collections = [ + "u", + "p", + "repo", + "admin", + "owner", + "c", + "a", + "g", + "acl", + "member", + "ident", + "alias", + "uuid", + "accn", + ]; + + collections.forEach((name) => { + const col = db._collection(name); + if (col) col.truncate(); + }); + }); + + it("should create a new project when client is a repo admin", () => { + // ------------------------------------------------------------------ + // arrange + // ------------------------------------------------------------------ + + // create user + db.u.save({ + _key: "proj_admin", + _id: "u/proj_admin", + is_admin: false, + max_proj: -1, + }); + + // create repo + db.repo.save({ + _key: "testrepo", + title: "Test Repo", + capacity: 0, + type: "metadata", + }); + + // link user as repo admin (required by /prj/create) + db.admin.save({ + _from: "repo/testrepo", + _to: "u/proj_admin", + }); + + const url = + `${proj_base_url}/create` + + `?client=u/proj_admin` + + `&id=myproject` + + `&title=My+Project` + + `&desc=Test+project+description`; + + // ------------------------------------------------------------------ + // act + // ------------------------------------------------------------------ + + const response = request.get(url, { + headers: { "x-correlation-id": "test-proj-create" }, + }); + + // ------------------------------------------------------------------ + // assert + // ------------------------------------------------------------------ + + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + expect(body).to.be.an("array").with.lengthOf(1); + + const project = body[0]; + + // project fields + expect(project).to.have.property("id"); + expect(project).to.have.property("title", "My Project"); + expect(project).to.have.property("desc", "Test project description"); + expect(project).to.have.property("owner", "u/proj_admin"); + + // admins / members arrays initialized + expect(project).to.have.property("admins").that.is.an("array"); + expect(project).to.have.property("members").that.is.an("array"); + + const exists = db._exists(project.id); + expect(exists).to.not.equal(null); + + // owner edge created + const ownerEdge = db.owner.firstExample({ + _from: project.id, + _to: "u/proj_admin", + }); + expect(ownerEdge).to.exist; + + // root collection created + const rootCollection = db.c.firstExample({ + owner: project.id, + is_root: true, + }); + expect(rootCollection).to.exist; + + // members group created + const membersGroup = db.g.firstExample({ + uid: project.id, + gid: "members", + }); + expect(membersGroup).to.exist; + }); + it("should update project metadata and membership when client is project admin", () => { + // ------------------------------------------------------------------ + // arrange + // ------------------------------------------------------------------ + + // users + db.u.save({ _key: "proj_admin", is_admin: false }); + db.u.save({ _key: "new_admin", is_admin: false }); + db.u.save({ _key: "member1", is_admin: false }); + + // project + db.p.save({ + _key: "myproject", + title: "Old Title", + desc: "Old description", + ct: 1, + ut: 1, + owner: "u/proj_admin", + }); + + // owner edge + db.owner.save({ + _from: "p/myproject", + _to: "u/proj_admin", + }); + + // members group (MUST match create logic exactly) + const memGrp = db.g.save({ + uid: "p/myproject", + gid: "members", + title: "Project Members", + desc: "Use to set baseline project member permissions.", + }); + + // ownership edge: group -> project + db.owner.save({ + _from: memGrp._id, + _to: "p/myproject", + }); + + // existing admin edge + db.admin.save({ + _from: "p/myproject", + _to: "u/proj_admin", + }); + + const url = + `${proj_base_url}/update` + + `?client=u/proj_admin` + + `&id=p/myproject` + + `&title=New+Title` + + `&desc=New+description`; + + // ------------------------------------------------------------------ + // act + // ------------------------------------------------------------------ + + const response = request.get(url, { + headers: { "x-correlation-id": "test-proj-update" }, + }); + + // ------------------------------------------------------------------ + // assert + // ------------------------------------------------------------------ + + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + expect(body).to.be.an("array").with.lengthOf(1); + + const proj = body[0]; + // core fields updated + expect(proj).to.have.property("id", "p/myproject"); + expect(proj).to.have.property("title", "New Title"); + expect(proj).to.have.property("desc", "New description"); + + // admins unchanged + expect(proj.admins).to.have.members(["u/proj_admin"]); + + // members unchanged + expect(proj.members).to.be.an("array").that.is.empty; + + // project document updated in DB + const storedProj = db.p.document("p/myproject"); + expect(storedProj.title).to.equal("New Title"); + expect(storedProj.desc).to.equal("New description"); + }); + + it("should return project info including admins and members", () => { + // --- arrange --- + // create users + db.u.save({ _key: "proj_admin" }); + db.u.save({ _key: "member1" }); + + // create project + db.p.save({ + _key: "myproject", + title: "Test Project", + desc: "Test description", + owner: "u/proj_admin", + }); + + // admin edge + db.admin.save({ _from: "p/myproject", _to: "u/proj_admin" }); + + // members group + const memGrp = db.g.save({ + uid: "p/myproject", + gid: "members", + title: "Project Members", + desc: "Use to set baseline project member permissions.", + }); + + // ownership edge: group -> project + db.owner.save({ + _from: memGrp._id, + _to: "p/myproject", + }); + + // member edge: group -> user + db.member.save({ + _from: memGrp._id, + _to: "u/member1", + }); + + // --- act --- + const url = `${proj_base_url}/view?client=u/proj_admin&id=p/myproject`; + const response = request.get(url, { headers: { "x-correlation-id": "test-proj-view" } }); + + // --- assert --- + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + expect(body).to.be.an("array").with.lengthOf(1); + + const proj = body[0]; + expect(proj).to.have.property("id", "p/myproject"); + expect(proj).to.have.property("title", "Test Project"); + expect(proj).to.have.property("desc", "Test description"); + + // admins + expect(proj.admins).to.be.an("array").that.includes("u/proj_admin"); + + // members + expect(proj.members).to.be.an("array").that.includes("u/member1"); + + // allocs array should exist even if empty + expect(proj).to.have.property("allocs").that.is.an("array"); + }); + + it("should return a list of projects for a client including ownership, admin, and member roles", () => { + // ------------------------------------------------------------------ + // Arrange: setup users and projects + // ------------------------------------------------------------------ + db.u.save({ _key: "proj_owner", is_admin: false }); + db.u.save({ _key: "proj_admin", is_admin: false }); + db.u.save({ _key: "proj_member", is_admin: false }); + + // Project 1: owned by proj_owner + db.p.save({ + _key: "proj1", + title: "Project One", + desc: "First project", + ct: 1, + ut: 1, + owner: "u/proj_owner", + }); + db.owner.save({ _from: "p/proj1", _to: "u/proj_owner" }); + + // Project 2: admin by proj_owner + db.p.save({ + _key: "proj2", + title: "Project Two", + desc: "Second project", + ct: 1, + ut: 1, + owner: "u/proj_admin", + }); + db.admin.save({ _from: "p/proj2", _to: "u/proj_owner" }); + + // Project 3: member role for proj_owner + db.p.save({ + _key: "proj3", + title: "Project Three", + desc: "Third project", + ct: 1, + ut: 1, + owner: "u/proj_admin", + }); + + const membersGroup = db.g.save({ + uid: "p/proj3", + gid: "members", + title: "Project Three Members", + desc: "Member group for proj3", + }); + + db.owner.save({ _from: membersGroup._id, _to: "p/proj3" }); + db.member.save({ _from: membersGroup._id, _to: "u/proj_owner" }); + + // ------------------------------------------------------------------ + // Act: call the /list route + // ------------------------------------------------------------------ + const url = + `${proj_base_url}/list` + + `?client=u/proj_owner` + + `&as_owner=true&as_admin=true&as_member=true`; + + const response = request.get(url, { + headers: { "x-correlation-id": "test-proj-list" }, + }); + + // ------------------------------------------------------------------ + // Assert + // ------------------------------------------------------------------ + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + const ids = body.map((p) => p.id); + + expect(ids).to.include.members(["p/proj1", "p/proj2", "p/proj3"]); + }); + + it("should enqueue a project delete task when client is authorized", () => { + // ------------------------------------------------------------------ + // Arrange + // ------------------------------------------------------------------ + db.u.save({ _key: "delete_admin", is_admin: true }); + + db.p.save({ + _key: "delete_proj1", + title: "Delete Me", + desc: "Project to be deleted", + ct: 1, + ut: 1, + owner: "u/delete_admin", + }); + + db.owner.save({ + _from: "p/delete_proj1", + _to: "u/delete_admin", + }); + + db.admin.save({ + _from: "p/delete_proj1", + _to: "u/delete_admin", + }); + + const url = `${proj_base_url}/delete?client=u/delete_admin`; + + // ------------------------------------------------------------------ + // Act + // ------------------------------------------------------------------ + const response = request.post(url, { + headers: { + "content-type": "application/json", + "x-correlation-id": "test-proj-delete", + }, + body: JSON.stringify({ + ids: ["p/delete_proj1"], + }), + }); + + // ------------------------------------------------------------------ + // Assert + // ------------------------------------------------------------------ + expect(response.status).to.equal(200); + + // Response body is allowed to be null + const body = JSON.parse(response.body); + expect(body).to.exist; + expect(body).to.be.an("object"); + expect(body).to.have.property("task"); + + // Delete is async — project still exists immediately + expect(db.p.exists("p/delete_proj1")).to.exist; + }); + + it("should return the correct project role for client or subject", () => { + // ------------------------------------------------------------------ + // arrange + // ------------------------------------------------------------------ + + db.u.save({ _key: "proj_owner", is_admin: false }); + db.u.save({ _key: "proj_admin", is_admin: false }); + db.u.save({ _key: "proj_member", is_admin: false }); + + db.p.save({ + _key: "role_proj", + title: "Role Test Project", + owner: "u/proj_owner", + ct: 1, + ut: 1, + }); + + // owner edge + db.owner.save({ + _from: "p/role_proj", + _to: "u/proj_owner", + }); + + // admin edge + db.admin.save({ + _from: "p/role_proj", + _to: "u/proj_admin", + }); + + // members group + const memGrp = db.g.save({ + uid: "p/role_proj", + gid: "members", + title: "Project Members", + desc: "Members group", + }); + + db.owner.save({ + _from: memGrp._id, + _to: "p/role_proj", + }); + + db.member.save({ + _from: memGrp._id, + _to: "u/proj_member", + }); + + // ------------------------------------------------------------------ + // OWNER + // ------------------------------------------------------------------ + + let response = request.get( + `${proj_base_url}/get_role` + `?client=u/proj_owner` + `&id=p/role_proj`, + { headers: { "x-correlation-id": "test-proj-get-role-owner" } }, + ); + + expect(response.status).to.equal(200); + let body = JSON.parse(response.body); + expect(body.role).to.equal(3); // owner + + // ------------------------------------------------------------------ + // ADMIN (subject) + // ------------------------------------------------------------------ + + response = request.get( + `${proj_base_url}/get_role` + + `?client=u/proj_owner` + + `&subject=u/proj_admin` + + `&id=p/role_proj`, + { headers: { "x-correlation-id": "test-proj-get-role-admin" } }, + ); + + expect(response.status).to.equal(200); + body = JSON.parse(response.body); + expect(body.role).to.equal(2); // admin + + // ------------------------------------------------------------------ + // MEMBER (subject) + // ------------------------------------------------------------------ + + response = request.get( + `${proj_base_url}/get_role` + + `?client=u/proj_owner` + + `&subject=u/proj_member` + + `&id=p/role_proj`, + { headers: { "x-correlation-id": "test-proj-get-role-member" } }, + ); + + expect(response.status).to.equal(200); + body = JSON.parse(response.body); + expect(body.role).to.equal(1); // member + }); +}); diff --git a/core/database/foxx/tests/query_router.test.js b/core/database/foxx/tests/query_router.test.js index b075e6790..8c982edb7 100644 --- a/core/database/foxx/tests/query_router.test.js +++ b/core/database/foxx/tests/query_router.test.js @@ -14,7 +14,7 @@ const qry_base_url = `${baseUrl}/qry`; describe("unit_query_router: the Foxx microservice qry_router endpoints", () => { after(function () { - const collections = ["u", "qry"]; + const collections = ["u", "qry", "c", "note", "fake"]; collections.forEach((name) => { let col = db._collection(name); if (col) col.truncate(); @@ -22,7 +22,7 @@ describe("unit_query_router: the Foxx microservice qry_router endpoints", () => }); beforeEach(() => { - const collections = ["u", "qry"]; + const collections = ["u", "qry", "c", "note", "fake"]; collections.forEach((name) => { let col = db._collection(name); if (col) { @@ -157,10 +157,56 @@ describe("unit_query_router: the Foxx microservice qry_router endpoints", () => }); var parsed = JSON.parse(response.body); - console.log("Response body:", response.body); // assert expect(response.status).to.equal(200); expect(parsed).to.be.an("array"); expect(parsed.length).to.be.greaterThan(0); }); + + it("should execute a query directly", () => { + // arrange + const fakeUser = { + _key: "fakeUser", + _id: "u/fakeUser", + name: "Fake User", + email: "fakeuser@datadev.org", + is_admin: true, + max_coll: 5, + max_proj: 5, + max_sav_qry: 10, + }; + + const fakeCol = { + _key: "fakeCol", + _id: "col/fakeCol", + title: "fakeCol", + desc: "This is a fake col", + }; + + db.u.save(fakeUser); + + // Save the query and the edge between the query and the user + var request_string = `${qry_base_url}/exec/direct?client=u/fakeUser&owner=u/fakeUser&cols=c/fakeCol&cnt=1&off=0`; + var body = { + qry_begin: "FOR i in fake filter i.owner == @owner ", + qry_end: " sort @off,@cnt RETURN distinct i", + qry_filter: "", + params: '{ "cnt": 1, "off": 0, "owner": "u/fakeUser"}', + limit: 10, + mode: 1, + published: false, + }; + + // act + var response = request.post(request_string, { + json: true, + body: body, + headers: { + "x-correlation-id": "test-correlation-id", + }, + }); + + // Assert + expect(response.status).to.equal(200); + }); }); diff --git a/core/database/foxx/tests/record.test.js b/core/database/foxx/tests/record.test.js index ed0c4fbcb..1725f10a0 100644 --- a/core/database/foxx/tests/record.test.js +++ b/core/database/foxx/tests/record.test.js @@ -237,7 +237,7 @@ describe("Record Class", () => { const valid_key = "1127"; const key_id = "d/" + valid_key; const owner_id = "u/john"; - const repo_id = "repo/orange-at-com"; + const repo_id = "repo/orange-at-org"; const new_repo_id = "repo/watermelon-at-org"; // Create nodes diff --git a/core/database/foxx/tests/repo_router.test.js b/core/database/foxx/tests/repo_router.test.js index 72fecc3a0..fd751f723 100644 --- a/core/database/foxx/tests/repo_router.test.js +++ b/core/database/foxx/tests/repo_router.test.js @@ -11,7 +11,7 @@ const repo_base_url = `${baseUrl}/repo`; // NOTE: describe block strings are compared against test specification during test call, not file name describe("integration_repo_router: the Foxx microservice repo_router create endpoint", () => { beforeEach(() => { - const collections = ["repo", "d", "alloc", "loc", "repo", "admin", "g", "p", "u"]; + const collections = ["repo", "d", "alloc", "loc", "admin", "g", "p", "u"]; collections.forEach((name) => { let col = g_db._collection(name); if (col) { @@ -22,6 +22,13 @@ describe("integration_repo_router: the Foxx microservice repo_router create endp }); }); + after(function () { + const collections = ["repo", "d", "alloc", "loc", "admin", "g", "p", "u"]; + collections.forEach((name) => { + const col = g_db._collection(name); + if (col) col.truncate(); + }); + }); const user_params = { id: "u/shredder", key: "shredder", @@ -253,4 +260,635 @@ describe("integration_repo_router: the Foxx microservice repo_router create endp expect(json[0]).to.have.property("path", "/mnt/nfs/large/heavymetal/"); expect(json[0]).to.have.property("pub_key", "Zm7W6W5vJjZZqFj7okjBOS8K9wVjHhYyLzX+zA8B"); }); + + it("should list all repos when no client is provided", () => { + g_db.repo.save({ _key: "r1", title: "Repo One", domain: "test" }); + g_db.repo.save({ _key: "r2", title: "Repo Two", domain: "test" }); + + const response = request.get(`${repo_base_url}/list`); + + expect(response.status).to.equal(200); + const json = JSON.parse(response.body); + + expect(json).to.be.an("array").with.lengthOf(2); + expect(json[0]).to.have.property("id"); + expect(json[0]).to.not.have.property("_key"); + }); + it("should view a repo by id", () => { + // arrange: seed users + g_db.u.save({ _key: "shredder", is_admin: false }); + g_db.u.save({ _key: "splinter", is_admin: true }); + + // arrange: seed repo + g_db.repo.save({ + _key: "heavymetal", + title: "Rock On!!!!", + capacity: 0, + type: "metadata", + }); + + // arrange: admin edges (repo -> user) + g_db.admin.save({ + _from: "repo/heavymetal", + _to: "u/shredder", + }); + g_db.admin.save({ + _from: "repo/heavymetal", + _to: "u/splinter", + }); + + // act + const response = request.get(`${repo_base_url}/view?id=repo/heavymetal`); + + // assert + expect(response.status).to.equal(200); + const json = JSON.parse(response.body); + + expect(json).to.be.an("array").with.lengthOf(1); + + const repo = json[0]; + + // id remapped from _id + expect(repo).to.have.property("id", "repo/heavymetal"); + + // admins resolved from admin edges + expect(repo.admins).to.have.members(["u/shredder", "u/splinter"]); + + // internal fields stripped + expect(repo).to.not.have.property("_id"); + expect(repo).to.not.have.property("_key"); + expect(repo).to.not.have.property("_rev"); + }); + + it("should update a repo when client has admin permissions", () => { + // arrange: seed admin user + g_db.u.save({ + _key: "splinter", + is_admin: true, + }); + + // arrange: seed repo + g_db.repo.save({ + _key: "heavymetal", + title: "Old Title", + summary: "Old summary", + domain: "old-domain", + capacity: 0, + type: "metadata", + }); + + // arrange: admin edge (repo -> user) + g_db.admin.save({ + _from: "repo/heavymetal", + _to: "u/splinter", + }); + + const request_string = `${repo_base_url}/update?client=u/splinter`; + + const update_body = { + id: "repo/heavymetal", + title: "New Title", + domain: "new-domain", + path: "/mnt/nfs/heavymetal", + capacity: 42, + admins: ["u/splinter"], + }; + + // act + const response = request.post(request_string, { + body: JSON.stringify(update_body), + headers: { "Content-Type": "application/json" }, + }); + + // assert + expect(response.status).to.equal(200); + const json = JSON.parse(response.body); + + expect(json).to.be.an("array").with.lengthOf(1); + + const repo = json[0]; + + // id remapped + expect(repo).to.have.property("id", "repo/heavymetal"); + + // updated fields + expect(repo).to.have.property("title", "New Title"); + expect(repo).to.have.property("domain", "new-domain"); + expect(repo).to.have.property("capacity", 42); + + // path normalized with trailing slash + expect(repo).to.have.property("path", "/mnt/nfs/heavymetal/"); + + // internal fields stripped + expect(repo).to.not.have.property("_id"); + expect(repo).to.not.have.property("_key"); + expect(repo).to.not.have.property("_rev"); + }); + + it("should delete a repo when user has admin perms and repo is not in use", () => { + // arrange + // create admin user + g_db.u.save(user_params_raw_admin); + + // create repo document + const repoDoc = { + _key: "heavymetal", + title: "Rock On!!!!", + capacity: 0, + type: "metadata", + }; + g_db.repo.save(repoDoc); + + // link admin to repo + g_db.admin.save({ + _from: "repo/heavymetal", + _to: "u/splinter", + }); + + // sanity check: repo exists before delete + expect(!!g_db._exists("repo/heavymetal")).to.equal(true); + const request_string = `${repo_base_url}/delete?client=${user_params_admin.id}&id=repo/heavymetal`; + + // act + const response = request.get(request_string); + + // assert + expect(response.status).to.equal(204); + + // repo should no longer exist + expect(g_db._exists("repo/heavymetal")).to.equal(false); + }); + it("should calculate per-repo sizes for specified items", () => { + // arrange + g_db.u.save(user_params_raw_admin); + + g_db.repo.save({ + _key: "heavymetal", + title: "Rock On!!!!", + capacity: 0, + type: "metadata", + }); + + g_db.d.save({ + _key: "song1", + size: 1234, + repo: "repo/heavymetal", + }); + + g_db.alloc.save({ + _from: "d/song1", + _to: "repo/heavymetal", + }); + + const items = encodeURIComponent(JSON.stringify(["d/song1"])); + const request_string = + `${repo_base_url}/calc_size?client=${user_params_admin.id}` + + `&items=${items}&recurse=false`; + + // act + const response = request.get(request_string); + + // assert + expect(response.status).to.equal(200); + + const json = JSON.parse(response.body); + expect(json).to.be.an("array"); + + // Only check repo info if something is returned + if (json.length > 0) { + expect(json[0]).to.have.property("repo"); + expect(json[0]).to.have.property("size"); + } + }); + it("should list all allocations for a repo", () => { + // arrange + const adminUser = { _key: "alloc_admin", is_admin: true }; + g_db.u.save(adminUser); + + const repoDoc = { + _key: "rock_repo", + title: "Rock Collection", + capacity: 0, + type: "metadata", + }; + g_db.repo.save(repoDoc); + + // create some data vertices + g_db.d.save({ _key: "songA", size: 1000 }); + g_db.d.save({ _key: "songB", size: 2000 }); + + // create allocation edges + g_db.alloc.save({ + _from: "d/songA", + _to: "repo/rock_repo", + data_limit: 5000, + data_size: 1000, + rec_limit: 10, + rec_count: 1, + path: "/mnt/rock/songA", + }); + + g_db.alloc.save({ + _from: "d/songB", + _to: "repo/rock_repo", + data_limit: 5000, + data_size: 2000, + rec_limit: 10, + rec_count: 1, + path: "/mnt/rock/songB", + }); + + // query parameters + const clientId = encodeURIComponent("alloc_admin"); + const repoId = encodeURIComponent("repo/rock_repo"); + + const request_string = `${repo_base_url}/alloc/list/by_repo?client=${clientId}&repo=${repoId}`; + + // act + const response = request.get(request_string); + + // assert + expect(response.status).to.equal(200); + + const json = JSON.parse(response.body); + expect(json).to.be.an("array").with.lengthOf(2); + + // validate first allocation structure + expect(json[0]).to.have.all.keys( + "id", + "repo", + "data_limit", + "data_size", + "rec_limit", + "rec_count", + "path", + ); + expect(json[0].repo).to.equal("repo/rock_repo"); + + // validate second allocation + expect(json[1].repo).to.equal("repo/rock_repo"); + }); + it("should list allocations for a specific object", () => { + // arrange + const adminUser = { _key: "alloc_admin", is_admin: true }; + g_db.u.save(adminUser); + + // create repo + const repoDoc = { + _key: "rock_repo", + title: "Rock Collection", + capacity: 0, + type: "metadata", + }; + g_db.repo.save(repoDoc); + + // create data object + const dataObj = { _key: "songX", size: 500 }; + g_db.d.save(dataObj); + + // create owner edge: object -> owner + g_db.owner.save({ + _from: "d/songX", + _to: "u/alloc_admin", + }); + + // create allocations: owner -> repo + g_db.alloc.save({ + _from: "u/alloc_admin", + _to: "repo/rock_repo", + data_limit: 1000, + data_size: 500, + rec_limit: 5, + rec_count: 1, + path: "/mnt/rock/songX", + }); + + const clientId = encodeURIComponent("alloc_admin"); + const objectId = encodeURIComponent("d/songX"); + const request_string = `${repo_base_url}/alloc/list/by_object?client=${clientId}&object=${objectId}`; + + // act + const response = request.get(request_string); + + // assert + expect(response.status).to.equal(200); + + const json = JSON.parse(response.body); + expect(json).to.be.an("array").with.lengthOf(1); + + const alloc = json[0]; + expect(alloc).to.have.all.keys( + "id", + "repo", + "data_limit", + "data_size", + "rec_limit", + "rec_count", + "path", + ); + + expect(alloc.id).to.equal("u/alloc_admin"); + expect(alloc.repo).to.equal("repo/rock_repo"); + expect(alloc.data_size).to.equal(500); + expect(alloc.rec_count).to.equal(1); + }); + it("should view allocation details for a repo", () => { + // arrange + const adminUser = { _key: "alloc_admin", is_admin: true }; + g_db.u.save(adminUser); + + // create repo + const repoDoc = { + _key: "rock_repo", + title: "Rock Collection", + capacity: 0, + type: "metadata", + }; + g_db.repo.save(repoDoc); + + // create allocation: user -> repo + g_db.alloc.save({ + _from: "u/alloc_admin", + _to: "repo/rock_repo", + data_limit: 1000, + data_size: 500, + rec_limit: 5, + rec_count: 1, + path: "/mnt/rock/songX", + }); + + const clientId = encodeURIComponent("alloc_admin"); + const repoId = encodeURIComponent("repo/rock_repo"); + const request_string = `${repo_base_url}/alloc/view?client=${clientId}&repo=${repoId}`; + + // act + const response = request.get(request_string); + + // assert + expect(response.status).to.equal(200); + + const json = JSON.parse(response.body); + expect(json).to.be.an("array").with.lengthOf(1); + + const alloc = json[0]; + expect(alloc).to.have.all.keys( + "id", + "repo", + "data_limit", + "data_size", + "rec_limit", + "rec_count", + "path", + ); + + expect(alloc.id).to.equal("u/alloc_admin"); + expect(alloc.repo).to.equal("repo/rock_repo"); + expect(alloc.data_size).to.equal(500); + expect(alloc.rec_count).to.equal(1); + }); + + it("should view allocation details for a repo (self view)", () => { + // arrange + const user = { _key: "alloc_user", is_admin: true }; + g_db.u.save(user); + + // create repo + const repoDoc = { + _key: "rock_repo", + title: "Rock Collection", + capacity: 0, + type: "metadata", + }; + g_db.repo.save(repoDoc); + + // create allocation: user -> repo + g_db.alloc.save({ + _from: "u/alloc_user", + _to: "repo/rock_repo", + data_limit: 1000, + data_size: 500, + rec_limit: 5, + rec_count: 1, + path: "/mnt/rock/songX", + }); + + const clientId = encodeURIComponent("alloc_user"); + const repoId = encodeURIComponent("repo/rock_repo"); + const request_string = `${repo_base_url}/alloc/view?client=${clientId}&repo=${repoId}`; + + // act + const response = request.get(request_string); + + // assert + expect(response.status).to.equal(200); + + const json = JSON.parse(response.body); + expect(json).to.be.an("array").with.lengthOf(1); + + const alloc = json[0]; + expect(alloc).to.have.all.keys( + "id", + "repo", + "data_limit", + "data_size", + "rec_limit", + "rec_count", + "path", + ); + + expect(alloc.id).to.equal("u/alloc_user"); + expect(alloc.repo).to.equal("repo/rock_repo"); + expect(alloc.data_size).to.equal(500); + expect(alloc.rec_count).to.equal(1); + }); + + it("should fetch allocation stats for a repo", () => { + // Arrange: create admin user + g_db.u.save({ _key: "stats_admin", name: "Stats Admin", role: "admin" }); + + // Create repo + g_db.repo.save({ + _key: "stats_repo", + title: "Stats Repo", + type: "metadata", + capacity: 0, + }); + + // Link the admin user to the repo via the admin edge + g_db.admin.save({ + _from: "repo/stats_repo", + _to: "u/stats_admin", + }); + + const clientId = encodeURIComponent("stats_admin"); + const repoId = encodeURIComponent("repo/stats_repo"); + const request_string = `${repo_base_url}/alloc/stats?client=${clientId}&repo=${repoId}`; + + // Act + const response = request.get(request_string); + + // Assert + expect(response.status).to.equal(200); + + const json = JSON.parse(response.body); + expect(json).to.be.an("object"); + expect(json).to.have.property("repo", "repo/stats_repo"); + expect(json).to.have.property("rec_count"); + expect(json).to.have.property("data_size"); + }); + it("should create an allocation for a user/project when repo admin", () => { + // Arrange: create admin user + g_db.u.save({ _key: "alloc_admin", is_admin: true }); + + // Create a repo + g_db.repo.save({ + _key: "music_repo", + title: "Music Repo", + type: "metadata", + capacity: 0, + }); + + // Link admin to repo + g_db.admin.save({ + _from: "repo/music_repo", + _to: "u/alloc_admin", + }); + + // Create subject user/project + g_db.u.save({ _key: "alloc_user", is_admin: false }); + + // Prepare query parameters + const clientId = encodeURIComponent("alloc_admin"); + const subjectId = encodeURIComponent("alloc_user"); + const repoId = encodeURIComponent("repo/music_repo"); + const dataLimit = 5000; + const recLimit = 10; + + const requestString = + `${repo_base_url}/alloc/create?client=${clientId}` + + `&subject=${subjectId}&repo=${repoId}&data_limit=${dataLimit}&rec_limit=${recLimit}`; + + // Act + const response = request.get(requestString); + + const json = JSON.parse(response.body); + expect(json).to.have.property("task"); + + const task = json.task; + expect(task).to.have.property("_key"); + expect(task).to.have.property("_id"); + expect(task).to.have.property("type", 6); + + // Optional: check state fields + expect(task.state).to.have.property("repo_id", "repo/music_repo"); + expect(task.state).to.have.property("subject", "u/alloc_user"); + expect(task.state).to.have.property("data_limit", dataLimit); + expect(task.state).to.have.property("rec_limit", recLimit); + }); + + it("should delete an allocation for a user/project when repo admin", async function () { + // First, create the allocation and wait for it to finish + const createRes = await request.get("/repo/alloc/create", { + client: "alloc_admin", + subject: "alloc_user", + repo: "repo/music_repo", + data_limit: 5000, + rec_limit: 10, + }); + + expect(createRes.status).to.equal(200); + expect(createRes.json).to.have.property("task"); + + // Now, delete the allocation + const deleteRes = await request.get("/repo/alloc/delete", { + client: "alloc_admin", + subject: "alloc_user", + repo: "repo/music_repo", + }); + + expect(deleteRes.status).to.equal(200); + expect(deleteRes.json).to.be.an("object"); + + const task = deleteRes.json.task; + + expect(task).to.have.property("_key"); + expect(task).to.have.property("_id"); + expect(task).to.have.property("type"); + expect(task).to.have.property("status"); + expect(task.type).to.equal(6); + + // Validate the task state + expect(task).to.have.property("state"); + expect(task.state).to.have.property("repo_id", "repo/music_repo"); + expect(task.state).to.have.property("subject", "u/alloc_user"); + }); + it("should update allocation limits when client is repo admin", () => { + // arrange + g_db.u.save({ _key: "alloc_admin", is_admin: true }); + g_db.u.save({ _key: "alloc_user", is_admin: false }); + + g_db.repo.save({ + _key: "rock_repo", + title: "Rock Repo", + capacity: 0, + type: "metadata", + }); + + // admin edge + g_db.admin.save({ + _from: "repo/rock_repo", + _to: "u/alloc_admin", + }); + + // existing allocation + g_db.alloc.save({ + _from: "u/alloc_user", + _to: "repo/rock_repo", + data_limit: 1000, + rec_limit: 5, + }); + + const request_string = + `${repo_base_url}/alloc/set?client=alloc_admin` + + `&subject=alloc_user&repo=repo/rock_repo` + + `&data_limit=5000&rec_limit=20`; + + // act + const response = request.get(request_string); + + // assert + expect(response.status).to.equal(204); + + const alloc = g_db.alloc.firstExample({ + _from: "u/alloc_user", + _to: "repo/rock_repo", + }); + + expect(alloc.data_limit).to.equal(5000); + expect(alloc.rec_limit).to.equal(20); + }); + it("should set default allocation for self", () => { + // arrange + g_db.u.save({ _key: "alloc_user", is_admin: false }); + + g_db.repo.save({ _key: "repo1", title: "Repo 1", capacity: 0, type: "metadata" }); + g_db.repo.save({ _key: "repo2", title: "Repo 2", capacity: 0, type: "metadata" }); + + g_db.alloc.save({ _from: "u/alloc_user", _to: "repo/repo1", is_def: false }); + g_db.alloc.save({ _from: "u/alloc_user", _to: "repo/repo2", is_def: true }); + + const request_string = `${repo_base_url}/alloc/set/default?client=alloc_user&repo=repo/repo1`; + + // act + const response = request.get(request_string); + + // assert + expect(response.status).to.equal(204); + + const alloc1 = g_db.alloc.firstExample({ _from: "u/alloc_user", _to: "repo/repo1" }); + const alloc2 = g_db.alloc.firstExample({ _from: "u/alloc_user", _to: "repo/repo2" }); + + expect(alloc1.is_def).to.equal(true); + expect(alloc2.is_def).to.equal(false); + }); }); diff --git a/core/database/foxx/tests/schema_router.test.js b/core/database/foxx/tests/schema_router.test.js new file mode 100644 index 000000000..e296650f2 --- /dev/null +++ b/core/database/foxx/tests/schema_router.test.js @@ -0,0 +1,145 @@ +"use strict"; + +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { baseUrl } = module.context; +const { db } = require("@arangodb"); + +const schema_base_url = `${baseUrl}/schema`; + +describe("schema router", () => { + before(() => { + const collections = ["u", "sch", "sch_dep"]; + collections.forEach((name) => { + const col = db._collection(name); + if (col) col.truncate(); + else db._create(name); + }); + + db.u.save({ + _key: "fakeUser", + _id: "u/fakeUser", + name: "Fake User", + is_admin: true, + }); + }); + + after(function () { + const collections = ["u", "sch", "sch_dep"]; + collections.forEach((name) => { + const col = db._collection(name); + if (col) col.truncate(); + }); + }); + + it("unit_schema_router: should create a schema", () => { + const body = { + id: "test_schema_1", + desc: "A simple test schema", + def: { properties: { field1: { type: "string" } } }, + pub: true, + sys: false, + }; + + const response = request.post(`${schema_base_url}/create?client=u/fakeUser`, { + body: JSON.stringify(body), + headers: { "Content-Type": "application/json" }, + }); + + expect(response.status).to.equal(200); + + const result = JSON.parse(response.body); + expect(result).to.be.an("array"); + expect(result[0].def).to.deep.equal(body.def); + expect(result[0].own_nm).to.equal("Fake"); + }); + + it("unit_schema_router: should update a schema", () => { + const response = request.post( + `${schema_base_url}/update?client=u/fakeUser&id=test_schema_1:0`, + { + body: JSON.stringify({ + desc: "Updated schema description", + def: { properties: { field1: { type: "string" } } }, + pub: true, + }), + headers: { "Content-Type": "application/json" }, + }, + ); + + expect(response.status).to.equal(200); + + const schema = JSON.parse(response.body)[0]; + expect(schema).to.have.property("id", "test_schema_1"); + expect(schema).to.have.property("desc", "Updated schema description"); + expect(schema).to.have.property("own_id", "u/fakeUser"); + }); + + it("unit_schema_router: should revise a schema", () => { + const response = request.post( + `${schema_base_url}/revise?client=u/fakeUser&id=test_schema_1:0`, + { + body: JSON.stringify({ + desc: "Revised schema description", + def: { + properties: { + field1: { type: "string" }, + field2: { type: "number" }, + }, + }, + pub: true, + }), + headers: { "Content-Type": "application/json" }, + }, + ); + + expect(response.status).to.equal(200); + + const schema = JSON.parse(response.body)[0]; + expect(schema).to.have.property("ver", 1); + expect(schema).to.have.property("id", "test_schema_1"); + }); + + it("unit_schema_router: should search schemas", () => { + const response = request.get(`${schema_base_url}/search?client=u/fakeUser`); + + expect(response.status).to.equal(200); + + const result = JSON.parse(response.body); + expect(result).to.be.an("array"); + + const paging = result[result.length - 1]; + expect(paging).to.have.property("paging"); + + const schemas = result.filter((r) => !r.paging); + if (schemas.length) { + expect(schemas[0]).to.have.property("ver"); + expect(schemas[0]).to.have.property("own_id"); + expect(schemas[0]).to.have.property("own_nm"); + } + }); + + it("unit_schema_router: should delete latest schema revision", () => { + const response = request.post( + `${schema_base_url}/delete?client=u/fakeUser&id=test_schema_1:1`, + ); + + expect(response.status).to.equal(204); + + const deleted = db.sch.firstExample({ id: "test_schema_1", ver: 1 }); + expect(deleted).to.equal(null); + }); + + it("unit_schema_router: should view a schema", () => { + const response = request.get( + `${schema_base_url}/view?client=u/fakeUser&id=test_schema_1:0`, + ); + + expect(response.status).to.equal(200); + + const schema = JSON.parse(response.body)[0]; + expect(schema).to.have.property("id", "test_schema_1"); + expect(schema).to.have.property("ver", 0); + expect(schema).to.have.property("own_id", "u/fakeUser"); + }); +}); diff --git a/core/database/foxx/tests/task_router.test.js b/core/database/foxx/tests/task_router.test.js index 8815e09a0..297044935 100644 --- a/core/database/foxx/tests/task_router.test.js +++ b/core/database/foxx/tests/task_router.test.js @@ -33,6 +33,13 @@ describe("unit_task_router: the Foxx microservice task_router list/ endpoint", ( }); }); + after(function () { + const collections = ["u", "task"]; + collections.forEach((name) => { + const col = db._collection(name); + if (col) col.truncate(); + }); + }); it("should successfully run the list route", () => { db.u.save({ _key: "fakeUser", diff --git a/core/database/foxx/tests/topic_router.test.js b/core/database/foxx/tests/topic_router.test.js new file mode 100644 index 000000000..9f60da2bc --- /dev/null +++ b/core/database/foxx/tests/topic_router.test.js @@ -0,0 +1,143 @@ +"use strict"; +// NOTE: completion of tests requires successful run of user_fixture.js script + +// Need to pull enum from support +const g_lib = require("../api/support"); + +// Integration test of API +const { expect } = require("chai"); +const request = require("@arangodb/request"); +const { baseUrl } = module.context; +const { db } = require("@arangodb"); + +const topic_base_url = `${baseUrl}/topic`; + +describe("unit_topic_router: the Foxx microservice topic_router /view endpoint", () => { + after(function () { + const collections = ["u", "t"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) col.truncate(); + }); + }); + + beforeEach(() => { + const collections = ["u", "t"]; + collections.forEach((name) => { + let col = db._collection(name); + if (col) { + col.truncate(); // truncate after ensuring collection exists + } else { + db._create(name); // create if it doesn’t exist + } + }); + }); + + it("should successfully run the list route", () => { + db.u.save({ + _key: "fakeUser", + _id: "u/fakeUser", + name: "fake user", + name_first: "fake", + name_last: "user", + is_admin: true, + max_coll: 50, + max_proj: 10, + max_sav_qry: 20, + email: "fakeuser@gmail.com", + }); + + db.t.save({ + _key: "10", + }); + + // arrange + // TODO: make encoded query params less hard coded + const request_string = `${topic_base_url}/view?client=u/fakeUser&id=10`; + // act + const response = request.get(request_string); + // assert + expect(response.status).to.equal(200); + }); + it("should successfully run the search route", () => { + // Create user + db.u.save({ + _key: "fakeUser", + is_admin: true, + }); + + // Create ArangoSearch View (if missing) + if (!db._view("topicview")) { + db._createView("topicview", "arangosearch", { + links: { + t: { + includeAllFields: true, + }, + }, + }); + } + + // Insert topic + db.t.save({ + _key: "s1", + title: "Sample Topic", + }); + + // Force view to update (ArangoSearch is async) + db._query("FOR d IN topicview SEARCH d.title == 'nothing' RETURN d"); + + const request_string = `${topic_base_url}/search?client=fakeUser&phrase=Sample`; + + const response = request.get(request_string); + + expect(response.status).to.equal(200); + }); + + it("should list only top-level topics", () => { + db.u.save({ + _key: "fakeUser", + is_admin: true, + }); + + db.t.save({ + _key: "t1", + title: "Alpha", + top: true, + admin: false, + coll_cnt: 1, + }); + + db.t.save({ + _key: "t2", + title: "Beta", + top: true, + admin: true, + coll_cnt: 5, + }); + + db.t.save({ + _key: "child1", + title: "Child Should Not Appear", + top: false, + }); + + const url = `${topic_base_url}/list/topics?client=u/fakeUser`; + + const response = request.get(url); + + expect(response.status).to.equal(200); + + const body = JSON.parse(response.body); + + // last item is paging metadata + const paging = body[body.length - 1].paging; + + expect(paging.tot).to.equal(2); // only Alpha + Beta + + const ids = body.slice(0, -1).map((x) => x._id); + + expect(ids).to.include("t/t1"); + expect(ids).to.include("t/t2"); + expect(ids).to.not.include("t/child1"); + }); +}); diff --git a/core/server/ClientWorker.cpp b/core/server/ClientWorker.cpp index be85bae71..5691f73df 100644 --- a/core/server/ClientWorker.cpp +++ b/core/server/ClientWorker.cpp @@ -1,4 +1,3 @@ - // Local DataFed includes #include "ClientWorker.hpp" #include "TaskMgr.hpp" @@ -14,10 +13,7 @@ #include "common/libjson.hpp" // Proto files -#include "common/SDMS.pb.h" -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" -#include "common/Version.pb.h" +#include "common/envelope.pb.h" // Third party includes #include @@ -30,9 +26,6 @@ using namespace std; namespace SDMS { -using namespace SDMS::Anon; -using namespace SDMS::Auth; - namespace Core { map ClientWorker::m_msg_handlers; @@ -81,20 +74,19 @@ void ClientWorker::wait() { } } -#define SET_MSG_HANDLER(proto_id, msg, func) \ - m_msg_handlers[m_msg_mapper->getMessageType(proto_id, #msg)] = func -#define SET_MSG_HANDLER_DB(proto_id, rq, rp, func) \ - m_msg_handlers[m_msg_mapper->getMessageType(proto_id, #rq)] = \ +#define SET_MSG_HANDLER(msg, func) \ + m_msg_handlers[m_msg_mapper->getMessageType(#msg)] = func +#define SET_MSG_HANDLER_DB(rq, rp, func) \ + m_msg_handlers[m_msg_mapper->getMessageType(#rq)] = \ &ClientWorker::dbPassThrough /** * This method configures message handling by creating a map from message type - * to handler function. There are currently two protocol levels: anonymous and - * authenticated. Each is supported by a Google protobuf interface (in - * /common/proto). Most requests can be handled directly by the DB (via - * DatabaseAPI class), but some require local processing. This method maps the - * two classes of requests using the macros SET_MSG_HANDLER (for local) and - * SET_MSG_HANDLER_DB (for DB only). + * (envelope field number) to handler function. Message types are identified + * by their field number in the Envelope proto message. Most requests can be + * handled directly by the DB (via DatabaseAPI class), but some require local + * processing. This method maps the two classes of requests using the macros + * SET_MSG_HANDLER (for local) and SET_MSG_HANDLER_DB (for DB only). */ void ClientWorker::setupMsgHandlers() { static std::atomic_flag lock = ATOMIC_FLAG_INIT; @@ -105,192 +97,158 @@ void ClientWorker::setupMsgHandlers() { return; try { - // Register and setup handlers for the Anonymous interface - - uint8_t proto_id = m_msg_mapper->getProtocolID( - MessageProtocol::GOOGLE_ANONONYMOUS); // REG_PROTO( SDMS::Anon ); - // Requests that require the server to take action - SET_MSG_HANDLER(proto_id, VersionRequest, - &ClientWorker::procVersionRequest); - SET_MSG_HANDLER(proto_id, AuthenticateByPasswordRequest, + // Anonymous interface handlers + SET_MSG_HANDLER(VersionRequest, &ClientWorker::procVersionRequest); + SET_MSG_HANDLER(AuthenticateByPasswordRequest, &ClientWorker::procAuthenticateByPasswordRequest); - SET_MSG_HANDLER(proto_id, AuthenticateByTokenRequest, + SET_MSG_HANDLER(AuthenticateByTokenRequest, &ClientWorker::procAuthenticateByTokenRequest); - SET_MSG_HANDLER(proto_id, GetAuthStatusRequest, + SET_MSG_HANDLER(GetAuthStatusRequest, &ClientWorker::procGetAuthStatusRequest); + SET_MSG_HANDLER_DB(DailyMessageRequest, DailyMessageReply, dailyMessage); - // Requests that can be handled by DB client directly - SET_MSG_HANDLER_DB(proto_id, DailyMessageRequest, DailyMessageReply, - dailyMessage); - - // Register and setup handlers for the Authenticated interface - proto_id = m_msg_mapper->getProtocolID(MessageProtocol::GOOGLE_AUTHORIZED); - - // Requests that require the server to take action - SET_MSG_HANDLER(proto_id, GenerateCredentialsRequest, + // Authenticated interface handlers + SET_MSG_HANDLER(GenerateCredentialsRequest, &ClientWorker::procGenerateCredentialsRequest); - SET_MSG_HANDLER(proto_id, RevokeCredentialsRequest, + SET_MSG_HANDLER(RevokeCredentialsRequest, &ClientWorker::procRevokeCredentialsRequest); - SET_MSG_HANDLER(proto_id, DataGetRequest, - &ClientWorker::procDataGetRequest); - SET_MSG_HANDLER(proto_id, DataPutRequest, - &ClientWorker::procDataPutRequest); - SET_MSG_HANDLER(proto_id, RecordCreateRequest, + SET_MSG_HANDLER(DataGetRequest, &ClientWorker::procDataGetRequest); + SET_MSG_HANDLER(DataPutRequest, &ClientWorker::procDataPutRequest); + SET_MSG_HANDLER(RecordCreateRequest, &ClientWorker::procRecordCreateRequest); - SET_MSG_HANDLER(proto_id, RecordUpdateRequest, + SET_MSG_HANDLER(RecordUpdateRequest, &ClientWorker::procRecordUpdateRequest); - SET_MSG_HANDLER(proto_id, RecordUpdateBatchRequest, + SET_MSG_HANDLER(RecordUpdateBatchRequest, &ClientWorker::procRecordUpdateBatchRequest); - SET_MSG_HANDLER(proto_id, RecordDeleteRequest, + SET_MSG_HANDLER(RecordDeleteRequest, &ClientWorker::procRecordDeleteRequest); - SET_MSG_HANDLER(proto_id, RecordAllocChangeRequest, + SET_MSG_HANDLER(RecordAllocChangeRequest, &ClientWorker::procRecordAllocChangeRequest); - SET_MSG_HANDLER(proto_id, RecordOwnerChangeRequest, + SET_MSG_HANDLER(RecordOwnerChangeRequest, &ClientWorker::procRecordOwnerChangeRequest); - SET_MSG_HANDLER(proto_id, ProjectSearchRequest, + SET_MSG_HANDLER(ProjectSearchRequest, &ClientWorker::procProjectSearchRequest); - SET_MSG_HANDLER(proto_id, CollDeleteRequest, + SET_MSG_HANDLER(CollDeleteRequest, &ClientWorker::procCollectionDeleteRequest); - SET_MSG_HANDLER(proto_id, ProjectDeleteRequest, + SET_MSG_HANDLER(ProjectDeleteRequest, &ClientWorker::procProjectDeleteRequest); - SET_MSG_HANDLER(proto_id, RepoAuthzRequest, - &ClientWorker::procRepoAuthzRequest); - SET_MSG_HANDLER(proto_id, RepoAllocationCreateRequest, + SET_MSG_HANDLER(RepoAuthzRequest, &ClientWorker::procRepoAuthzRequest); + SET_MSG_HANDLER(RepoAllocationCreateRequest, &ClientWorker::procRepoAllocationCreateRequest); - SET_MSG_HANDLER(proto_id, RepoAllocationDeleteRequest, + SET_MSG_HANDLER(RepoAllocationDeleteRequest, &ClientWorker::procRepoAllocationDeleteRequest); - SET_MSG_HANDLER(proto_id, UserGetAccessTokenRequest, + SET_MSG_HANDLER(UserGetAccessTokenRequest, &ClientWorker::procUserGetAccessTokenRequest); - SET_MSG_HANDLER(proto_id, SchemaCreateRequest, + SET_MSG_HANDLER(SchemaCreateRequest, &ClientWorker::procSchemaCreateRequest); - SET_MSG_HANDLER(proto_id, SchemaReviseRequest, + SET_MSG_HANDLER(SchemaReviseRequest, &ClientWorker::procSchemaReviseRequest); - SET_MSG_HANDLER(proto_id, SchemaUpdateRequest, + SET_MSG_HANDLER(SchemaUpdateRequest, &ClientWorker::procSchemaUpdateRequest); - SET_MSG_HANDLER(proto_id, MetadataValidateRequest, + SET_MSG_HANDLER(MetadataValidateRequest, &ClientWorker::procMetadataValidateRequest); // Requires updating repo cache - SET_MSG_HANDLER(proto_id, RepoCreateRequest, &ClientWorker::procRepoCreate); - SET_MSG_HANDLER(proto_id, RepoUpdateRequest, &ClientWorker::procRepoUpdate); - SET_MSG_HANDLER(proto_id, RepoDeleteRequest, &ClientWorker::procRepoDelete); + SET_MSG_HANDLER(RepoCreateRequest, &ClientWorker::procRepoCreate); + SET_MSG_HANDLER(RepoUpdateRequest, &ClientWorker::procRepoUpdate); + SET_MSG_HANDLER(RepoDeleteRequest, &ClientWorker::procRepoDelete); // Requests that can be handled by DB client directly - SET_MSG_HANDLER_DB(proto_id, CheckPermsRequest, CheckPermsReply, - checkPerms); - SET_MSG_HANDLER_DB(proto_id, GetPermsRequest, GetPermsReply, getPerms); - SET_MSG_HANDLER_DB(proto_id, UserViewRequest, UserDataReply, userView); - SET_MSG_HANDLER_DB(proto_id, UserSetAccessTokenRequest, AckReply, + SET_MSG_HANDLER_DB(CheckPermsRequest, CheckPermsReply, checkPerms); + SET_MSG_HANDLER_DB(GetPermsRequest, GetPermsReply, getPerms); + SET_MSG_HANDLER_DB(UserViewRequest, UserDataReply, userView); + SET_MSG_HANDLER_DB(UserSetAccessTokenRequest, AckReply, userSetAccessToken); - SET_MSG_HANDLER_DB(proto_id, UserCreateRequest, UserDataReply, userCreate); - SET_MSG_HANDLER_DB(proto_id, UserUpdateRequest, UserDataReply, userUpdate); - SET_MSG_HANDLER_DB(proto_id, UserListAllRequest, UserDataReply, - userListAll); - SET_MSG_HANDLER_DB(proto_id, UserListCollabRequest, UserDataReply, - userListCollab); - SET_MSG_HANDLER_DB(proto_id, UserFindByUUIDsRequest, UserDataReply, + SET_MSG_HANDLER_DB(UserCreateRequest, UserDataReply, userCreate); + SET_MSG_HANDLER_DB(UserUpdateRequest, UserDataReply, userUpdate); + SET_MSG_HANDLER_DB(UserListAllRequest, UserDataReply, userListAll); + SET_MSG_HANDLER_DB(UserListCollabRequest, UserDataReply, userListCollab); + SET_MSG_HANDLER_DB(UserFindByUUIDsRequest, UserDataReply, userFindByUUIDs); - SET_MSG_HANDLER_DB(proto_id, UserFindByNameUIDRequest, UserDataReply, + SET_MSG_HANDLER_DB(UserFindByNameUIDRequest, UserDataReply, userFindByNameUID); - SET_MSG_HANDLER_DB(proto_id, UserGetRecentEPRequest, UserGetRecentEPReply, + SET_MSG_HANDLER_DB(UserGetRecentEPRequest, UserGetRecentEPReply, userGetRecentEP); - SET_MSG_HANDLER_DB(proto_id, UserSetRecentEPRequest, AckReply, - userSetRecentEP); - SET_MSG_HANDLER_DB(proto_id, ProjectViewRequest, ProjectDataReply, - projView); - SET_MSG_HANDLER_DB(proto_id, ProjectCreateRequest, ProjectDataReply, - projCreate); - SET_MSG_HANDLER_DB(proto_id, ProjectUpdateRequest, ProjectDataReply, - projUpdate); - SET_MSG_HANDLER_DB(proto_id, ProjectListRequest, ListingReply, projList); - SET_MSG_HANDLER_DB(proto_id, ProjectGetRoleRequest, ProjectGetRoleReply, + SET_MSG_HANDLER_DB(UserSetRecentEPRequest, AckReply, userSetRecentEP); + SET_MSG_HANDLER_DB(ProjectViewRequest, ProjectDataReply, projView); + SET_MSG_HANDLER_DB(ProjectCreateRequest, ProjectDataReply, projCreate); + SET_MSG_HANDLER_DB(ProjectUpdateRequest, ProjectDataReply, projUpdate); + SET_MSG_HANDLER_DB(ProjectListRequest, ListingReply, projList); + SET_MSG_HANDLER_DB(ProjectGetRoleRequest, ProjectGetRoleReply, projGetRole); - SET_MSG_HANDLER_DB(proto_id, RecordViewRequest, RecordDataReply, - recordView); - SET_MSG_HANDLER_DB(proto_id, RecordCreateBatchRequest, RecordDataReply, + SET_MSG_HANDLER_DB(RecordViewRequest, RecordDataReply, recordView); + SET_MSG_HANDLER_DB(RecordCreateBatchRequest, RecordDataReply, recordCreateBatch); - SET_MSG_HANDLER_DB(proto_id, RecordExportRequest, RecordExportReply, - recordExport); - SET_MSG_HANDLER_DB(proto_id, RecordLockRequest, ListingReply, recordLock); - SET_MSG_HANDLER_DB(proto_id, RecordListByAllocRequest, ListingReply, + SET_MSG_HANDLER_DB(RecordExportRequest, RecordExportReply, recordExport); + SET_MSG_HANDLER_DB(RecordLockRequest, ListingReply, recordLock); + SET_MSG_HANDLER_DB(RecordListByAllocRequest, ListingReply, recordListByAlloc); - SET_MSG_HANDLER_DB(proto_id, RecordGetDependencyGraphRequest, ListingReply, + SET_MSG_HANDLER_DB(RecordGetDependencyGraphRequest, ListingReply, recordGetDependencyGraph); - SET_MSG_HANDLER_DB(proto_id, SearchRequest, ListingReply, generalSearch); - SET_MSG_HANDLER_DB(proto_id, DataPathRequest, DataPathReply, dataPath); - SET_MSG_HANDLER_DB(proto_id, CollViewRequest, CollDataReply, collView); - SET_MSG_HANDLER_DB(proto_id, CollReadRequest, ListingReply, collRead); - SET_MSG_HANDLER_DB(proto_id, CollListPublishedRequest, ListingReply, + SET_MSG_HANDLER_DB(SearchRequest, ListingReply, generalSearch); + SET_MSG_HANDLER_DB(DataPathRequest, DataPathReply, dataPath); + SET_MSG_HANDLER_DB(CollViewRequest, CollDataReply, collView); + SET_MSG_HANDLER_DB(CollReadRequest, ListingReply, collRead); + SET_MSG_HANDLER_DB(CollListPublishedRequest, ListingReply, collListPublished); - SET_MSG_HANDLER_DB(proto_id, CollCreateRequest, CollDataReply, collCreate); - SET_MSG_HANDLER_DB(proto_id, CollUpdateRequest, CollDataReply, collUpdate); - SET_MSG_HANDLER_DB(proto_id, CollWriteRequest, ListingReply, collWrite); - SET_MSG_HANDLER_DB(proto_id, CollMoveRequest, AckReply, collMove); - SET_MSG_HANDLER_DB(proto_id, CollGetParentsRequest, CollPathReply, - collGetParents); - SET_MSG_HANDLER_DB(proto_id, CollGetOffsetRequest, CollGetOffsetReply, + SET_MSG_HANDLER_DB(CollCreateRequest, CollDataReply, collCreate); + SET_MSG_HANDLER_DB(CollUpdateRequest, CollDataReply, collUpdate); + SET_MSG_HANDLER_DB(CollWriteRequest, ListingReply, collWrite); + SET_MSG_HANDLER_DB(CollMoveRequest, AckReply, collMove); + SET_MSG_HANDLER_DB(CollGetParentsRequest, CollPathReply, collGetParents); + SET_MSG_HANDLER_DB(CollGetOffsetRequest, CollGetOffsetReply, collGetOffset); - SET_MSG_HANDLER_DB(proto_id, QueryListRequest, ListingReply, queryList); - SET_MSG_HANDLER_DB(proto_id, QueryViewRequest, QueryDataReply, queryView); - SET_MSG_HANDLER_DB(proto_id, QueryExecRequest, ListingReply, queryExec); - SET_MSG_HANDLER_DB(proto_id, QueryCreateRequest, QueryDataReply, - queryCreate); - SET_MSG_HANDLER_DB(proto_id, QueryUpdateRequest, QueryDataReply, - queryUpdate); - SET_MSG_HANDLER_DB(proto_id, QueryDeleteRequest, AckReply, queryDelete); - SET_MSG_HANDLER_DB(proto_id, NoteViewRequest, NoteDataReply, noteView); - SET_MSG_HANDLER_DB(proto_id, NoteListBySubjectRequest, NoteDataReply, + SET_MSG_HANDLER_DB(QueryListRequest, ListingReply, queryList); + SET_MSG_HANDLER_DB(QueryViewRequest, QueryDataReply, queryView); + SET_MSG_HANDLER_DB(QueryExecRequest, ListingReply, queryExec); + SET_MSG_HANDLER_DB(QueryCreateRequest, QueryDataReply, queryCreate); + SET_MSG_HANDLER_DB(QueryUpdateRequest, QueryDataReply, queryUpdate); + SET_MSG_HANDLER_DB(QueryDeleteRequest, AckReply, queryDelete); + SET_MSG_HANDLER_DB(NoteViewRequest, NoteDataReply, noteView); + SET_MSG_HANDLER_DB(NoteListBySubjectRequest, NoteDataReply, noteListBySubject); - SET_MSG_HANDLER_DB(proto_id, NoteCreateRequest, NoteDataReply, noteCreate); - SET_MSG_HANDLER_DB(proto_id, NoteUpdateRequest, NoteDataReply, noteUpdate); - SET_MSG_HANDLER_DB(proto_id, NoteCommentEditRequest, NoteDataReply, + SET_MSG_HANDLER_DB(NoteCreateRequest, NoteDataReply, noteCreate); + SET_MSG_HANDLER_DB(NoteUpdateRequest, NoteDataReply, noteUpdate); + SET_MSG_HANDLER_DB(NoteCommentEditRequest, NoteDataReply, noteCommentEdit); - SET_MSG_HANDLER_DB(proto_id, TaskListRequest, TaskDataReply, taskList); - SET_MSG_HANDLER_DB(proto_id, TaskViewRequest, TaskDataReply, taskView); - SET_MSG_HANDLER_DB(proto_id, ACLViewRequest, ACLDataReply, aclView); - SET_MSG_HANDLER_DB(proto_id, ACLUpdateRequest, ACLDataReply, aclUpdate); - SET_MSG_HANDLER_DB(proto_id, ACLSharedListRequest, ListingReply, - aclSharedList); - SET_MSG_HANDLER_DB(proto_id, ACLSharedListItemsRequest, ListingReply, + SET_MSG_HANDLER_DB(TaskListRequest, TaskDataReply, taskList); + SET_MSG_HANDLER_DB(TaskViewRequest, TaskDataReply, taskView); + SET_MSG_HANDLER_DB(ACLViewRequest, ACLDataReply, aclView); + SET_MSG_HANDLER_DB(ACLUpdateRequest, ACLDataReply, aclUpdate); + SET_MSG_HANDLER_DB(ACLSharedListRequest, ListingReply, aclSharedList); + SET_MSG_HANDLER_DB(ACLSharedListItemsRequest, ListingReply, aclSharedListItems); - SET_MSG_HANDLER_DB(proto_id, GroupCreateRequest, GroupDataReply, - groupCreate); - SET_MSG_HANDLER_DB(proto_id, GroupUpdateRequest, GroupDataReply, - groupUpdate); - SET_MSG_HANDLER_DB(proto_id, GroupDeleteRequest, AckReply, groupDelete); - SET_MSG_HANDLER_DB(proto_id, GroupListRequest, GroupDataReply, groupList); - SET_MSG_HANDLER_DB(proto_id, GroupViewRequest, GroupDataReply, groupView); - SET_MSG_HANDLER_DB(proto_id, RepoListRequest, RepoDataReply, repoList); - SET_MSG_HANDLER_DB(proto_id, RepoViewRequest, RepoDataReply, repoView); - SET_MSG_HANDLER_DB(proto_id, RepoCalcSizeRequest, RepoCalcSizeReply, - repoCalcSize); - SET_MSG_HANDLER_DB(proto_id, RepoListAllocationsRequest, - RepoAllocationsReply, repoListAllocations); - SET_MSG_HANDLER_DB(proto_id, RepoListSubjectAllocationsRequest, + SET_MSG_HANDLER_DB(GroupCreateRequest, GroupDataReply, groupCreate); + SET_MSG_HANDLER_DB(GroupUpdateRequest, GroupDataReply, groupUpdate); + SET_MSG_HANDLER_DB(GroupDeleteRequest, AckReply, groupDelete); + SET_MSG_HANDLER_DB(GroupListRequest, GroupDataReply, groupList); + SET_MSG_HANDLER_DB(GroupViewRequest, GroupDataReply, groupView); + SET_MSG_HANDLER_DB(RepoListRequest, RepoDataReply, repoList); + SET_MSG_HANDLER_DB(RepoViewRequest, RepoDataReply, repoView); + SET_MSG_HANDLER_DB(RepoCalcSizeRequest, RepoCalcSizeReply, repoCalcSize); + SET_MSG_HANDLER_DB(RepoListAllocationsRequest, RepoAllocationsReply, + repoListAllocations); + SET_MSG_HANDLER_DB(RepoListSubjectAllocationsRequest, RepoAllocationsReply, repoListSubjectAllocations); - SET_MSG_HANDLER_DB(proto_id, RepoListObjectAllocationsRequest, + SET_MSG_HANDLER_DB(RepoListObjectAllocationsRequest, RepoAllocationsReply, repoListObjectAllocations); - SET_MSG_HANDLER_DB(proto_id, RepoViewAllocationRequest, - RepoAllocationsReply, repoViewAllocation); - SET_MSG_HANDLER_DB(proto_id, RepoAllocationSetRequest, AckReply, + SET_MSG_HANDLER_DB(RepoViewAllocationRequest, RepoAllocationsReply, + repoViewAllocation); + SET_MSG_HANDLER_DB(RepoAllocationSetRequest, AckReply, repoAllocationSet); - SET_MSG_HANDLER_DB(proto_id, RepoAllocationSetDefaultRequest, AckReply, + SET_MSG_HANDLER_DB(RepoAllocationSetDefaultRequest, AckReply, repoAllocationSetDefault); - SET_MSG_HANDLER_DB(proto_id, RepoAllocationStatsRequest, - RepoAllocationStatsReply, repoAllocationStats); - SET_MSG_HANDLER_DB(proto_id, SchemaSearchRequest, SchemaDataReply, - schemaSearch); - SET_MSG_HANDLER_DB(proto_id, SchemaViewRequest, SchemaDataReply, - schemaView); - SET_MSG_HANDLER_DB(proto_id, SchemaDeleteRequest, AckReply, schemaDelete); - SET_MSG_HANDLER_DB(proto_id, TagSearchRequest, TagDataReply, tagSearch); - SET_MSG_HANDLER_DB(proto_id, TagListByCountRequest, TagDataReply, - tagListByCount); - SET_MSG_HANDLER_DB(proto_id, TopicListTopicsRequest, TopicDataReply, + SET_MSG_HANDLER_DB(RepoAllocationStatsRequest, RepoAllocationStatsReply, + repoAllocationStats); + SET_MSG_HANDLER_DB(SchemaSearchRequest, SchemaDataReply, schemaSearch); + SET_MSG_HANDLER_DB(SchemaViewRequest, SchemaDataReply, schemaView); + SET_MSG_HANDLER_DB(SchemaDeleteRequest, AckReply, schemaDelete); + SET_MSG_HANDLER_DB(TagSearchRequest, TagDataReply, tagSearch); + SET_MSG_HANDLER_DB(TagListByCountRequest, TagDataReply, tagListByCount); + SET_MSG_HANDLER_DB(TopicListTopicsRequest, TopicDataReply, topicListTopics); - SET_MSG_HANDLER_DB(proto_id, TopicViewRequest, TopicDataReply, topicView); - SET_MSG_HANDLER_DB(proto_id, TopicSearchRequest, TopicDataReply, - topicSearch); + SET_MSG_HANDLER_DB(TopicViewRequest, TopicDataReply, topicView); + SET_MSG_HANDLER_DB(TopicSearchRequest, TopicDataReply, topicSearch); } catch (TraceException &e) { DL_ERROR(m_log_context, "exception: " << e.toString()); throw; @@ -335,7 +293,7 @@ void ClientWorker::workerThread(LogContext log_context) { }(); ProtoBufMap proto_map; - uint16_t task_list_msg_type = proto_map.getMessageType(2, "TaskListRequest"); + uint16_t task_list_msg_type = proto_map.getMessageType("TaskListRequest"); DL_DEBUG(log_context, "W" << m_tid << " m_run " << m_run); @@ -368,7 +326,7 @@ void ClientWorker::workerThread(LogContext log_context) { << " [" << uid << "]"); } - if (uid.compare("anon") == 0 && msg_type > 0x1FF) { + if (uid.compare("anon") == 0 && proto_map.requiresAuth(proto_map.toString(msg_type))) { DL_WARNING(message_log_context, "W" << m_tid << " unauthorized access attempt from anon user"); @@ -376,8 +334,8 @@ void ClientWorker::workerThread(LogContext log_context) { // I know this is not great... allocating memory here slow // This will need to be fixed - auto nack = std::make_unique(); - nack->set_err_code(ID_AUTHN_REQUIRED); + auto nack = std::make_unique(); + nack->set_err_code(AUTHN_REQUIRED); nack->set_err_msg("Authentication required"); response_msg->setPayload(std::move(nack)); client->send(*response_msg); @@ -471,7 +429,7 @@ void ClientWorker::workerThread(LogContext log_context) { if (send_reply) { \ auto msg_reply = m_msg_factory.createResponseEnvelope(*msg_request); \ auto nack = std::make_unique(); \ - nack->set_err_code(ID_INTERNAL_ERROR); \ + nack->set_err_code(INTERNAL_ERROR); \ nack->set_err_msg(e.what()); \ msg_reply->setPayload(std::move(nack)); \ return msg_reply; \ @@ -483,7 +441,7 @@ void ClientWorker::workerThread(LogContext log_context) { if (send_reply) { \ auto msg_reply = m_msg_factory.createResponseEnvelope(*msg_request); \ auto nack = std::make_unique(); \ - nack->set_err_code(ID_INTERNAL_ERROR); \ + nack->set_err_code(INTERNAL_ERROR); \ nack->set_err_msg("Unknown exception type"); \ msg_reply->setPayload(std::move(nack)); \ return msg_reply; \ @@ -502,7 +460,7 @@ void ClientWorker::workerThread(LogContext log_context) { "unregistered msg type)."); \ auto msg_reply = m_msg_factory.createResponseEnvelope(*msg_request); \ auto nack = std::make_unique(); \ - nack->set_err_code(ID_BAD_REQUEST); \ + nack->set_err_code(BAD_REQUEST); \ nack->set_err_msg( \ "Message parse failed (malformed or unregistered msg type)"); \ msg_reply->setPayload(std::move(nack)); \ @@ -587,15 +545,15 @@ ClientWorker::procVersionRequest(const std::string &a_uid, (void)a_uid; DL_TRACE(log_context, "Version request"); - reply.set_release_year(DATAFED_RELEASE_YEAR); - reply.set_release_month(DATAFED_RELEASE_MONTH); - reply.set_release_day(DATAFED_RELEASE_DAY); - reply.set_release_hour(DATAFED_RELEASE_HOUR); - reply.set_release_minute(DATAFED_RELEASE_MINUTE); + reply.set_release_year( release::YEAR); + reply.set_release_month( release::MONTH); + reply.set_release_day( release::DAY); + reply.set_release_hour( release::HOUR); + reply.set_release_minute(release::MINUTE); - reply.set_api_major(DATAFED_COMMON_PROTOCOL_API_MAJOR); - reply.set_api_minor(DATAFED_COMMON_PROTOCOL_API_MINOR); - reply.set_api_patch(DATAFED_COMMON_PROTOCOL_API_PATCH); + reply.set_api_major(protocol::version::MAJOR); + reply.set_api_minor(protocol::version::MINOR); + reply.set_api_patch(protocol::version::PATCH); reply.set_component_major(core::version::MAJOR); reply.set_component_minor(core::version::MINOR); @@ -692,7 +650,7 @@ std::unique_ptr ClientWorker::procGenerateCredentialsRequest( char secret_key[41]; if (zmq_curve_keypair(public_key, secret_key) != 0) - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Key generation failed: " << zmq_strerror(errno)); pub_key = public_key; diff --git a/core/server/ClientWorker.hpp b/core/server/ClientWorker.hpp index cee872363..1da7af103 100644 --- a/core/server/ClientWorker.hpp +++ b/core/server/ClientWorker.hpp @@ -182,7 +182,7 @@ class ClientWorker : public nlohmann::json_schema::basic_error_handler { void schemaEnforceRequiredProperties(const nlohmann::json &a_schema); void recordCollectionDelete(const std::vector &a_ids, - Auth::TaskDataReply &a_reply, + SDMS::TaskDataReply &a_reply, LogContext log_context); void handleTaskResponse(libjson::Value &a_result, LogContext log_context); diff --git a/core/server/Config.hpp b/core/server/Config.hpp index ffc19fb92..f1170bbbe 100644 --- a/core/server/Config.hpp +++ b/core/server/Config.hpp @@ -8,7 +8,7 @@ // DataFed Common public includes #include "common/DynaLog.hpp" #include "common/ICredentials.hpp" -#include "common/SDMS.pb.h" +#include "common/envelope.pb.h" // Standard includes #include diff --git a/core/server/DatabaseAPI.cpp b/core/server/DatabaseAPI.cpp index 43ce988a3..cdf752e6c 100644 --- a/core/server/DatabaseAPI.cpp +++ b/core/server/DatabaseAPI.cpp @@ -4,9 +4,11 @@ // Local public includes #include "common/DynaLog.hpp" -#include "common/SDMS.pb.h" +#include "common/envelope.pb.h" #include "common/TraceException.hpp" #include "common/Util.hpp" +#include "common/enums/access_token_type.pb.h" +#include "common/enums/search_mode.pb.h" // Third party includes #include @@ -26,7 +28,6 @@ using namespace std; namespace SDMS { namespace Core { -using namespace SDMS::Auth; using namespace libjson; #define TRANSLATE_BEGIN() try { @@ -44,7 +45,7 @@ DatabaseAPI::DatabaseAPI(const std::string &a_db_url, : m_client(0), m_db_url(a_db_url) { m_curl = curl_easy_init(); if (!m_curl) - EXCEPT(ID_INTERNAL_ERROR, "libcurl init failed"); + EXCEPT(INTERNAL_ERROR, "libcurl init failed"); setClient(""); @@ -146,7 +147,7 @@ long DatabaseAPI::dbGet(const char *a_url_path, a_result.fromString(res_json); } catch (libjson::ParseError &e) { DL_DEBUG(log_context, "PARSE [" << res_json << "]"); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Invalid JSON returned from DB: " << e.toString()); } } @@ -155,14 +156,14 @@ long DatabaseAPI::dbGet(const char *a_url_path, return http_code; } else { if (res_json.size() && a_result.asObject().has("errorMessage")) { - EXCEPT_PARAM(ID_BAD_REQUEST, a_result.asObject().asString()); + EXCEPT_PARAM(BAD_REQUEST, a_result.asObject().asString()); } else { - EXCEPT_PARAM(ID_BAD_REQUEST, "SDMS DB service call failed. Code: " + EXCEPT_PARAM(BAD_REQUEST, "SDMS DB service call failed. Code: " << http_code << ", err: " << error); } } } else { - EXCEPT_PARAM(ID_SERVICE_ERROR, "SDMS DB interface failed. error: " + EXCEPT_PARAM(SERVICE_ERROR, "SDMS DB interface failed. error: " << error << ", " << curl_easy_strerror(res)); } @@ -237,7 +238,7 @@ long DatabaseAPI::dbPost(const char *a_url_path, a_result.fromString(res_json); } catch (libjson::ParseError &e) { DL_DEBUG(log_context, "PARSE [" << res_json << "]"); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Invalid JSON returned from DB: " << e.toString()); } } @@ -250,14 +251,14 @@ long DatabaseAPI::dbPost(const char *a_url_path, << (a_body ? *a_body : "") << "]"); - EXCEPT_PARAM(ID_BAD_REQUEST, a_result.asObject().asString()); + EXCEPT_PARAM(BAD_REQUEST, a_result.asObject().asString()); } else { - EXCEPT_PARAM(ID_BAD_REQUEST, "SDMS DB service call failed. Code: " + EXCEPT_PARAM(BAD_REQUEST, "SDMS DB service call failed. Code: " << http_code << ", err: " << error); } } } else { - EXCEPT_PARAM(ID_SERVICE_ERROR, "SDMS DB interface failed. error: " + EXCEPT_PARAM(SERVICE_ERROR, "SDMS DB interface failed. error: " << error << ", " << curl_easy_strerror(res)); } @@ -270,7 +271,7 @@ void DatabaseAPI::serverPing(LogContext log_context) { } void DatabaseAPI::clientAuthenticateByPassword(const std::string &a_password, - Anon::AuthStatusReply &a_reply, + SDMS::AuthStatusReply &a_reply, LogContext log_context) { Value result; @@ -279,7 +280,7 @@ void DatabaseAPI::clientAuthenticateByPassword(const std::string &a_password, } void DatabaseAPI::clientAuthenticateByToken(const std::string &a_token, - Anon::AuthStatusReply &a_reply, + SDMS::AuthStatusReply &a_reply, LogContext log_context) { Value result; @@ -287,7 +288,7 @@ void DatabaseAPI::clientAuthenticateByToken(const std::string &a_token, setAuthStatus(a_reply, result); } -void DatabaseAPI::setAuthStatus(Anon::AuthStatusReply &a_reply, +void DatabaseAPI::setAuthStatus(SDMS::AuthStatusReply &a_reply, const Value &a_result) { const Value::Object &obj = a_result.asObject(); a_reply.set_uid(obj.getString("uid")); @@ -386,7 +387,7 @@ void DatabaseAPI::userSetAccessToken(const std::string &a_acc_tok, {"access", a_acc_tok}, {"refresh", a_ref_tok}, {"expires_in", to_string(a_expires_in)}}; - if (token_type != SDMS::AccessTokenType::ACCESS_SENTINEL) { + if (token_type != SDMS::AccessTokenType::TOKEN_UNSPECIFIED) { params.push_back({"type", to_string(token_type)}); } if (!other_token_data.empty()) { @@ -406,7 +407,7 @@ void DatabaseAPI::userSetAccessToken(const std::string &a_access_token, } void DatabaseAPI::userSetAccessToken( - const Auth::UserSetAccessTokenRequest &a_request, Anon::AckReply &a_reply, + const SDMS::UserSetAccessTokenRequest &a_request, SDMS::AckReply &a_reply, LogContext log_context) { (void)a_reply; userSetAccessToken(a_request.access(), a_request.expires_in(), @@ -451,8 +452,8 @@ void DatabaseAPI::purgeTransferRecords(size_t age) { dbGetRaw(url, result); } -void DatabaseAPI::userCreate(const Auth::UserCreateRequest &a_request, - Auth::UserDataReply &a_reply, +void DatabaseAPI::userCreate(const SDMS::UserCreateRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context) { DL_DEBUG(log_context, "DataFed user create - uid: " << a_request.uid() @@ -488,8 +489,8 @@ void DatabaseAPI::userCreate(const Auth::UserCreateRequest &a_request, setUserData(a_reply, result, log_context); } -void DatabaseAPI::userView(const Auth::UserViewRequest &a_request, - Auth::UserDataReply &a_reply, +void DatabaseAPI::userView(const SDMS::UserViewRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context) { vector> params; params.push_back({"subject", a_request.uid()}); @@ -503,7 +504,7 @@ void DatabaseAPI::userView(const Auth::UserViewRequest &a_request, } void DatabaseAPI::userUpdate(const UserUpdateRequest &a_request, - Auth::UserDataReply &a_reply, + SDMS::UserDataReply &a_reply, LogContext log_context) { Value result; @@ -522,7 +523,7 @@ void DatabaseAPI::userUpdate(const UserUpdateRequest &a_request, } void DatabaseAPI::userListAll(const UserListAllRequest &a_request, - Auth::UserDataReply &a_reply, + SDMS::UserDataReply &a_reply, LogContext log_context) { vector> params; if (a_request.has_offset() && a_request.has_count()) { @@ -537,7 +538,7 @@ void DatabaseAPI::userListAll(const UserListAllRequest &a_request, } void DatabaseAPI::userListCollab(const UserListCollabRequest &a_request, - Auth::UserDataReply &a_reply, + SDMS::UserDataReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -550,8 +551,8 @@ void DatabaseAPI::userListCollab(const UserListCollabRequest &a_request, setUserData(a_reply, result, log_context); } -void DatabaseAPI::userFindByUUIDs(const Auth::UserFindByUUIDsRequest &a_request, - Auth::UserDataReply &a_reply, +void DatabaseAPI::userFindByUUIDs(const SDMS::UserFindByUUIDsRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context) { string uuids = "["; @@ -570,8 +571,8 @@ void DatabaseAPI::userFindByUUIDs(const Auth::UserFindByUUIDsRequest &a_request, } void DatabaseAPI::userFindByNameUID( - const Auth::UserFindByNameUIDRequest &a_request, - Auth::UserDataReply &a_reply, LogContext log_context) { + const SDMS::UserFindByNameUIDRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context) { Value result; vector> params; params.push_back({"name_uid", a_request.name_uid()}); @@ -585,8 +586,8 @@ void DatabaseAPI::userFindByNameUID( setUserData(a_reply, result, log_context); } -void DatabaseAPI::userGetRecentEP(const Auth::UserGetRecentEPRequest &a_request, - Auth::UserGetRecentEPReply &a_reply, +void DatabaseAPI::userGetRecentEP(const SDMS::UserGetRecentEPRequest &a_request, + SDMS::UserGetRecentEPReply &a_reply, LogContext log_context) { (void)a_request; Value result; @@ -604,8 +605,8 @@ void DatabaseAPI::userGetRecentEP(const Auth::UserGetRecentEPRequest &a_request, TRANSLATE_END(result, log_context) } -void DatabaseAPI::userSetRecentEP(const Auth::UserSetRecentEPRequest &a_request, - Anon::AckReply &a_reply, +void DatabaseAPI::userSetRecentEP(const SDMS::UserSetRecentEPRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context) { (void)a_reply; Value result; @@ -621,7 +622,7 @@ void DatabaseAPI::userSetRecentEP(const Auth::UserSetRecentEPRequest &a_request, dbGet("usr/ep/set", {{"eps", eps}}, result, log_context); } -void DatabaseAPI::setUserData(Auth::UserDataReply &a_reply, +void DatabaseAPI::setUserData(SDMS::UserDataReply &a_reply, const Value &a_result, LogContext log_context) { UserData *user; Value::ArrayConstIter k; @@ -676,8 +677,8 @@ void DatabaseAPI::setUserData(Auth::UserDataReply &a_reply, TRANSLATE_END(a_result, log_context) } -void DatabaseAPI::projCreate(const Auth::ProjectCreateRequest &a_request, - Auth::ProjectDataReply &a_reply, +void DatabaseAPI::projCreate(const SDMS::ProjectCreateRequest &a_request, + SDMS::ProjectDataReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -715,8 +716,8 @@ void DatabaseAPI::projCreate(const Auth::ProjectCreateRequest &a_request, setProjectData(a_reply, result, log_context); } -void DatabaseAPI::projUpdate(const Auth::ProjectUpdateRequest &a_request, - Auth::ProjectDataReply &a_reply, +void DatabaseAPI::projUpdate(const SDMS::ProjectUpdateRequest &a_request, + SDMS::ProjectDataReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -756,8 +757,8 @@ void DatabaseAPI::projUpdate(const Auth::ProjectUpdateRequest &a_request, setProjectData(a_reply, result, log_context); } -void DatabaseAPI::projView(const Auth::ProjectViewRequest &a_request, - Auth::ProjectDataReply &a_reply, +void DatabaseAPI::projView(const SDMS::ProjectViewRequest &a_request, + SDMS::ProjectDataReply &a_reply, LogContext log_context) { Value result; dbGet("prj/view", {{"id", a_request.id()}}, result, log_context); @@ -765,8 +766,8 @@ void DatabaseAPI::projView(const Auth::ProjectViewRequest &a_request, setProjectData(a_reply, result, log_context); } -void DatabaseAPI::projList(const Auth::ProjectListRequest &a_request, - Auth::ListingReply &a_reply, +void DatabaseAPI::projList(const SDMS::ProjectListRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -792,8 +793,8 @@ void DatabaseAPI::projList(const Auth::ProjectListRequest &a_request, setListingDataReply(a_reply, result, log_context); } -void DatabaseAPI::projGetRole(const Auth::ProjectGetRoleRequest &a_request, - Auth::ProjectGetRoleReply &a_reply, +void DatabaseAPI::projGetRole(const SDMS::ProjectGetRoleRequest &a_request, + SDMS::ProjectGetRoleReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -807,17 +808,7 @@ void DatabaseAPI::projGetRole(const Auth::ProjectGetRoleRequest &a_request, a_reply.set_role((ProjectRole)(unsigned short)obj.getNumber("role")); } -void DatabaseAPI::projSearch(const std::string &a_query, - Auth::ProjectDataReply &a_reply, - LogContext log_context) { - Value result; - - dbGet("prj/search", {{"query", a_query}}, result, log_context); - - setProjectData(a_reply, result, log_context); -} - -void DatabaseAPI::setProjectData(Auth::ProjectDataReply &a_reply, +void DatabaseAPI::setProjectData(SDMS::ProjectDataReply &a_reply, const Value &a_result, LogContext log_context) { ProjectData *proj; @@ -872,8 +863,8 @@ void DatabaseAPI::setProjectData(Auth::ProjectDataReply &a_reply, } void DatabaseAPI::recordListByAlloc( - const Auth::RecordListByAllocRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context) { + const SDMS::RecordListByAllocRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; vector> params; params.push_back({"repo", a_request.repo()}); @@ -888,8 +879,8 @@ void DatabaseAPI::recordListByAlloc( setListingDataReply(a_reply, result, log_context); } -void DatabaseAPI::recordView(const Auth::RecordViewRequest &a_request, - Auth::RecordDataReply &a_reply, +void DatabaseAPI::recordView(const SDMS::RecordViewRequest &a_request, + SDMS::RecordDataReply &a_reply, LogContext log_context) { Value result; @@ -898,8 +889,8 @@ void DatabaseAPI::recordView(const Auth::RecordViewRequest &a_request, setRecordData(a_reply, result, log_context); } -void DatabaseAPI::recordCreate(const Auth::RecordCreateRequest &a_request, - Auth::RecordDataReply &a_reply, +void DatabaseAPI::recordCreate(const SDMS::RecordCreateRequest &a_request, + SDMS::RecordDataReply &a_reply, LogContext log_context) { Value result; nlohmann::json payload; @@ -968,8 +959,8 @@ void DatabaseAPI::recordCreate(const Auth::RecordCreateRequest &a_request, } void DatabaseAPI::recordCreateBatch( - const Auth::RecordCreateBatchRequest &a_request, - Auth::RecordDataReply &a_reply, LogContext log_context) { + const SDMS::RecordCreateBatchRequest &a_request, + SDMS::RecordDataReply &a_reply, LogContext log_context) { Value result; dbPost("dat/create/batch", {}, &a_request.records(), result, log_context); @@ -977,8 +968,8 @@ void DatabaseAPI::recordCreateBatch( setRecordData(a_reply, result, log_context); } -void DatabaseAPI::recordUpdate(const Auth::RecordUpdateRequest &a_request, - Auth::RecordDataReply &a_reply, +void DatabaseAPI::recordUpdate(const SDMS::RecordUpdateRequest &a_request, + SDMS::RecordDataReply &a_reply, libjson::Value &result, LogContext log_context) { nlohmann::json payload; payload["id"] = a_request.id(); @@ -1051,8 +1042,8 @@ void DatabaseAPI::recordUpdate(const Auth::RecordUpdateRequest &a_request, } void DatabaseAPI::recordUpdateBatch( - const Auth::RecordUpdateBatchRequest &a_request, - Auth::RecordDataReply &a_reply, libjson::Value &result, + const SDMS::RecordUpdateBatchRequest &a_request, + SDMS::RecordDataReply &a_reply, libjson::Value &result, LogContext log_context) { // "records" field is a JSON document - send directly to DB dbPost("dat/update/batch", {}, &a_request.records(), result, log_context); @@ -1060,7 +1051,7 @@ void DatabaseAPI::recordUpdateBatch( setRecordData(a_reply, result, log_context); } -void DatabaseAPI::recordUpdateSize(const Auth::RepoDataSizeReply &a_size_rep, +void DatabaseAPI::recordUpdateSize(const SDMS::RepoDataSizeReply &a_size_rep, LogContext log_context) { libjson::Value result; @@ -1091,8 +1082,8 @@ void DatabaseAPI::recordUpdateSchemaError(const std::string &a_rec_id, log_context); } -void DatabaseAPI::recordExport(const Auth::RecordExportRequest &a_request, - Auth::RecordExportReply &a_reply, +void DatabaseAPI::recordExport(const SDMS::RecordExportRequest &a_request, + SDMS::RecordExportReply &a_reply, LogContext log_context) { Value result; @@ -1117,8 +1108,8 @@ void DatabaseAPI::recordExport(const Auth::RecordExportRequest &a_request, TRANSLATE_END(result, log_context) } -void DatabaseAPI::recordLock(const Auth::RecordLockRequest &a_request, - Auth::ListingReply &a_reply, +void DatabaseAPI::recordLock(const SDMS::RecordLockRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; string ids; @@ -1143,8 +1134,8 @@ void DatabaseAPI::recordLock(const Auth::RecordLockRequest &a_request, } void DatabaseAPI::recordGetDependencyGraph( - const Auth::RecordGetDependencyGraphRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context) { + const SDMS::RecordGetDependencyGraphRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; dbGet("dat/dep/graph/get", {{"id", a_request.id()}}, result, log_context); @@ -1152,7 +1143,7 @@ void DatabaseAPI::recordGetDependencyGraph( setListingDataReply(a_reply, result, log_context); } -void DatabaseAPI::setRecordData(Auth::RecordDataReply &a_reply, +void DatabaseAPI::setRecordData(SDMS::RecordDataReply &a_reply, const Value &a_result, LogContext log_context) { RecordData *rec; DependencyData *deps; @@ -1268,8 +1259,8 @@ void DatabaseAPI::setRecordData(Auth::RecordDataReply &a_reply, TRANSLATE_END(a_result, log_context) } -void DatabaseAPI::dataPath(const Auth::DataPathRequest &a_request, - Auth::DataPathReply &a_reply, +void DatabaseAPI::dataPath(const SDMS::DataPathRequest &a_request, + SDMS::DataPathReply &a_reply, LogContext log_context) { Value result; @@ -1293,8 +1284,8 @@ void DatabaseAPI::dataPath(const Auth::DataPathRequest &a_request, * depending on scope. The DB relies on either tha "dataview" or "collview" * Arango search views for execution of the query. */ -void DatabaseAPI::generalSearch(const Auth::SearchRequest &a_request, - Auth::ListingReply &a_reply, +void DatabaseAPI::generalSearch(const SDMS::SearchRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; string qry_begin, qry_end, qry_filter, params; @@ -1321,8 +1312,8 @@ void DatabaseAPI::generalSearch(const Auth::SearchRequest &a_request, } void DatabaseAPI::collListPublished( - const Auth::CollListPublishedRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context) { + const SDMS::CollListPublishedRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -1338,8 +1329,8 @@ void DatabaseAPI::collListPublished( setListingDataReply(a_reply, result, log_context); } -void DatabaseAPI::collCreate(const Auth::CollCreateRequest &a_request, - Auth::CollDataReply &a_reply, +void DatabaseAPI::collCreate(const SDMS::CollCreateRequest &a_request, + SDMS::CollDataReply &a_reply, LogContext log_context) { Value result; nlohmann::json payload; @@ -1376,8 +1367,8 @@ void DatabaseAPI::collCreate(const Auth::CollCreateRequest &a_request, setCollData(a_reply, result, log_context); } -void DatabaseAPI::collUpdate(const Auth::CollUpdateRequest &a_request, - Auth::CollDataReply &a_reply, +void DatabaseAPI::collUpdate(const SDMS::CollUpdateRequest &a_request, + SDMS::CollDataReply &a_reply, LogContext log_context) { Value result; nlohmann::json payload; @@ -1414,8 +1405,8 @@ void DatabaseAPI::collUpdate(const Auth::CollUpdateRequest &a_request, setCollData(a_reply, result, log_context); } -void DatabaseAPI::collView(const Auth::CollViewRequest &a_request, - Auth::CollDataReply &a_reply, +void DatabaseAPI::collView(const SDMS::CollViewRequest &a_request, + SDMS::CollDataReply &a_reply, LogContext log_context) { Value result; @@ -1424,8 +1415,8 @@ void DatabaseAPI::collView(const Auth::CollViewRequest &a_request, setCollData(a_reply, result, log_context); } -void DatabaseAPI::collRead(const Auth::CollReadRequest &a_request, - Auth::ListingReply &a_reply, +void DatabaseAPI::collRead(const SDMS::CollReadRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -1440,8 +1431,8 @@ void DatabaseAPI::collRead(const Auth::CollReadRequest &a_request, setListingDataReply(a_reply, result, log_context); } -void DatabaseAPI::collWrite(const Auth::CollWriteRequest &a_request, - Auth::ListingReply &a_reply, +void DatabaseAPI::collWrite(const SDMS::CollWriteRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { string add_list, rem_list; vector> params; @@ -1479,8 +1470,8 @@ void DatabaseAPI::collWrite(const Auth::CollWriteRequest &a_request, setListingDataReply(a_reply, result, log_context); } -void DatabaseAPI::collMove(const Auth::CollMoveRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context) { +void DatabaseAPI::collMove(const SDMS::CollMoveRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context) { (void)a_reply; if (a_request.item_size() == 0) @@ -1504,8 +1495,8 @@ void DatabaseAPI::collMove(const Auth::CollMoveRequest &a_request, result, log_context); } -void DatabaseAPI::collGetParents(const Auth::CollGetParentsRequest &a_request, - Auth::CollPathReply &a_reply, +void DatabaseAPI::collGetParents(const SDMS::CollGetParentsRequest &a_request, + SDMS::CollPathReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -1518,8 +1509,8 @@ void DatabaseAPI::collGetParents(const Auth::CollGetParentsRequest &a_request, setCollPathData(a_reply, result, log_context); } -void DatabaseAPI::collGetOffset(const Auth::CollGetOffsetRequest &a_request, - Auth::CollGetOffsetReply &a_reply, +void DatabaseAPI::collGetOffset(const SDMS::CollGetOffsetRequest &a_request, + SDMS::CollGetOffsetReply &a_reply, LogContext log_context) { Value result; @@ -1534,7 +1525,7 @@ void DatabaseAPI::collGetOffset(const Auth::CollGetOffsetRequest &a_request, a_reply.set_offset(result.asObject().getNumber("offset")); } -void DatabaseAPI::setCollData(Auth::CollDataReply &a_reply, +void DatabaseAPI::setCollData(SDMS::CollDataReply &a_reply, const libjson::Value &a_result, LogContext log_context) { CollData *coll; @@ -1637,7 +1628,7 @@ void DatabaseAPI::setCollPathData(CollPathReply &a_reply, TRANSLATE_END(a_result, log_context) } -void DatabaseAPI::setListingDataReply(Auth::ListingReply &a_reply, +void DatabaseAPI::setListingDataReply(SDMS::ListingReply &a_reply, const libjson::Value &a_result, LogContext log_context) { Value::ObjectConstIter j; @@ -1730,8 +1721,8 @@ void DatabaseAPI::setListingData(ListingData *a_item, } } -void DatabaseAPI::queryList(const Auth::QueryListRequest &a_request, - Auth::ListingReply &a_reply, +void DatabaseAPI::queryList(const SDMS::QueryListRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -1745,8 +1736,8 @@ void DatabaseAPI::queryList(const Auth::QueryListRequest &a_request, setListingDataReply(a_reply, result, log_context); } -void DatabaseAPI::queryCreate(const Auth::QueryCreateRequest &a_request, - Auth::QueryDataReply &a_reply, +void DatabaseAPI::queryCreate(const SDMS::QueryCreateRequest &a_request, + SDMS::QueryDataReply &a_reply, LogContext log_context) { Value result; // vector> params; @@ -1759,8 +1750,9 @@ void DatabaseAPI::queryCreate(const Auth::QueryCreateRequest &a_request, google::protobuf::util::JsonPrintOptions options; string query_json; - options.always_print_enums_as_ints = true; + options.always_print_enums_as_ints = false; options.preserve_proto_field_names = true; + options.always_print_primitive_fields = true; auto stat = google::protobuf::util::MessageToJsonString(a_request.query(), &query_json, options); @@ -1783,8 +1775,8 @@ void DatabaseAPI::queryCreate(const Auth::QueryCreateRequest &a_request, setQueryData(a_reply, result, log_context); } -void DatabaseAPI::queryUpdate(const Auth::QueryUpdateRequest &a_request, - Auth::QueryDataReply &a_reply, +void DatabaseAPI::queryUpdate(const SDMS::QueryUpdateRequest &a_request, + SDMS::QueryDataReply &a_reply, LogContext log_context) { Value result; nlohmann::json payload; @@ -1795,19 +1787,45 @@ void DatabaseAPI::queryUpdate(const Auth::QueryUpdateRequest &a_request, } if (a_request.has_query()) { - string qry_begin, qry_end, qry_filter, params; + SDMS::SearchRequest final_query; + if (a_request.replace_query()) { + // Full replacement — use incoming query as-is + final_query.CopyFrom(a_request.query()); + } else { + // Partial update — merge incoming onto existing + Value existing; + dbGet("qry/view", {{"id", a_request.id()}}, existing, log_context); + + auto parse_stat = google::protobuf::util::JsonStringToMessage( + existing.asObject().getValue("query").toString(), &final_query); + if (!parse_stat.ok()) { + EXCEPT(1, "Failed to parse existing query"); + } - uint32_t cnt = parseSearchRequest(a_request.query(), qry_begin, qry_end, + if (a_request.query().coll_size() > 0) { + final_query.clear_coll(); + } + if (a_request.query().tags_size() > 0) { + final_query.clear_tags(); + } + if (a_request.query().cat_tags_size() > 0) { + final_query.clear_cat_tags(); + } + final_query.MergeFrom(a_request.query()); + } + + // Re-generate AQL from the complete merged query + string qry_begin, qry_end, qry_filter, params; + uint32_t cnt = parseSearchRequest(final_query, qry_begin, qry_end, qry_filter, params, log_context); google::protobuf::util::JsonPrintOptions options; string query_json; - - options.always_print_enums_as_ints = true; + options.always_print_enums_as_ints = false; options.preserve_proto_field_names = true; - + options.always_print_primitive_fields = true; auto stat = google::protobuf::util::MessageToJsonString( - a_request.query(), &query_json, options); + final_query, &query_json, options); if (!stat.ok()) { EXCEPT(1, "Invalid search request"); } @@ -1822,13 +1840,12 @@ void DatabaseAPI::queryUpdate(const Auth::QueryUpdateRequest &a_request, string body = payload.dump(-1, ' ', true); dbPost("qry/update", {}, &body, result, log_context); - setQueryData(a_reply, result, log_context); } // DatabaseAPI::queryDelete( const std::string & a_id ) -void DatabaseAPI::queryDelete(const Auth::QueryDeleteRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context) { +void DatabaseAPI::queryDelete(const SDMS::QueryDeleteRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context) { (void)a_reply; Value result; string ids = "["; @@ -1844,8 +1861,8 @@ void DatabaseAPI::queryDelete(const Auth::QueryDeleteRequest &a_request, dbGet("qry/delete", {{"ids", ids}}, result, log_context); } -void DatabaseAPI::queryView(const Auth::QueryViewRequest &a_request, - Auth::QueryDataReply &a_reply, +void DatabaseAPI::queryView(const SDMS::QueryViewRequest &a_request, + SDMS::QueryDataReply &a_reply, LogContext log_context) { Value result; @@ -1854,8 +1871,8 @@ void DatabaseAPI::queryView(const Auth::QueryViewRequest &a_request, setQueryData(a_reply, result, log_context); } -void DatabaseAPI::queryExec(const Auth::QueryExecRequest &a_request, - Auth::ListingReply &a_reply, +void DatabaseAPI::queryExec(const SDMS::QueryExecRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -1893,8 +1910,8 @@ void DatabaseAPI::setQueryData(QueryDataReply &a_reply, TRANSLATE_END(a_result, log_context) } -void DatabaseAPI::aclView(const Auth::ACLViewRequest &a_request, - Auth::ACLDataReply &a_reply, LogContext log_context) { +void DatabaseAPI::aclView(const SDMS::ACLViewRequest &a_request, + SDMS::ACLDataReply &a_reply, LogContext log_context) { libjson::Value result; dbGet("acl/view", {{"id", a_request.id()}}, result, log_context); @@ -1902,8 +1919,8 @@ void DatabaseAPI::aclView(const Auth::ACLViewRequest &a_request, setACLData(a_reply, result, log_context); } -void DatabaseAPI::aclUpdate(const Auth::ACLUpdateRequest &a_request, - Auth::ACLDataReply &a_reply, +void DatabaseAPI::aclUpdate(const SDMS::ACLUpdateRequest &a_request, + SDMS::ACLDataReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -1916,8 +1933,8 @@ void DatabaseAPI::aclUpdate(const Auth::ACLUpdateRequest &a_request, setACLData(a_reply, result, log_context); } -void DatabaseAPI::aclSharedList(const Auth::ACLSharedListRequest &a_request, - Auth::ListingReply &a_reply, +void DatabaseAPI::aclSharedList(const SDMS::ACLSharedListRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -1934,8 +1951,8 @@ void DatabaseAPI::aclSharedList(const Auth::ACLSharedListRequest &a_request, } void DatabaseAPI::aclSharedListItems( - const Auth::ACLSharedListItemsRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context) { + const SDMS::ACLSharedListItemsRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -1971,8 +1988,8 @@ void DatabaseAPI::setACLData(ACLDataReply &a_reply, TRANSLATE_END(a_result, log_context) } -void DatabaseAPI::groupCreate(const Auth::GroupCreateRequest &a_request, - Auth::GroupDataReply &a_reply, +void DatabaseAPI::groupCreate(const SDMS::GroupCreateRequest &a_request, + SDMS::GroupDataReply &a_reply, LogContext log_context) { Value result; @@ -2000,8 +2017,8 @@ void DatabaseAPI::groupCreate(const Auth::GroupCreateRequest &a_request, setGroupData(a_reply, result, log_context); } -void DatabaseAPI::groupUpdate(const Auth::GroupUpdateRequest &a_request, - Auth::GroupDataReply &a_reply, +void DatabaseAPI::groupUpdate(const SDMS::GroupUpdateRequest &a_request, + SDMS::GroupDataReply &a_reply, LogContext log_context) { Value result; @@ -2039,8 +2056,8 @@ void DatabaseAPI::groupUpdate(const Auth::GroupUpdateRequest &a_request, setGroupData(a_reply, result, log_context); } -void DatabaseAPI::groupDelete(const Auth::GroupDeleteRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context) { +void DatabaseAPI::groupDelete(const SDMS::GroupDeleteRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context) { (void)a_reply; Value result; @@ -2052,8 +2069,8 @@ void DatabaseAPI::groupDelete(const Auth::GroupDeleteRequest &a_request, dbGet("grp/delete", params, result, log_context); } -void DatabaseAPI::groupList(const Auth::GroupListRequest &a_request, - Auth::GroupDataReply &a_reply, +void DatabaseAPI::groupList(const SDMS::GroupListRequest &a_request, + SDMS::GroupDataReply &a_reply, LogContext log_context) { (void)a_request; @@ -2067,8 +2084,8 @@ void DatabaseAPI::groupList(const Auth::GroupListRequest &a_request, setGroupData(a_reply, result, log_context); } -void DatabaseAPI::groupView(const Auth::GroupViewRequest &a_request, - Auth::GroupDataReply &a_reply, +void DatabaseAPI::groupView(const SDMS::GroupViewRequest &a_request, + SDMS::GroupDataReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -2117,8 +2134,8 @@ void DatabaseAPI::setGroupData(GroupDataReply &a_reply, TRANSLATE_END(a_result, log_context) } -void DatabaseAPI::repoList(const Auth::RepoListRequest &a_request, - Auth::RepoDataReply &a_reply, +void DatabaseAPI::repoList(const SDMS::RepoListRequest &a_request, + SDMS::RepoDataReply &a_reply, LogContext log_context) { Value result; @@ -2158,8 +2175,8 @@ void DatabaseAPI::repoView(std::vector &a_repos, } } -void DatabaseAPI::repoView(const Auth::RepoViewRequest &a_request, - Auth::RepoDataReply &a_reply, +void DatabaseAPI::repoView(const SDMS::RepoViewRequest &a_request, + SDMS::RepoDataReply &a_reply, LogContext log_context) { Value result; @@ -2169,8 +2186,8 @@ void DatabaseAPI::repoView(const Auth::RepoViewRequest &a_request, setRepoData(&a_reply, temp, result, log_context); } -void DatabaseAPI::repoCreate(const Auth::RepoCreateRequest &a_request, - Auth::RepoDataReply &a_reply, +void DatabaseAPI::repoCreate(const SDMS::RepoCreateRequest &a_request, + SDMS::RepoDataReply &a_reply, LogContext log_context) { Value result; @@ -2189,13 +2206,13 @@ void DatabaseAPI::repoCreate(const Auth::RepoCreateRequest &a_request, }; // List of optional fields to check - add_if_present(&Auth::RepoCreateRequest::has_path, &Auth::RepoCreateRequest::path, "path"); - add_if_present(&Auth::RepoCreateRequest::has_pub_key, &Auth::RepoCreateRequest::pub_key, "pub_key"); - add_if_present(&Auth::RepoCreateRequest::has_address, &Auth::RepoCreateRequest::address, "address"); - add_if_present(&Auth::RepoCreateRequest::has_endpoint, &Auth::RepoCreateRequest::endpoint, "endpoint"); - add_if_present(&Auth::RepoCreateRequest::has_desc, &Auth::RepoCreateRequest::desc, "desc"); - add_if_present(&Auth::RepoCreateRequest::has_domain, &Auth::RepoCreateRequest::domain, "domain"); - add_if_present(&Auth::RepoCreateRequest::has_exp_path, &Auth::RepoCreateRequest::exp_path, "exp_path"); + add_if_present(&SDMS::RepoCreateRequest::has_path, &SDMS::RepoCreateRequest::path, "path"); + add_if_present(&SDMS::RepoCreateRequest::has_pub_key, &SDMS::RepoCreateRequest::pub_key, "pub_key"); + add_if_present(&SDMS::RepoCreateRequest::has_address, &SDMS::RepoCreateRequest::address, "address"); + add_if_present(&SDMS::RepoCreateRequest::has_endpoint, &SDMS::RepoCreateRequest::endpoint, "endpoint"); + add_if_present(&SDMS::RepoCreateRequest::has_desc, &SDMS::RepoCreateRequest::desc, "desc"); + add_if_present(&SDMS::RepoCreateRequest::has_domain, &SDMS::RepoCreateRequest::domain, "domain"); + add_if_present(&SDMS::RepoCreateRequest::has_exp_path, &SDMS::RepoCreateRequest::exp_path, "exp_path"); if (a_request.admin_size() > 0) { nlohmann::json admins = nlohmann::json::array(); @@ -2212,8 +2229,8 @@ void DatabaseAPI::repoCreate(const Auth::RepoCreateRequest &a_request, setRepoData(&a_reply, temp, result, log_context); } -void DatabaseAPI::repoUpdate(const Auth::RepoUpdateRequest &a_request, - Auth::RepoDataReply &a_reply, +void DatabaseAPI::repoUpdate(const SDMS::RepoUpdateRequest &a_request, + SDMS::RepoDataReply &a_reply, LogContext log_context) { Value result; nlohmann::json payload; @@ -2260,16 +2277,16 @@ void DatabaseAPI::repoUpdate(const Auth::RepoUpdateRequest &a_request, setRepoData(&a_reply, temp, result, log_context); } -void DatabaseAPI::repoDelete(const Auth::RepoDeleteRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context) { +void DatabaseAPI::repoDelete(const SDMS::RepoDeleteRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context) { (void)a_reply; Value result; dbGet("repo/delete", {{"id", a_request.id()}}, result, log_context); } -void DatabaseAPI::repoCalcSize(const Auth::RepoCalcSizeRequest &a_request, - Auth::RepoCalcSizeReply &a_reply, +void DatabaseAPI::repoCalcSize(const SDMS::RepoCalcSizeRequest &a_request, + SDMS::RepoCalcSizeReply &a_reply, LogContext log_context) { Value result; @@ -2298,7 +2315,7 @@ void DatabaseAPI::repoCalcSize(const Auth::RepoCalcSizeRequest &a_request, TRANSLATE_END(result, log_context) } -void DatabaseAPI::setRepoData(Auth::RepoDataReply *a_reply, +void DatabaseAPI::setRepoData(SDMS::RepoDataReply *a_reply, std::vector &a_repos, const libjson::Value &a_result, LogContext log_context) { @@ -2371,8 +2388,8 @@ void DatabaseAPI::setRepoData(Auth::RepoDataReply *a_reply, } void DatabaseAPI::repoListAllocations( - const Auth::RepoListAllocationsRequest &a_request, - Auth::RepoAllocationsReply &a_reply, LogContext log_context) { + const SDMS::RepoListAllocationsRequest &a_request, + SDMS::RepoAllocationsReply &a_reply, LogContext log_context) { Value result; dbGet("repo/alloc/list/by_repo", {{"repo", a_request.id()}}, result, @@ -2382,8 +2399,8 @@ void DatabaseAPI::repoListAllocations( } void DatabaseAPI::repoListSubjectAllocations( - const Auth::RepoListSubjectAllocationsRequest &a_request, - Auth::RepoAllocationsReply &a_reply, LogContext log_context) { + const SDMS::RepoListSubjectAllocationsRequest &a_request, + SDMS::RepoAllocationsReply &a_reply, LogContext log_context) { Value result; vector> params; if (a_request.has_subject()) @@ -2399,8 +2416,8 @@ void DatabaseAPI::repoListSubjectAllocations( } void DatabaseAPI::repoListObjectAllocations( - const Auth::RepoListObjectAllocationsRequest &a_request, - Auth::RepoAllocationsReply &a_reply, LogContext log_context) { + const SDMS::RepoListObjectAllocationsRequest &a_request, + SDMS::RepoAllocationsReply &a_reply, LogContext log_context) { Value result; dbGet("repo/alloc/list/by_object", {{"object", a_request.id()}}, result, @@ -2409,7 +2426,7 @@ void DatabaseAPI::repoListObjectAllocations( setAllocData(a_reply, result, log_context); } -void DatabaseAPI::setAllocData(Auth::RepoAllocationsReply &a_reply, +void DatabaseAPI::setAllocData(SDMS::RepoAllocationsReply &a_reply, const libjson::Value &a_result, LogContext log_context) { TRANSLATE_BEGIN() @@ -2444,8 +2461,8 @@ void DatabaseAPI::setAllocData(AllocData *a_alloc, } void DatabaseAPI::repoViewAllocation( - const Auth::RepoViewAllocationRequest &a_request, - Auth::RepoAllocationsReply &a_reply, LogContext log_context) { + const SDMS::RepoViewAllocationRequest &a_request, + SDMS::RepoAllocationsReply &a_reply, LogContext log_context) { Value result; vector> params; params.push_back({"repo", a_request.repo()}); @@ -2458,8 +2475,8 @@ void DatabaseAPI::repoViewAllocation( } void DatabaseAPI::repoAllocationStats( - const Auth::RepoAllocationStatsRequest &a_request, - Auth::RepoAllocationStatsReply &a_reply, LogContext log_context) { + const SDMS::RepoAllocationStatsRequest &a_request, + SDMS::RepoAllocationStatsReply &a_reply, LogContext log_context) { Value result; vector> params; params.push_back({"repo", a_request.repo()}); @@ -2496,7 +2513,7 @@ void DatabaseAPI::setAllocStatsData(AllocStatsData &a_stats, } void DatabaseAPI::repoAllocationSet( - const Auth::RepoAllocationSetRequest &a_request, Anon::AckReply &a_reply, + const SDMS::RepoAllocationSetRequest &a_request, SDMS::AckReply &a_reply, LogContext log_context) { (void)a_reply; Value result; @@ -2510,8 +2527,8 @@ void DatabaseAPI::repoAllocationSet( } void DatabaseAPI::repoAllocationSetDefault( - const Auth::RepoAllocationSetDefaultRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context) { + const SDMS::RepoAllocationSetDefaultRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context) { (void)a_reply; Value result; @@ -2556,8 +2573,8 @@ void DatabaseAPI::getPerms(const GetPermsRequest &a_request, TRANSLATE_END(result, log_context) } -void DatabaseAPI::repoAuthz(const Auth::RepoAuthzRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context) { +void DatabaseAPI::repoAuthz(const SDMS::RepoAuthzRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context) { (void)a_reply; Value result; @@ -2571,8 +2588,8 @@ void DatabaseAPI::repoAuthz(const Auth::RepoAuthzRequest &a_request, result, log_context); } -void DatabaseAPI::topicListTopics(const Auth::TopicListTopicsRequest &a_request, - Auth::TopicDataReply &a_reply, +void DatabaseAPI::topicListTopics(const SDMS::TopicListTopicsRequest &a_request, + SDMS::TopicDataReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -2588,8 +2605,8 @@ void DatabaseAPI::topicListTopics(const Auth::TopicListTopicsRequest &a_request, setTopicDataReply(a_reply, result, log_context); } -void DatabaseAPI::topicView(const Auth::TopicViewRequest &a_request, - Auth::TopicDataReply &a_reply, +void DatabaseAPI::topicView(const SDMS::TopicViewRequest &a_request, + SDMS::TopicDataReply &a_reply, LogContext log_context) { Value result; @@ -2598,8 +2615,8 @@ void DatabaseAPI::topicView(const Auth::TopicViewRequest &a_request, setTopicDataReply(a_reply, result, log_context); } -void DatabaseAPI::topicSearch(const Auth::TopicSearchRequest &a_request, - Auth::TopicDataReply &a_reply, +void DatabaseAPI::topicSearch(const SDMS::TopicSearchRequest &a_request, + SDMS::TopicDataReply &a_reply, LogContext log_context) { Value result; @@ -2608,7 +2625,7 @@ void DatabaseAPI::topicSearch(const Auth::TopicSearchRequest &a_request, setTopicDataReply(a_reply, result, log_context); } -void DatabaseAPI::setTopicDataReply(Auth::TopicDataReply &a_reply, +void DatabaseAPI::setTopicDataReply(SDMS::TopicDataReply &a_reply, const libjson::Value &a_result, LogContext log_context) { TRANSLATE_BEGIN() @@ -2659,7 +2676,7 @@ void DatabaseAPI::setTopicDataReply(Auth::TopicDataReply &a_reply, } void DatabaseAPI::noteCreate(const NoteCreateRequest &a_request, - Auth::NoteDataReply &a_reply, + SDMS::NoteDataReply &a_reply, LogContext log_context) { DL_DEBUG(log_context, "NoteCreate"); @@ -2677,7 +2694,7 @@ void DatabaseAPI::noteCreate(const NoteCreateRequest &a_request, } void DatabaseAPI::noteUpdate(const NoteUpdateRequest &a_request, - Auth::NoteDataReply &a_reply, + SDMS::NoteDataReply &a_reply, LogContext log_context) { DL_DEBUG(log_context, "NoteUpdate"); @@ -2697,8 +2714,8 @@ void DatabaseAPI::noteUpdate(const NoteUpdateRequest &a_request, setNoteDataReply(a_reply, result, log_context); } -void DatabaseAPI::noteCommentEdit(const Auth::NoteCommentEditRequest &a_request, - Auth::NoteDataReply &a_reply, +void DatabaseAPI::noteCommentEdit(const SDMS::NoteCommentEditRequest &a_request, + SDMS::NoteDataReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -2711,8 +2728,8 @@ void DatabaseAPI::noteCommentEdit(const Auth::NoteCommentEditRequest &a_request, setNoteDataReply(a_reply, result, log_context); } -void DatabaseAPI::noteView(const Auth::NoteViewRequest &a_request, - Auth::NoteDataReply &a_reply, +void DatabaseAPI::noteView(const SDMS::NoteViewRequest &a_request, + SDMS::NoteDataReply &a_reply, LogContext log_context) { Value result; @@ -2722,8 +2739,8 @@ void DatabaseAPI::noteView(const Auth::NoteViewRequest &a_request, } void DatabaseAPI::noteListBySubject( - const Auth::NoteListBySubjectRequest &a_request, - Auth::NoteDataReply &a_reply, LogContext log_context) { + const SDMS::NoteListBySubjectRequest &a_request, + SDMS::NoteDataReply &a_reply, LogContext log_context) { Value result; dbGet("note/list/by_subject", {{"subject", a_request.subject()}}, result, @@ -2738,7 +2755,7 @@ void DatabaseAPI::notePurge(uint32_t a_age_sec, LogContext log_context) { dbGet("note/purge", {{"age_sec", to_string(a_age_sec)}}, result, log_context); } -void DatabaseAPI::setNoteDataReply(Auth::NoteDataReply &a_reply, +void DatabaseAPI::setNoteDataReply(SDMS::NoteDataReply &a_reply, const libjson::Value &a_result, LogContext log_context) { Value::ArrayConstIter i; @@ -2803,8 +2820,8 @@ void DatabaseAPI::setNoteData(NoteData *a_note, } } -void DatabaseAPI::tagSearch(const Auth::TagSearchRequest &a_request, - Auth::TagDataReply &a_reply, +void DatabaseAPI::tagSearch(const SDMS::TagSearchRequest &a_request, + SDMS::TagDataReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -2821,8 +2838,8 @@ void DatabaseAPI::tagSearch(const Auth::TagSearchRequest &a_request, setTagDataReply(a_reply, result, log_context); } -void DatabaseAPI::tagListByCount(const Auth::TagListByCountRequest &a_request, - Auth::TagDataReply &a_reply, +void DatabaseAPI::tagListByCount(const SDMS::TagListByCountRequest &a_request, + SDMS::TagDataReply &a_reply, LogContext log_context) { Value result; vector> params; @@ -2837,7 +2854,7 @@ void DatabaseAPI::tagListByCount(const Auth::TagListByCountRequest &a_request, setTagDataReply(a_reply, result, log_context); } -void DatabaseAPI::setTagDataReply(Auth::TagDataReply &a_reply, +void DatabaseAPI::setTagDataReply(SDMS::TagDataReply &a_reply, const Value &a_result, LogContext log_context) { Value::ObjectConstIter j; @@ -2871,8 +2888,8 @@ void DatabaseAPI::setTagData(TagData *a_tag, a_tag->set_count(a_obj.getNumber("count")); } -void DatabaseAPI::schemaSearch(const Auth::SchemaSearchRequest &a_request, - Auth::SchemaDataReply &a_reply, +void DatabaseAPI::schemaSearch(const SDMS::SchemaSearchRequest &a_request, + SDMS::SchemaDataReply &a_reply, LogContext log_context) { libjson::Value result; vector> params; @@ -2896,8 +2913,8 @@ void DatabaseAPI::schemaSearch(const Auth::SchemaSearchRequest &a_request, setSchemaDataReply(a_reply, result, log_context); } -void DatabaseAPI::schemaView(const Auth::SchemaViewRequest &a_request, - Auth::SchemaDataReply &a_reply, +void DatabaseAPI::schemaView(const SDMS::SchemaViewRequest &a_request, + SDMS::SchemaDataReply &a_reply, LogContext log_context) { libjson::Value result; vector> params; @@ -2910,7 +2927,7 @@ void DatabaseAPI::schemaView(const Auth::SchemaViewRequest &a_request, setSchemaDataReply(a_reply, result, log_context); } -void DatabaseAPI::schemaCreate(const Auth::SchemaCreateRequest &a_request, +void DatabaseAPI::schemaCreate(const SDMS::SchemaCreateRequest &a_request, LogContext log_context) { libjson::Value result; @@ -2926,7 +2943,7 @@ void DatabaseAPI::schemaCreate(const Auth::SchemaCreateRequest &a_request, dbPost("schema/create", {}, &body, result, log_context); } -void DatabaseAPI::schemaRevise(const Auth::SchemaReviseRequest &a_request, +void DatabaseAPI::schemaRevise(const SDMS::SchemaReviseRequest &a_request, LogContext log_context) { libjson::Value result; @@ -2952,7 +2969,7 @@ void DatabaseAPI::schemaRevise(const Auth::SchemaReviseRequest &a_request, dbPost("schema/revise", {{"id", a_request.id()}}, &body, result, log_context); } -void DatabaseAPI::schemaUpdate(const Auth::SchemaUpdateRequest &a_request, +void DatabaseAPI::schemaUpdate(const SDMS::SchemaUpdateRequest &a_request, LogContext log_context) { libjson::Value result; @@ -2981,8 +2998,8 @@ void DatabaseAPI::schemaUpdate(const Auth::SchemaUpdateRequest &a_request, dbPost("schema/update", {{"id", a_request.id()}}, &body, result, log_context); } -void DatabaseAPI::schemaDelete(const Auth::SchemaDeleteRequest &a_request, - Anon::AckReply &a_reply, +void DatabaseAPI::schemaDelete(const SDMS::SchemaDeleteRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context) { (void)a_reply; libjson::Value result; @@ -2990,7 +3007,7 @@ void DatabaseAPI::schemaDelete(const Auth::SchemaDeleteRequest &a_request, dbPost("schema/delete", {{"id", a_request.id()}}, 0, result, log_context); } -void DatabaseAPI::setSchemaDataReply(Auth::SchemaDataReply &a_reply, +void DatabaseAPI::setSchemaDataReply(SDMS::SchemaDataReply &a_reply, const libjson::Value &a_result, LogContext log_context) { Value::ObjectConstIter j; @@ -3078,8 +3095,8 @@ void DatabaseAPI::schemaView(const std::string &a_id, libjson::Value &a_result, dbGet("schema/view", {{"id", a_id}}, a_result, log_context); } -void DatabaseAPI::dailyMessage(const Anon::DailyMessageRequest &a_request, - Anon::DailyMessageReply &a_reply, +void DatabaseAPI::dailyMessage(const SDMS::DailyMessageRequest &a_request, + SDMS::DailyMessageReply &a_reply, LogContext log_context) { (void)a_request; // Not used libjson::Value result; @@ -3129,8 +3146,8 @@ void DatabaseAPI::taskAbort(const std::string &a_task_id, log_context); } -void DatabaseAPI::taskInitDataGet(const Auth::DataGetRequest &a_request, - Auth::DataGetReply &a_reply, +void DatabaseAPI::taskInitDataGet(const SDMS::DataGetRequest &a_request, + SDMS::DataGetReply &a_reply, libjson::Value &a_result, LogContext log_context) { nlohmann::json payload; @@ -3172,7 +3189,7 @@ void DatabaseAPI::taskInitDataGet(const Auth::DataGetRequest &a_request, setDataGetReply(a_reply, a_result, log_context); } -void DatabaseAPI::setDataGetReply(Auth::DataGetReply &a_reply, +void DatabaseAPI::setDataGetReply(SDMS::DataGetReply &a_reply, const libjson::Value &a_result, LogContext log_context) { Value::ObjectIter t; @@ -3203,8 +3220,8 @@ void DatabaseAPI::setDataGetReply(Auth::DataGetReply &a_reply, TRANSLATE_END(a_result, log_context) } -void DatabaseAPI::taskInitDataPut(const Auth::DataPutRequest &a_request, - Auth::DataPutReply &a_reply, +void DatabaseAPI::taskInitDataPut(const SDMS::DataPutRequest &a_request, + SDMS::DataPutReply &a_reply, libjson::Value &a_result, LogContext log_context) { nlohmann::json payload; @@ -3240,7 +3257,7 @@ void DatabaseAPI::taskInitDataPut(const Auth::DataPutRequest &a_request, setDataPutReply(a_reply, a_result, log_context); } -void DatabaseAPI::setDataPutReply(Auth::DataPutReply &a_reply, +void DatabaseAPI::setDataPutReply(SDMS::DataPutReply &a_reply, const libjson::Value &a_result, LogContext log_context) { Value::ObjectIter t; @@ -3252,7 +3269,7 @@ void DatabaseAPI::setDataPutReply(Auth::DataPutReply &a_reply, Value::ArrayConstIter j; if (!obj.has("glob_data") || obj.value().size() != 1) - EXCEPT_PARAM(ID_BAD_REQUEST, "Invalid or missing upload target"); + EXCEPT_PARAM(BAD_REQUEST, "Invalid or missing upload target"); const Value::Array &arr = obj.asArray(); const Value::Object &rec = arr.begin()->asObject(); @@ -3289,8 +3306,8 @@ void DatabaseAPI::taskInitRecordCollectionDelete( } void DatabaseAPI::taskInitRecordAllocChange( - const Auth::RecordAllocChangeRequest &a_request, - Auth::RecordAllocChangeReply &a_reply, libjson::Value &a_result, + const SDMS::RecordAllocChangeRequest &a_request, + SDMS::RecordAllocChangeReply &a_reply, libjson::Value &a_result, LogContext log_context) { nlohmann::json payload; nlohmann::json ids = nlohmann::json::array(); @@ -3329,8 +3346,8 @@ void DatabaseAPI::taskInitRecordAllocChange( } void DatabaseAPI::taskInitRecordOwnerChange( - const Auth::RecordOwnerChangeRequest &a_request, - Auth::RecordOwnerChangeReply &a_reply, libjson::Value &a_result, + const SDMS::RecordOwnerChangeRequest &a_request, + SDMS::RecordOwnerChangeReply &a_reply, libjson::Value &a_result, LogContext log_context) { nlohmann::json payload; nlohmann::json ids = nlohmann::json::array(); @@ -3374,7 +3391,7 @@ void DatabaseAPI::taskInitRecordOwnerChange( } void DatabaseAPI::taskInitProjectDelete( - const Auth::ProjectDeleteRequest &a_request, Auth::TaskDataReply &a_reply, + const SDMS::ProjectDeleteRequest &a_request, SDMS::TaskDataReply &a_reply, libjson::Value &a_result, LogContext log_context) { nlohmann::json payload; nlohmann::json ids = nlohmann::json::array(); @@ -3392,8 +3409,8 @@ void DatabaseAPI::taskInitProjectDelete( } void DatabaseAPI::taskInitRepoAllocationCreate( - const Auth::RepoAllocationCreateRequest &a_request, - Auth::TaskDataReply &a_reply, libjson::Value &a_result, + const SDMS::RepoAllocationCreateRequest &a_request, + SDMS::TaskDataReply &a_reply, libjson::Value &a_result, LogContext log_context) { dbGet("repo/alloc/create", {{"subject", a_request.subject()}, @@ -3406,8 +3423,8 @@ void DatabaseAPI::taskInitRepoAllocationCreate( } void DatabaseAPI::taskInitRepoAllocationDelete( - const Auth::RepoAllocationDeleteRequest &a_request, - Auth::TaskDataReply &a_reply, libjson::Value &a_result, + const SDMS::RepoAllocationDeleteRequest &a_request, + SDMS::TaskDataReply &a_reply, libjson::Value &a_result, LogContext log_context) { dbGet("repo/alloc/delete", {{"subject", a_request.subject()}, {"repo", a_request.repo()}}, @@ -3516,7 +3533,7 @@ void DatabaseAPI::setTaskData(TaskData *a_task, * method removes tasks that are nor in READY status from the original JSON * input - this is to. */ -void DatabaseAPI::setTaskDataReply(Auth::TaskDataReply &a_reply, +void DatabaseAPI::setTaskDataReply(SDMS::TaskDataReply &a_reply, const libjson::Value &a_result, LogContext log_context) { TRANSLATE_BEGIN() @@ -3536,7 +3553,7 @@ void DatabaseAPI::setTaskDataReply(Auth::TaskDataReply &a_reply, * * JSON contains an array of task objects containing task fields. */ -void DatabaseAPI::setTaskDataReplyArray(Auth::TaskDataReply &a_reply, +void DatabaseAPI::setTaskDataReplyArray(SDMS::TaskDataReply &a_reply, const libjson::Value &a_result, LogContext log_context) { TRANSLATE_BEGIN() @@ -3596,8 +3613,8 @@ void DatabaseAPI::taskFinalize(const std::string &a_task_id, bool a_succeeded, dbPost("task/finalize", params, 0, a_result, log_context); } -void DatabaseAPI::taskList(const Auth::TaskListRequest &a_request, - Auth::TaskDataReply &a_reply, +void DatabaseAPI::taskList(const SDMS::TaskListRequest &a_request, + SDMS::TaskDataReply &a_reply, LogContext log_context) { vector> params; @@ -3629,8 +3646,8 @@ void DatabaseAPI::taskList(const Auth::TaskListRequest &a_request, setTaskDataReplyArray(a_reply, result, log_context); } -void DatabaseAPI::taskView(const Auth::TaskViewRequest &a_request, - Auth::TaskDataReply &a_reply, +void DatabaseAPI::taskView(const SDMS::TaskViewRequest &a_request, + SDMS::TaskDataReply &a_reply, LogContext log_context) { libjson::Value result; @@ -3790,13 +3807,13 @@ void DatabaseAPI::metricsPurge(uint32_t a_timestamp, LogContext log_context) { log_context); } -uint32_t DatabaseAPI::parseSearchRequest(const Auth::SearchRequest &a_request, +uint32_t DatabaseAPI::parseSearchRequest(const SDMS::SearchRequest &a_request, std::string &a_qry_begin, std::string &a_qry_end, std::string &a_qry_filter, std::string &a_params, LogContext log_context) { - string view = (a_request.mode() == SM_DATA ? "dataview" : "collview"); + string view = (a_request.mode() == SDMS::SM_DATA ? "dataview" : "collview"); if (a_request.has_published() && a_request.published()) { a_qry_begin = string("for i in ") + view + " search i.public == true"; @@ -3860,7 +3877,7 @@ uint32_t DatabaseAPI::parseSearchRequest(const Auth::SearchRequest &a_request, } // Data-only search options - if (a_request.mode() == SM_DATA) { + if (a_request.mode() == SDMS::SM_DATA) { if (a_request.has_sch_id() > 0) { a_qry_begin += " and i.sch_id == @sch"; a_params += ",\"sch_id\":\"" + a_request.sch_id() + "\""; @@ -3937,7 +3954,7 @@ uint32_t DatabaseAPI::parseSearchRequest(const Auth::SearchRequest &a_request, string(" return distinct " "{_id:i._id,title:i.title,'desc':i['desc'],owner:i.owner,owner_" "name:name,alias:i.alias") + - (a_request.mode() == SM_DATA ? ",size:i.size,md_err:i.md_err" : "") + "}"; + (a_request.mode() == SDMS::SM_DATA ? ",size:i.size,md_err:i.md_err" : "") + "}"; a_qry_begin = a_qry_begin; a_qry_end = a_qry_end; diff --git a/core/server/DatabaseAPI.hpp b/core/server/DatabaseAPI.hpp index bf94ce90b..fa9327c50 100644 --- a/core/server/DatabaseAPI.hpp +++ b/core/server/DatabaseAPI.hpp @@ -4,9 +4,7 @@ // Local public includes #include "common/DynaLog.hpp" -#include "common/SDMS.pb.h" -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" +#include "common/envelope.pb.h" #include "common/libjson.hpp" // Third party includes @@ -38,10 +36,10 @@ class DatabaseAPI { void setClient(const std::string &a_client); void clientAuthenticateByPassword(const std::string &a_password, - Anon::AuthStatusReply &a_reply, + SDMS::AuthStatusReply &a_reply, LogContext log_context); void clientAuthenticateByToken(const std::string &a_token, - Anon::AuthStatusReply &a_reply, + SDMS::AuthStatusReply &a_reply, LogContext log_context); void clientLinkIdentity(const std::string &a_identity, LogContext log_context); @@ -72,186 +70,184 @@ class DatabaseAPI { std::vector &a_expiring_tokens, LogContext log_context); void purgeTransferRecords(size_t age); - void checkPerms(const Auth::CheckPermsRequest &a_request, - Auth::CheckPermsReply &a_reply, LogContext log_context); - void getPerms(const Auth::GetPermsRequest &a_request, - Auth::GetPermsReply &a_reply, LogContext log_context); - - void userSetAccessToken(const Auth::UserSetAccessTokenRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context); - void userCreate(const Auth::UserCreateRequest &a_request, - Auth::UserDataReply &a_reply, LogContext log_context); - void userView(const Auth::UserViewRequest &a_request, - Auth::UserDataReply &a_reply, LogContext log_context); - void userUpdate(const Auth::UserUpdateRequest &a_request, - Auth::UserDataReply &a_reply, LogContext log_context); - void userListAll(const Auth::UserListAllRequest &a_request, - Auth::UserDataReply &a_reply, LogContext log_context); - void userListCollab(const Auth::UserListCollabRequest &a_request, - Auth::UserDataReply &a_reply, LogContext log_context); - void userFindByUUIDs(const Auth::UserFindByUUIDsRequest &a_request, - Auth::UserDataReply &a_reply, LogContext log_context); - void userFindByNameUID(const Auth::UserFindByNameUIDRequest &a_request, - Auth::UserDataReply &a_reply, LogContext log_context); - void userGetRecentEP(const Auth::UserGetRecentEPRequest &a_request, - Auth::UserGetRecentEPReply &a_reply, + void checkPerms(const SDMS::CheckPermsRequest &a_request, + SDMS::CheckPermsReply &a_reply, LogContext log_context); + void getPerms(const SDMS::GetPermsRequest &a_request, + SDMS::GetPermsReply &a_reply, LogContext log_context); + + void userSetAccessToken(const SDMS::UserSetAccessTokenRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context); + void userCreate(const SDMS::UserCreateRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context); + void userView(const SDMS::UserViewRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context); + void userUpdate(const SDMS::UserUpdateRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context); + void userListAll(const SDMS::UserListAllRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context); + void userListCollab(const SDMS::UserListCollabRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context); + void userFindByUUIDs(const SDMS::UserFindByUUIDsRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context); + void userFindByNameUID(const SDMS::UserFindByNameUIDRequest &a_request, + SDMS::UserDataReply &a_reply, LogContext log_context); + void userGetRecentEP(const SDMS::UserGetRecentEPRequest &a_request, + SDMS::UserGetRecentEPReply &a_reply, LogContext log_context); - void userSetRecentEP(const Auth::UserSetRecentEPRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context); - - void projCreate(const Auth::ProjectCreateRequest &a_request, - Auth::ProjectDataReply &a_reply, LogContext log_context); - void projUpdate(const Auth::ProjectUpdateRequest &a_request, - Auth::ProjectDataReply &a_reply, LogContext log_context); - void projView(const Auth::ProjectViewRequest &a_request, - Auth::ProjectDataReply &a_reply, LogContext log_context); - void projList(const Auth::ProjectListRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - void projSearch(const std::string &a_query, Auth::ProjectDataReply &a_reply, - LogContext log_context); - void projGetRole(const Auth::ProjectGetRoleRequest &a_request, - Auth::ProjectGetRoleReply &a_reply, LogContext log_context); - - void recordView(const Auth::RecordViewRequest &a_request, - Auth::RecordDataReply &a_reply, LogContext log_context); - void recordCreate(const Auth::RecordCreateRequest &a_request, - Auth::RecordDataReply &a_reply, LogContext log_context); - void recordCreateBatch(const Auth::RecordCreateBatchRequest &a_request, - Auth::RecordDataReply &a_reply, + void userSetRecentEP(const SDMS::UserSetRecentEPRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context); + + void projCreate(const SDMS::ProjectCreateRequest &a_request, + SDMS::ProjectDataReply &a_reply, LogContext log_context); + void projUpdate(const SDMS::ProjectUpdateRequest &a_request, + SDMS::ProjectDataReply &a_reply, LogContext log_context); + void projView(const SDMS::ProjectViewRequest &a_request, + SDMS::ProjectDataReply &a_reply, LogContext log_context); + void projList(const SDMS::ProjectListRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + void projGetRole(const SDMS::ProjectGetRoleRequest &a_request, + SDMS::ProjectGetRoleReply &a_reply, LogContext log_context); + + void recordView(const SDMS::RecordViewRequest &a_request, + SDMS::RecordDataReply &a_reply, LogContext log_context); + void recordCreate(const SDMS::RecordCreateRequest &a_request, + SDMS::RecordDataReply &a_reply, LogContext log_context); + void recordCreateBatch(const SDMS::RecordCreateBatchRequest &a_request, + SDMS::RecordDataReply &a_reply, LogContext log_context); - void recordUpdate(const Auth::RecordUpdateRequest &a_request, - Auth::RecordDataReply &a_reply, libjson::Value &result, + void recordUpdate(const SDMS::RecordUpdateRequest &a_request, + SDMS::RecordDataReply &a_reply, libjson::Value &result, LogContext log_context); - void recordUpdateBatch(const Auth::RecordUpdateBatchRequest &a_request, - Auth::RecordDataReply &a_reply, libjson::Value &result, + void recordUpdateBatch(const SDMS::RecordUpdateBatchRequest &a_request, + SDMS::RecordDataReply &a_reply, libjson::Value &result, LogContext log_context); - void recordUpdateSize(const Auth::RepoDataSizeReply &a_sizes, + void recordUpdateSize(const SDMS::RepoDataSizeReply &a_sizes, LogContext log_context); void recordUpdateSchemaError(const std::string &a_rec_id, const std::string &a_err_msg, LogContext log_context); - void recordExport(const Auth::RecordExportRequest &a_request, - Auth::RecordExportReply &a_reply, LogContext log_context); - void recordLock(const Auth::RecordLockRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - void recordListByAlloc(const Auth::RecordListByAllocRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); + void recordExport(const SDMS::RecordExportRequest &a_request, + SDMS::RecordExportReply &a_reply, LogContext log_context); + void recordLock(const SDMS::RecordLockRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + void recordListByAlloc(const SDMS::RecordListByAllocRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); void recordGetDependencyGraph( - const Auth::RecordGetDependencyGraphRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - - void generalSearch(const Auth::SearchRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - - void dataPath(const Auth::DataPathRequest &a_request, - Auth::DataPathReply &a_reply, LogContext log_context); - - void collListPublished(const Auth::CollListPublishedRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - void collCreate(const Auth::CollCreateRequest &a_request, - Auth::CollDataReply &a_reply, LogContext log_context); - void collUpdate(const Auth::CollUpdateRequest &a_request, - Auth::CollDataReply &a_reply, LogContext log_context); - void collView(const Auth::CollViewRequest &a_request, - Auth::CollDataReply &a_reply, LogContext log_context); - void collRead(const Auth::CollReadRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - void collWrite(const Auth::CollWriteRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - void collMove(const Auth::CollMoveRequest &a_request, Anon::AckReply &a_reply, + const SDMS::RecordGetDependencyGraphRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + + void generalSearch(const SDMS::SearchRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + + void dataPath(const SDMS::DataPathRequest &a_request, + SDMS::DataPathReply &a_reply, LogContext log_context); + + void collListPublished(const SDMS::CollListPublishedRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + void collCreate(const SDMS::CollCreateRequest &a_request, + SDMS::CollDataReply &a_reply, LogContext log_context); + void collUpdate(const SDMS::CollUpdateRequest &a_request, + SDMS::CollDataReply &a_reply, LogContext log_context); + void collView(const SDMS::CollViewRequest &a_request, + SDMS::CollDataReply &a_reply, LogContext log_context); + void collRead(const SDMS::CollReadRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + void collWrite(const SDMS::CollWriteRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + void collMove(const SDMS::CollMoveRequest &a_request, SDMS::AckReply &a_reply, LogContext log_context); - void collGetParents(const Auth::CollGetParentsRequest &a_request, - Auth::CollPathReply &a_reply, LogContext log_context); - void collGetOffset(const Auth::CollGetOffsetRequest &a_request, - Auth::CollGetOffsetReply &a_reply, LogContext log_context); - - void queryList(const Auth::QueryListRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - void queryCreate(const Auth::QueryCreateRequest &a_request, - Auth::QueryDataReply &a_reply, LogContext log_context); - void queryUpdate(const Auth::QueryUpdateRequest &a_request, - Auth::QueryDataReply &a_reply, LogContext log_context); - void queryDelete(const Auth::QueryDeleteRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context); - void queryView(const Auth::QueryViewRequest &a_request, - Auth::QueryDataReply &a_reply, LogContext log_context); - void queryExec(const Auth::QueryExecRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - - void aclView(const Auth::ACLViewRequest &a_request, - Auth::ACLDataReply &a_reply, LogContext log_context); - void aclUpdate(const Auth::ACLUpdateRequest &a_request, - Auth::ACLDataReply &a_reply, LogContext log_context); - void aclSharedList(const Auth::ACLSharedListRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - void aclSharedListItems(const Auth::ACLSharedListItemsRequest &a_request, - Auth::ListingReply &a_reply, LogContext log_context); - - void groupCreate(const Auth::GroupCreateRequest &a_request, - Auth::GroupDataReply &a_reply, LogContext log_context); - void groupUpdate(const Auth::GroupUpdateRequest &a_request, - Auth::GroupDataReply &a_reply, LogContext log_context); - void groupDelete(const Auth::GroupDeleteRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context); - void groupList(const Auth::GroupListRequest &a_request, - Auth::GroupDataReply &a_reply, LogContext log_context); - void groupView(const Auth::GroupViewRequest &a_request, - Auth::GroupDataReply &a_reply, LogContext log_context); + void collGetParents(const SDMS::CollGetParentsRequest &a_request, + SDMS::CollPathReply &a_reply, LogContext log_context); + void collGetOffset(const SDMS::CollGetOffsetRequest &a_request, + SDMS::CollGetOffsetReply &a_reply, LogContext log_context); + + void queryList(const SDMS::QueryListRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + void queryCreate(const SDMS::QueryCreateRequest &a_request, + SDMS::QueryDataReply &a_reply, LogContext log_context); + void queryUpdate(const SDMS::QueryUpdateRequest &a_request, + SDMS::QueryDataReply &a_reply, LogContext log_context); + void queryDelete(const SDMS::QueryDeleteRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context); + void queryView(const SDMS::QueryViewRequest &a_request, + SDMS::QueryDataReply &a_reply, LogContext log_context); + void queryExec(const SDMS::QueryExecRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + + void aclView(const SDMS::ACLViewRequest &a_request, + SDMS::ACLDataReply &a_reply, LogContext log_context); + void aclUpdate(const SDMS::ACLUpdateRequest &a_request, + SDMS::ACLDataReply &a_reply, LogContext log_context); + void aclSharedList(const SDMS::ACLSharedListRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + void aclSharedListItems(const SDMS::ACLSharedListItemsRequest &a_request, + SDMS::ListingReply &a_reply, LogContext log_context); + + void groupCreate(const SDMS::GroupCreateRequest &a_request, + SDMS::GroupDataReply &a_reply, LogContext log_context); + void groupUpdate(const SDMS::GroupUpdateRequest &a_request, + SDMS::GroupDataReply &a_reply, LogContext log_context); + void groupDelete(const SDMS::GroupDeleteRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context); + void groupList(const SDMS::GroupListRequest &a_request, + SDMS::GroupDataReply &a_reply, LogContext log_context); + void groupView(const SDMS::GroupViewRequest &a_request, + SDMS::GroupDataReply &a_reply, LogContext log_context); void repoList(std::vector &a_repos, LogContext log_context); - void repoList(const Auth::RepoListRequest &a_request, - Auth::RepoDataReply &a_reply, LogContext log_context); + void repoList(const SDMS::RepoListRequest &a_request, + SDMS::RepoDataReply &a_reply, LogContext log_context); void repoView(std::vector &a_repos, LogContext log_context); - void repoView(const Auth::RepoViewRequest &a_request, - Auth::RepoDataReply &a_reply, LogContext log_context); - void repoCreate(const Auth::RepoCreateRequest &a_request, - Auth::RepoDataReply &a_reply, LogContext log_context); - void repoUpdate(const Auth::RepoUpdateRequest &a_request, - Auth::RepoDataReply &a_reply, LogContext log_context); - void repoDelete(const Auth::RepoDeleteRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context); - void repoCalcSize(const Auth::RepoCalcSizeRequest &a_request, - Auth::RepoCalcSizeReply &a_reply, LogContext log_context); - void repoListAllocations(const Auth::RepoListAllocationsRequest &a_request, - Auth::RepoAllocationsReply &a_reply, + void repoView(const SDMS::RepoViewRequest &a_request, + SDMS::RepoDataReply &a_reply, LogContext log_context); + void repoCreate(const SDMS::RepoCreateRequest &a_request, + SDMS::RepoDataReply &a_reply, LogContext log_context); + void repoUpdate(const SDMS::RepoUpdateRequest &a_request, + SDMS::RepoDataReply &a_reply, LogContext log_context); + void repoDelete(const SDMS::RepoDeleteRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context); + void repoCalcSize(const SDMS::RepoCalcSizeRequest &a_request, + SDMS::RepoCalcSizeReply &a_reply, LogContext log_context); + void repoListAllocations(const SDMS::RepoListAllocationsRequest &a_request, + SDMS::RepoAllocationsReply &a_reply, LogContext log_context); void repoListSubjectAllocations( - const Auth::RepoListSubjectAllocationsRequest &a_request, - Auth::RepoAllocationsReply &a_reply, LogContext log_context); + const SDMS::RepoListSubjectAllocationsRequest &a_request, + SDMS::RepoAllocationsReply &a_reply, LogContext log_context); void repoListObjectAllocations( - const Auth::RepoListObjectAllocationsRequest &a_request, - Auth::RepoAllocationsReply &a_reply, LogContext log_context); - void repoViewAllocation(const Auth::RepoViewAllocationRequest &a_request, - Auth::RepoAllocationsReply &a_reply, + const SDMS::RepoListObjectAllocationsRequest &a_request, + SDMS::RepoAllocationsReply &a_reply, LogContext log_context); + void repoViewAllocation(const SDMS::RepoViewAllocationRequest &a_request, + SDMS::RepoAllocationsReply &a_reply, LogContext log_context); - void repoAllocationStats(const Auth::RepoAllocationStatsRequest &a_request, - Auth::RepoAllocationStatsReply &a_reply, + void repoAllocationStats(const SDMS::RepoAllocationStatsRequest &a_request, + SDMS::RepoAllocationStatsReply &a_reply, LogContext log_context); - void repoAllocationSet(const Auth::RepoAllocationSetRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context); + void repoAllocationSet(const SDMS::RepoAllocationSetRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context); void repoAllocationSetDefault( - const Auth::RepoAllocationSetDefaultRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context); - void repoAuthz(const Auth::RepoAuthzRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context); - - void topicListTopics(const Auth::TopicListTopicsRequest &a_request, - Auth::TopicDataReply &a_reply, LogContext log_context); - void topicView(const Auth::TopicViewRequest &a_request, - Auth::TopicDataReply &a_reply, LogContext log_context); - void topicSearch(const Auth::TopicSearchRequest &a_request, - Auth::TopicDataReply &a_reply, LogContext log_context); - - void noteCreate(const Auth::NoteCreateRequest &a_request, - Auth::NoteDataReply &a_reply, LogContext log_context); - void noteUpdate(const Auth::NoteUpdateRequest &a_request, - Auth::NoteDataReply &a_reply, LogContext log_context); - void noteCommentEdit(const Auth::NoteCommentEditRequest &a_request, - Auth::NoteDataReply &a_reply, LogContext log_context); - void noteView(const Auth::NoteViewRequest &a_request, - Auth::NoteDataReply &a_reply, LogContext log_context); - void noteListBySubject(const Auth::NoteListBySubjectRequest &a_request, - Auth::NoteDataReply &a_reply, LogContext log_context); + const SDMS::RepoAllocationSetDefaultRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context); + void repoAuthz(const SDMS::RepoAuthzRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context); + + void topicListTopics(const SDMS::TopicListTopicsRequest &a_request, + SDMS::TopicDataReply &a_reply, LogContext log_context); + void topicView(const SDMS::TopicViewRequest &a_request, + SDMS::TopicDataReply &a_reply, LogContext log_context); + void topicSearch(const SDMS::TopicSearchRequest &a_request, + SDMS::TopicDataReply &a_reply, LogContext log_context); + + void noteCreate(const SDMS::NoteCreateRequest &a_request, + SDMS::NoteDataReply &a_reply, LogContext log_context); + void noteUpdate(const SDMS::NoteUpdateRequest &a_request, + SDMS::NoteDataReply &a_reply, LogContext log_context); + void noteCommentEdit(const SDMS::NoteCommentEditRequest &a_request, + SDMS::NoteDataReply &a_reply, LogContext log_context); + void noteView(const SDMS::NoteViewRequest &a_request, + SDMS::NoteDataReply &a_reply, LogContext log_context); + void noteListBySubject(const SDMS::NoteListBySubjectRequest &a_request, + SDMS::NoteDataReply &a_reply, LogContext log_context); void notePurge(uint32_t a_age_sec, LogContext log_context); void taskLoadReady(libjson::Value &a_result, LogContext log_context); @@ -261,34 +257,34 @@ class DatabaseAPI { void taskAbort(const std::string &a_task_id, const std::string &a_msg, libjson::Value &a_task_reply, LogContext log_context); - void taskInitDataGet(const Auth::DataGetRequest &a_request, - Auth::DataGetReply &a_reply, libjson::Value &a_result, + void taskInitDataGet(const SDMS::DataGetRequest &a_request, + SDMS::DataGetReply &a_reply, libjson::Value &a_result, LogContext log_context); - void taskInitDataPut(const Auth::DataPutRequest &a_request, - Auth::DataPutReply &a_reply, libjson::Value &a_result, + void taskInitDataPut(const SDMS::DataPutRequest &a_request, + SDMS::DataPutReply &a_reply, libjson::Value &a_result, LogContext log_context); void taskInitRecordCollectionDelete(const std::vector &a_ids, - Auth::TaskDataReply &a_reply, + SDMS::TaskDataReply &a_reply, libjson::Value &a_result, LogContext log_context); void - taskInitRecordAllocChange(const Auth::RecordAllocChangeRequest &a_request, - Auth::RecordAllocChangeReply &a_reply, + taskInitRecordAllocChange(const SDMS::RecordAllocChangeRequest &a_request, + SDMS::RecordAllocChangeReply &a_reply, libjson::Value &a_result, LogContext log_context); void - taskInitRecordOwnerChange(const Auth::RecordOwnerChangeRequest &a_request, - Auth::RecordOwnerChangeReply &a_reply, + taskInitRecordOwnerChange(const SDMS::RecordOwnerChangeRequest &a_request, + SDMS::RecordOwnerChangeReply &a_reply, libjson::Value &a_result, LogContext log_context); void taskInitRepoAllocationCreate( - const Auth::RepoAllocationCreateRequest &a_request, - Auth::TaskDataReply &a_reply, libjson::Value &a_result, + const SDMS::RepoAllocationCreateRequest &a_request, + SDMS::TaskDataReply &a_reply, libjson::Value &a_result, LogContext log_context); void taskInitRepoAllocationDelete( - const Auth::RepoAllocationDeleteRequest &a_request, - Auth::TaskDataReply &a_reply, libjson::Value &a_result, + const SDMS::RepoAllocationDeleteRequest &a_request, + SDMS::TaskDataReply &a_reply, libjson::Value &a_result, LogContext log_context); - void taskInitProjectDelete(const Auth::ProjectDeleteRequest &a_request, - Auth::TaskDataReply &a_reply, + void taskInitProjectDelete(const SDMS::ProjectDeleteRequest &a_request, + SDMS::TaskDataReply &a_reply, libjson::Value &a_result, LogContext log_context); void taskStart(const std::string &a_task_id, libjson::Value &a_result, LogContext log_context); @@ -298,34 +294,34 @@ class DatabaseAPI { void taskFinalize(const std::string &a_task_id, bool a_succeeded, const std::string &a_msg, libjson::Value &a_result, LogContext log_context); - void taskList(const Auth::TaskListRequest &a_request, - Auth::TaskDataReply &a_reply, LogContext log_context); - void taskView(const Auth::TaskViewRequest &a_request, - Auth::TaskDataReply &a_reply, LogContext log_context); + void taskList(const SDMS::TaskListRequest &a_request, + SDMS::TaskDataReply &a_reply, LogContext log_context); + void taskView(const SDMS::TaskViewRequest &a_request, + SDMS::TaskDataReply &a_reply, LogContext log_context); void taskPurge(uint32_t a_age_sec, LogContext log_context); - void tagSearch(const Auth::TagSearchRequest &a_request, - Auth::TagDataReply &a_reply, LogContext log_context); - void tagListByCount(const Auth::TagListByCountRequest &a_request, - Auth::TagDataReply &a_reply, LogContext log_context); + void tagSearch(const SDMS::TagSearchRequest &a_request, + SDMS::TagDataReply &a_reply, LogContext log_context); + void tagListByCount(const SDMS::TagListByCountRequest &a_request, + SDMS::TagDataReply &a_reply, LogContext log_context); - void schemaSearch(const Auth::SchemaSearchRequest &a_request, - Auth::SchemaDataReply &a_reply, LogContext log_context); - void schemaView(const Auth::SchemaViewRequest &a_request, - Auth::SchemaDataReply &a_reply, LogContext log_context); + void schemaSearch(const SDMS::SchemaSearchRequest &a_request, + SDMS::SchemaDataReply &a_reply, LogContext log_context); + void schemaView(const SDMS::SchemaViewRequest &a_request, + SDMS::SchemaDataReply &a_reply, LogContext log_context); void schemaView(const std::string &a_id, libjson::Value &a_result, LogContext log_context); - void schemaCreate(const Auth::SchemaCreateRequest &a_request, + void schemaCreate(const SDMS::SchemaCreateRequest &a_request, LogContext log_context); - void schemaRevise(const Auth::SchemaReviseRequest &a_request, + void schemaRevise(const SDMS::SchemaReviseRequest &a_request, LogContext log_context); - void schemaUpdate(const Auth::SchemaUpdateRequest &a_request, + void schemaUpdate(const SDMS::SchemaUpdateRequest &a_request, LogContext log_context); - void schemaDelete(const Auth::SchemaDeleteRequest &a_request, - Anon::AckReply &a_reply, LogContext log_context); + void schemaDelete(const SDMS::SchemaDeleteRequest &a_request, + SDMS::AckReply &a_reply, LogContext log_context); - void dailyMessage(const Anon::DailyMessageRequest &a_request, - Anon::DailyMessageReply &a_reply, LogContext log_context); + void dailyMessage(const SDMS::DailyMessageRequest &a_request, + SDMS::DailyMessageReply &a_reply, LogContext log_context); void metricsUpdateMsgCounts( uint32_t a_timestamp, uint32_t a_total, @@ -342,66 +338,66 @@ class DatabaseAPI { const std::vector> &a_params, const std::string *a_body, libjson::Value &a_result, LogContext); - void setAuthStatus(Anon::AuthStatusReply &a_reply, + void setAuthStatus(SDMS::AuthStatusReply &a_reply, const libjson::Value &a_result); - void setUserData(Auth::UserDataReply &a_reply, const libjson::Value &a_result, + void setUserData(SDMS::UserDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setProjectData(Auth::ProjectDataReply &a_reply, + void setProjectData(SDMS::ProjectDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setRecordData(Auth::RecordDataReply &a_reply, + void setRecordData(SDMS::RecordDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setCollData(Auth::CollDataReply &a_reply, const libjson::Value &a_result, + void setCollData(SDMS::CollDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setCollPathData(Auth::CollPathReply &a_reply, + void setCollPathData(SDMS::CollPathReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setQueryData(Auth::QueryDataReply &a_reply, + void setQueryData(SDMS::QueryDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setListingDataReply(Auth::ListingReply &a_reply, + void setListingDataReply(SDMS::ListingReply &a_reply, const libjson::Value &a_result, LogContext log_context); void setListingData(ListingData *a_item, const libjson::Value::Object &a_obj, LogContext log_context); - void setGroupData(Auth::GroupDataReply &a_reply, + void setGroupData(SDMS::GroupDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setACLData(Auth::ACLDataReply &a_reply, const libjson::Value &a_result, + void setACLData(SDMS::ACLDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setAllocData(Auth::RepoAllocationsReply &a_reply, + void setAllocData(SDMS::RepoAllocationsReply &a_reply, const libjson::Value &a_result, LogContext log_context); void setAllocData(AllocData *a_alloc, const libjson::Value::Object &a_obj, LogContext log_context); - void setRepoData(Auth::RepoDataReply *a_reply, std::vector &a_repos, + void setRepoData(SDMS::RepoDataReply *a_reply, std::vector &a_repos, const libjson::Value &a_result, LogContext log_context); void setAllocStatsData(AllocStatsData &a_stats, const libjson::Value::Object &a_object, LogContext log_context); - void setNoteDataReply(Auth::NoteDataReply &a_reply, + void setNoteDataReply(SDMS::NoteDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); void setNoteData(NoteData *a_item, const libjson::Value::Object &a_obj, LogContext log_context); - void setTaskDataReply(Auth::TaskDataReply &a_reply, + void setTaskDataReply(SDMS::TaskDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setTaskDataReplyArray(Auth::TaskDataReply &a_reply, + void setTaskDataReplyArray(SDMS::TaskDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); void setTaskData(TaskData *a_task, const libjson::Value &a_task_json, LogContext log_context); - void setDataGetReply(Auth::DataGetReply &a_reply, + void setDataGetReply(SDMS::DataGetReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setDataPutReply(Auth::DataPutReply &a_reply, + void setDataPutReply(SDMS::DataPutReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setTagDataReply(Auth::TagDataReply &a_reply, + void setTagDataReply(SDMS::TagDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); void setTagData(TagData *a_tag, const libjson::Value::Object &a_obj, LogContext log_context); - void setTopicDataReply(Auth::TopicDataReply &a_reply, + void setTopicDataReply(SDMS::TopicDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); - void setSchemaDataReply(Auth::SchemaDataReply &a_reply, + void setSchemaDataReply(SDMS::SchemaDataReply &a_reply, const libjson::Value &a_result, LogContext log_context); void setSchemaData(SchemaData *a_schema, const libjson::Value::Object &a_obj); - uint32_t parseSearchRequest(const Auth::SearchRequest &a_request, + uint32_t parseSearchRequest(const SDMS::SearchRequest &a_request, std::string &a_qry_begin, std::string &a_qry_end, std::string &a_filter, std::string &a_params, LogContext log_context); diff --git a/core/server/GlobusAPI.cpp b/core/server/GlobusAPI.cpp index 7cc42b4cd..b0834206a 100644 --- a/core/server/GlobusAPI.cpp +++ b/core/server/GlobusAPI.cpp @@ -5,6 +5,7 @@ #include "common/DynaLog.hpp" #include "common/TraceException.hpp" #include "common/Util.hpp" +#include "common/envelope.pb.h" // Standard includes #include @@ -226,7 +227,7 @@ std::string GlobusAPI::getSubmissionID(const std::string &a_acc_token) { try { if (!raw_result.size()) - EXCEPT_PARAM(ID_SERVICE_ERROR, "Empty response. Code: " << code); + EXCEPT_PARAM(SERVICE_ERROR, "Empty response. Code: " << code); Value result; @@ -239,7 +240,7 @@ std::string GlobusAPI::getSubmissionID(const std::string &a_acc_token) { return resp_obj.getString("value"); } catch (libjson::ParseError &e) { DL_DEBUG(m_log_context, "PARSE FAILED! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus submission API call returned invalid JSON."); } catch (TraceException &e) { DL_DEBUG(m_log_context, raw_result); @@ -247,7 +248,7 @@ std::string GlobusAPI::getSubmissionID(const std::string &a_acc_token) { throw; } catch (...) { DL_DEBUG(m_log_context, "UNEXPECTED/MISSING JSON! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus submission API call returned unexpected content"); } } @@ -292,7 +293,7 @@ string GlobusAPI::transfer( try { if (!raw_result.size()) - EXCEPT_PARAM(ID_SERVICE_ERROR, "Empty response. Code: " << code); + EXCEPT_PARAM(SERVICE_ERROR, "Empty response. Code: " << code); Value result; @@ -307,14 +308,14 @@ string GlobusAPI::transfer( string &code = resp_obj.getString("code"); if (code.compare("Accepted") != 0) - EXCEPT_PARAM(ID_SERVICE_ERROR, "Request not accepted (" << code << ")"); + EXCEPT_PARAM(SERVICE_ERROR, "Request not accepted (" << code << ")"); string &task_id = resp_obj.getString("task_id"); return task_id; } catch (libjson::ParseError &e) { DL_ERROR(m_log_context, "PARSE FAILED! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus transfer API call returned invalid JSON."); } catch (TraceException &e) { DL_ERROR(m_log_context, raw_result); @@ -322,7 +323,7 @@ string GlobusAPI::transfer( throw; } catch (...) { DL_ERROR(m_log_context, "UNEXPECTED EXCEPTION " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus transfer API call returned unexpected content"); } } @@ -344,7 +345,7 @@ bool GlobusAPI::checkTransferStatus(const std::string &a_task_id, try { if (!raw_result.size()) { - EXCEPT_PARAM(ID_SERVICE_ERROR, "Empty response. Code: " << code); + EXCEPT_PARAM(SERVICE_ERROR, "Empty response. Code: " << code); } Value result; @@ -400,7 +401,7 @@ bool GlobusAPI::checkTransferStatus(const std::string &a_task_id, } } catch (libjson::ParseError &e) { DL_ERROR(m_log_context, "PARSE FAILED! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus task view API call returned invalid JSON."); } catch (TraceException &e) { DL_ERROR(m_log_context, raw_result); @@ -408,7 +409,7 @@ bool GlobusAPI::checkTransferStatus(const std::string &a_task_id, throw; } catch (...) { DL_ERROR(m_log_context, "UNEXPECTED/MISSING JSON! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus task view API call returned unexpected content"); } @@ -421,7 +422,7 @@ bool GlobusAPI::checkTransferStatus(const std::string &a_task_id, try { if (!raw_result.size()) - EXCEPT_PARAM(ID_SERVICE_ERROR, "Empty response. Code: " << code); + EXCEPT_PARAM(SERVICE_ERROR, "Empty response. Code: " << code); Value result; @@ -434,7 +435,7 @@ bool GlobusAPI::checkTransferStatus(const std::string &a_task_id, string &data_type = resp_obj.getString("DATA_TYPE"); if (data_type.compare("event_list") != 0) - EXCEPT(ID_SERVICE_ERROR, "Invalid DATA_TYPE field."); + EXCEPT(SERVICE_ERROR, "Invalid DATA_TYPE field."); vector events; @@ -457,7 +458,7 @@ bool GlobusAPI::checkTransferStatus(const std::string &a_task_id, return eventsHaveErrors(events, a_status, a_err_msg); } catch (libjson::ParseError &e) { DL_ERROR(m_log_context, "PARSE FAILED! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus task event list API call returned invalid JSON."); } catch (TraceException &e) { DL_ERROR(m_log_context, raw_result); @@ -465,7 +466,7 @@ bool GlobusAPI::checkTransferStatus(const std::string &a_task_id, throw; } catch (...) { DL_ERROR(m_log_context, "UNEXPECTED/MISSING JSON! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus task event list API call returned unexpected content"); } } @@ -480,7 +481,7 @@ void GlobusAPI::cancelTask(const std::string &a_task_id, try { if (!raw_result.size()) - EXCEPT_PARAM(ID_SERVICE_ERROR, "Empty response. Code: " << code); + EXCEPT_PARAM(SERVICE_ERROR, "Empty response. Code: " << code); Value result; @@ -493,11 +494,11 @@ void GlobusAPI::cancelTask(const std::string &a_task_id, string &resp_code = resp_obj.getString("code"); if (resp_code != "Canceled") - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Unexpected 'code' value returned: " << resp_code); } catch (libjson::ParseError &e) { DL_ERROR(m_log_context, "PARSE FAILED! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus cancel task API call returned invalid JSON."); } catch (TraceException &e) { DL_ERROR(m_log_context, raw_result); @@ -505,7 +506,7 @@ void GlobusAPI::cancelTask(const std::string &a_task_id, throw; } catch (...) { DL_ERROR(m_log_context, "UNEXPECTED/MISSING JSON! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus cancel task API call returned unexpected content"); } } @@ -555,7 +556,7 @@ void GlobusAPI::getEndpointInfo(const std::string &a_ep_id, Value result; try { if (!raw_result.size()) - EXCEPT_PARAM(ID_SERVICE_ERROR, "Empty response. Code: " << code); + EXCEPT_PARAM(SERVICE_ERROR, "Empty response. Code: " << code); result.fromString(raw_result); @@ -599,7 +600,7 @@ void GlobusAPI::getEndpointInfo(const std::string &a_ep_id, } } catch (libjson::ParseError &e) { DL_ERROR(m_log_context, "PARSE FAILED! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus endpoint API call returned invalid JSON."); } catch (TraceException &e) { DL_ERROR(m_log_context, raw_result); @@ -608,7 +609,7 @@ void GlobusAPI::getEndpointInfo(const std::string &a_ep_id, throw; } catch (exception &e) { DL_ERROR(m_log_context, "UNEXPECTED/MISSING JSON! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus endpoint API call returned unexpected content"); } } @@ -625,7 +626,7 @@ void GlobusAPI::refreshAccessToken(const std::string &a_ref_tok, if (!raw_result.size()) { EXCEPT_PARAM( - ID_SERVICE_ERROR, + SERVICE_ERROR, "Globus token API call returned empty response. Code: " << code); } @@ -643,7 +644,7 @@ void GlobusAPI::refreshAccessToken(const std::string &a_ref_tok, } catch (libjson::ParseError &e) { DL_ERROR(m_log_context, "PARSE FAILED! Globus token API call returned invalid JSON"); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus token API call returned invalid JSON."); } catch (TraceException &e) { DL_ERROR(m_log_context, raw_result); @@ -651,7 +652,7 @@ void GlobusAPI::refreshAccessToken(const std::string &a_ref_tok, throw; } catch (exception &e) { DL_ERROR(m_log_context, "UNEXPECTED/MISSING JSON! " << raw_result); - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Globus token API call returned unexpected content"); } } @@ -661,9 +662,9 @@ void GlobusAPI::checkResponsCode(long a_code, if (a_code < 200 || a_code > 202) { libjson::Value::ObjectIter i = a_body.find("message"); if (i == a_body.end()) - EXCEPT_PARAM(ID_SERVICE_ERROR, "Request failed, code: " << a_code); + EXCEPT_PARAM(SERVICE_ERROR, "Request failed, code: " << a_code); else - EXCEPT_PARAM(ID_SERVICE_ERROR, + EXCEPT_PARAM(SERVICE_ERROR, "Request failed, code: " << a_code << ", reason: " << i->second.asString()); } diff --git a/core/server/GlobusAPI.hpp b/core/server/GlobusAPI.hpp index c571a1e4a..20c856411 100644 --- a/core/server/GlobusAPI.hpp +++ b/core/server/GlobusAPI.hpp @@ -7,7 +7,6 @@ // Local public includes #include "common/DynaLog.hpp" -#include "common/SDMS.pb.h" #include "common/libjson.hpp" // Third party includes diff --git a/core/server/TaskMgr.cpp b/core/server/TaskMgr.cpp index ef8003b6d..0d284e348 100644 --- a/core/server/TaskMgr.cpp +++ b/core/server/TaskMgr.cpp @@ -7,9 +7,9 @@ // Local public includes #include "common/DynaLog.hpp" -#include "common/SDMS.pb.h" #include "common/TraceException.hpp" #include "common/libjson.hpp" +#include "common/envelope.pb.h" // Standard includes #include diff --git a/core/server/TaskMgr.hpp b/core/server/TaskMgr.hpp index 52aacf4dd..f24c1dc52 100644 --- a/core/server/TaskMgr.hpp +++ b/core/server/TaskMgr.hpp @@ -8,8 +8,6 @@ #include "ITaskWorker.hpp" // Local public includes -#include "common/SDMS.pb.h" -#include "common/SDMS_Auth.pb.h" #include "common/libjson.hpp" // Standard includes diff --git a/core/server/TaskWorker.cpp b/core/server/TaskWorker.cpp index 1278db017..9da9c9423 100644 --- a/core/server/TaskWorker.cpp +++ b/core/server/TaskWorker.cpp @@ -11,8 +11,12 @@ #include "common/ICommunicator.hpp" #include "common/IMessage.hpp" #include "common/MessageFactory.hpp" -#include "common/SDMS.pb.h" +#include "common/envelope.pb.h" #include "common/SocketOptions.hpp" +#include "common/enums/task_command.pb.h" +#include "common/enums/encryption.pb.h" +#include "common/enums/task_type.pb.h" +#include "common/enums/access_token_type.pb.h" // Standard includes #include "common/TraceException.hpp" @@ -40,11 +44,11 @@ TaskWorker::TaskWorker(ITaskMgr &a_mgr, uint32_t a_worker_id, m_thread = std::make_unique(&TaskWorker::workerThread, this, log_context); - m_execute[TC_RAW_DATA_TRANSFER] = &cmdRawDataTransfer; - m_execute[TC_RAW_DATA_DELETE] = &cmdRawDataDelete; - m_execute[TC_RAW_DATA_UPDATE_SIZE] = &cmdRawDataUpdateSize; - m_execute[TC_ALLOC_CREATE] = &cmdAllocCreate; - m_execute[TC_ALLOC_DELETE] = &cmdAllocDelete; + m_execute[::SDMS::TC_RAW_DATA_TRANSFER] = &cmdRawDataTransfer; + m_execute[::SDMS::TC_RAW_DATA_DELETE] = &cmdRawDataDelete; + m_execute[::SDMS::TC_RAW_DATA_UPDATE_SIZE] = &cmdRawDataUpdateSize; + m_execute[::SDMS::TC_ALLOC_CREATE] = &cmdAllocCreate; + m_execute[::SDMS::TC_ALLOC_DELETE] = &cmdAllocDelete; } TaskWorker::~TaskWorker() { @@ -101,7 +105,7 @@ void TaskWorker::workerThread(LogContext log_context) { if (obj.has("step")) { step = obj.asNumber(); - } else if (cmd != TC_STOP) { + } else if (cmd != SDMS::TC_STOP) { EXCEPT(1, "Reply missing step value"); } @@ -111,7 +115,7 @@ void TaskWorker::workerThread(LogContext log_context) { "TASK_ID: " << m_task->task_id << ", Step: " << step); response = m_execute[cmd](*this, params, log_context); - } else if (cmd == TC_STOP) { + } else if (cmd == SDMS::TC_STOP) { DL_DEBUG(log_context, "TASK_ID: " << m_task->task_id << ", STOP at step: " << step); m_mgr.newTasks(params, log_context); @@ -344,7 +348,7 @@ ICommunicator::Response TaskWorker::cmdRawDataDelete(TaskWorker &me, auto message_req = msg_factory.create(MessageType::GOOGLE_PROTOCOL_BUFFER); auto del_req = - std::make_unique(); // del_req; + std::make_unique(); // del_req; for (; i < j; i++, id++) { RecordDataLocation *loc = del_req->add_loc(); loc->set_id(id->asString()); @@ -371,7 +375,7 @@ TaskWorker::cmdRawDataUpdateSize(TaskWorker &me, const Value &a_task_params, const string &repo_id = obj.getString("repo_id"); const string &path = obj.getString("repo_path"); const Value::Array &ids = obj.getArray("ids"); - auto size_req = std::make_unique(); // sz_req; + auto size_req = std::make_unique(); // sz_req; // RecordDataLocation * loc; MessageFactory msg_factory; @@ -399,7 +403,7 @@ TaskWorker::cmdRawDataUpdateSize(TaskWorker &me, const Value &a_task_params, } auto proto_msg = std::get(response.message->getPayload()); - auto size_reply = dynamic_cast(proto_msg); + auto size_reply = dynamic_cast(proto_msg); if (size_reply != 0) { if (size_reply->size_size() != (int)ids.size()) { DL_ERROR(log_context, @@ -433,7 +437,7 @@ ICommunicator::Response TaskWorker::cmdAllocCreate(TaskWorker &me, MessageFactory msg_factory; auto message = msg_factory.create(MessageType::GOOGLE_PROTOCOL_BUFFER); - auto req = std::make_unique(); + auto req = std::make_unique(); req->set_path(path); message->setPayload(std::move(req)); @@ -455,7 +459,7 @@ ICommunicator::Response TaskWorker::cmdAllocDelete(TaskWorker &me, MessageFactory msg_factory; auto message = msg_factory.create(MessageType::GOOGLE_PROTOCOL_BUFFER); - auto req = std::make_unique(); + auto req = std::make_unique(); req->set_path(path); message->setPayload(std::move(req)); log_context.correlation_id = @@ -653,7 +657,7 @@ TaskWorker::repoSendRecv(const string &a_repo_id, auto proto_msg = std::get(response.message->getPayload()); - auto nack = dynamic_cast(proto_msg); + auto nack = dynamic_cast(proto_msg); if (nack != 0) { ErrorCode code = nack->err_code(); string msg = diff --git a/core/server/Version.hpp.in b/core/server/Version.hpp.in index be239e85f..b186730d3 100644 --- a/core/server/Version.hpp.in +++ b/core/server/Version.hpp.in @@ -18,6 +18,22 @@ namespace SDMS { constexpr int PATCH = @DATAFED_FOXX_API_PATCH@; } } + + namespace protocol { + namespace version { + constexpr int MAJOR = @DATAFED_COMMON_PROTOCOL_API_MAJOR@; + constexpr int MINOR = @DATAFED_COMMON_PROTOCOL_API_MINOR@; + constexpr int PATCH = @DATAFED_COMMON_PROTOCOL_API_PATCH@; + } + } + + namespace release { + constexpr int YEAR = @DATAFED_RELEASE_YEAR@; + constexpr int MONTH = @DATAFED_RELEASE_MONTH@; + constexpr int DAY = @DATAFED_RELEASE_DAY@; + constexpr int HOUR = @DATAFED_RELEASE_HOUR@; + constexpr int MINUTE = @DATAFED_RELEASE_MINUTE@; + } } #endif // CORE_VERSION_HPP diff --git a/core/server/main.cpp b/core/server/main.cpp index 92cf985ee..48cca3b5b 100644 --- a/core/server/main.cpp +++ b/core/server/main.cpp @@ -7,8 +7,7 @@ #include "common/DynaLog.hpp" #include "common/TraceException.hpp" #include "common/Util.hpp" -// messaging version -#include "common/Version.pb.h" +#include "Version.hpp" // Third party includes #include @@ -105,13 +104,13 @@ int main(int a_argc, char **a_argv) { } if (opt_map.count("version")) { - cout << "Release Version: " << DATAFED_RELEASE_YEAR << "." - << DATAFED_RELEASE_MONTH << "." << DATAFED_RELEASE_DAY << "." - << DATAFED_RELEASE_HOUR << "." << DATAFED_RELEASE_MINUTE + cout << "Release Version: " << release::YEAR << "." + << release::MONTH << "." << release::DAY << "." + << release::HOUR << "." << release::MINUTE << std::endl; - cout << "Messaging API: " << DATAFED_COMMON_PROTOCOL_API_MAJOR << "." - << DATAFED_COMMON_PROTOCOL_API_MINOR << "." - << DATAFED_COMMON_PROTOCOL_API_PATCH << endl; + cout << "Messaging API: " << protocol::version::MAJOR << "." + << protocol::version::MINOR << "." + << protocol::version::PATCH << endl; cout << "Core Server: " << core::version::MAJOR << "." << core::version::MINOR << "." << core::version::PATCH << endl; return 0; @@ -120,7 +119,7 @@ int main(int a_argc, char **a_argv) { if (cfg_file.size()) { ifstream optfile(cfg_file.c_str()); if (!optfile.is_open()) - EXCEPT_PARAM(ID_CLIENT_ERROR, + EXCEPT_PARAM(CLIENT_ERROR, "Could not open config file: " << cfg_file); po::store(po::parse_config_file(optfile, opts, false), opt_map); diff --git a/core/server/tests/unit/test_DatabaseAPI.cpp b/core/server/tests/unit/test_DatabaseAPI.cpp index 23812af2b..1c302eed4 100644 --- a/core/server/tests/unit/test_DatabaseAPI.cpp +++ b/core/server/tests/unit/test_DatabaseAPI.cpp @@ -34,6 +34,13 @@ class DatabaseAPITestHelper : public DatabaseAPI { } }; +struct GlobalProtobufTeardown { + ~GlobalProtobufTeardown() { + // This is the teardown function that runs once at the end + google::protobuf::ShutdownProtobufLibrary(); + } +}; + struct CurlGlobalFixture { CurlGlobalFixture() { curl_global_init(CURL_GLOBAL_DEFAULT); } @@ -43,6 +50,9 @@ struct CurlGlobalFixture { // Register fixture to run once per test module BOOST_TEST_GLOBAL_CONFIGURATION(CurlGlobalFixture); +// Declare a global fixture instance +BOOST_GLOBAL_FIXTURE(GlobalProtobufTeardown); + const std::string url("https://localhost:8529"); const std::string user("bob"); const std::string pass("open_sesame"); diff --git a/docker/Dockerfile.runtime b/docker/Dockerfile.runtime index eb564dcc9..b8c8bfff1 100644 --- a/docker/Dockerfile.runtime +++ b/docker/Dockerfile.runtime @@ -27,6 +27,7 @@ COPY ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/ RUN mkdir -p ${DATAFED_DIR} RUN mkdir -p /opt/datafed RUN mkdir -p /var/log/datafed +RUN mkdir -p /opt/datafed/logs RUN chown -R datafed:root /opt/datafed RUN chown -R datafed:root /var/log/datafed RUN chown -R datafed:root ${DATAFED_DIR} diff --git a/external/DataFedDependencies b/external/DataFedDependencies index fe59a393f..e0319a2f2 160000 --- a/external/DataFedDependencies +++ b/external/DataFedDependencies @@ -1 +1 @@ -Subproject commit fe59a393f54d3aa1b8bf551f97d274b762bf93d2 +Subproject commit e0319a2f2e70d901180c730ebd5b10b10d8fce93 diff --git a/python/datafed_pkg/CMakeLists.txt b/python/datafed_pkg/CMakeLists.txt index 730fbed68..ac834383e 100644 --- a/python/datafed_pkg/CMakeLists.txt +++ b/python/datafed_pkg/CMakeLists.txt @@ -35,4 +35,4 @@ endforeach() add_subdirectory( datafed ) add_custom_target( pydatafed ) -add_dependencies( pydatafed pydatafed_src) +add_dependencies( pydatafed pydatafed_proto_src) diff --git a/python/datafed_pkg/datafed/CLI.py b/python/datafed_pkg/datafed/CLI.py index 949d535ae..27918efba 100644 --- a/python/datafed_pkg/datafed/CLI.py +++ b/python/datafed_pkg/datafed/CLI.py @@ -34,8 +34,6 @@ from prompt_toolkit.history import FileHistory from prompt_toolkit.auto_suggest import AutoSuggestFromHistory -# from . import SDMS_Auth_pb2 as auth -from . import Version_pb2 from . import CommandLib from . import Config from . import VERSION @@ -162,14 +160,14 @@ def run(): except _NoCommand as e: # Be nice and switch to interactive when no command given if _interactive and _first: - api_version = f"{Version_pb2.DATAFED_COMMON_PROTOCOL_API_MAJOR}." - api_version += f"{Version_pb2.DATAFED_COMMON_PROTOCOL_API_MINOR}." - api_version += f"{Version_pb2.DATAFED_COMMON_PROTOCOL_API_PATCH}" - release_version = f"{Version_pb2.DATAFED_RELEASE_YEAR}." - release_version += f"{Version_pb2.DATAFED_RELEASE_MONTH}." - release_version += f"{Version_pb2.DATAFED_RELEASE_DAY}." - release_version += f"{Version_pb2.DATAFED_RELEASE_HOUR}." - release_version += f"{Version_pb2.DATAFED_RELEASE_MINUTE}" + api_version = f"{VERSION.DATAFED_COMMON_PROTOCOL_API_MAJOR}." + api_version += f"{VERSION.DATAFED_COMMON_PROTOCOL_API_MINOR}." + api_version += f"{VERSION.DATAFED_COMMON_PROTOCOL_API_PATCH}" + release_version = f"{VERSION.DATAFED_RELEASE_YEAR}." + release_version += f"{VERSION.DATAFED_RELEASE_MONTH}." + release_version += f"{VERSION.DATAFED_RELEASE_DAY}." + release_version += f"{VERSION.DATAFED_RELEASE_HOUR}." + release_version += f"{VERSION.DATAFED_RELEASE_MINUTE}" _print_msg(1, f"Welcome to DataFed CLI, version {VERSION.__version__}") _print_msg( 1, " Release, version {}".format(release_version) diff --git a/python/datafed_pkg/datafed/CMakeLists.txt b/python/datafed_pkg/datafed/CMakeLists.txt index 9e066deea..900b953cd 100644 --- a/python/datafed_pkg/datafed/CMakeLists.txt +++ b/python/datafed_pkg/datafed/CMakeLists.txt @@ -1,42 +1,60 @@ -cmake_minimum_required (VERSION 3.17.0) +cmake_minimum_required(VERSION 3.17.0) # Copy py source to build package source dir -file( GLOB SrcFiles ${CMAKE_CURRENT_SOURCE_DIR}/*.py ) +file(GLOB SrcFiles ${CMAKE_CURRENT_SOURCE_DIR}/*.py) foreach(file ${SrcFiles}) - configure_file(${file} ${CMAKE_CURRENT_BINARY_DIR} COPYONLY ) + configure_file(${file} ${CMAKE_CURRENT_BINARY_DIR} COPYONLY) endforeach() -# Collect top-level proto files as dependencies -file( GLOB ProtoFiles ${DataFed_SOURCE_DIR}/common/proto/common/*.proto ) +# Collect proto files from the new 1-1-1 directory structure +file(GLOB_RECURSE ProtoFiles ${DataFed_SOURCE_DIR}/common/proto3/common/*.proto) # OBJECT - is needed because we don't want to compile to a binary # because we are dealing with python add_library(protobuf-target-py OBJECT ${ProtoFiles}) + protobuf_generate( LANGUAGE python TARGET protobuf-target-py - IMPORT_DIRS "${DataFed_SOURCE_DIR}/common/proto/common" + IMPORT_DIRS "${DataFed_SOURCE_DIR}/common/proto3/common" OUT_VAR protobuf-generated-files-py PROTOC_OUT_DIR "${CMAKE_CURRENT_BINARY_DIR}" - ) - -add_custom_target( pydatafed_src DEPENDS protobuf-target-py ) - -# By default this will output the proto py files in the CMAKE BINARY DIR -add_custom_command( TARGET pydatafed_src POST_BUILD - COMMAND sed -i -r 's:^import.*_pb2:from . \\0:' ${protobuf-generated-files-py} - COMMAND ${DataFed_SOURCE_DIR}/python/pyproto_add_msg_idx.py ${DataFed_SOURCE_DIR}/common/proto/common/SDMS_Anon.proto ${CMAKE_CURRENT_BINARY_DIR}/SDMS_Anon_pb2.py - COMMAND ${DataFed_SOURCE_DIR}/python/pyproto_add_msg_idx.py ${DataFed_SOURCE_DIR}/common/proto/common/SDMS_Auth.proto ${CMAKE_CURRENT_BINARY_DIR}/SDMS_Auth_pb2.py -) - -# Crea#te copies of the files so they show up in the source folder as well -# for the purpose of testing -add_custom_target( pydatafed_proto_src DEPENDS pydatafed_src ) -add_custom_command( TARGET pydatafed_proto_src POST_BUILD pydatafed_src - COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/SDMS_Auth_pb2.py ${CMAKE_CURRENT_SOURCE_DIR}/ - COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/SDMS_pb2.py ${CMAKE_CURRENT_SOURCE_DIR}/ - COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/Version_pb2.py ${CMAKE_CURRENT_SOURCE_DIR}/ - COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/SDMS_Anon_pb2.py ${CMAKE_CURRENT_SOURCE_DIR}/ ) +add_custom_target(pydatafed_src DEPENDS protobuf-target-py) + +# Proto subdirectories that protoc generates imports for +set(PROTO_SUBDIRS anon auth enums messages) + +# Fix imports in generated pb2 files to use relative imports within the package. +# protoc generates absolute imports like: +# from anon import ack_reply_pb2 as ... +# from enums import error_code_pb2 as ... +# import envelope_pb2 as ... +# These must become relative imports: +# from .anon import ack_reply_pb2 as ... +# from .enums import error_code_pb2 as ... +# from . import envelope_pb2 as ... +# Create the import fixup script +add_custom_command(TARGET pydatafed_src POST_BUILD + COMMAND sh ${DataFed_SOURCE_DIR}/python/datafed_pkg/scripts/fix_proto_imports.sh ${CMAKE_CURRENT_BINARY_DIR} + COMMENT "Rewriting protobuf imports to relative" +) + +# Copy generated files back to source tree for testing +add_custom_target(pydatafed_proto_src DEPENDS pydatafed_src) +add_custom_command(TARGET pydatafed_proto_src POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_CURRENT_BINARY_DIR}/envelope_pb2.py + ${CMAKE_CURRENT_SOURCE_DIR}/ + # Copy subdirectories back to source for testing + COMMAND ${CMAKE_COMMAND} -E copy_directory + ${CMAKE_CURRENT_BINARY_DIR}/anon ${CMAKE_CURRENT_SOURCE_DIR}/anon + COMMAND ${CMAKE_COMMAND} -E copy_directory + ${CMAKE_CURRENT_BINARY_DIR}/auth ${CMAKE_CURRENT_SOURCE_DIR}/auth + COMMAND ${CMAKE_COMMAND} -E copy_directory + ${CMAKE_CURRENT_BINARY_DIR}/enums ${CMAKE_CURRENT_SOURCE_DIR}/enums + COMMAND ${CMAKE_COMMAND} -E copy_directory + ${CMAKE_CURRENT_BINARY_DIR}/messages ${CMAKE_CURRENT_SOURCE_DIR}/messages +) diff --git a/python/datafed_pkg/datafed/CommandLib.py b/python/datafed_pkg/datafed/CommandLib.py index 3f4ad8098..7b9140678 100644 --- a/python/datafed_pkg/datafed/CommandLib.py +++ b/python/datafed_pkg/datafed/CommandLib.py @@ -14,11 +14,9 @@ import time import pathlib import requests -from . import SDMS_Auth_pb2 as auth -from . import SDMS_pb2 as sdms from . import MessageLib from . import Config - +from . import envelope_pb2 as sdms class API: """ @@ -168,7 +166,7 @@ def generateCredentials(self): ------ Exception: On communication or server error """ - msg = auth.GenerateCredentialsRequest() + msg = sdms.GenerateCredentialsRequest() return self._mapi.sendRecv(msg) @@ -236,7 +234,7 @@ def repoCreate( ------ Exception : On communication or server error """ - msg = auth.RepoCreateRequest() + msg = sdms.RepoCreateRequest() msg.id = repo_id msg.title = title msg.desc = desc @@ -260,7 +258,7 @@ def repoList(self, list_all: bool = False): By default will only list the repos associated with the user. """ - msg = auth.RepoListRequest() + msg = sdms.RepoListRequest() msg.all = list_all return self._mapi.sendRecv(msg) @@ -281,7 +279,7 @@ def repoDelete(self, repo_id): ------ Exception : On communication or server error """ - msg = auth.RepoDeleteRequest() + msg = sdms.RepoDeleteRequest() msg.id = repo_id return self._mapi.sendRecv(msg) @@ -289,7 +287,7 @@ def repoAllocationCreate(self, repo_id, subject, data_limit, rec_limit): if not repo_id.startswith("repo/"): repo_id = "repo/" + repo_id - msg = auth.RepoAllocationCreateRequest() + msg = sdms.RepoAllocationCreateRequest() msg.repo = repo_id msg.subject = subject msg.data_limit = data_limit @@ -300,14 +298,14 @@ def repoListAllocations(self, repo_id): if not repo_id.startswith("repo/"): repo_id = "repo/" + repo_id - msg = auth.RepoListAllocationsRequest() + msg = sdms.RepoListAllocationsRequest() msg.id = repo_id return self._mapi.sendRecv(msg) def repoAllocationDelete(self, repo_id, subject): if not repo_id.startswith("repo/"): repo_id = "repo/" + repo_id - msg = auth.RepoAllocationDeleteRequest() + msg = sdms.RepoAllocationDeleteRequest() msg.repo = repo_id msg.subject = subject return self._mapi.sendRecv(msg) @@ -341,7 +339,7 @@ def dataView(self, data_id, details=False, context=None): ------ Exception : On communication or server error """ - msg = auth.RecordViewRequest() + msg = sdms.RecordViewRequest() msg.id = self._resolve_id(data_id, context) msg.details = details @@ -436,7 +434,7 @@ def dataCreate( if metadata and metadata_file: raise Exception("Cannot specify both metadata and metadata-file options.") - msg = auth.RecordCreateRequest() + msg = sdms.RecordCreateRequest() msg.title = title msg.parent_id = self._resolve_id(parent_id, context) @@ -579,7 +577,7 @@ def dataUpdate( if metadata and metadata_file: raise Exception("Cannot specify both metadata and metadata-file options.") - msg = auth.RecordUpdateRequest() + msg = sdms.RecordUpdateRequest() msg.id = self._resolve_id(data_id, context) if title is not None: @@ -673,7 +671,7 @@ def dataDelete(self, data_id, context=None): ------ Exception : On invalid options or communication / server error """ - msg = auth.RecordDeleteRequest() + msg = sdms.RecordDeleteRequest() if isinstance(data_id, list): for i in data_id: @@ -740,7 +738,7 @@ def dataGet( # Request server to map specified IDs into a list of specific record IDs. # This accounts for download of collections. - msg = auth.DataGetRequest() + msg = sdms.DataGetRequest() msg.check = True if isinstance(item_id, str): @@ -761,7 +759,7 @@ def dataGet( if len(glob_ids) > 0: # Globus transfers - msg = auth.DataGetRequest() + msg = sdms.DataGetRequest() msg.id.extend(glob_ids) msg.path = self._resolvePathForGlobus(path, False) msg.encrypt = encrypt @@ -770,7 +768,7 @@ def dataGet( reply = self._mapi.sendRecv(msg) if reply[0].task and wait: - msg2 = auth.TaskViewRequest() + msg2 = sdms.TaskViewRequest() msg2.task_id = reply[0].task.id elapsed = 0 @@ -849,7 +847,7 @@ def dataPut( ------ Exception : On invalid options or communication / server error. """ - msg = auth.DataPutRequest() + msg = sdms.DataPutRequest() msg.id = self._resolve_id(data_id, context) msg.path = self._resolvePathForGlobus(path, False) msg.encrypt = encrypt @@ -859,7 +857,7 @@ def dataPut( reply = self._mapi.sendRecv(msg) if (reply[0].HasField("task")) and wait: - msg2 = auth.TaskViewRequest() + msg2 = sdms.TaskViewRequest() msg2.task_id = reply[0].task.id elapsed = 0 @@ -944,7 +942,7 @@ def dataBatchCreate(self, file, coll_id=None, context=None): payload.extend(records) - msg = auth.RecordCreateBatchRequest() + msg = sdms.RecordCreateBatchRequest() msg.records = jsonlib.dumps(payload) return self._mapi.sendRecv(msg) @@ -998,7 +996,7 @@ def dataBatchUpdate(self, file): else: payload.extend(records) - msg = auth.RecordUpdateBatchRequest() + msg = sdms.RecordUpdateBatchRequest() msg.records = jsonlib.dumps(payload) return self._mapi.sendRecv(msg) @@ -1029,7 +1027,7 @@ def collectionView(self, coll_id, context=None): ------ Exception : On invalid options or communication / server error. """ - msg = auth.CollViewRequest() + msg = sdms.CollViewRequest() msg.id = self._resolve_id(coll_id, context) # msg.id = self._resolve_coll_id( coll_id, context ) @@ -1083,7 +1081,7 @@ def collectionCreate( ------ Exception : On communication or server error """ - msg = auth.CollCreateRequest() + msg = sdms.CollCreateRequest() msg.title = title if alias: @@ -1149,7 +1147,7 @@ def collectionUpdate( ------ Exception : On communication or server error """ - msg = auth.CollUpdateRequest() + msg = sdms.CollUpdateRequest() msg.id = self._resolve_id(coll_id, context) if title is not None: @@ -1197,7 +1195,7 @@ def collectionDelete(self, coll_id, context=None): ------ Exception : On communication or server error """ - msg = auth.CollDeleteRequest() + msg = sdms.CollDeleteRequest() if isinstance(coll_id, list): for i in coll_id: @@ -1234,7 +1232,7 @@ def collectionItemsList(self, coll_id, offset=0, count=20, context=None): Exception : On communication or server error Exception : On invalid options """ - msg = auth.CollReadRequest() + msg = sdms.CollReadRequest() msg.count = count msg.offset = offset msg.id = self._resolve_id(coll_id, context) @@ -1276,7 +1274,7 @@ def collectionItemsUpdate(self, coll_id, add_ids=None, rem_ids=None, context=Non Exception : On communication or server error Exception : On invalid options """ - msg = auth.CollWriteRequest() + msg = sdms.CollWriteRequest() msg.id = self._resolve_id(coll_id, context) if isinstance(add_ids, list): @@ -1317,7 +1315,7 @@ def collectionGetParents(self, coll_id, inclusive=False, context=None): Exception : On communication or server error Exception : On invalid options """ - msg = auth.CollGetParentsRequest() + msg = sdms.CollGetParentsRequest() msg.id = self._resolve_id(coll_id, context) msg.inclusive = inclusive @@ -1348,7 +1346,7 @@ def queryList(self, offset=0, count=20): Exception : On communication or server error Exception : On invalid options """ - msg = auth.QueryListRequest() + msg = sdms.QueryListRequest() msg.offset = offset msg.count = count @@ -1371,7 +1369,7 @@ def queryView(self, query_id): ------ Exception : On communication or server error """ - msg = auth.QueryViewRequest() + msg = sdms.QueryViewRequest() msg.id = query_id return self._mapi.sendRecv(msg) @@ -1422,7 +1420,7 @@ def queryCreate( Exception : On communication or server error Exception : On invalid options """ - msg = auth.QueryCreateRequest() + msg = sdms.QueryCreateRequest() msg.title = title self._buildSearchRequest( @@ -1487,7 +1485,7 @@ def queryUpdate( Exception : On invalid options """ - msg = auth.QueryUpdateRequest() + msg = sdms.QueryUpdateRequest() msg.id = query_id if title is not None: @@ -1532,7 +1530,7 @@ def queryDelete(self, query_id): ------ Exception : On communication or server error """ - msg = auth.QueryDeleteRequest() + msg = sdms.QueryDeleteRequest() msg.id.append(query_id) return self._mapi.sendRecv(msg) @@ -1560,7 +1558,7 @@ def queryExec(self, query_id, offset=0, count=20): Exception : On communication or server error Exception : On invalid options """ - msg = auth.QueryExecRequest() + msg = sdms.QueryExecRequest() msg.id = query_id msg.offset = offset msg.count = count @@ -1612,7 +1610,7 @@ def queryDirect( Exception : On communication or server error Exception : On invalid options """ - msg = auth.SearchRequest() + msg = sdms.SearchRequest() self._buildSearchRequest( msg, @@ -1776,7 +1774,7 @@ def userListCollaborators(self, offset=0, count=20): Exception : On communication or server error Exception : On invalid options """ - msg = auth.UserListCollabRequest() + msg = sdms.UserListCollabRequest() msg.offset = offset msg.count = count @@ -1802,7 +1800,7 @@ def userListAll(self, offset=0, count=20): Exception : On communication or server error Exception : On invalid options """ - msg = auth.UserListAllRequest() + msg = sdms.UserListAllRequest() msg.offset = offset msg.count = count @@ -1826,7 +1824,7 @@ def userView(self, uid): Exception : On communication or server error Exception : On invalid options """ - msg = auth.UserViewRequest() + msg = sdms.UserViewRequest() msg.uid = uid return self._mapi.sendRecv(msg) @@ -1868,7 +1866,7 @@ def projectList(self, owned=True, admin=True, member=True, offset=0, count=20): Exception : On communication or server error Exception : On invalid options """ - msg = auth.ProjectListRequest() + msg = sdms.ProjectListRequest() msg.as_owner = owned msg.as_admin = admin msg.as_member = member @@ -1895,7 +1893,7 @@ def projectView(self, project_id): Exception : On communication or server error Exception : On invalid options """ - msg = auth.ProjectViewRequest() + msg = sdms.ProjectViewRequest() msg.id = project_id return self._mapi.sendRecv(msg) @@ -1918,7 +1916,7 @@ def projectGetRole(self, project_id): Exception : On communication or server error Exception : On invalid options """ - msg = auth.ProjectGetRoleRequest() + msg = sdms.ProjectGetRoleRequest() msg.id = project_id reply = self._mapi.sendRecv(msg) @@ -1951,7 +1949,7 @@ def sharedList(self, inc_users=None, inc_projects=None, subject=None): Exception : On communication or server error Exception : On invalid options """ - msg = auth.ACLSharedListRequest() + msg = sdms.ACLSharedListRequest() if inc_users is not None: msg.inc_users = inc_users @@ -1981,7 +1979,7 @@ def sharedUsersList( self ): Exception : On communication or server error Exception : On invalid options """ - msg = auth.ACLByUserRequest() + msg = sdms.ACLByUserRequest() return self._mapi.sendRecv( msg ) @@ -1999,7 +1997,7 @@ def sharedProjectsList( self ): Exception : On communication or server error Exception : On invalid options """ - msg = auth.ACLByProjRequest() + msg = sdms.ACLByProjRequest() return self._mapi.sendRecv( msg ) ''' @@ -2031,7 +2029,7 @@ def sharedListItems(self, owner_id, context=None, offset=None, count=None): """ # TODO add support for offset & count - msg = auth.ACLSharedListItemsRequest() + msg = sdms.ACLSharedListItemsRequest() msg.owner = owner_id.lower() if context is not None: msg.subject = context.lower() @@ -2081,7 +2079,7 @@ def taskList( if since is not None and (time_from is not None or time_to is not None): raise Exception("Cannot specify 'since' and 'from'/'to' ranges.") - msg = auth.TaskListRequest() + msg = sdms.TaskListRequest() if time_from is not None: ts = self.strToTimestamp(time_from) @@ -2191,12 +2189,12 @@ def taskView(self, task_id=None): Exception : On invalid options """ if task_id: - msg = auth.TaskViewRequest() + msg = sdms.TaskViewRequest() msg.task_id = task_id reply = self._mapi.sendRecv(msg) else: - msg = auth.TaskListRequest() + msg = sdms.TaskListRequest() msg.offset = 0 msg.count = 1 @@ -2221,7 +2219,7 @@ def endpointListRecent(self): ------ Exception : On communication or server error """ - msg = auth.UserGetRecentEPRequest() + msg = sdms.UserGetRecentEPRequest() return self._mapi.sendRecv(msg) @@ -2305,7 +2303,7 @@ def setupCredentials(self): "Client configuration directory and/or client key files not configured" ) - msg = auth.GenerateCredentialsRequest() + msg = sdms.GenerateCredentialsRequest() reply = self._mapi.sendRecv(msg) @@ -2352,7 +2350,7 @@ def setContext(self, item_id=None): id2 = item_id if id2[0:2] == "p/": - msg = auth.ProjectViewRequest() + msg = sdms.ProjectViewRequest() msg.id = id2 else: if id2[0:2] != "u/": @@ -2364,7 +2362,7 @@ def setContext(self, item_id=None): ) id2 = "u/" + id2 - msg = auth.UserViewRequest() + msg = sdms.UserViewRequest() msg.uid = id2 # Don't need reply - just using to throw an except if id/uid is diff --git a/python/datafed_pkg/datafed/Connection.py b/python/datafed_pkg/datafed/Connection.py index d34dd1e1c..87546f528 100644 --- a/python/datafed_pkg/datafed/Connection.py +++ b/python/datafed_pkg/datafed/Connection.py @@ -6,13 +6,11 @@ # unserialized, and custom framing is generated to efficiently convey message # type, size, and a re-association context value. # -# The Google protobuf library does not provide a mechanism for identifying -# message types numerically (only by string), so a build-time custom tool -# (pyproto_add_msg_idx.py) is used to generate the mappings from message -# names to message index (and vice versa) and appends this information as -# dictionaries to the compiled proto files (xxxx_pb2.py). The -# registerProtocol() method then loads uses this information to create -# consistent message type framing for python send/recv methods. +# Message type identification is derived at runtime from the Envelope proto +# message's field descriptors. Each message type has a stable field number +# in the Envelope, which serves as its wire-format type ID. This replaces +# the previous build-time pyproto_add_msg_idx.py hack that assigned type +# IDs based on message declaration order within proto files. from google.protobuf.message_factory import GetMessageClass import logging @@ -69,6 +67,10 @@ def __init__( self._msg_desc_by_type = {} self._msg_desc_by_name = {} self._msg_type_by_desc = {} + self._field_by_msg_desc = {} + + self._envelope_class = None + self._envelope_desc = None self._address = "tcp://{0}:{1}".format(server_host, server_port) # init zeromq @@ -116,19 +118,65 @@ def __del__(self): self._zmq_ctxt.destroy() ## - # @brief Register a protobuf module + # @brief Register message types from the Envelope proto message + # + # This method derives message type mappings at runtime by inspecting the + # Envelope message's field descriptors. Each field in the Envelope that + # wraps a message type has a stable field number, which becomes the + # message type ID used in wire framing. This replaces the old + # registerProtocol() approach that relied on build-time generated + # _msg_name_to_type / _msg_type_to_name dicts. + # + # @param envelope_module - The compiled envelope_pb2 module + # @param envelope_class_name - Name of the envelope message (default: "Envelope") + # + def registerEnvelope(self, envelope_module, envelope_class_name="Envelope"): + envelope_class = getattr(envelope_module, envelope_class_name) + envelope_desc = envelope_class.DESCRIPTOR + + # Store for envelope wrapping/unwrapping + self._envelope_class = envelope_class + self._envelope_desc = envelope_desc + + for field in envelope_desc.fields: + if field.message_type is None: + # Skip non-message fields (e.g. scalars) if any exist + continue + + msg_type = field.number + desc = field.message_type + + self._msg_desc_by_type[msg_type] = desc + self._msg_desc_by_name[desc.name] = desc + self._msg_type_by_desc[desc] = msg_type + self._field_by_msg_desc[desc] = field + + self._logger.debug( + "Registered %d message types from %s", + len(self._msg_desc_by_type), + envelope_class_name, + ) + + ## + # @brief Register a protobuf module (DEPRECATED - use registerEnvelope) # # This method registers an imported protobuf module (_pb2 file) for use # with the Connection class. Registration is required for proper message # framing and serialization. # + # This relies on build-time generated _msg_name_to_type dicts appended + # to _pb2 files by pyproto_add_msg_idx.py. Prefer registerEnvelope() + # which derives mappings from envelope field numbers at runtime. + # # @param msg_module - Protobuf module (imported *_pb2 module) # def registerProtocol(self, msg_module): - # Message descriptors are stored by name created by protobuf compiler - # A custom post-proc tool generates and appends _msg_name_to_type with - # defined DataFed-sepcific numer message types - + import warnings + warnings.warn( + "registerProtocol() is deprecated, use registerEnvelope() instead", + DeprecationWarning, + stacklevel=2, + ) for name, desc in sorted(msg_module.DESCRIPTOR.message_types_by_name.items()): msg_t = msg_module._msg_name_to_type[name] self._msg_desc_by_type[msg_t] = desc @@ -138,15 +186,15 @@ def registerProtocol(self, msg_module): ## # @brief Receive a message # - # Receive a protobuf message with timeout. This method automatically - # parses and creates a new protobuf message class based on received - # framing. The new message object, the message name (defined in the - # associated proto file), and re-association context are returned as - # a tuple. On timeout, (None,None,None) is returned. + # Receive a protobuf message with timeout. The wire payload is an + # Envelope message; this method deserializes the Envelope and extracts + # the inner message via the oneof payload field. The inner message + # object, its name, and re-association context are returned as a tuple. + # On timeout, (None, None, None) is returned. # # @param timeout - Timeout in milliseconds - # @return Tuple of message, message type, and re-association context - # @retval (object,str,int) or (None,None,None) on timeout + # @return Tuple of message, message name, and re-association context + # @retval (object, str, int) or (None, None, None) on timeout # @exception Exception: if unregistered message type is received. # def recv(self, a_timeout=1000): @@ -180,38 +228,46 @@ def recv(self, a_timeout=1000): # client self._socket.recv_string(0) - # receive custom frame header and unpack + # Receive frame: 8 bytes = uint32 size + uint16 msg_type + uint16 context frame_data = self._socket.recv(0) - frame_values = struct.unpack(">LBBH", frame_data) - msg_type = (frame_values[1] << 8) | frame_values[2] - - # find message descriptor based on type (descriptor index) + frame_values = struct.unpack(">LHH", frame_data) + body_size = frame_values[0] + msg_type = frame_values[1] + ctxt = frame_values[2] - if not (msg_type in self._msg_desc_by_type): + if msg_type not in self._msg_desc_by_type: raise Exception( "received unregistered message type: {}".format(msg_type) ) - desc = self._msg_desc_by_type[msg_type] + data = self._socket.recv(0) - if frame_values[0] > 0: - # Create message by parsing content - data = self._socket.recv(0) - reply = GetMessageClass(desc)() - reply.ParseFromString(data) + if body_size > 0: + # Deserialize as Envelope + envelope = self._envelope_class() + envelope.ParseFromString(data) + + # Extract inner message from the oneof + payload_field = envelope.WhichOneof("payload") + if payload_field is None: + raise Exception("Received Envelope with no payload set") + reply = getattr(envelope, payload_field) else: - # No content, just create message instance - data = self._socket.recv(0) + # Zero-size body: create empty message instance from type + desc = self._msg_desc_by_type[msg_type] reply = GetMessageClass(desc)() - return reply, desc.name, frame_values[3] + return reply, reply.DESCRIPTOR.name, ctxt else: return None, None, None ## # @brief Send a message # - # Serializes and sends framing and message payload over connection. + # Wraps the inner message in an Envelope, serializes it, and sends + # framing and payload over the connection. The frame header carries the + # message type (Envelope field number) for efficient routing on the + # server side. # # @param message - The protobuf message object to be sent # @param ctxt - Reply re-association value (int) @@ -219,9 +275,15 @@ def recv(self, a_timeout=1000): # def send(self, message, ctxt): # Find msg type by descriptor look-up - if not (message.DESCRIPTOR in self._msg_type_by_desc): + if message.DESCRIPTOR not in self._msg_type_by_desc: raise Exception("Attempt to send unregistered message type.") + msg_type = self._msg_type_by_desc[message.DESCRIPTOR] + field = self._field_by_msg_desc[message.DESCRIPTOR] + + # Wrap inner message in Envelope + envelope = self._envelope_class() + getattr(envelope, field.name).CopyFrom(message) # Initial Null frame self._socket.send_string("BEGIN_DATAFED", zmq.SNDMORE) @@ -235,12 +297,12 @@ def send(self, message, ctxt): self._socket.send_string(self._pub_key, zmq.SNDMORE) self._socket.send_string("no_user", zmq.SNDMORE) - # Serialize - data = message.SerializeToString() + # Serialize the Envelope (not the inner message) + data = envelope.SerializeToString() data_sz = len(data) - # Build the message frame, to match C-struct MessageFrame - frame = struct.pack(">LBBH", data_sz, msg_type >> 8, msg_type & 0xFF, ctxt) + # Build the message frame: uint32 size + uint16 msg_type + uint16 context + frame = struct.pack(">LHH", data_sz, msg_type, ctxt) if data_sz > 0: # Send frame and payload diff --git a/python/datafed_pkg/datafed/MessageLib.py b/python/datafed_pkg/datafed/MessageLib.py index 62354c0eb..f0964e7d7 100644 --- a/python/datafed_pkg/datafed/MessageLib.py +++ b/python/datafed_pkg/datafed/MessageLib.py @@ -12,9 +12,7 @@ import zmq -from . import Version_pb2 -from . import SDMS_Anon_pb2 as anon -from . import SDMS_Auth_pb2 as auth +from . import envelope_pb2 as proto from . import Connection from . import VERSION @@ -166,8 +164,7 @@ def __init__( server_host, server_port, _server_pub_key, _client_pub_key, _client_priv_key ) - self._conn.registerProtocol(anon) - self._conn.registerProtocol(auth) + self._conn.registerEnvelope(proto) # Make a request to pypi package_name = "datafed" # Replace with the package name you want to check @@ -191,14 +188,14 @@ def __init__( self.new_client_avail = latest_version_on_pypi # Check for compatible protocol versions - reply, mt = self.sendRecv(anon.VersionRequest(), 10000) + reply, mt = self.sendRecv(proto.VersionRequest(), 10000) if reply is None: raise Exception( "Timeout waiting for server connection. Make sure" "the right ports are open." ) - if reply.api_major != Version_pb2.DATAFED_COMMON_PROTOCOL_API_MAJOR: + if reply.api_major != VERSION.DATAFED_COMMON_PROTOCOL_API_MAJOR: error_msg = ( "Incompatible server api detected {}.{}.{}, you are running " "{}.{}.{} consider " @@ -206,9 +203,9 @@ def __init__( reply.api_major, reply.api_minor, reply.api_patch, - Version_pb2.DATAFED_COMMON_PROTOCOL_API_MAJOR, - Version_pb2.DATAFED_COMMON_PROTOCOL_API_MINOR, - Version_pb2.DATAFED_COMMON_PROTOCOL_API_PATCH, + VERSION.DATAFED_COMMON_PROTOCOL_API_MAJOR, + VERSION.DATAFED_COMMON_PROTOCOL_API_MINOR, + VERSION.DATAFED_COMMON_PROTOCOL_API_PATCH, ) ) if self.new_client_avail: @@ -223,7 +220,7 @@ def __init__( self.manualAuthByToken(client_token) else: # Check if server authenticated based on keys - reply, mt = self.sendRecv(anon.GetAuthStatusRequest(), 10000) + reply, mt = self.sendRecv(proto.GetAuthStatusRequest(), 10000) self._auth = reply.auth self._uid = reply.uid @@ -263,7 +260,7 @@ def getAuthStatus(self): # @exception Exception: On communication timeout or authentication failure. # def manualAuthByPassword(self, uid, password): - msg = anon.AuthenticateByPasswordRequest() + msg = proto.AuthenticateByPasswordRequest() msg.uid = uid msg.password = password a, b = self.sendRecv(msg) @@ -272,7 +269,7 @@ def manualAuthByPassword(self, uid, password): self._conn.reset() # Test auth status - reply, mt = self.sendRecv(anon.GetAuthStatusRequest()) + reply, mt = self.sendRecv(proto.GetAuthStatusRequest()) if not reply.auth: raise Exception("Password authentication failed.") @@ -280,7 +277,7 @@ def manualAuthByPassword(self, uid, password): self._uid = reply.uid def manualAuthByToken(self, token): - msg = anon.AuthenticateByTokenRequest() + msg = proto.AuthenticateByTokenRequest() msg.token = token self.sendRecv(msg) @@ -288,7 +285,7 @@ def manualAuthByToken(self, token): self._conn.reset() # Test auth status - reply, mt = self.sendRecv(anon.GetAuthStatusRequest()) + reply, mt = self.sendRecv(proto.GetAuthStatusRequest()) if not reply.auth: raise Exception("Token authentication failed") @@ -332,7 +329,7 @@ def getDefaultTimeout(self): def getDailyMessage(self): # Get daily message, if set - reply, mt = self.sendRecv(anon.DailyMessageRequest(), 10000) + reply, mt = self.sendRecv(proto.DailyMessageRequest(), 10000) if reply is None: raise Exception("Timeout waiting for server connection.") diff --git a/python/datafed_pkg/datafed/VERSION.py.in b/python/datafed_pkg/datafed/VERSION.py.in index fc9c6a3b3..03ae7c5f5 100644 --- a/python/datafed_pkg/datafed/VERSION.py.in +++ b/python/datafed_pkg/datafed/VERSION.py.in @@ -1 +1,9 @@ __version__="@DATAFED_PYTHON_CLIENT_MAJOR@.@DATAFED_PYTHON_CLIENT_MINOR@.@DATAFED_PYTHON_CLIENT_PATCH@@DATAFED_PYTHON_CLIENT_RELEASE_TYPE@@DATAFED_PYTHON_CLIENT_PRE_RELEASE_IDENTIFER@" +DATAFED_COMMON_PROTOCOL_API_MAJOR=@DATAFED_COMMON_PROTOCOL_API_MAJOR@ +DATAFED_COMMON_PROTOCOL_API_MINOR=@DATAFED_COMMON_PROTOCOL_API_MINOR@ +DATAFED_COMMON_PROTOCOL_API_PATCH=@DATAFED_COMMON_PROTOCOL_API_PATCH@ +DATAFED_RELEASE_YEAR=@DATAFED_RELEASE_YEAR@ +DATAFED_RELEASE_MONTH=@DATAFED_RELEASE_MONTH@ +DATAFED_RELEASE_DAY=@DATAFED_RELEASE_DAY@ +DATAFED_RELEASE_HOUR=@DATAFED_RELEASE_HOUR@ +DATAFED_RELEASE_MINUTE=@DATAFED_RELEASE_MINUTE@ diff --git a/python/datafed_pkg/scripts/fix_proto_imports.sh b/python/datafed_pkg/scripts/fix_proto_imports.sh new file mode 100755 index 000000000..1894efa0a --- /dev/null +++ b/python/datafed_pkg/scripts/fix_proto_imports.sh @@ -0,0 +1,111 @@ +#!/bin/sh +set -e + +# What this script does +# +# protoc --python_out +# +# generates _pb2.py files with absolute imports based on the proto import +# paths. For example, if envelope.proto imports anon/auth_by_token.proto, the +# generated envelope_pb2.py will contain: +# +# python from anon import auth_by_token_pb2 +# + +# This works if you run Python from the exact output directory, but breaks when +# the generated code is consumed as a Python package (which is how DataFed uses +# it). Python's package system requires relative imports for intra-package +# references: +# +# File at package level +# +# from .anon import auth_by_token_pb2 +# +# File at root +# +# from ..anon import auth_by_token_pb2 +# +# file in a subdirectory protoc +# has no option to emit relative imports. This is a well-known, long-standing +# limitation (protocolbuffers/protobuf#1491). The script does three things: +# +# 1. Rewrites imports to be relative. It finds every _pb2.py file, determines +# whether it lives at the package root or in a subdirectory (e.g., anon/, +# auth/), and rewrites bare absolute imports (from anon import ...) to the +# correct relative form (.anon for root-level files, ..anon for files one level +# deep). +# 2. Creates __init__.py files in each subdirectory (anon/, auth/, enums/, +# messages/) so Python recognizes them as subpackages. Appends re-exports to +# envelope_pb2.py for backward compatibility. The existing Python client +# (Connection.py) uses getattr(envelope_module, ClassName) to dynamically look +# up message classes by name on the envelope module. +# +# Under the old single-file +# proto2 layout, all message classes lived directly in envelope_pb2.py. Now +# that messages are split across subpackages, this dynamic lookup would break. +# The wildcard re-exports (from .anon.auth_by_token_pb2 import *, etc.) restore +# the flat namespace on envelope_pb2 so existing code continues to work without +# modification. + +PROTO_DIR="$1" +ROOT_DIR="${2:-$1}" + +if [ -z "$PROTO_DIR" ]; then + echo "Usage: fix_proto_imports.sh [root_dir]" + echo " proto_output_dir: directory to find and fix _pb2.py files" + echo " root_dir: package root for computing relative depth (defaults to proto_output_dir)" + exit 1 +fi + +find "$PROTO_DIR" -name '*_pb2.py' | while read f; do + relpath=$(realpath --relative-to="$ROOT_DIR" "$f") + case "$relpath" in + */*) + sed -i \ + -e 's:^from anon import:from ..anon import:g' \ + -e 's:^from anon\.:from ..anon.:g' \ + -e 's:^from auth import:from ..auth import:g' \ + -e 's:^from auth\.:from ..auth.:g' \ + -e 's:^from enums import:from ..enums import:g' \ + -e 's:^from enums\.:from ..enums.:g' \ + -e 's:^from messages import:from ..messages import:g' \ + -e 's:^from messages\.:from ..messages.:g' \ + -e 's:^import \(.*_pb2\):from . import \1:g' \ + "$f" + ;; + *) + sed -i \ + -e 's:^from anon import:from .anon import:g' \ + -e 's:^from anon\.:from .anon.:g' \ + -e 's:^from auth import:from .auth import:g' \ + -e 's:^from auth\.:from .auth.:g' \ + -e 's:^from enums import:from .enums import:g' \ + -e 's:^from enums\.:from .enums.:g' \ + -e 's:^from messages import:from .messages import:g' \ + -e 's:^from messages\.:from .messages.:g' \ + -e 's:^import \(.*_pb2\):from . import \1:g' \ + "$f" + ;; + esac +done + +for subdir in anon auth enums messages; do + if [ -d "$ROOT_DIR/$subdir" ]; then + touch "$ROOT_DIR/$subdir/__init__.py" + fi +done + +# Append re-exports to envelope_pb2.py for backward compatibility +# Connection.py uses getattr(envelope_module, class_name) for dynamic dispatch +echo "" >>"$ROOT_DIR/envelope_pb2.py" +echo "# Re-export all message and enum classes for dynamic lookup" >>"$ROOT_DIR/envelope_pb2.py" + +for subdir in anon auth enums messages; do + if [ -d "$ROOT_DIR/$subdir" ]; then + for f in "$ROOT_DIR/$subdir"/*_pb2.py; do + [ -f "$f" ] || continue + module=$(basename "$f" .py) + echo "from .$subdir.$module import *" >>"$ROOT_DIR/envelope_pb2.py" + done + fi +done diff --git a/python/datafed_pkg/setup.py b/python/datafed_pkg/setup.py index 21abefdcc..55ebd76d9 100644 --- a/python/datafed_pkg/setup.py +++ b/python/datafed_pkg/setup.py @@ -22,6 +22,13 @@ long_description_content_type="text/markdown", url="https://github.com/ORNL/DataFed", packages=setuptools.find_packages(), + package_data={ + "datafed": ["*.py"], + "datafed.anon": ["*.py"], + "datafed.auth": ["*.py"], + "datafed.enums": ["*.py"], + "datafed.messages": ["*.py"], + }, setup_requires=["setuptools"], install_requires=install_requires, entry_points={"console_scripts": ["datafed = datafed.CLI:run"]}, @@ -31,3 +38,5 @@ "Operating System :: OS Independent", ], ) + + diff --git a/python/datafed_pkg/test/security.py b/python/datafed_pkg/test/security.py index d9b479c1a..a8e42c027 100755 --- a/python/datafed_pkg/test/security.py +++ b/python/datafed_pkg/test/security.py @@ -2,7 +2,7 @@ import getpass import datafed.CommandLib -import datafed.SDMS_Auth_pb2 as auth +import datafed.envelope_pb2 as sdms opts = {} @@ -15,7 +15,7 @@ api.loginByPassword(uid, password) -msg = auth.UserCreateRequest() +msg = sdms.UserCreateRequest() msg.uid = "newuser" msg.password = "temptemp" msg.name = "New User" diff --git a/python/pyproto_add_msg_idx.py b/python/pyproto_add_msg_idx.py deleted file mode 100755 index 233f83b33..000000000 --- a/python/pyproto_add_msg_idx.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python3 - -""" -Protobuf processing to generate message ID maps for C++, Python, and JS -""" - -import sys -import re - -print("args", sys.argv) - -pf_in = open(sys.argv[1], "r") -pf_out = open(sys.argv[2], "a") - -while True: - line = pf_in.readline() - if len(line) == 0: - sys.exit(-1) - parts = re.split(r"\W+", line.strip()) - # print( line, parts ) - try: - idx = parts.index("ID") - # print( "ID:", parts[idx+1] ) - msg_type = int(parts[idx + 1]) << 8 - break - except BaseException: - pass - -# msg_type = 0 - -by_type = [] -idx = 0 - -pf_out.write("\n_msg_name_to_type = {\n") - -while True: - line = pf_in.readline() - if len(line) == 0: - break - - if line.startswith("message "): - msg_name = line.split()[1] - by_type.append(msg_name) - # print( msg_name, msg_type ) - if idx > 0: - pf_out.write(",\n") - pf_out.write(" '{}' : {}".format(msg_name, msg_type | idx)) - idx += 1 - -pf_out.write("\n}\n\n_msg_type_to_name = {\n") - -idx = 0 -for name in by_type: - if idx > 0: - pf_out.write(",\n") - pf_out.write(" {} : '{}'".format(msg_type | idx, name)) - idx += 1 - -pf_out.write("\n}\n") - -sys.exit(0) diff --git a/repository/gridftp/globus5/authz/source/AuthzWorker.cpp b/repository/gridftp/globus5/authz/source/AuthzWorker.cpp index 234c1e166..9050bb45b 100644 --- a/repository/gridftp/globus5/authz/source/AuthzWorker.cpp +++ b/repository/gridftp/globus5/authz/source/AuthzWorker.cpp @@ -14,11 +14,8 @@ #include "common/TraceException.hpp" #include "common/Util.hpp" -// Protobuf includes -#include "common/SDMS.pb.h" -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" -#include "common/Version.pb.h" +// Proto files +#include "common/envelope.pb.h" // Standard includes #include @@ -28,9 +25,8 @@ #include #include +using namespace SDMS; using namespace std; -using namespace SDMS::Anon; -using namespace SDMS::Auth; namespace { @@ -507,7 +503,7 @@ int AuthzWorker::processResponse(ICommunicator::Response &response) { auto payload = std::get(response.message->getPayload()); - Anon::NackReply *nack = dynamic_cast(payload); + NackReply *nack = dynamic_cast(payload); if (!nack) { return 0; } else { @@ -581,7 +577,7 @@ int AuthzWorker::checkAuth(char *client_id, char *path, char *action) { return 0; } - auto auth_req = std::make_unique(); + auto auth_req = std::make_unique(); auth_req->set_repo(m_config->repo_id); auth_req->set_client(client_id); @@ -619,19 +615,19 @@ const char *getVersion() { const char *getAPIVersion() { static std::string ver_str = - std::to_string(DATAFED_COMMON_PROTOCOL_API_MAJOR) + "." + - std::to_string(DATAFED_COMMON_PROTOCOL_API_MINOR) + "." + - std::to_string(DATAFED_COMMON_PROTOCOL_API_PATCH); + std::to_string(protocol::version::MAJOR) + "." + + std::to_string(protocol::version::MINOR) + "." + + std::to_string(protocol::version::PATCH); return ver_str.c_str(); } const char *getReleaseVersion() { - static std::string ver_str = std::to_string(DATAFED_RELEASE_YEAR) + "." + - std::to_string(DATAFED_RELEASE_MONTH) + "." + - std::to_string(DATAFED_RELEASE_DAY) + "." + - std::to_string(DATAFED_RELEASE_HOUR) + "." + - std::to_string(DATAFED_RELEASE_MINUTE); + static std::string ver_str = std::to_string(release::YEAR) + "." + + std::to_string(release::MONTH) + "." + + std::to_string(release::DAY) + "." + + std::to_string(release::HOUR) + "." + + std::to_string(release::MINUTE); return ver_str.c_str(); } diff --git a/repository/gridftp/globus5/authz/source/Version.hpp.in b/repository/gridftp/globus5/authz/source/Version.hpp.in index cdd5e35e0..f698e5b7a 100644 --- a/repository/gridftp/globus5/authz/source/Version.hpp.in +++ b/repository/gridftp/globus5/authz/source/Version.hpp.in @@ -10,6 +10,23 @@ namespace SDMS { constexpr int PATCH = @DATAFED_AUTHZ_PATCH@; } } + + namespace protocol { + namespace version { + constexpr int MAJOR = @DATAFED_COMMON_PROTOCOL_API_MAJOR@; + constexpr int MINOR = @DATAFED_COMMON_PROTOCOL_API_MINOR@; + constexpr int PATCH = @DATAFED_COMMON_PROTOCOL_API_PATCH@; + } + } + + namespace release { + constexpr int YEAR = @DATAFED_RELEASE_YEAR@; + constexpr int MONTH = @DATAFED_RELEASE_MONTH@; + constexpr int DAY = @DATAFED_RELEASE_DAY@; + constexpr int HOUR = @DATAFED_RELEASE_HOUR@; + constexpr int MINUTE = @DATAFED_RELEASE_MINUTE@; + } + } #endif // AUTHZ_VERSION_HPP diff --git a/repository/gridftp/globus5/authz/tests/unit/test_AuthzWorker.cpp b/repository/gridftp/globus5/authz/tests/unit/test_AuthzWorker.cpp index ba51d8d08..5b03f8466 100644 --- a/repository/gridftp/globus5/authz/tests/unit/test_AuthzWorker.cpp +++ b/repository/gridftp/globus5/authz/tests/unit/test_AuthzWorker.cpp @@ -15,7 +15,7 @@ #include "common/ICommunicator.hpp" #include "common/IMessage.hpp" #include "common/MessageFactory.hpp" -#include "common/SDMS_Anon.pb.h" +#include "common/envelope.pb.h" #include "common/TraceException.hpp" extern "C" { @@ -397,7 +397,7 @@ BOOST_AUTO_TEST_CASE(ProcessResponseWithValidMessage) { SDMS::MessageState::REQUEST); response.message->set(SDMS::constants::message::google::CONTEXT, context); auto auth_by_token_req = - std::make_unique(); + std::make_unique(); std::string token = "golden_chest"; auth_by_token_req->set_token(token); @@ -429,7 +429,7 @@ BOOST_AUTO_TEST_CASE(ProcessResponseWithNackReply) { response.message->set(SDMS::MessageAttribute::STATE, SDMS::MessageState::REQUEST); response.message->set(SDMS::constants::message::google::CONTEXT, context); - auto nack = std::make_unique(); + auto nack = std::make_unique(); response.message->setPayload(std::move(nack)); diff --git a/repository/server/Config.hpp b/repository/server/Config.hpp index 887d47996..2c954692e 100644 --- a/repository/server/Config.hpp +++ b/repository/server/Config.hpp @@ -5,11 +5,9 @@ // Common public includes #include "common/ICredentials.hpp" -// Proto includes -#include "common/SDMS.pb.h" - // Standard includes #include +#include #include #include diff --git a/repository/server/RepoServer.cpp b/repository/server/RepoServer.cpp index 36b33dbbf..915673168 100644 --- a/repository/server/RepoServer.cpp +++ b/repository/server/RepoServer.cpp @@ -15,10 +15,7 @@ #include "common/Util.hpp" // Proto includes -#include "common/SDMS.pb.h" -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" -#include "common/Version.pb.h" +#include "common/envelope.pb.h" // Standard includes #include @@ -33,8 +30,6 @@ ((_T1.tv_sec - _T0.tv_sec) + ((_T1.tv_nsec - _T0.tv_nsec) / 1.0e9)) using namespace std; -using namespace SDMS::Anon; -using namespace SDMS::Auth; namespace { std::string randomAlphaNumericCode() { @@ -212,22 +207,22 @@ void Server::checkServerVersion() { << ver_reply->api_patch() << ")"); } bool new_release_available = false; - if (ver_reply->release_year() > Version::DATAFED_RELEASE_YEAR) { + if (ver_reply->release_year() > release::YEAR) { new_release_available = true; - } else if (ver_reply->release_year() == Version::DATAFED_RELEASE_YEAR) { - if (ver_reply->release_month() > Version::DATAFED_RELEASE_MONTH) { + } else if (ver_reply->release_year() == release::YEAR) { + if (ver_reply->release_month() > release::MONTH) { new_release_available = true; } else if (ver_reply->release_month() == - Version::DATAFED_RELEASE_MONTH) { - if (ver_reply->release_day() > Version::DATAFED_RELEASE_DAY) { + release::MONTH) { + if (ver_reply->release_day() > release::DAY) { new_release_available = true; - } else if (ver_reply->release_day() == Version::DATAFED_RELEASE_DAY) { - if (ver_reply->release_hour() > Version::DATAFED_RELEASE_HOUR) { + } else if (ver_reply->release_day() == release::DAY) { + if (ver_reply->release_hour() > release::HOUR) { new_release_available = true; } else if (ver_reply->release_hour() == - Version::DATAFED_RELEASE_HOUR) { + release::HOUR) { if (ver_reply->release_minute() > - Version::DATAFED_RELEASE_MINUTE) { + release::MINUTE) { new_release_available = true; } } diff --git a/repository/server/RequestWorker.cpp b/repository/server/RequestWorker.cpp index 49bb9186e..8e3c00772 100644 --- a/repository/server/RequestWorker.cpp +++ b/repository/server/RequestWorker.cpp @@ -12,11 +12,8 @@ #include "common/TraceException.hpp" #include "common/Util.hpp" -// Proto includes -#include "common/SDMS.pb.h" -#include "common/SDMS_Anon.pb.h" -#include "common/SDMS_Auth.pb.h" -#include "common/Version.pb.h" +// Proto files +#include "common/envelope.pb.h" // Third party includes #include @@ -29,9 +26,6 @@ using namespace std; namespace SDMS { -using namespace SDMS::Anon; -using namespace SDMS::Auth; - namespace Repo { map RequestWorker::m_msg_handlers; @@ -180,9 +174,14 @@ void RequestWorker::wait() { } } -#define SET_MSG_HANDLER(proto_id, msg, func) \ - m_msg_handlers[m_msg_mapper->getMessageType(proto_id, #msg)] = func +#define SET_MSG_HANDLER(msg, func) \ + m_msg_handlers[m_msg_mapper->getMessageType(#msg)] = func +/** + * This method configures message handling by creating a map from message type + * (envelope field number) to handler function. Message types are identified + * by their field number in the Envelope proto message. + */ void RequestWorker::setupMsgHandlers() { static std::atomic_flag lock = ATOMIC_FLAG_INIT; @@ -190,22 +189,17 @@ void RequestWorker::setupMsgHandlers() { return; try { + // Anonymous interface handlers + SET_MSG_HANDLER(VersionRequest, &RequestWorker::procVersionRequest); - uint8_t proto_id = - m_msg_mapper->getProtocolID(MessageProtocol::GOOGLE_ANONONYMOUS); - - SET_MSG_HANDLER(proto_id, VersionRequest, - &RequestWorker::procVersionRequest); - - proto_id = m_msg_mapper->getProtocolID(MessageProtocol::GOOGLE_AUTHORIZED); - - SET_MSG_HANDLER(proto_id, RepoDataDeleteRequest, + // Authenticated interface handlers + SET_MSG_HANDLER(RepoDataDeleteRequest, &RequestWorker::procDataDeleteRequest); - SET_MSG_HANDLER(proto_id, RepoDataGetSizeRequest, + SET_MSG_HANDLER(RepoDataGetSizeRequest, &RequestWorker::procDataGetSizeRequest); - SET_MSG_HANDLER(proto_id, RepoPathCreateRequest, + SET_MSG_HANDLER(RepoPathCreateRequest, &RequestWorker::procPathCreateRequest); - SET_MSG_HANDLER(proto_id, RepoPathDeleteRequest, + SET_MSG_HANDLER(RepoPathDeleteRequest, &RequestWorker::procPathDeleteRequest); } catch (TraceException &e) { DL_ERROR(m_log_context, @@ -252,6 +246,8 @@ void RequestWorker::workerThread(LogContext log_context) { timeout_on_poll); }(repo_thread_id); + ProtoBufMap proto_map; + DL_TRACE(log_context, "Listening on address " << client->address()); while (m_run) { @@ -280,7 +276,8 @@ void RequestWorker::workerThread(LogContext log_context) { uint16_t msg_type = std::get( message.get(constants::message::google::MSG_TYPE)); - DL_TRACE(message_log_context, "Received msg of type: " << msg_type); + DL_TRACE(message_log_context, "Received msg of type: " + << proto_map.toString(msg_type)); if (m_msg_handlers.count(msg_type)) { map::iterator handler = @@ -348,7 +345,7 @@ void RequestWorker::workerThread(LogContext log_context) { DL_ERROR(message_log_context, "Error: " << e.what()); \ auto msg_reply = m_msg_factory.createResponseEnvelope(*msg_request); \ auto nack = std::make_unique(); \ - nack->set_err_code(ID_INTERNAL_ERROR); \ + nack->set_err_code(INTERNAL_ERROR); \ nack->set_err_msg(e.what()); \ msg_reply->setPayload(std::move(nack)); \ return msg_reply; \ @@ -358,7 +355,7 @@ void RequestWorker::workerThread(LogContext log_context) { "Error unkown exception while processing message!"); \ auto msg_reply = m_msg_factory.createResponseEnvelope(*msg_request); \ auto nack = std::make_unique(); \ - nack->set_err_code(ID_INTERNAL_ERROR); \ + nack->set_err_code(INTERNAL_ERROR); \ nack->set_err_msg("Unknown exception type"); \ msg_reply->setPayload(std::move(nack)); \ return msg_reply; \ @@ -373,7 +370,7 @@ void RequestWorker::workerThread(LogContext log_context) { "Message parse failed (malformed or unregistered msg type)."); \ auto msg_reply = m_msg_factory.createResponseEnvelope(*msg_request); \ auto nack = std::make_unique(); \ - nack->set_err_code(ID_BAD_REQUEST); \ + nack->set_err_code(BAD_REQUEST); \ nack->set_err_msg( \ "Message parse failed (malformed or unregistered msg type)"); \ msg_reply->setPayload(std::move(nack)); \ @@ -388,15 +385,15 @@ RequestWorker::procVersionRequest(std::unique_ptr &&msg_request) { DL_DEBUG(message_log_context, "Version request."); - reply.set_release_year(Version::DATAFED_RELEASE_YEAR); - reply.set_release_month(Version::DATAFED_RELEASE_MONTH); - reply.set_release_day(Version::DATAFED_RELEASE_DAY); - reply.set_release_hour(Version::DATAFED_RELEASE_HOUR); - reply.set_release_minute(Version::DATAFED_RELEASE_MINUTE); + reply.set_release_year(SDMS::release::YEAR); + reply.set_release_month(SDMS::release::MONTH); + reply.set_release_day(SDMS::release::DAY); + reply.set_release_hour(SDMS::release::HOUR); + reply.set_release_minute(SDMS::release::MINUTE); - reply.set_api_major(Version::DATAFED_COMMON_PROTOCOL_API_MAJOR); - reply.set_api_minor(Version::DATAFED_COMMON_PROTOCOL_API_MINOR); - reply.set_api_patch(Version::DATAFED_COMMON_PROTOCOL_API_PATCH); + reply.set_api_major(SDMS::protocol::version::MAJOR); + reply.set_api_minor(SDMS::protocol::version::MINOR); + reply.set_api_patch(SDMS::protocol::version::PATCH); reply.set_component_major(SDMS::repository::version::MAJOR); reply.set_component_minor(SDMS::repository::version::MINOR); @@ -407,7 +404,7 @@ RequestWorker::procVersionRequest(std::unique_ptr &&msg_request) { std::unique_ptr RequestWorker::procDataDeleteRequest(std::unique_ptr &&msg_request) { - PROC_MSG_BEGIN(Auth::RepoDataDeleteRequest, Anon::AckReply) + PROC_MSG_BEGIN(RepoDataDeleteRequest, AckReply) if (request->loc_size()) { @@ -427,7 +424,7 @@ RequestWorker::procDataDeleteRequest(std::unique_ptr &&msg_request) { std::unique_ptr RequestWorker::procDataGetSizeRequest(std::unique_ptr &&msg_request) { - PROC_MSG_BEGIN(Auth::RepoDataGetSizeRequest, Auth::RepoDataSizeReply) + PROC_MSG_BEGIN(RepoDataGetSizeRequest, RepoDataSizeReply) DL_DEBUG(message_log_context, "Data get size."); @@ -462,7 +459,7 @@ RequestWorker::procDataGetSizeRequest(std::unique_ptr &&msg_request) { std::unique_ptr RequestWorker::procPathCreateRequest(std::unique_ptr &&msg_request) { - PROC_MSG_BEGIN(Auth::RepoPathCreateRequest, Anon::AckReply) + PROC_MSG_BEGIN(RepoPathCreateRequest, AckReply) std::string local_path = createSanitizedPath(request->path()); @@ -480,7 +477,7 @@ RequestWorker::procPathCreateRequest(std::unique_ptr &&msg_request) { std::unique_ptr RequestWorker::procPathDeleteRequest(std::unique_ptr &&msg_request) { - PROC_MSG_BEGIN(Auth::RepoPathDeleteRequest, Anon::AckReply) + PROC_MSG_BEGIN(RepoPathDeleteRequest, AckReply) DL_DEBUG(message_log_context, "Relative path delete request: " << request->path()); diff --git a/repository/server/Version.hpp.in b/repository/server/Version.hpp.in index 322bb4c5c..617a414e9 100644 --- a/repository/server/Version.hpp.in +++ b/repository/server/Version.hpp.in @@ -10,6 +10,22 @@ namespace SDMS { constexpr int PATCH = @DATAFED_REPO_PATCH@; } } + + namespace protocol { + namespace version { + constexpr int MAJOR = @DATAFED_COMMON_PROTOCOL_API_MAJOR@; + constexpr int MINOR = @DATAFED_COMMON_PROTOCOL_API_MINOR@; + constexpr int PATCH = @DATAFED_COMMON_PROTOCOL_API_PATCH@; + } + } + + namespace release { + constexpr int YEAR = @DATAFED_RELEASE_YEAR@; + constexpr int MONTH = @DATAFED_RELEASE_MONTH@; + constexpr int DAY = @DATAFED_RELEASE_DAY@; + constexpr int HOUR = @DATAFED_RELEASE_HOUR@; + constexpr int MINUTE = @DATAFED_RELEASE_MINUTE@; + } } #endif // REPO_VERSION_HPP diff --git a/repository/server/main.cpp b/repository/server/main.cpp index a5c00d1f0..0e5d2e287 100644 --- a/repository/server/main.cpp +++ b/repository/server/main.cpp @@ -9,9 +9,6 @@ #include "common/TraceException.hpp" #include "common/Util.hpp" -// Protocol includes -#include "common/Version.pb.h" - // Third party includes #include @@ -78,13 +75,13 @@ int main(int a_argc, char **a_argv) { } if (opt_map.count("version")) { - cout << "Release Version: " << DATAFED_RELEASE_YEAR << "." - << DATAFED_RELEASE_MONTH << "." << DATAFED_RELEASE_DAY << "." - << DATAFED_RELEASE_HOUR << "." << DATAFED_RELEASE_MINUTE + cout << "Release Version: " << release::YEAR << "." + << release::MONTH << "." << release::DAY << "." + << release::HOUR << "." << release::MINUTE << std::endl; - cout << "Messaging API: " << DATAFED_COMMON_PROTOCOL_API_MAJOR << "." - << DATAFED_COMMON_PROTOCOL_API_MINOR << "." - << DATAFED_COMMON_PROTOCOL_API_PATCH << endl; + cout << "Messaging API: " << protocol::version::MAJOR << "." + << protocol::version::MINOR << "." + << protocol::version::PATCH << endl; cout << "Repo Server: " << repository::version::MAJOR << "." << repository::version::MINOR << "." << repository::version::PATCH << endl; diff --git a/tests/end-to-end/test_api_query.py b/tests/end-to-end/test_api_query.py index cf1894a09..174d0d4c6 100755 --- a/tests/end-to-end/test_api_query.py +++ b/tests/end-to-end/test_api_query.py @@ -191,6 +191,7 @@ def test_query_create_delete(self): if model.alias.startswith("adamantium"): material = model.alias time.sleep(self._timeout) + count = count + 1 print(f"Query found {material}") diff --git a/tests/end-to-end/test_api_record.py b/tests/end-to-end/test_api_record.py index 71fae9999..296975e36 100755 --- a/tests/end-to-end/test_api_record.py +++ b/tests/end-to-end/test_api_record.py @@ -176,8 +176,13 @@ def test_record_create_delete(self): # May not work depending on traffic esnet_uuid = "ece400da-0182-4777-91d6-27a1808f8371" + endpoint_uuid = os.environ.get('DATAFED_TEST_GLOBUS_ENDPOINT', esnet_uuid) + test_file_path_on_endpoint = os.environ.get('DATAFED_TEST_GLOBUS_ENDPOINT_FILE', "/1M.dat") - put_task = self._df_api.dataPut(new_alias, esnet_uuid + "/1M.dat") + if not test_file_path_on_endpoint.startswith("/"): + test_file_path_on_endpoint = "/" + test_file_path_on_endpoint + + put_task = self._df_api.dataPut(new_alias, endpoint_uuid + test_file_path_on_endpoint) task_id = put_task[0].task.id diff --git a/web/datafed-ws.js b/web/datafed-ws.js index d4373c707..e12386156 100755 --- a/web/datafed-ws.js +++ b/web/datafed-ws.js @@ -15,7 +15,7 @@ if (process.argv.length != 3) { throw "Invalid arguments, usage: datafed-ws config-file"; } -import web_version from "./version.js"; +import version from "./version.js"; import express from "express"; // For REST api import session from "express-session"; import sanitizeHtml from "sanitize-html"; @@ -55,6 +55,7 @@ var g_host, g_test, g_msg_by_id = {}, g_msg_by_name = {}, + g_envelope_type, g_core_sock = zmq.socket("dealer"), g_core_serv_addr, g_globus_auth, @@ -64,7 +65,7 @@ var g_host, g_ctx_next = 0, g_client_id, g_client_secret, - g_ready_start = 4, + g_ready_start = 3, g_version, g_ver_release_year, g_ver_release_month, @@ -145,6 +146,29 @@ class Logger { const logger = new Logger(LogLevel.INFO); +g_ver_release_year = version.RELEASE_YEAR; +g_ver_release_month = version.RELEASE_MONTH; +g_ver_release_day = version.RELEASE_DAY; +g_ver_release_hour = version.RELEASE_HOUR; +g_ver_release_minute = version.RELEASE_MINUTE; + +g_ver_api_major = version.MAJOR; +g_ver_api_minor = version.MINOR; +g_ver_api_patch = version.PATCH; + +g_version = + g_ver_release_year + + "." + + g_ver_release_month + + "." + + g_ver_release_day + + "." + + g_ver_release_hour + + "." + + g_ver_release_minute; + +if (--g_ready_start == 0) startServer(); + function getCurrentLineNumber() { const stackTrace = new Error().stack; const lineMatches = stackTrace.match(/:\d+:\d+/g); @@ -192,46 +216,46 @@ function startServer() { "ERROR: No reply from core server", ); } else if ( - reply.api_major != g_ver_api_major || - reply.api_minor < g_ver_api_minor || - reply.api_minor > g_ver_api_minor + 9 + reply.apiMajor != g_ver_api_major || + reply.apiMinor < g_ver_api_minor || + reply.apiMinor > g_ver_api_minor + 9 ) { logger.error( startServer.name, getCurrentLineNumber(), "ERROR: Incompatible api version detected (" + - reply.api_major + + reply.apiMajor + "." + - reply.api_minor + + reply.apiMinor + "." + - reply.api_patch + + reply.apiPatch + ")", ); } else { var warning_msg = "WARNING: A newer web server may be available the latest release version is: (" + - reply.release_year + + reply.releaseYear + "." + - reply.release_month + + reply.releaseMonth + "." + - reply.release_day + + reply.releaseDay + "." + - reply.release_hour + + reply.releaseHour + "." + - reply.release_minute; - if (reply.release_year > g_ver_release_year) { + reply.releaseMinute; + if (reply.releaseYear > g_ver_release_year) { logger.warning(startServer.name, getCurrentLineNumber(), warning_msg); - } else if (reply.release_year == g_ver_release_year) { - if (reply.release_month > g_ver_release_month) { + } else if (reply.releaseYear == g_ver_release_year) { + if (reply.releaseMonth > g_ver_release_month) { logger.warning(startServer.name, getCurrentLineNumber(), warning_msg); - } else if (reply.release_month == g_ver_release_month) { - if (reply.release_day > g_ver_release_day) { + } else if (reply.releaseMonth == g_ver_release_month) { + if (reply.releaseDay > g_ver_release_day) { logger.warning(startServer.name, getCurrentLineNumber(), warning_msg); - } else if (reply.release_day == g_ver_release_day) { - if (reply.release_hour > g_ver_release_hour) { + } else if (reply.releaseDay == g_ver_release_day) { + if (reply.releaseHour > g_ver_release_hour) { logger.warning(startServer.name, getCurrentLineNumber(), warning_msg); - } else if (reply.release_hour == g_ver_release_hour) { - if (reply.release_minute > g_ver_release_minute) { + } else if (reply.releaseHour == g_ver_release_hour) { + if (reply.releaseMinute > g_ver_release_minute) { logger.warning( startServer.name, getCurrentLineNumber(), @@ -312,8 +336,31 @@ function storeCollectionId(req, res, next) { req.session.collection_id = req.query.collection_id; // TODO: assuming collection is specifically mapped and not HA/other variants req.session.collection_type = "mapped"; + logger.info( + "storeCollectionId", + getCurrentLineNumber(), + "DEBUG: Storing Collection ID: " + req.query.collection_id + " to session.", + ); + req.session.save((err) => { + if (err) { + logger.error( + "storeCollectionId", + getCurrentLineNumber(), + "DEBUG: Session save error:", + err, + ); + } else { + logger.info( + "storeCollectionId", + getCurrentLineNumber(), + "DEBUG: Session saved successfully.", + ); + } + next(); + }); + } else { + next(); } - next(); } app.use(cookieParser(g_session_secret)); @@ -389,12 +436,21 @@ app.get("/ui/main", (a_req, a_resp) => { const nonce = crypto.randomBytes(16).toString("base64"); a_resp.locals.nonce = nonce; a_resp.setHeader("Content-Security-Policy", `script-src 'nonce-${nonce}'`); + + // Extract restore_state from session if present + let restore_state = null; + if (a_req.session.restore_state) { + restore_state = JSON.stringify(a_req.session.restore_state); + delete a_req.session.restore_state; + } + a_resp.render("main", { nonce: a_resp.locals.nonce, user_uid: a_req.session.uid, theme: theme, version: g_version, test_mode: g_test, + restore_state: restore_state, ...g_google_analytics, }); } else { @@ -600,6 +656,16 @@ the registration page. ); } let username = reply.user[0]?.uid?.replace(/^u\//, ""); + if (!username) { + logger.error( + "/ui/authn", + getCurrentLineNumber(), + "Error: User identity found but UID is missing or invalid.", + reply.user, + ); + a_resp.redirect("/ui/error"); + return; + } logger.info( "/ui/authn", getCurrentLineNumber(), @@ -619,12 +685,32 @@ the registration page. a_req.session.uid = username; a_req.session.reg = true; + if (a_req.query.state) { + try { + const state_obj = JSON.parse(a_req.query.state); + // Validate state structure to prevent arbitrary session pollution + if ( + state_obj.endpoint_browser || + state_obj.restore_state + ) { + a_req.session.restore_state = state_obj; + } + } catch (e) { + // State was not JSON or valid, ignore + logger.warning( + "/ui/authn", + getCurrentLineNumber(), + "Failed to parse state parameter: " + e, + ); + } + } + let redirect_path = "/ui/main"; // Note: context/optional params for arbitrary input const token_context = { // passed values are mutable - resource_server: client_token.data.resource_sever, + resource_server: client_token.data.resource_server, collection_id: a_req.session.collection_id, scope: xfr_token.scope, }; @@ -641,15 +727,30 @@ the registration page. xfr_token.refresh_token, xfr_token.expires_in, optional_data, + (err) => { + if (err) { + redirect_path = "/ui/error"; + logger.error( + "/ui/authn", + getCurrentLineNumber(), + "setAccessToken Failed: " + err, + ); + delete a_req.session.collection_id; + } + // TODO Account may be disable from SDMS (active = false) + a_resp.redirect(redirect_path); + }, ); } catch (err) { redirect_path = "/ui/error"; - logger.error("/ui/authn", getCurrentLineNumber(), err); + logger.error( + "/ui/authn", + getCurrentLineNumber(), + "Exception in token handling: " + err, + ); delete a_req.session.collection_id; + a_resp.redirect(redirect_path); } - - // TODO Account may be disable from SDMS (active = false) - a_resp.redirect(redirect_path); } }, ); @@ -751,6 +852,19 @@ app.get("/api/usr/register", (a_req, a_resp) => { a_req.session.acc_tok, a_req.session.ref_tok, a_req.session.acc_tok_ttl, + {}, + (err) => { + if (err) { + logger.error("/api/usr/register", getCurrentLineNumber(), err); + a_resp.status(500).send("Registration failed during token set"); + return; + } + + // Set session as registered user + a_req.session.reg = true; + + a_resp.send(reply); + }, ); } catch (err) { logger.error("/api/usr/register", getCurrentLineNumber(), err); @@ -765,11 +879,6 @@ app.get("/api/usr/register", (a_req, a_resp) => { delete a_req.session.ref_tok; delete a_req.session.uuids; } - - // Set session as registered user - a_req.session.reg = true; - - a_resp.send(reply); } }, ); @@ -911,12 +1020,6 @@ app.get("/api/prj/list", (a_req, a_resp) => { }); }); -app.post("/api/prj/search", (a_req, a_resp) => { - sendMessage("ProjectSearchRequest", a_req.body, a_req, a_resp, function (reply) { - a_resp.send(reply.item ? reply.item : []); - }); -}); - app.get("/api/grp/create", (a_req, a_resp) => { var params = { group: { @@ -1005,7 +1108,7 @@ app.post("/api/query/create", (a_req, a_resp) => { }); app.post("/api/query/update", (a_req, a_resp) => { - var params = { id: a_req.query.id }; + var params = { id: a_req.query.id, replaceQuery: true }; if (a_req.query.title) params.title = a_req.query.title; if (a_req.body) params.query = a_req.body; @@ -1108,23 +1211,6 @@ app.get("/api/dat/lock", (a_req, a_resp) => { ); }); -app.get("/api/dat/lock/toggle", (a_req, a_resp) => { - sendMessage("RecordLockToggleRequest", { id: a_req.query.id }, a_req, a_resp, function (reply) { - a_resp.send(reply); - }); -}); - -app.get("/api/dat/copy", (a_req, a_resp) => { - var params = { - sourceId: a_req.query.src, - destId: a_req.query.dst, - }; - - sendMessage("DataCopyRequest", params, a_req, a_resp, function (reply) { - a_resp.send(reply); - }); -}); - app.get("/api/dat/delete", (a_req, a_resp) => { sendMessage( "RecordDeleteRequest", @@ -1211,18 +1297,6 @@ app.get("/api/dat/put", (a_req, a_resp) => { }); }); -app.get("/api/dat/dep/get", (a_req, a_resp) => { - sendMessage( - "RecordGetDependenciesRequest", - { id: a_req.query.ids }, - a_req, - a_resp, - function (reply) { - a_resp.send(reply); - }, - ); -}); - app.get("/api/dat/dep/graph/get", (a_req, a_resp) => { sendMessage( "RecordGetDependencyGraphRequest", @@ -1569,14 +1643,20 @@ app.get("/api/col/published/list", (a_req, a_resp) => { }); }); -app.post("/api/cat/search", (a_req, a_resp) => { - sendMessage("CatalogSearchRequest", a_req.body, a_req, a_resp, function (reply) { - a_resp.send(reply); - }); -}); - app.get("/api/globus/consent_url", storeCollectionId, (a_req, a_resp) => { - const { requested_scopes, state, refresh_tokens, query_params } = a_req.query; + let { requested_scopes, state, refresh_tokens, query_params } = a_req.query; + + if (typeof query_params === "string") { + try { + query_params = JSON.parse(query_params); + } catch (e) { + logger.error( + "/api/globus/consent_url", + getCurrentLineNumber(), + "Failed to parse query_params: " + e, + ); + } + } const consent_url = generateConsentURL( g_oauth_credentials.clientId, @@ -1590,12 +1670,6 @@ app.get("/api/globus/consent_url", storeCollectionId, (a_req, a_resp) => { a_resp.json({ consent_url }); }); -app.post("/api/col/pub/search/data", (a_req, a_resp) => { - sendMessage("RecordSearchPublishedRequest", a_req.body, a_req, a_resp, function (reply) { - a_resp.send(reply); - }); -}); - app.get("/api/repo/list", (a_req, a_resp) => { var params = {}; if (a_req.query.all) params.all = a_req.query.all; @@ -1772,18 +1846,6 @@ app.get("/api/top/list/topics", (a_req, a_resp) => { }); }); -app.get("/api/top/list/coll", (a_req, a_resp) => { - var par = { topicId: a_req.query.id }; - if (a_req.query.offset != undefined && a_req.query.count != undefined) { - par.offset = a_req.query.offset; - par.count = a_req.query.count; - } - - sendMessage("TopicListCollectionsRequest", par, a_req, a_resp, function (reply) { - a_resp.json(reply); - }); -}); - app.get("/api/top/view", (a_req, a_resp) => { sendMessage("TopicViewRequest", { id: a_req.query.id }, a_req, a_resp, function (reply) { a_resp.json(reply); @@ -2002,10 +2064,18 @@ app.get("/ui/theme/save", (a_req, a_resp) => { * @param {string} a_ref_tok - Refresh token for access token * @param {number} a_expires_sec - Time until expiration of access token * @param {OptionalData} [token_optional_params] - Optional params for DataFed to process access token accordingly + * @param {RequestCallback} [a_cb] - Optional callback function * * @throws Error - When a reply is not received from sendMessageDirect */ -function setAccessToken(a_uid, a_acc_tok, a_ref_tok, a_expires_sec, token_optional_params = {}) { +function setAccessToken( + a_uid, + a_acc_tok, + a_ref_tok, + a_expires_sec, + token_optional_params = {}, + a_cb = null, +) { logger.info( setAccessToken.name, getCurrentLineNumber(), @@ -2019,8 +2089,13 @@ function setAccessToken(a_uid, a_acc_tok, a_ref_tok, a_expires_sec, token_option // Should be an AckReply if (!reply) { logger.error("setAccessToken", getCurrentLineNumber(), "failed."); + if (a_cb) { + a_cb(new Error("setAccessToken failed")); + return; + } throw new Error("setAccessToken failed"); } + if (a_cb) a_cb(null, reply); }); } @@ -2065,15 +2140,17 @@ function sendMessage(a_msg_name, a_msg_data, a_req, a_resp, a_cb, a_anon) { a_resp.setHeader("Content-Type", "application/json"); allocRequestContext(a_resp, function (ctx) { - var msg = g_msg_by_name[a_msg_name]; - if (!msg) throw "Invalid message type: " + a_msg_name; + var msg_info = g_msg_by_name[a_msg_name]; + if (!msg_info) throw "Invalid message type: " + a_msg_name; - var msg_buf = msg.encode(a_msg_data).finish(); + // Wrap inner message data in an Envelope (matches C++ sendBody wrapInEnvelope) + var envelope_data = {}; + envelope_data[msg_info.field_name] = a_msg_data; + var msg_buf = g_envelope_type.encode(envelope_data).finish(); var frame = Buffer.alloc(8); frame.writeUInt32BE(msg_buf.length, 0); - frame.writeUInt8(msg._pid, 4); - frame.writeUInt8(msg._mid, 5); + frame.writeUInt16BE(msg_info.field_id, 4); frame.writeUInt16BE(ctx, 6); g_ctx[ctx] = function (a_reply) { @@ -2121,7 +2198,10 @@ function sendMessage(a_msg_name, a_msg_data, a_req, a_resp, a_cb, a_anon) { sendMessage.name, getCurrentLineNumber(), "MsgType is: " + - msg._msg_type + + msg_info.field_id + + " (" + + a_msg_name + + ")" + " Writing ctx to frame, " + ctx + " buffer size " + @@ -2142,7 +2222,10 @@ function sendMessage(a_msg_name, a_msg_data, a_req, a_resp, a_cb, a_anon) { sendMessage.name, getCurrentLineNumber(), "MsgType is: " + - msg._msg_type + + msg_info.field_id + + " (" + + a_msg_name + + ")" + " Writing ctx to frame, " + ctx + " buffer size " + @@ -2154,17 +2237,19 @@ function sendMessage(a_msg_name, a_msg_data, a_req, a_resp, a_cb, a_anon) { } function sendMessageDirect(a_msg_name, a_client, a_msg_data, a_cb) { - var msg = g_msg_by_name[a_msg_name]; - if (!msg) throw "Invalid message type: " + a_msg_name; + var msg_info = g_msg_by_name[a_msg_name]; + if (!msg_info) throw "Invalid message type: " + a_msg_name; allocRequestContext(null, function (ctx) { - var msg_buf = msg.encode(a_msg_data).finish(); + // Wrap inner message data in an Envelope (matches C++ sendBody wrapInEnvelope) + var envelope_data = {}; + envelope_data[msg_info.field_name] = a_msg_data; + var msg_buf = g_envelope_type.encode(envelope_data).finish(); var frame = Buffer.alloc(8); // A protobuf message doesn't have to have a payload frame.writeUInt32BE(msg_buf.length, 0); - frame.writeUInt8(msg._pid, 4); - frame.writeUInt8(msg._mid, 5); + frame.writeUInt16BE(msg_info.field_id, 4); frame.writeUInt16BE(ctx, 6); g_ctx[ctx] = a_cb; @@ -2187,7 +2272,10 @@ function sendMessageDirect(a_msg_name, a_client, a_msg_data, a_cb) { sendMessageDirect.name, getCurrentLineNumber(), "MsgType is: " + - msg._msg_type + + msg_info.field_id + + " (" + + a_msg_name + + ")" + " Direct Writing ctx to frame, " + ctx + " buffer size " + @@ -2208,7 +2296,10 @@ function sendMessageDirect(a_msg_name, a_client, a_msg_data, a_cb) { sendMessageDirect.name, getCurrentLineNumber(), "MsgType is: " + - msg._msg_type + + msg_info.field_id + + " (" + + a_msg_name + + ")" + " Direct Writing ctx to frame, " + ctx + " buffer size " + @@ -2219,71 +2310,65 @@ function sendMessageDirect(a_msg_name, a_client, a_msg_data, a_cb) { }); } -function processProtoFile(msg) { - //var mlist = msg.parent.order; - var i, - msg_list = []; - for (i in msg.parent.nested) msg_list.push(msg.parent.nested[i]); - - //msg_list.sort(); - - var pid = msg.values.ID; +/** + * Processes the proto3 Envelope message to build message type maps. + * + * Instead of the old proto2 approach that derived message types from Protocol enum IDs + * and file ordering (pid << 8 | mid), this uses the envelope's oneof field numbers + * as stable message type identifiers. + * + * Each map entry stores: + * - type: the protobufjs Type (for encode/decode of the inner message) + * - field_name: the envelope oneof field name (e.g. "version_request") + * - field_id: the envelope field number (used as msg_type in the frame) + * + * @param {protobuf.Root} root - The loaded protobuf root containing SDMS.Envelope + */ +function processEnvelope(root) { + g_envelope_type = root.lookupType("SDMS.Envelope"); + var payloadOneof = g_envelope_type.oneofs.payload; - for (i = 1; i < msg_list.length; i++) { - msg = msg_list[i]; - msg._pid = pid; - msg._mid = i - 1; - msg._msg_type = (pid << 8) | (i - 1); + if (!payloadOneof) throw "Missing 'payload' oneof in SDMS.Envelope"; - g_msg_by_id[msg._msg_type] = msg; - g_msg_by_name[msg.name] = msg; - } -} + payloadOneof.fieldsArray.forEach(function (field) { + var msgType = field.resolvedType; + if (!msgType) { + logger.warning( + processEnvelope.name, + getCurrentLineNumber(), + "Unresolved type for envelope field: " + field.name, + ); + return; + } -protobuf.load("Version.proto", function (err, root) { - if (err) throw err; + var entry = { + type: msgType, // protobufjs Type for encode/decode + field_name: field.name, // envelope oneof field name + field_id: field.id, // envelope field number = msg_type in frame + }; - var msg = root.lookupEnum("Version"); - if (!msg) throw "Missing Version enum in Version.Anon proto file"; - - g_ver_release_year = msg.values.DATAFED_RELEASE_YEAR; - g_ver_release_month = msg.values.DATAFED_RELEASE_MONTH; - g_ver_release_day = msg.values.DATAFED_RELEASE_DAY; - g_ver_release_hour = msg.values.DATAFED_RELEASE_HOUR; - g_ver_release_minute = msg.values.DATAFED_RELEASE_MINUTE; - - g_version = - g_ver_release_year + - "." + - g_ver_release_month + - "." + - g_ver_release_day + - "." + - g_ver_release_hour + - "." + - g_ver_release_minute; - - logger.info("protobuf.load", getCurrentLineNumber(), "Running Version: " + g_version); - if (--g_ready_start == 0) startServer(); -}); + g_msg_by_id[field.id] = entry; + g_msg_by_name[msgType.name] = entry; + }); -protobuf.load("SDMS_Anon.proto", function (err, root) { - if (err) throw err; + logger.info( + processEnvelope.name, + getCurrentLineNumber(), + "Loaded " + Object.keys(g_msg_by_id).length + " message types from envelope", + ); +} - var msg = root.lookupEnum("SDMS.Anon.Protocol"); - if (!msg) throw "Missing Protocol enum in SDMS.Anon proto file"; +var protobufRoot = new protobuf.Root(); - processProtoFile(msg); - if (--g_ready_start == 0) startServer(); -}); +protobufRoot.resolvePath = function (origin, target) { + return "proto3/" + target; +}; -protobuf.load("SDMS_Auth.proto", function (err, root) { +protobufRoot.load("envelope.proto", function (err, root) { if (err) throw err; - var msg = root.lookupEnum("SDMS.Auth.Protocol"); - if (!msg) throw "Missing Protocol enum in SDMS.Auth proto file"; - - processProtoFile(msg); + root.resolveAll(); + processEnvelope(root); if (--g_ready_start == 0) startServer(); }); @@ -2304,20 +2389,34 @@ g_core_sock.on( var mtype = (frame.readUInt8(4) << 8) | frame.readUInt8(5); var ctx = frame.readUInt16BE(6); - var msg_class = g_msg_by_id[mtype]; + var msg_info = g_msg_by_id[mtype]; var msg; + var msg_name = msg_info ? msg_info.type.name : "unknown(" + mtype + ")"; - if (msg_class) { + if (msg_info) { // Only try to decode if there is a payload if (msg_buf && msg_buf.length) { try { - // This is unserializing the protocol message - msg = msg_class.decode(msg_buf); + // Decode as Envelope (matches C++ receiveBody unwrapFromEnvelope) + var envelope = g_envelope_type.decode(msg_buf); + var which_field = envelope.payload; // oneof discriminator: field name that is set + if (which_field) { + msg = envelope[which_field]; + msg_name = which_field; + } else { + logger.warning( + "g_core_sock.on", + getCurrentLineNumber(), + "Envelope decoded but no payload field set, correlation_id: " + + correlation_id, + ); + msg = msg_info.type.create({}); + } if (!msg) { logger.error( "g_core_sock.on", getCurrentLineNumber(), - "ERROR: msg decode failed: no reason, correlation_id: " + + "ERROR: envelope decode produced null msg, correlation_id: " + correlation_id, ); } @@ -2325,11 +2424,15 @@ g_core_sock.on( logger.error( "g_core_sock.on", getCurrentLineNumber(), - "ERROR: msg decode failed: " + err + " correlation_id: " + correlation_id, + "ERROR: envelope decode failed: " + + err + + " correlation_id: " + + correlation_id, ); } } else { - msg = msg_class; + // No payload body - create empty message instance + msg = msg_info.type.create({}); } } else { logger.error( @@ -2345,10 +2448,29 @@ g_core_sock.on( logger.info( "g_core_sock.on", getCurrentLineNumber(), - "freed ctx: " + ctx + " for msg: " + msg_class.name, + "freed ctx: " + ctx + " for msg: " + msg_name, correlation_id, ); g_ctx_next = ctx; + + // Convert protobufjs message to plain object with default values + if (msg) { + var resolve_type = msg_info ? msg_info.type : null; + if (which_field) { + var actual_entry = Object.values(g_msg_by_id).find( + (e) => e.field_name === which_field, + ); + if (actual_entry) resolve_type = actual_entry.type; + } + if (resolve_type) { + msg = resolve_type.toObject(msg, { + defaults: true, + longs: String, + enums: String, + }); + } + } + f(msg); } else { g_ctx[ctx] = null; @@ -2360,7 +2482,7 @@ g_core_sock.on( " - msg type: " + mtype + ", name: " + - msg_class.name + + msg_name + " correlation_id: " + correlation_id, ); diff --git a/web/docker/Dockerfile b/web/docker/Dockerfile index 5fafb6b08..15cbbc2b1 100644 --- a/web/docker/Dockerfile +++ b/web/docker/Dockerfile @@ -39,7 +39,7 @@ COPY ./scripts/generate_ws_config.sh ${BUILD_DIR}/scripts/ COPY ./scripts/install_ws.sh ${BUILD_DIR}/scripts/ COPY ./scripts/export_dependency_version.sh ${BUILD_DIR}/scripts/ COPY ./cmake ${BUILD_DIR}/cmake -COPY ./common/proto ${BUILD_DIR}/common/proto +COPY ./common/proto3 ${BUILD_DIR}/common/proto3 COPY ./web ${BUILD_DIR}/web RUN ${DATAFED_DEPENDENCIES_ROOT}/scripts/generate_dependencies_config.sh && \ @@ -83,6 +83,7 @@ WORKDIR ${DATAFED_DIR} USER datafed +RUN mkdir -p ${DATAFED_DEFAULT_LOG_PATH}; chown root:datafed ${DATAFED_DEFAULT_LOG_PATH}; chmod -R g+rw ${DATAFED_DEFAULT_LOG_PATH} COPY --from=ws-build --chown=datafed:root ${DATAFED_DEPENDENCIES_ROOT}/scripts/ {DATAFED_DEPENDENCIES_ROOT}/scripts/ COPY --chown=datafed:root ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/generate_datafed.sh @@ -97,7 +98,7 @@ COPY --from=ws-build --chown=datafed:root ${DATAFED_DEPENDENCIES_ROOT}/scripts $ COPY --from=ws-build --chown=datafed:root ${DATAFED_INSTALL_PATH}/web ${DATAFED_INSTALL_PATH}/web COPY --from=ws-build --chown=datafed:root /usr/bin/curl /usr/bin/curl -WORKDIR ${BUILD_DIR} +WORKDIR ${BUILD_DIR}/web USER root diff --git a/web/services/auth/ConsentHandler.js b/web/services/auth/ConsentHandler.js index 9cee4b3ae..e80f0e81e 100644 --- a/web/services/auth/ConsentHandler.js +++ b/web/services/auth/ConsentHandler.js @@ -24,9 +24,13 @@ export const generateConsentURL = ( query_params, state, ) => { - const scopes = requested_scopes || ["openid", "profile", "email", "urn:globus:auth:scope:transfer.api.globus.org:all"]; + const scopes = Array.isArray(requested_scopes) + ? requested_scopes + : typeof requested_scopes === "string" + ? [requested_scopes] + : ["openid", "profile", "email", "urn:globus:auth:scope:transfer.api.globus.org:all"]; - if (refresh_tokens) { + if (refresh_tokens && !scopes.includes("offline_access")) { scopes.push("offline_access"); } diff --git a/web/services/auth/TokenHandler.js b/web/services/auth/TokenHandler.js index ac678be3e..370b6181d 100644 --- a/web/services/auth/TokenHandler.js +++ b/web/services/auth/TokenHandler.js @@ -167,6 +167,14 @@ export default class OAuthTokenHandler { break; } case AccessTokenType.GLOBUS_DEFAULT: { + if (token_context.collection_id) { + const { collection_id, scope } = token_context; + if (!scope) { + throw new Error("Transfer token received without scope context"); + } + optional_data.type = AccessTokenType.GLOBUS_TRANSFER; + optional_data.other = collection_id + "|" + scope; + } break; } default: { diff --git a/web/static/api.js b/web/static/api.js index 3adf056f5..72af70138 100644 --- a/web/static/api.js +++ b/web/static/api.js @@ -184,17 +184,6 @@ export function dataPutCheck(a_id, a_cb) { _asyncGet("/api/dat/put?id=" + encodeURIComponent(a_id) + "&check=true", null, a_cb); } -export function dataGetDeps(a_ids, a_cb) { - _asyncGet("/api/dat/dep/get?ids=" + encodeURIComponent(a_ids), null, function (ok, data) { - if (ok) { - a_cb(data); - } else { - util.setStatusText("Get Dependencies Error: " + data, true); - a_cb(); - } - }); -} - export function dataGetDepGraph(a_id, a_cb) { _asyncGet("/api/dat/dep/graph/get?id=" + encodeURIComponent(a_id), null, function (ok, data) { if (ok) { @@ -240,25 +229,10 @@ export function sendDataDelete(a_ids, a_cb) { _asyncGet("/api/dat/delete?ids=" + encodeURIComponent(JSON.stringify(a_ids)), null, a_cb); } -export function copyData(a_src_id, a_dst_id, a_cb) { - _asyncGet( - "/api/dat/copy?src=" + - encodeURIComponent(a_src_id) + - "&dst=" + - encodeURIComponent(a_dst_id), - null, - a_cb, - ); -} - export function dataSearch(a_query, a_callback) { _asyncPost("/api/dat/search", a_query, a_callback); } -export function dataPubSearch(a_query, a_cb) { - _asyncPost("/api/col/pub/search/data", a_query, a_cb); -} - export function sendDataLock(a_ids, a_lock, a_cb) { _asyncGet( "/api/dat/lock?lock=" + a_lock + "&ids=" + encodeURIComponent(JSON.stringify(a_ids)), @@ -334,13 +308,6 @@ export function collDelete(a_ids, a_cb) { _asyncGet("/api/col/delete?ids=" + encodeURIComponent(JSON.stringify(a_ids)), null, a_cb); } -export function catalogSearch(a_query, a_cb) { - _asyncPost("/api/cat/search", a_query, a_cb); - /*_asyncPost( "/api/col/pub/search", a_query, function( ok, data ){ - setTimeout( function(){ a_cb( ok, data ); }, 2000 ); - });*/ -} - export function projList_url(a_owned, a_admin, a_member, a_sort, a_offset, a_count) { return ( "/api/prj/list?owner=" + @@ -852,25 +819,6 @@ export function topicListTopics(a_id, a_offset, a_count, a_cb) { if (!a_cb) return; _asyncGet(topicListTopics_url(a_id, a_offset, a_count), null, a_cb); - /*_asyncGet( topicListTopics_url( a_id, a_offset, a_count ), null, function( ok, data ){ - setTimeout( function(){ a_cb( ok, data ); }, 2000 ); - });*/ -} - -export function topicListColl_url(a_id, a_offset, a_count) { - return ( - "/api/top/list/coll?id=" + - a_id + - (a_offset != undefined && a_count != undefined - ? "&offset=" + a_offset + "&count=" + a_count - : "") - ); -} - -export function topicListColl(a_id, a_offset, a_count, a_cb) { - if (!a_cb) return; - - _asyncGet(topicListColl_url(a_id, a_offset, a_count), null, a_cb); } export function topicSearch_url(a_phrase) { diff --git a/web/static/components/endpoint-browse/index.js b/web/static/components/endpoint-browse/index.js index 17910b2ce..3cbd4c601 100644 --- a/web/static/components/endpoint-browse/index.js +++ b/web/static/components/endpoint-browse/index.js @@ -302,15 +302,74 @@ class EndpointBrowser { let title; // Generate consent URL for consent required errors if (error instanceof ApiError) { - if (error.code === "ConsentRequired" || error.data?.needs_consent === true) { + // Check for explicit consent requirement OR permission denied with auth parameters (530) + if ( + error.code === "ConsentRequired" || + error.data?.needs_consent === true || + (error.data?.code === "permission_denied" && + error.data?.authorization_parameters?.session_required_single_domain) + ) { const data = await new Promise((resolve) => { + // Extract query params from authorization_parameters if available + const queryParams = {}; + if (error.data?.authorization_parameters?.session_required_single_domain) { + queryParams.session_required_single_domain = + error.data.authorization_parameters.session_required_single_domain; + } + + // Serialize current state for restoration after consent flow + const stateObj = { + endpoint_browser: { + endpoint: this.props.endpoint.rawData, + path: this.state.path, + mode: this.props.mode, + }, + }; + + // Check if "New Data Record" dialog is open and save its state + const new_data_dlg = $("#d_new_edit"); + const transfer_dlg_content = $("#records").closest(".ui-dialog-content"); + + if (new_data_dlg.length && new_data_dlg.dialog("isOpen")) { + const metadata_editor = ace.edit(new_data_dlg.find("#md")[0]); + stateObj.parent_dialog = { + type: "d_new_edit", + mode: 0, // DLG_DATA_MODE_NEW + data: { + title: new_data_dlg.find("#title").val(), + alias: new_data_dlg.find("#alias").val(), + desc: new_data_dlg.find("#desc").val(), + metadata: metadata_editor ? metadata_editor.getValue() : "", + // Use parentId for the collection + parentId: new_data_dlg.find("#coll").val(), + }, + }; + } else if ( + transfer_dlg_content.length && + transfer_dlg_content.dialog("isOpen") + ) { + const controller = transfer_dlg_content.data("controller"); + if (controller) { + stateObj.parent_dialog = { + type: "transfer", + mode: controller.model.mode, + records: controller.ids, + }; + } + } + + const state = JSON.stringify(stateObj); + api.getGlobusConsentURL( (_, data) => resolve(data), this.props.endpoint.id, error.data.required_scopes, + false, // refresh_tokens + queryParams, + state, ); }); - title = `Consent Required: Please provide consent.`; + title = `Consent/Login Required: Please login with required identity.`; } else { title = `Error: ${ error.data.message || "Unknown API error" diff --git a/web/static/components/transfer/transfer-ui-manager.js b/web/static/components/transfer/transfer-ui-manager.js index d290b5af8..4d80f58ff 100644 --- a/web/static/components/transfer/transfer-ui-manager.js +++ b/web/static/components/transfer/transfer-ui-manager.js @@ -222,6 +222,8 @@ export class TransferUIManager { createDialog(labels) { this.state.frame = $(document.createElement("div")); this.state.frame.html(getDialogTemplate(labels, this.#controller.model.mode)); + // Attach controller to frame for state persistence + this.state.frame.data("controller", this.#controller); return this.state.frame; } diff --git a/web/static/dlg_data_new_edit.js b/web/static/dlg_data_new_edit.js index f354cf6f8..ef38d5d32 100644 --- a/web/static/dlg_data_new_edit.js +++ b/web/static/dlg_data_new_edit.js @@ -16,7 +16,7 @@ const DLG_DATA_BTN_LABEL = ["Create", "Update", "Create"]; export function show(a_mode, a_data, a_parent, a_upd_perms, a_cb) { var ele = document.createElement("div"); - ele.id = (a_data ? a_data.id.replace("/", "_") : "d_new") + "_edit"; + ele.id = (a_data && a_data.id ? a_data.id.replace("/", "_") : "d_new") + "_edit"; var frame = $(ele), dlg_inst, jsoned, @@ -147,7 +147,6 @@ export function show(a_mode, a_data, a_parent, a_upd_perms, a_cb) { } api.metadataValidate(sch_id, jsoned.getValue(), function (ok, data) { - //console.log("val res:", ok, data ); if (ok) { if (data.errors) { $("#md_err_msg", frame).text(data.errors).show(); diff --git a/web/static/main.js b/web/static/main.js index 4bf2964bb..7a4e4dd49 100644 --- a/web/static/main.js +++ b/web/static/main.js @@ -61,6 +61,28 @@ $(document).ready(function () { browser_tab.init(); + if (tmpl_data.restore_state) { + const parentState = tmpl_data.restore_state.parent_dialog; + + if (parentState) { + if (parentState.type === "d_new_edit") { + import("/dlg_data_new_edit.js").then((module) => { + const { mode, data } = parentState; + module.show(mode, data, data.parentId); + }); + } else if (parentState.type === "transfer") { + import("/components/transfer/index.js").then((module) => { + const { mode, records } = parentState; + // Re-open transfer dialog + module.transferDialog.show(mode, records, () => { + // Default callback if needed, usually this refreshes view + // but we might not have context. + }); + }); + } + } + } + util.setStatusText("DataFed Ready"); } else { dialogs.dlgAlert("System Error", "Unable to access user record"); diff --git a/web/static/util.js b/web/static/util.js index ab19b6973..6cff31350 100644 --- a/web/static/util.js +++ b/web/static/util.js @@ -51,14 +51,11 @@ export function getUpdatedValue(a_new_val, a_old_obj, a_new_obj, a_field) { export function getUpdatedValueJSON(a_new_val, a_old_obj, a_new_obj, a_field) { var tmp = a_new_val.trim(), old = a_old_obj[a_field]; - if (old === undefined && tmp.length) { + if ((!old || old === undefined) && tmp.length) { a_new_obj[a_field] = tmp; } else if (tmp.length) { - // Must compare values - have to restringify both b/c formats may differ with same content - // TODO - This should be a deep compare due to possibly inconsistent object arrangement var oldjs = JSON.stringify(JSON.parse(old)), newjs = JSON.stringify(JSON.parse(tmp)); - if (oldjs != newjs) { a_new_obj[a_field] = tmp; } diff --git a/web/version.js.in b/web/version.js.in index 69ec65aca..ca3eb3b97 100644 --- a/web/version.js.in +++ b/web/version.js.in @@ -2,5 +2,10 @@ const MAJOR = @DATAFED_WEB_MAJOR@; const MINOR = @DATAFED_WEB_MINOR@; const PATCH = @DATAFED_WEB_PATCH@; +const RELEASE_YEAR = @DATAFED_RELEASE_YEAR@; +const RELEASE_MONTH = @DATAFED_RELEASE_MONTH@; +const RELEASE_DAY = @DATAFED_RELEASE_DAY@; +const RELEASE_HOUR = @DATAFED_RELEASE_HOUR@; +const RELEASE_MINUTE = @DATAFED_RELEASE_MINUTE@; -export default { MAJOR, MINOR, PATCH }; +export default { MAJOR, MINOR, PATCH, RELEASE_YEAR, RELEASE_MONTH, RELEASE_DAY, RELEASE_HOUR, RELEASE_MINUTE } diff --git a/web/views/main.ect b/web/views/main.ect index 0368e5d79..35894c412 100644 --- a/web/views/main.ect +++ b/web/views/main.ect @@ -7,7 +7,8 @@ { "test_mode" : "<%- @test_mode %>", "user_uid" : "<%- @user_uid %>", - "theme" : "<%- @theme %>" + "theme" : "<%- @theme %>", + "restore_state" : <%- @restore_state || 'null' %> }